wadrc-bcp-scripts 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +7 -0
- data/.svn/entries +41 -0
- data/.svn/format +1 -0
- data/.svn/prop-base/reconstruct_dti_test.rb.svn-base +5 -0
- data/.svn/text-base/reconstruct_dti_test.rb.svn-base +25 -0
- data/Gemfile +4 -0
- data/README.md +5 -0
- data/Rakefile +7 -0
- data/VERSION +1 -0
- data/bin/FS_local_recon +98 -0
- data/bin/FieldmapAtWaisman.rb +35 -0
- data/bin/createFieldmap.rb +33 -0
- data/bin/preprocess_dti.rb +110 -0
- data/bin/run_dti_fit_for_study.sh +26 -0
- data/bin/run_dti_fit_script.sh +28 -0
- data/bin/tensor_transpose.rb +45 -0
- data/lib/.svn/entries +54 -0
- data/lib/.svn/format +1 -0
- data/lib/.svn/prop-base/dti_wrapper.rb.svn-base +5 -0
- data/lib/.svn/prop-base/dtifit_processing.rb.svn-base +5 -0
- data/lib/.svn/text-base/dti_wrapper.rb.svn-base +33 -0
- data/lib/.svn/text-base/dtifit_processing.rb.svn-base +95 -0
- data/lib/additions/NetSshConnectionSession.rb +20 -0
- data/lib/wadrc-bcp-scripts.rb +13 -0
- data/lib/wadrc-bcp-scripts/basic_task.rb +137 -0
- data/lib/wadrc-bcp-scripts/dtitask.rb +156 -0
- data/lib/wadrc-bcp-scripts/fieldmap_classes.rb +166 -0
- data/lib/wadrc-bcp-scripts/freesurfer_roi_task.rb +134 -0
- data/lib/wadrc-bcp-scripts/tensor.rb +25 -0
- data/lib/wadrc-bcp-scripts/version.rb +3 -0
- data/spec/.svn/entries +28 -0
- data/spec/.svn/format +1 -0
- data/spec/FS_local_recon_spec.rb +22 -0
- data/spec/blueprints.rb +12 -0
- data/spec/dtitask_spec.rb +95 -0
- data/spec/examples/johnson.alz.snodrest.visit2.yaml +14 -0
- data/spec/examples/johnson.tbi.aware.visit1.yaml +11 -0
- data/spec/examples/johnson.wrap140.visit1.yaml +12 -0
- data/spec/examples/johnson.wrap140.visit1_TR12s.yaml +12 -0
- data/spec/examples/spec.yaml +22 -0
- data/spec/factories.rb +11 -0
- data/spec/helper_spec.rb +8 -0
- data/test/.svn/entries +41 -0
- data/test/.svn/format +1 -0
- data/test/.svn/prop-base/reconstruct_dti_test.rb.svn-base +5 -0
- data/test/.svn/text-base/reconstruct_dti_test.rb.svn-base +25 -0
- data/test/reconstruct_dti_test.rb +25 -0
- metadata +188 -0
@@ -0,0 +1,166 @@
|
|
1
|
+
require 'find'
|
2
|
+
require 'tmpdir'
|
3
|
+
require 'fileutils'
|
4
|
+
require 'rubygems'
|
5
|
+
require 'metamri'
|
6
|
+
|
7
|
+
module Find
|
8
|
+
def match(path)
|
9
|
+
matched = []
|
10
|
+
find(path) do |p|
|
11
|
+
if yield p
|
12
|
+
if File.file?(p) && File.readable?(path): matched << p; end
|
13
|
+
end
|
14
|
+
end
|
15
|
+
return matched
|
16
|
+
end
|
17
|
+
module_function :match
|
18
|
+
end
|
19
|
+
|
20
|
+
module WadrcBcpScripts
|
21
|
+
|
22
|
+
# This library contains tasks for creating Fieldmaps at a remote server
|
23
|
+
class FieldmapTask
|
24
|
+
DWELL_TIME = 0.688
|
25
|
+
attr_accessor :log
|
26
|
+
attr_accessor :incoming_tar_file
|
27
|
+
|
28
|
+
def initialize(incoming_tar_file)
|
29
|
+
@log = Logger.new(STDOUT)
|
30
|
+
@log.level = Logger::DEBUG
|
31
|
+
@log.datetime_format = "%Y-%m-%d %H:%M:%S"
|
32
|
+
@incoming_tar_file = incoming_tar_file
|
33
|
+
end
|
34
|
+
|
35
|
+
def setup_paths
|
36
|
+
@log.debug { %x[set_default_paths.sh] }
|
37
|
+
end
|
38
|
+
|
39
|
+
def unpack(incoming_tar_file)
|
40
|
+
unpacked_directory = unzip_incoming_tar_file(incoming_tar_file)
|
41
|
+
@log.debug { "Unpacked Directory: #{unpacked_directory}"}
|
42
|
+
fieldmap_directory = File.join(unpacked_directory, 'fieldmap')
|
43
|
+
files_to_fieldmap = Find.match(unpacked_directory) { |f| File.extname(f) == '.nii' }
|
44
|
+
@log.debug {"Files to be fieldmapped: #{files_to_fieldmap.each { |file| File.basename(file) } } " }
|
45
|
+
|
46
|
+
return unpacked_directory, fieldmap_directory, files_to_fieldmap
|
47
|
+
end
|
48
|
+
|
49
|
+
def unzip_incoming_tar_file(incoming_tar_file, output_directory = nil)
|
50
|
+
unless output_directory
|
51
|
+
# Unless your ruby version is greater than 1.8.7
|
52
|
+
# output_directory = Dir.mktmpdir
|
53
|
+
output_directory = File.join(Dir.tmpdir, File.basename(@incoming_tar_file, '.tar.gz'))
|
54
|
+
Dir.mkdir(output_directory) unless File.exists?(output_directory)
|
55
|
+
end
|
56
|
+
msg = %x[tar --directory #{output_directory} -xzvf #{incoming_tar_file} ]
|
57
|
+
|
58
|
+
return output_directory
|
59
|
+
end
|
60
|
+
|
61
|
+
def create_fieldmap(fieldmap_directory, output_file = nil)
|
62
|
+
fieldmap_file = output_file ? output_file : 'fmap.nii'
|
63
|
+
make_fmap_cmd = "make_fmap #{fieldmap_directory} #{fieldmap_file}"
|
64
|
+
@log.info make_fmap_cmd
|
65
|
+
system(make_fmap_cmd)
|
66
|
+
#@log.info { %x["#{make_fmap_cmd}"] }
|
67
|
+
return fieldmap_file
|
68
|
+
end
|
69
|
+
|
70
|
+
def apply_fieldmap(fieldmap_file, files_to_fieldmap, output_directory = nil)
|
71
|
+
unless output_directory then output_directory = Dir.pwd; end
|
72
|
+
p files_to_fieldmap
|
73
|
+
fieldmap_correction_cmd = "fieldmap_correction #{fieldmap_file} #{DWELL_TIME} #{output_directory} #{ files_to_fieldmap.join(" ") }"
|
74
|
+
@log.info { fieldmap_correction_cmd }
|
75
|
+
@log.info { %x[#{fieldmap_correction_cmd}] }
|
76
|
+
|
77
|
+
=begin
|
78
|
+
# Pretend fieldmapping works!
|
79
|
+
files_to_fieldmap.each do |f|
|
80
|
+
fieldmapped_filename = File.join(File.dirname(f), File.basename(f, '.nii') + '_fm.nii')
|
81
|
+
FileUtils.copy(f, fieldmapped_filename)
|
82
|
+
p fieldmapped_filename
|
83
|
+
end
|
84
|
+
=end
|
85
|
+
end
|
86
|
+
|
87
|
+
def zip_up_fieldmapped_files(output_name, fieldmapped_files)
|
88
|
+
tar_and_zip_cmd = "tar -czvf #{output_name} #{fieldmapped_files.join(" ")}"
|
89
|
+
@log.info {tar_and_zip_cmd}
|
90
|
+
end
|
91
|
+
|
92
|
+
def cleanup(tmpdir)
|
93
|
+
|
94
|
+
output_name='fieldmapped_files.tar.gz'
|
95
|
+
|
96
|
+
fieldmapped_files = Find.match(tmpdir) { |f| File.fnmatch('*_fm*', f ) }
|
97
|
+
p fieldmapped_files
|
98
|
+
zip_up_fieldmapped_files(output_name, fieldmapped_files)
|
99
|
+
|
100
|
+
@log.close
|
101
|
+
|
102
|
+
end
|
103
|
+
|
104
|
+
|
105
|
+
end
|
106
|
+
|
107
|
+
class LocalFieldmapSetup
|
108
|
+
attr_accessor :prefix
|
109
|
+
attr_accessor :files_to_fieldmap_directory
|
110
|
+
attr_accessor :dicoms_directory
|
111
|
+
|
112
|
+
REMOTE_SCRATCH_DIR = '/scratch/johnson_fieldmaps'
|
113
|
+
REMOTE_USER = 'johnson'
|
114
|
+
REMOTE_HOST = 'tezpur'
|
115
|
+
REMOTE_PROCESSING_HOST = 'jaloro'
|
116
|
+
|
117
|
+
def initialize(prefix, files_to_fieldmap_directory, dicoms_directory)
|
118
|
+
@prefix = prefix
|
119
|
+
@files_to_fieldmap_directory = files_to_fieldmap_directory
|
120
|
+
@dicoms_directory = dicoms_directory
|
121
|
+
end
|
122
|
+
|
123
|
+
def find_fieldmap_directory(dicoms_directory)
|
124
|
+
visit = VisitRawDataDirectory.new(dicoms_directory)
|
125
|
+
visit.scan
|
126
|
+
visit.datasets.each do |ds|
|
127
|
+
if ds.series_description =~ /.*F Map.*/ then
|
128
|
+
fieldmap_directory = ds.directory
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
return fieldmap_directory
|
133
|
+
end
|
134
|
+
|
135
|
+
def find_files_to_fieldmap(files_to_fieldmap_directory)
|
136
|
+
files_to_fieldmap = Find.match(files_to_fieldmap_directory) { |p| File.fnmatch('r*', p) }
|
137
|
+
return files_to_fieldmap
|
138
|
+
end
|
139
|
+
|
140
|
+
def create_tarfile(prefix, files_to_fieldmap, fieldmap_directory)
|
141
|
+
local_tarfile = "#{prefix}.tar.gz"
|
142
|
+
system("tar -czvf #{local_tarfile} #{fieldmap_directory} #{files_to_fieldmap.join(" ")}")
|
143
|
+
return local_tarfile
|
144
|
+
end
|
145
|
+
|
146
|
+
def transfer_tarfile_to_move(local_tarfile)
|
147
|
+
remote_tarfile = REMOTE_SCRATCH_DIR + File.basename(tarfile)
|
148
|
+
system("scp #{REMOTE_USER}@#{REMOTE_HOST}:#{remote_tarfile}")
|
149
|
+
return remote_tarfile
|
150
|
+
end
|
151
|
+
|
152
|
+
def execute_remote_fieldmapping(remote_tarfile)
|
153
|
+
system("ssh johnson@${REMOTE_PROCESSING_HOST} ~/bin/createFieldmap.rb #{remote_tarfile}")
|
154
|
+
end
|
155
|
+
|
156
|
+
def transfer_tarfile_from_remote
|
157
|
+
system("scp #{REMOTE_USER}@#{REMOTE_HOST}:#{remote_tarfile} .")
|
158
|
+
end
|
159
|
+
|
160
|
+
def unpack_tarfile_locally
|
161
|
+
system("tar -xzvf #{local_tarfile}")
|
162
|
+
end
|
163
|
+
|
164
|
+
end
|
165
|
+
|
166
|
+
end
|
@@ -0,0 +1,134 @@
|
|
1
|
+
module WadrcBcpScripts
|
2
|
+
# We want to sample to ASL perfusion data (PET data, or other data) from ROIs,
|
3
|
+
# determined in subject space by running a high-resolution T1 through
|
4
|
+
# Freesurfer. As part of it's recon-all, freesurfer outputs segmentation images
|
5
|
+
# (volume parcellation) that include ~ 100 ROIs for cortical gm, wm and
|
6
|
+
# sub-cortical structures. All we need to do is get that segmentation into a
|
7
|
+
# common space with the image of interest, and then we can sample it to get
|
8
|
+
# meaningful estimates of the data.
|
9
|
+
#
|
10
|
+
# The Process:
|
11
|
+
# 1) Do Basic Reconstruction for Anatomical Images
|
12
|
+
# 2) Run a high resolution T1 through Freesurfer
|
13
|
+
# 3) Convert the aparc.a2009s+aseg.mgz back to NIfTI in T1 space for sampling.
|
14
|
+
# 4) Rigidly register & reslice Target modality to T1 space
|
15
|
+
# 5) Sample mean and measures of interest
|
16
|
+
class FreesurferRoiTask < BasicTask
|
17
|
+
|
18
|
+
def initialize(raw_directory, output_directory, config)
|
19
|
+
@config = config
|
20
|
+
@raw_directory = raw_directory
|
21
|
+
@output_directory = output_directory
|
22
|
+
@commands = ShellQueue.new(:dry_run => true)
|
23
|
+
end
|
24
|
+
|
25
|
+
# Do Basic Reconstruction for Anatomical Images
|
26
|
+
def basic_anatomical_reconstruction
|
27
|
+
|
28
|
+
# Create the T1 & other Anatomicals
|
29
|
+
@commands << "convert_visit.rb #{@raw_directory} #{@config[:subj_processed_dir]}"
|
30
|
+
|
31
|
+
# Create the ASL
|
32
|
+
modality_dir = File.join(@config[:subj_processed_dir], @config[:modality])
|
33
|
+
Dir.mkdir_p modality_dir unless File.exists? modality_dir
|
34
|
+
Dir.chdir modality_dir do
|
35
|
+
# fmap_make /Data/vtrak1/raw/dempsey.plaque.visit1/plq20005_1959_04072011/009
|
36
|
+
#(Or, to search automatically: )
|
37
|
+
@commands << "fmap_make `list_visit #{@raw_directory} -g #{@config[:modality]}`"
|
38
|
+
|
39
|
+
# Link the T1 into the ASL directory for easy visualization if you want.
|
40
|
+
|
41
|
+
# File.symlink("../unknown/plq02002_Ax-FSPGR-BRAVO_003.nii", "plq02002_Ax-FSPGR-BRAVO_003.nii")
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
# Run a high resolution T1 through Freesurfer
|
46
|
+
def run_t1_through_freesurfer
|
47
|
+
ENV[:SUBJECTS_DIR] = @proc_options[:freesurfer_subjects_dir]
|
48
|
+
|
49
|
+
system("recon-all -all -s #{OPTIONS[:subid]} -i #{File.join(OPTIONS[:subj_raw_dir], OPTIONS[:subid], "003/I0001.dcm")}")
|
50
|
+
|
51
|
+
# This will run for 20 hours, and return a pretty subject directory. See below
|
52
|
+
# for a sample manifest.
|
53
|
+
end
|
54
|
+
|
55
|
+
# Convert the aparc.a2009s+aseg.mgz back to NIfTI in T1 space for sampling.
|
56
|
+
def prepare_segmentation
|
57
|
+
aparc_base = "aparc.a2009s+aseg"
|
58
|
+
freesurfer_subj_mri_dir = File.join(OPTIONS[:freesurfer_subjects_dir], OPTIONS[:subid], "mri")
|
59
|
+
|
60
|
+
Dir.chdir modality_dir do
|
61
|
+
system("mri_convert #{File.join(freesurfer_subj_mri_dir, aparc_base)}.mgz #{apar_base}.nii")
|
62
|
+
|
63
|
+
# Resample the Segementation image to the T1 space (using nearest neighbor so as
|
64
|
+
# to not change any values):
|
65
|
+
system("
|
66
|
+
flirt -in aparc.a2009s+aseg.nii -ref plq20005_Ax-FSPGR-BRAVO_003.nii \
|
67
|
+
-out raparc.a2009s+aseg.nii -applyxfm -init $FSLDIR/etc/flirtsch/ident.mat \
|
68
|
+
-interp nearestneighbour"
|
69
|
+
)
|
70
|
+
end
|
71
|
+
|
72
|
+
# You could resample with SPM as well (Coregister - Write) but this is a nice
|
73
|
+
# command line option. For the actual registration, we are using SPM because
|
74
|
+
# it's a somewhat better (qualitatively) algorithm.
|
75
|
+
end
|
76
|
+
|
77
|
+
# Rigidly register & reslice Target modality to T1 space
|
78
|
+
def register_modality_to_t1
|
79
|
+
# For ASL, we ill use the PD image because it's information is closer to
|
80
|
+
# anatomical than the computed flow maps, bringing along the flow maps.
|
81
|
+
|
82
|
+
# system("spm8")
|
83
|
+
# Click Coregister - Estimate and Reslice
|
84
|
+
# Reference Image: Select the BRAVO
|
85
|
+
# Source Image: Select the PD Map
|
86
|
+
# Other Images: Select the ASL Map
|
87
|
+
# Use other defaults (NMI, etc.)
|
88
|
+
end
|
89
|
+
|
90
|
+
# Sample mean and measures of interest
|
91
|
+
def sample_roi
|
92
|
+
system("3dROIstats -mask_f2short -mask raparc.a2009s+aseg.nii plq20005_Ax-FSPGR-BRAVO_003.nii rASL_plq20005_fmap.nii > stats.txt")
|
93
|
+
end
|
94
|
+
|
95
|
+
|
96
|
+
end
|
97
|
+
|
98
|
+
# Heroes in a half shell - Turtle Power!
|
99
|
+
#
|
100
|
+
# Manage a list of shell commands.
|
101
|
+
# q = ShellQueue.new(:dry_run => true)
|
102
|
+
# q << "ls"
|
103
|
+
# q << "time"
|
104
|
+
# q.run!
|
105
|
+
class ShellQueue
|
106
|
+
attr_reader :dry_run, :commands, :completed_commands, :failed_commands
|
107
|
+
|
108
|
+
# Initialize a queue with an options hash.
|
109
|
+
def initialize(options = {:dry_run => false})
|
110
|
+
@commands = Array.new
|
111
|
+
@dry_run = options[:dry_run]
|
112
|
+
end
|
113
|
+
|
114
|
+
# Run a queue (or print if dry_run)
|
115
|
+
def run!
|
116
|
+
while @commands.length > 0
|
117
|
+
command = @commands.shift
|
118
|
+
puts command
|
119
|
+
@run_success = run command unless @dry_run
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
def <<(cmd)
|
124
|
+
@commands << cmd
|
125
|
+
run!
|
126
|
+
end
|
127
|
+
|
128
|
+
# Expose >>, << array methods to commands array.
|
129
|
+
def method_missing(m, *args, &block)
|
130
|
+
@commands.send(m, *args, &block)
|
131
|
+
end
|
132
|
+
|
133
|
+
end
|
134
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module WadrcBcpScripts
|
2
|
+
|
3
|
+
# A class for manipulating Tensor info for DTI.
|
4
|
+
class Tensor
|
5
|
+
attr_accessor :data
|
6
|
+
|
7
|
+
def initialize(filepath)
|
8
|
+
@data = []
|
9
|
+
open(filepath, 'r').each do |line|
|
10
|
+
@data << line.split(/[\,\:\s]+/).each { |val| val.strip }
|
11
|
+
end
|
12
|
+
end
|
13
|
+
|
14
|
+
# Write out Data to a file.
|
15
|
+
def to_fsl_txt(output_file = 'out.txt')
|
16
|
+
puts "Writing " + output_file
|
17
|
+
open(output_file, 'w') do |file|
|
18
|
+
@data.transpose.each do |line|
|
19
|
+
file.puts line.join(' ')
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
end
|
data/spec/.svn/entries
ADDED
@@ -0,0 +1,28 @@
|
|
1
|
+
8
|
2
|
+
|
3
|
+
dir
|
4
|
+
146
|
5
|
+
file:///Data/vtrak1/SysAdmin/lab_repository/trunk/ImageProcessing/spec
|
6
|
+
file:///Data/vtrak1/SysAdmin/lab_repository
|
7
|
+
|
8
|
+
|
9
|
+
|
10
|
+
2009-05-08T17:32:47.685248Z
|
11
|
+
146
|
12
|
+
erik
|
13
|
+
|
14
|
+
|
15
|
+
svn:special svn:externals svn:needs-lock
|
16
|
+
|
17
|
+
|
18
|
+
|
19
|
+
|
20
|
+
|
21
|
+
|
22
|
+
|
23
|
+
|
24
|
+
|
25
|
+
|
26
|
+
|
27
|
+
fd7843e3-2c87-496d-a67a-9fe0c7cb9cb9
|
28
|
+
|
data/spec/.svn/format
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
8
|
@@ -0,0 +1,22 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__),'..','bin')
|
2
|
+
|
3
|
+
require 'helper_spec'
|
4
|
+
load 'FS_local_recon'
|
5
|
+
|
6
|
+
describe "Perform local recon" do
|
7
|
+
|
8
|
+
# before(:all) do
|
9
|
+
# @normalizer = Normalizer.new('/tmp/awr011_8414_06182009')
|
10
|
+
# end
|
11
|
+
|
12
|
+
it "should run local recon on a subject" do
|
13
|
+
hostname = 'localhost'
|
14
|
+
subject = "tami99999"
|
15
|
+
options = {:server_analysis_dir => $MRI_DATA, :autorecon_args => ['-autorecon2-wm', '-autorecon3']}
|
16
|
+
lambda { run!(hostname, subject, options) }.should_not raise_error
|
17
|
+
end
|
18
|
+
|
19
|
+
# after(:each) do
|
20
|
+
# end
|
21
|
+
|
22
|
+
end
|
data/spec/blueprints.rb
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
require 'machinist/object'
|
2
|
+
|
3
|
+
Dtitask.blueprint do
|
4
|
+
config({
|
5
|
+
:bvectors_file=>"/Data/vtrak1/analyses/barb/cathy_temp/25_directions_bvectors.txt",
|
6
|
+
:bvectors_file=>"/Data/vtrak1/analyses/barb/cathy_temp/25_directions_bvectors.txt",
|
7
|
+
:bvalues_file=>"/Data/vtrak1/analyses/barb/cathy_temp/25_directions_bvalues.txt",
|
8
|
+
:file_glob=>"'*.dcm'",
|
9
|
+
:volumes=>26,
|
10
|
+
:dry_run=>true,
|
11
|
+
:slices_per_volume=>48 })
|
12
|
+
end
|
@@ -0,0 +1,95 @@
|
|
1
|
+
$:.unshift File.join(File.dirname(__FILE__),'..','lib')
|
2
|
+
|
3
|
+
require 'helper_spec'
|
4
|
+
require 'wadrc-bcp-scripts/basic_task'
|
5
|
+
require 'wadrc-bcp-scripts/dtitask'
|
6
|
+
|
7
|
+
describe "Exception Testing for DtiTask" do
|
8
|
+
before(:all) do
|
9
|
+
$LOG = Logger.new(STDOUT)
|
10
|
+
end
|
11
|
+
|
12
|
+
before(:each) do
|
13
|
+
# @valid_config = {
|
14
|
+
# :slice_order=>"altplus",
|
15
|
+
# :bvectors_file=>"/Data/vtrak1/analyses/barb/cathy_temp/25_directions_bvectors.txt",
|
16
|
+
# :bvalues_file=>"/Data/vtrak1/analyses/barb/cathy_temp/25_directions_bvalues.txt",
|
17
|
+
# :file_glob=>"'*.dcm'",
|
18
|
+
# :force_overwrite=>true,
|
19
|
+
# :volumes=>26,
|
20
|
+
# :dry_run=>true,
|
21
|
+
# :slices_per_volume=>48
|
22
|
+
# }
|
23
|
+
|
24
|
+
@valid_config = {
|
25
|
+
:slice_order=>"altplus",
|
26
|
+
:bvectors_file=>"/Data/vtrak1/preprocessed/visits/johnson.alz.snodrest.visit2/DTI_info/preproc_dti/enc12_rows.txt",
|
27
|
+
:bvalues_file=>"/Data/vtrak1/preprocessed/visits/johnson.alz.snodrest.visit2/DTI_info/preproc_dti/bvalues.txt",
|
28
|
+
:file_glob=>"'I*'",
|
29
|
+
:force_overwrite=>true,
|
30
|
+
:volumes=>13,
|
31
|
+
:dry_run=>true,
|
32
|
+
:slices_per_volume=>39,
|
33
|
+
:rotate=>true
|
34
|
+
}
|
35
|
+
@valid_dtitask = WadrcBcpScripts::Dtitask.new(@valid_config)
|
36
|
+
|
37
|
+
@subid = 'alz021_2'
|
38
|
+
@valid_input_directory = File.join($MRI_DATA, @subid, 'anatomicals', 'S9')
|
39
|
+
end
|
40
|
+
|
41
|
+
it "should raise an IOError if tensor_files do not exist." do
|
42
|
+
File.stub!(:exists?).and_return(false)
|
43
|
+
lambda { @valid_dtitask.ensure_file_exists(@valid_config[:bvectors_file])}.should raise_error(IOError, "#{@valid_config[:bvectors_file]} not found.")
|
44
|
+
end
|
45
|
+
|
46
|
+
it "should not raise an IOError if tensor_files do exist." do
|
47
|
+
File.stub!(:exists?).and_return(true)
|
48
|
+
lambda { @valid_dtitask.ensure_file_exists(@valid_config[:bvectors_file])}.should_not raise_error(IOError, "#{@valid_config[:bvectors_file]} not found.")
|
49
|
+
end
|
50
|
+
|
51
|
+
it "should raise an error if required keys are not in config file." do
|
52
|
+
missing_key = :bvectors_file
|
53
|
+
invalid_config = @valid_config
|
54
|
+
invalid_config.delete(missing_key)
|
55
|
+
lambda { WadrcBcpScripts::Dtitask.new(invalid_config).config_requires(missing_key) }.should raise_error(ScriptError, "Missing Keys: #{missing_key}")
|
56
|
+
end
|
57
|
+
|
58
|
+
it "should create a valid command array when given correct configuration using rotbvecs" do
|
59
|
+
dir = Dir.tmpdir
|
60
|
+
valid_command = [
|
61
|
+
"to3d -prefix #{@subid}.nii -session #{dir} -time:zt #{@valid_config[:slices_per_volume]} #{@valid_config[:volumes]} 8000 altplus #{@valid_input_directory}/#{@valid_config[:file_glob]}",
|
62
|
+
"eddy_correct #{dir}/#{@subid}.nii #{dir}/#{@subid}_ecc.nii 0",
|
63
|
+
"rotbvecs #{@valid_config[:bvectors_file]} #{dir}/#{@subid}_#{File.basename(@valid_config[:bvectors_file])} #{dir}/#{@subid}_ecc.ecclog",
|
64
|
+
"bet #{dir}/#{@subid}_ecc #{dir}/#{@subid}_ecc_brain -f 0.1 -g 0 -n -m",
|
65
|
+
"dtifit --data=#{dir}/#{@subid}_ecc.nii --out=#{dir}/#{@subid}_dti --mask=#{dir}/#{@subid}_ecc_brain_mask --bvecs=#{dir}/#{@subid}_#{File.basename(@valid_config[:bvectors_file])} --bvals=#{@valid_config[:bvalues_file]}"
|
66
|
+
]
|
67
|
+
cmd = @valid_dtitask.construct_commands(@valid_input_directory, dir, @subid).collect! {|cmd| cmd.squeeze(" ") }
|
68
|
+
cmd.should == valid_command
|
69
|
+
end
|
70
|
+
|
71
|
+
it "should create a valid command array when given correct configuration NOT using rotbvecs" do
|
72
|
+
dir = Dir.tmpdir
|
73
|
+
config = @valid_config.dup
|
74
|
+
config[:rotate] = false
|
75
|
+
valid_command = [
|
76
|
+
"to3d -prefix #{@subid}.nii -session #{dir} -time:zt #{@valid_config[:slices_per_volume]} #{@valid_config[:volumes]} 8000 altplus #{@valid_input_directory}/#{@valid_config[:file_glob]}",
|
77
|
+
"eddy_correct #{dir}/#{@subid}.nii #{dir}/#{@subid}_ecc.nii 0",
|
78
|
+
"bet #{dir}/#{@subid}_ecc #{dir}/#{@subid}_ecc_brain -f 0.1 -g 0 -n -m",
|
79
|
+
"dtifit --data=#{dir}/#{@subid}_ecc.nii --out=#{dir}/#{@subid}_dti --mask=#{dir}/#{@subid}_ecc_brain_mask --bvecs=#{@valid_config[:bvectors_file]} --bvals=#{@valid_config[:bvalues_file]}"
|
80
|
+
]
|
81
|
+
cmd = WadrcBcpScripts::Dtitask.new(config).construct_commands(@valid_input_directory, dir, @subid).collect! {|cmd| cmd.squeeze(" ") }
|
82
|
+
cmd.should == valid_command
|
83
|
+
end
|
84
|
+
|
85
|
+
it "should sucessfully run through fixture data for johnson.alz.visit2" do
|
86
|
+
Dir.mktmpdir do |dir|
|
87
|
+
cmd = @valid_dtitask.construct_commands(@valid_input_directory, dir, @subid).collect! {|cmd| cmd.squeeze(" ") }
|
88
|
+
puts cmd.join("; ")
|
89
|
+
system(cmd.join("; ")).should == true
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
# after(:each) do
|
94
|
+
# end
|
95
|
+
end
|