wadrc-bcp-scripts 0.0.6

Sign up to get free protection for your applications and to get access to all the features.
Files changed (48) hide show
  1. data/.gitignore +7 -0
  2. data/.svn/entries +41 -0
  3. data/.svn/format +1 -0
  4. data/.svn/prop-base/reconstruct_dti_test.rb.svn-base +5 -0
  5. data/.svn/text-base/reconstruct_dti_test.rb.svn-base +25 -0
  6. data/Gemfile +4 -0
  7. data/README.md +5 -0
  8. data/Rakefile +7 -0
  9. data/VERSION +1 -0
  10. data/bin/FS_local_recon +98 -0
  11. data/bin/FieldmapAtWaisman.rb +35 -0
  12. data/bin/createFieldmap.rb +33 -0
  13. data/bin/preprocess_dti.rb +110 -0
  14. data/bin/run_dti_fit_for_study.sh +26 -0
  15. data/bin/run_dti_fit_script.sh +28 -0
  16. data/bin/tensor_transpose.rb +45 -0
  17. data/lib/.svn/entries +54 -0
  18. data/lib/.svn/format +1 -0
  19. data/lib/.svn/prop-base/dti_wrapper.rb.svn-base +5 -0
  20. data/lib/.svn/prop-base/dtifit_processing.rb.svn-base +5 -0
  21. data/lib/.svn/text-base/dti_wrapper.rb.svn-base +33 -0
  22. data/lib/.svn/text-base/dtifit_processing.rb.svn-base +95 -0
  23. data/lib/additions/NetSshConnectionSession.rb +20 -0
  24. data/lib/wadrc-bcp-scripts.rb +13 -0
  25. data/lib/wadrc-bcp-scripts/basic_task.rb +137 -0
  26. data/lib/wadrc-bcp-scripts/dtitask.rb +156 -0
  27. data/lib/wadrc-bcp-scripts/fieldmap_classes.rb +166 -0
  28. data/lib/wadrc-bcp-scripts/freesurfer_roi_task.rb +134 -0
  29. data/lib/wadrc-bcp-scripts/tensor.rb +25 -0
  30. data/lib/wadrc-bcp-scripts/version.rb +3 -0
  31. data/spec/.svn/entries +28 -0
  32. data/spec/.svn/format +1 -0
  33. data/spec/FS_local_recon_spec.rb +22 -0
  34. data/spec/blueprints.rb +12 -0
  35. data/spec/dtitask_spec.rb +95 -0
  36. data/spec/examples/johnson.alz.snodrest.visit2.yaml +14 -0
  37. data/spec/examples/johnson.tbi.aware.visit1.yaml +11 -0
  38. data/spec/examples/johnson.wrap140.visit1.yaml +12 -0
  39. data/spec/examples/johnson.wrap140.visit1_TR12s.yaml +12 -0
  40. data/spec/examples/spec.yaml +22 -0
  41. data/spec/factories.rb +11 -0
  42. data/spec/helper_spec.rb +8 -0
  43. data/test/.svn/entries +41 -0
  44. data/test/.svn/format +1 -0
  45. data/test/.svn/prop-base/reconstruct_dti_test.rb.svn-base +5 -0
  46. data/test/.svn/text-base/reconstruct_dti_test.rb.svn-base +25 -0
  47. data/test/reconstruct_dti_test.rb +25 -0
  48. metadata +188 -0
@@ -0,0 +1,45 @@
1
+ #!/usr/bin/env ruby
2
+ $:.unshift File.join(File.dirname(__FILE__),'..','lib')
3
+
4
+ require 'optparse'
5
+ require 'tensor'
6
+
7
+ def run!
8
+ # Parse CLI Options and Spec File
9
+ options = parse_options
10
+
11
+ # Create a DTI Preprocessing Flow Task and run it.
12
+ tensor = Tensor.new(options[:tensor_file])
13
+ tensor.to_fsl_txt(options[:output_file])
14
+
15
+ end
16
+
17
+ def parse_options
18
+ options = Hash.new
19
+
20
+ parser = OptionParser.new do |opts|
21
+ opts.banner = "Usage: #{File.basename(__FILE__)} [options] input_tensor output_transposed_tensor"
22
+
23
+ # opts.on('-t', '--tensor TENSOR_FILE', "Tensor File.") do |tensor_file|
24
+ # options[:tensor_file] = tensor_file
25
+ # end
26
+
27
+ opts.on_tail('-h', '--help', "Show this message") { puts(parser); exit }
28
+ opts.on_tail("Example: #{File.basename(__FILE__)} 40_direction.txt 40_direction_transposed.txt")
29
+ end
30
+ parser.parse!(ARGV)
31
+
32
+ options[:tensor_file] = ARGV[0]
33
+ options[:output_file] = ARGV[1]
34
+
35
+ unless ARGV.size == 2
36
+ puts(parser); exit
37
+ end
38
+
39
+ return options
40
+ end
41
+
42
+
43
+ if File.basename(__FILE__) == File.basename($0)
44
+ run!
45
+ end
@@ -0,0 +1,54 @@
1
+ 8
2
+
3
+ dir
4
+ 146
5
+ file:///Data/vtrak1/SysAdmin/lab_repository/trunk/ImageProcessing/lib
6
+ file:///Data/vtrak1/SysAdmin/lab_repository
7
+
8
+
9
+
10
+ 2009-05-08T17:32:47.685248Z
11
+ 146
12
+ erik
13
+
14
+
15
+ svn:special svn:externals svn:needs-lock
16
+
17
+
18
+
19
+
20
+
21
+
22
+
23
+
24
+
25
+
26
+
27
+ fd7843e3-2c87-496d-a67a-9fe0c7cb9cb9
28
+
29
+ dtifit_processing.rb
30
+ file
31
+
32
+
33
+
34
+
35
+ 2009-05-08T17:33:56.000000Z
36
+ b2bef44f643323c28603d4dcb8dd1da8
37
+ 2009-05-08T17:32:47.685248Z
38
+ 146
39
+ erik
40
+ has-props
41
+
42
+ dti_wrapper.rb
43
+ file
44
+
45
+
46
+
47
+
48
+ 2009-05-08T17:33:57.000000Z
49
+ 22334daf49e150cb1646a6a6ac7a2033
50
+ 2009-05-08T17:32:47.685248Z
51
+ 146
52
+ erik
53
+ has-props
54
+
@@ -0,0 +1 @@
1
+ 8
@@ -0,0 +1,5 @@
1
+ K 14
2
+ svn:executable
3
+ V 0
4
+
5
+ END
@@ -0,0 +1,5 @@
1
+ K 14
2
+ svn:executable
3
+ V 0
4
+
5
+ END
@@ -0,0 +1,33 @@
1
+ #!/bin/env ruby
2
+ ## This is a temporary wrapper to execute DTI data.
3
+
4
+ require 'dtifit_processing'
5
+
6
+ =begin rdoc
7
+ This library provides basic processing for Diffusion Tensor Images (DTI)
8
+ This command-line processing script takes raw DTI dicoms and outputs FA, MD &
9
+ associated diffusion maps (eigenvalues & eigenvectors).
10
+
11
+ Currently, the script assumes raw data are unzipped and the output directory
12
+ exists, the glob and DTI params are constant across DTI series.
13
+ =end
14
+
15
+
16
+ #def run!
17
+ # input_directory = '/Data/vtrak1/raw/wrap140/wrp002_5938_03072008/017'
18
+ # output_directory = '/Data/vtrak1/preprocessed/visits/wrap140.visit1/wrp002/dti'
19
+ # subject_prefix = 'wrp002'
20
+ #
21
+ # ReconstructDTI.reconstruct!(input_directory, output_directory, subject_prefix)
22
+ #end
23
+ #
24
+ #run!
25
+
26
+
27
+ if __FILE__ == $0
28
+ if ARGV.size != 3
29
+ puts "Usage: dtifit_processing.rb input_directory output_directory subject_prefix"
30
+ else
31
+ ReconstructDTI.reconstruct!(ARGV[0], ARGV[1], ARGV[2])
32
+ end
33
+ end
@@ -0,0 +1,95 @@
1
+ #!/bin/env ruby
2
+
3
+ =begin rdoc
4
+ This library provides basic processing for Diffusion Tensor Images (DTI).
5
+
6
+ The main function reconstruct! takes 3 arguements: A directory of raw DTI dicoms,
7
+ an output directory and a filename prefix. A set of batch commands using standard
8
+ imaging tools (AFNI & FSL) are generated and executed to create Fractional
9
+ Anisotropy (FA), Mean Diffusivity (MD) and associated diffusion maps
10
+ (eigenvalues & eigenvectors) in the output directory.
11
+
12
+ The script depends on AFNI to be in the path for reconstruction (to3d) and
13
+ FSL to be in the path for DTI Data Fitting (eddy_correct, bet & dtifit)
14
+
15
+ =end
16
+
17
+ require 'fileutils'
18
+
19
+ class ReconstructDTI
20
+
21
+ BVECTORS_FILE = '/Data/vtrak1/preprocessed/progs/wrap140.visit1/DTI_Fitting/25_directions_UWhospital_magnet_horizontal_spaces.csv' # Vector directions of the gradient for each of the 25 directtions in this 25-direction DTI sequence. A 3x26 matrix.
22
+ BVALUES_FILE = '/Data/vtrak1/preprocessed/progs/wrap140.visit1/DTI_Fitting/25_directions_bvalues.txt' # Magnitude of the direction vectors. A 1x26 matrix of 0 for the first (B0) and then 1000 for each direction afterwards.
23
+
24
+ FILE_GLOB = "'I*.dcm'"
25
+ VOLUMES = 26
26
+ SLICES_PER_VOLUME = 49
27
+ SLICE_ORDER = 'altplus'
28
+
29
+ # Checks for some required helper applications that must be installed on the
30
+ # system prior to use. It returns false if there are no missing
31
+ # processing program binaries and returns the names of the missing programs.
32
+ def self.missing_required_binaries?
33
+ missing_binaries = []
34
+ ['to3d', 'eddy_correct', 'bet', 'dtifit'].each do |required_binary|
35
+ if system("which #{required_binary} > /dev/null") == false
36
+ missing_binaries << required_binary
37
+ end
38
+ end
39
+
40
+ if missing_binaries.size == 0
41
+ return false
42
+ else
43
+ return missing_binaries
44
+ end
45
+
46
+ end
47
+
48
+ # Constructs the commands used in the script from constants and variables
49
+ # passed in from the command line.
50
+ def self.construct_commands
51
+ to3d_recon_options = "-time:zt #{SLICES_PER_VOLUME} #{VOLUMES} 1 #{SLICE_ORDER} #{@@input_directory}/#{FILE_GLOB}"
52
+
53
+ recon_cmd = "to3d -prefix #{@@file_prefix}.nii -session #{@@output_directory} #{to3d_recon_options}"
54
+ eddy_cmd = "eddy_correct #{@@output_directory}/#{@@file_prefix}.nii #{@@output_directory}/#{@@file_prefix}_ecc.nii 0"
55
+ mask_cmd = "bet #{@@output_directory}/#{@@file_prefix}_ecc #{@@output_directory}/#{@@file_prefix}_ecc_brain -f 0.1 -g 0 -n -m"
56
+ dtifit_cmd = "dtifit --data=#{@@output_directory}/#{@@file_prefix}_ecc.nii --out=#{@@output_directory}/#{@@file_prefix}_dti --mask=#{@@output_directory}/#{@@file_prefix}_ecc_brain_mask --bvecs=#{BVECTORS_FILE} --bvals=#{BVALUES_FILE}"
57
+
58
+ @@batch_cmd = [recon_cmd, eddy_cmd, mask_cmd, dtifit_cmd].join("; ")
59
+ end
60
+
61
+ # Sets variables passed in from the command-line, constructs the processing
62
+ # commands and then executes them.
63
+ #
64
+ # Throws an IOError if input_directory is not found on the filesystem or
65
+ # output directory already exists.
66
+ def self.reconstruct!(input_directory, output_directory, file_prefix)
67
+ @@input_directory = File.expand_path(input_directory)
68
+ @@output_directory = File.expand_path(output_directory)
69
+ @@file_prefix = file_prefix
70
+
71
+ if missing_required_binaries?
72
+ puts "You are missing some required processing programs:"
73
+ missing_required_binaries?.each { |requirement| puts requirement }
74
+ puts "Please install the missing programs or run this script from a properly configured workstation."
75
+ raise(Error)
76
+ end
77
+
78
+ raise(IOError, "#{@@input_directory}: not found.") if not File.directory?(@@input_directory)
79
+ raise(IOError, "#{@@output_directory} already exists.") if File.directory?(@@output_directory)
80
+ FileUtils.mkdir_p(@@output_directory)
81
+
82
+
83
+ construct_commands
84
+
85
+ puts @@batch_cmd
86
+ system @@batch_cmd
87
+ end
88
+
89
+ end
90
+
91
+ # Code for when excuting from the command line.
92
+ if __FILE__ == $0
93
+ puts "Libraries for DTI processing. To use this with the command-line, use dti_wrapper.rb"
94
+ end
95
+
@@ -0,0 +1,20 @@
1
+ # Reopen Net::SSH to allow for a "realtime" ssh run that prints out stdout
2
+ # immediately upon receipt, so you can interactively watch the results.
3
+ class Net::SSH::Connection::Session
4
+ def exec_realtime(cmd)
5
+ open_channel do |channel|
6
+ channel.exec(cmd) do |ch, success|
7
+ abort "could not execute command: #{cmd}" unless success
8
+
9
+ channel.on_data do |ch, data|
10
+ puts "#{data}"
11
+ end
12
+
13
+ channel.on_extended_data do |ch, type, data|
14
+ warn "ERROR: #{data}"
15
+ end
16
+ end
17
+ end
18
+ loop
19
+ end
20
+ end
@@ -0,0 +1,13 @@
1
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
2
+
3
+ require 'net/ssh'
4
+
5
+ require 'wadrc-bcp-scripts/basic_task'
6
+ require 'wadrc-bcp-scripts/dtitask'
7
+ require 'wadrc-bcp-scripts/fieldmap_classes'
8
+ require 'wadrc-bcp-scripts/tensor'
9
+ require 'wadrc-bcp-scripts/version'
10
+ require 'additions/NetSshConnectionSession'
11
+
12
+ # WADRC Basic Common Processing (BCP) Scripts
13
+ module WadrcBcpScripts; end
@@ -0,0 +1,137 @@
1
+ require 'fileutils'
2
+ require 'escoffier'
3
+
4
+ module WadrcBcpScripts
5
+
6
+ # Enviornment and configuration checks common between processing streams.
7
+ class BasicTask
8
+ # Task Configuration Options Hash
9
+ attr_accessor :config
10
+
11
+ # Check for some required helper applications that must be installed on the
12
+ # system prior to use. It returns true if there are no missing
13
+ # processing program binaries, otherwise it puts them to the screen and
14
+ # raises a ScriptError.
15
+ def environment_requires(*args)
16
+ missing_binaries = []
17
+ args.each do |required_binary|
18
+ if system("which #{required_binary.to_s} > /dev/null") == false
19
+ missing_binaries << required_binary
20
+ end
21
+ end
22
+
23
+ begin
24
+ unless missing_binaries.size == 0
25
+ error = "
26
+ Warning: The following processing tools weren't found on your system.
27
+ - #{missing_binaries.join(', ')}
28
+
29
+
30
+ Please install the missing programs, run this script from a properly configured workstation,
31
+ or use the dry_run option to output your script to the terminal.\n "
32
+ puts error
33
+ raise(ScriptError, "Missing #{missing_binaries.join(", ")}")
34
+ end
35
+ end
36
+
37
+ return
38
+ end
39
+
40
+ # Check for required keys in the @config hash.
41
+ def config_requires(*args)
42
+ missing_keys = []
43
+ args.each do |required_key|
44
+ unless @config.has_key?(required_key)
45
+ missing_keys << required_key
46
+ end
47
+ end
48
+
49
+ unless missing_keys.size == 0
50
+ error = "
51
+ Warning: Misconfiguration detected.
52
+ You are missing the following keys from your spec file:
53
+ - #{missing_keys.join(', ')}
54
+
55
+
56
+ Please install the missing programs, run this script from a properly configured workstation,
57
+ or use the dry_run option to output your script to the terminal.\n "
58
+ puts error
59
+ raise(ScriptError, "Missing Keys: #{missing_keys.join(", ")}")
60
+ end
61
+ end
62
+
63
+ # Basic IO Directory Checks
64
+ def check_setup(input_directory = @input_directory, output_directory = @output_directory)
65
+ # Check Input Directory
66
+ raise(IOError, "#{input_directory}: not found.") unless File.directory?(input_directory)
67
+
68
+ # Check Gradient Tensor Files
69
+ ensure_file_exists @config[:bvectors_file], @config[:bvalues_file]
70
+
71
+ unless @config[:dry_run]
72
+ # Check Working Input Directory
73
+ if @config[:force_sandbox]
74
+ path = Pathname.new(input_directory)
75
+ # @working_input_directory = path.sandbox(input_directory)
76
+ @working_input_directory = path.prep_mise(input_directory + '/', Dir.mktmpdir + '/')
77
+ @working_input_directory = File.join(@working_input_directory, File.basename(input_directory))
78
+ else
79
+ @working_input_directory = input_directory
80
+ end
81
+
82
+ # Check Output Directory and force cleanup if necessary.
83
+ colliding_files = Dir.glob(File.join(output_directory, @file_prefix) + '*')
84
+ puts colliding_files
85
+ if File.directory?(output_directory) && colliding_files.empty? == false
86
+ if @config[:force_overwrite] then colliding_files.each {|file| puts "Removing #{file}..."; File.delete(file) }
87
+ else raise(IOError, "#{output_directory} already exists. Set force_overwite in your spec file to overwrite the directory.")
88
+ end
89
+ end
90
+ FileUtils.mkdir_p(output_directory)
91
+ else
92
+ # Use the real input directory if the working directory was not assigned
93
+ # (ie during dry run)
94
+ @working_input_directory = input_directory
95
+ end
96
+
97
+ # Setup Logging
98
+ logfile = File.join(output_directory, "#{File.basename(input_directory)}_#{today}.log")
99
+ if File.writable?(output_directory) && ! @config[:dry_run]
100
+ $LOG = Logger.new(logfile)
101
+ else
102
+ $LOG = Logger.new(STDOUT)
103
+ end
104
+
105
+ # Switch CWD (default output location for rotbvecs script)
106
+ @cwd = Dir.pwd
107
+ Dir.chdir(output_directory) unless @config[:dry_run]
108
+ end
109
+
110
+ def ensure_file_exists(*args)
111
+ args.each do |file|
112
+ raise(IOError, "#{file} not found.") unless File.exists?(file)
113
+ end
114
+
115
+ end
116
+
117
+ private
118
+
119
+ def cleanup
120
+ Dir.chdir(@cwd)
121
+ puts Dir.pwd
122
+ cleanup_directories
123
+ # $LOG.close
124
+ end
125
+
126
+ # Cleanup Sandbox Directories
127
+ def cleanup_directories
128
+ if File.directory?(@working_input_directory) && (@input_directory != @working_input_directory)
129
+ FileUtils.rm_r @working_input_directory
130
+ end
131
+ end
132
+
133
+ def today
134
+ [Date.today.month, Date.today.day, Date.today.year].join
135
+ end
136
+ end
137
+ end
@@ -0,0 +1,156 @@
1
+ module WadrcBcpScripts
2
+
3
+ # This library creates scripts for basic processing for Diffusion Tensor Images
4
+ # (DTI).
5
+ #
6
+ # The main function reconstruct! takes 3 arguements: A directory of raw DTI
7
+ # dicoms, an output directory and a filename prefix. A set of batch commands
8
+ # using standard imaging tools (AFNI & FSL) are generated and executed to create
9
+ # Fractional Anisotropy (FA), Mean Diffusivity (MD) and associated diffusion
10
+ # maps (eigenvalues & eigenvectors) in the output directory.
11
+ #
12
+ # The script depends on AFNI to be in the path for reconstruction (to3d) and
13
+ # FSL to be in the path for DTI Data Fitting (eddy_correct, bet & dtifit)
14
+ class Dtitask < BasicTask
15
+
16
+ # Task Configuration Options Hash
17
+ attr_accessor :config
18
+ # Source Directory of DICOMS
19
+ attr_reader :input_directory
20
+ # Source Directory of _unzipped_ DICOMS if using a Sandbox (or input_directory if not)
21
+ attr_reader :working_input_directory
22
+ # Destination Directory for DTI vectors, values, and maps
23
+ attr_reader :output_directory
24
+ # File Prefix to use for processing.
25
+ attr_reader :file_prefix
26
+
27
+
28
+ # Intialize DTItask with the following options:
29
+ #
30
+ # DTI Options
31
+ # * bvectors_file :
32
+ # * bvalues_file :
33
+ # * repetition_time : TR in milliseconds, defaults to 8000
34
+ #
35
+ #
36
+ # File Conversion Options
37
+ # * file_glob :
38
+ # * volumes :
39
+ # * slices_per_volume :
40
+ # * slice_order:
41
+ #
42
+ #
43
+ # Runtime Options
44
+ # * dry_run :
45
+ # * force_overwrite :
46
+ # * sandbox : Forces copying and unzipping to a temp directory in the case
47
+ # of zipped dicom files.
48
+ #
49
+ def initialize(config = Hash.new)
50
+ @config = config
51
+
52
+ @config[:dry_run] = true if config.empty?
53
+
54
+ begin
55
+ # Intialize Settings for File Conversion and Diffusion Directions and Values
56
+ config_requires :bvectors_file, :bvalues_file, :file_glob, :volumes,
57
+ :slices_per_volume, :slice_order
58
+
59
+ # List binaries requried for the script to run.
60
+ environment_requires :to3d, :eddy_correct, :bet, :dtifit, :rotbvecs
61
+ rescue ScriptError => e
62
+ raise e unless @config[:dry_run]
63
+ end
64
+
65
+ end
66
+
67
+
68
+ # Reconstruct creates a script of commands to execute in order to prepare
69
+ # DTI data for analyses (take a raw directory of DICOMS, convert them to
70
+ # nifti, eddy current correct them, and fit them using FSL to create
71
+ # eigen vectors and values, and MD and FA maps.
72
+ #
73
+ # Throws an IOError if input_directory is not found on the filesystem or
74
+ # output directory already exists (except during a dry_run).
75
+ def reconstruct!(input_directory, output_directory, file_prefix = nil)
76
+ @input_directory = File.expand_path(input_directory)
77
+ @output_directory = File.expand_path(output_directory)
78
+ @file_prefix = file_prefix ? file_prefix : File.basename(input_directory)
79
+
80
+ introduction = "Begin processing #{File.join(@input_directory)}"; puts
81
+ puts "-" * introduction.size
82
+ puts introduction; puts
83
+
84
+ begin check_setup unless @config[:dry_run]
85
+ rescue IOError => e
86
+ puts "Error: #{e}"
87
+ exit
88
+ end
89
+
90
+ # Construct the Script, output it and run it.
91
+ batch_cmd = construct_commands(@working_input_directory, @output_directory, @file_prefix)
92
+
93
+ batch_cmd.each do |cmd|
94
+ puts cmd; $LOG.info cmd
95
+ puts `#{cmd}` unless @config[:dry_run]
96
+ puts
97
+ end
98
+
99
+ cleanup unless @config[:dry_run]
100
+
101
+ puts "Done processing #{@file_prefix}" unless @config[:dry_run]
102
+
103
+ end
104
+
105
+
106
+ # Constructs the commands used in the script from constants and variables
107
+ # set during intialization/configuration and gathered by the main
108
+ # reconstruct! function.
109
+ def construct_commands(input_directory, output_directory, file_prefix)
110
+ rep_time = @config[:repetition_time] ? @config[:repetition_time] : 8000
111
+ to3d_recon_options = "-time:zt #{@config[:slices_per_volume]} #{@config[:volumes]} #{rep_time} #{@config[:slice_order]} #{input_directory}/#{@config[:file_glob]}"
112
+
113
+ commands = Array.new
114
+
115
+ # Recon
116
+ commands << "to3d -prefix #{file_prefix}.nii -session #{output_directory} #{to3d_recon_options}"
117
+
118
+ # Eddy Current Correction
119
+ commands << "eddy_correct #{output_directory}/#{file_prefix}.nii #{output_directory}/#{file_prefix}_ecc.nii 0"
120
+
121
+ if @config[:rotate]
122
+ # Rotate_bvecs
123
+ subject_bvectors_file = File.join(output_directory, file_prefix + "_" + File.basename(@config[:bvectors_file]))
124
+ commands << "rotbvecs #{@config[:bvectors_file]} #{subject_bvectors_file} #{File.join(output_directory, file_prefix)}_ecc.ecclog"
125
+ else
126
+ subject_bvectors_file = @config[:bvectors_file]
127
+ end
128
+
129
+
130
+ # Apply Mask
131
+ if @config[:mask]
132
+ out = "#{File.join(output_directory, file_prefix)}_ecc_ss"
133
+ commands << "fslmaths #{@config[:mask]} -mul #{File.join(output_directory, file_prefix)}_ecc #{out}"
134
+ else
135
+ out = "#{File.join(output_directory, file_prefix)}_ecc"
136
+ end
137
+ commands << "bet #{out} #{out}_brain -f 0.1 -g 0 -n -m"
138
+
139
+ # Run DTI Fit
140
+ commands << "dtifit --data=#{output_directory}/#{file_prefix}_ecc.nii \
141
+ --out=#{output_directory}/#{file_prefix}_dti \
142
+ --mask=#{out}_brain_mask \
143
+ --bvecs=#{subject_bvectors_file} \
144
+ --bvals=#{@config[:bvalues_file]}"
145
+
146
+ return commands
147
+ end
148
+
149
+ end
150
+
151
+ # Message for when excuting from the command line.
152
+ if __FILE__ == $0
153
+ puts "Script generation library for DTI processing. To use this with the command-line, use preprocess_dti.rb"
154
+ end
155
+
156
+ end