rpipe 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- data/.document +5 -0
- data/.gitignore +23 -0
- data/LICENSE +20 -0
- data/README +0 -0
- data/README.rdoc +33 -0
- data/Rakefile +78 -0
- data/VERSION +1 -0
- data/bin/create_driver.rb +79 -0
- data/bin/rpipe +131 -0
- data/bin/swallow_batch_run.rb +21 -0
- data/lib/core_additions.rb +5 -0
- data/lib/custom_methods/JohnsonMerit220Visit1Preproc.m +26 -0
- data/lib/custom_methods/JohnsonMerit220Visit1Preproc.rb +43 -0
- data/lib/custom_methods/JohnsonMerit220Visit1Preproc_job.m +80 -0
- data/lib/custom_methods/JohnsonMerit220Visit1Stats.m +74 -0
- data/lib/custom_methods/JohnsonMerit220Visit1Stats.rb +63 -0
- data/lib/custom_methods/JohnsonMerit220Visit1Stats_job.m +63 -0
- data/lib/custom_methods/JohnsonTbiLongitudinalSnodPreproc.m +26 -0
- data/lib/custom_methods/JohnsonTbiLongitudinalSnodPreproc.rb +41 -0
- data/lib/custom_methods/JohnsonTbiLongitudinalSnodPreproc_job.m +69 -0
- data/lib/custom_methods/JohnsonTbiLongitudinalSnodStats.m +76 -0
- data/lib/custom_methods/JohnsonTbiLongitudinalSnodStats.rb +67 -0
- data/lib/custom_methods/JohnsonTbiLongitudinalSnodStats_job.m +59 -0
- data/lib/custom_methods/ReconWithHello.rb +7 -0
- data/lib/default_logger.rb +13 -0
- data/lib/default_methods/default_preproc.rb +76 -0
- data/lib/default_methods/default_recon.rb +80 -0
- data/lib/default_methods/default_stats.rb +94 -0
- data/lib/default_methods/recon/physionoise_helper.rb +69 -0
- data/lib/default_methods/recon/raw_sequence.rb +109 -0
- data/lib/generators/job_generator.rb +36 -0
- data/lib/generators/preproc_job_generator.rb +31 -0
- data/lib/generators/recon_job_generator.rb +76 -0
- data/lib/generators/stats_job_generator.rb +70 -0
- data/lib/generators/workflow_generator.rb +128 -0
- data/lib/global_additions.rb +18 -0
- data/lib/logfile.rb +310 -0
- data/lib/matlab_helpers/CreateFunctionalVolumeStruct.m +6 -0
- data/lib/matlab_helpers/import_csv.m +32 -0
- data/lib/matlab_helpers/matlab_queue.rb +37 -0
- data/lib/matlab_helpers/prepare_onsets_xls.m +30 -0
- data/lib/rpipe.rb +254 -0
- data/rpipe.gemspec +177 -0
- data/spec/generators/preproc_job_generator_spec.rb +27 -0
- data/spec/generators/recon_job_generator_spec.rb +33 -0
- data/spec/generators/stats_job_generator_spec.rb +50 -0
- data/spec/generators/workflow_generator_spec.rb +97 -0
- data/spec/helper_spec.rb +40 -0
- data/spec/integration/johnson.merit220.visit1_spec.rb +47 -0
- data/spec/integration/johnson.tbi.longitudinal.snod_spec.rb +48 -0
- data/spec/logfile_spec.rb +96 -0
- data/spec/matlab_queue_spec.rb +40 -0
- data/spec/merit220_stats_spec.rb +81 -0
- data/spec/physio_spec.rb +98 -0
- data/test/drivers/merit220_workflow_sample.yml +15 -0
- data/test/drivers/mrt00000.yml +65 -0
- data/test/drivers/mrt00015.yml +62 -0
- data/test/drivers/mrt00015_hello.yml +41 -0
- data/test/drivers/mrt00015_withphys.yml +81 -0
- data/test/drivers/tbi000.yml +129 -0
- data/test/drivers/tbi000_separatevisits.yml +137 -0
- data/test/drivers/tmp.yml +58 -0
- data/test/fixtures/faces3_recognitionA.mat +0 -0
- data/test/fixtures/faces3_recognitionA.txt +86 -0
- data/test/fixtures/faces3_recognitionA_equal.csv +25 -0
- data/test/fixtures/faces3_recognitionA_unequal.csv +21 -0
- data/test/fixtures/faces3_recognitionB_incmisses.txt +86 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_CPd3R_40.txt +13360 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_CPd3_40.txt +13360 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_CPttl_40.txt +13360 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_CRTd3R_40.txt +13360 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_CRTd3_40.txt +13360 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_CRTttl_40.txt +13360 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_HalfTR_CRTd3R_40.txt +334 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_HalfTR_CRTd3_40.txt +334 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_HalfTR_CRTttl_40.txt +334 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_HalfTR_RRT_40.txt +334 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_HalfTR_RVT_40.txt +334 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_HalfTR_card_spline_40.txt +334 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_HalfTR_resp_spline_40.txt +334 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_RRT_40.txt +9106 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_RVT_40.txt +9106 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_TR_CRTd3R_40.txt +167 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_TR_CRTd3_40.txt +167 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_TR_CRTttl_40.txt +167 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_TR_RRT_40.txt +167 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_TR_RVT_40.txt +167 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_TR_card_spline_40.txt +167 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_TR_resp_spline_40.txt +167 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_card_spline_40.txt +13360 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_resp_spline_40.txt +9106 -0
- data/test/fixtures/physionoise_regressors/EPI__fMRI_Task1_resp_spline_downsampled_40.txt +9106 -0
- data/test/fixtures/ruport_summary.yml +123 -0
- data/test/fixtures/valid_scans.yaml +35 -0
- data/test/helper.rb +10 -0
- data/test/test_dynamic_method_inclusion.rb +10 -0
- data/test/test_includes.rb +11 -0
- data/test/test_integrative_johnson.merit220.visit1.rb +31 -0
- data/test/test_preproc.rb +11 -0
- data/test/test_recon.rb +11 -0
- data/test/test_rpipe.rb +19 -0
- data/vendor/output_catcher.rb +93 -0
- data/vendor/trollop.rb +781 -0
- metadata +260 -0
@@ -0,0 +1,94 @@
|
|
1
|
+
module DefaultStats
|
2
|
+
|
3
|
+
# runs the complete set of tasks using data in a subject's "proc" directory and a preconfigured template spm job.
|
4
|
+
def run_first_level_stats
|
5
|
+
flash "Highway to the dangerzone..."
|
6
|
+
setup_directory(@statsdir, "STATS")
|
7
|
+
|
8
|
+
Dir.chdir(@statsdir) do
|
9
|
+
link_files_from_proc_directory
|
10
|
+
link_onsets_files
|
11
|
+
customize_template_job
|
12
|
+
run_stats_spm_job
|
13
|
+
end
|
14
|
+
|
15
|
+
end
|
16
|
+
|
17
|
+
alias_method :perform, :run_first_level_stats
|
18
|
+
|
19
|
+
# Links all the files necessary from the "proc" directory. Links written to current working directory.
|
20
|
+
def link_files_from_proc_directory(images_wildcard = File.join(@procdir, "swq*.nii"), motion_regressors_wildcard = File.join(@procdir, "md_rp*.txt"))
|
21
|
+
puts Dir.glob(images_wildcard), @statsdir
|
22
|
+
raise IOError, "No images to link with #{images_wildcard}" if Dir.glob(images_wildcard).empty?
|
23
|
+
system("ln -s #{images_wildcard} #{@statsdir}")
|
24
|
+
|
25
|
+
raise IOError, "No motion_regressors to link with #{motion_regressors_wildcard}" if Dir.glob(motion_regressors_wildcard).empty?
|
26
|
+
system("ln -s #{motion_regressors_wildcard} #{@statsdir}")
|
27
|
+
end
|
28
|
+
|
29
|
+
# Links to a preconfigured onsets file, a matlab file that contains three cell arrays: names, onsets, durations.
|
30
|
+
# This file is used by SPM to configure the conditions of the function task and the onset times to use in the model.
|
31
|
+
# Link is written to current working directory.
|
32
|
+
def link_onsets_files
|
33
|
+
@onsetsfiles.each do |ofile|
|
34
|
+
# Check if File Path is Absolute. If not link from procdir/onsets
|
35
|
+
opath = Pathname.new(ofile).absolute? ? ofile : File.join(@procdir, 'onsets', ofile)
|
36
|
+
system("ln -s #{File.expand_path(opath)} #{Dir.pwd}")
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
# Copies the template job to the current working directory, then customizes it by performing a set of recursive
|
41
|
+
# string replacements specified in the template_spec.
|
42
|
+
def customize_template_job
|
43
|
+
# TODO
|
44
|
+
end
|
45
|
+
|
46
|
+
# Finally runs the stats job and writes output to current working directory.
|
47
|
+
def run_stats_spm_job
|
48
|
+
# TODO
|
49
|
+
end
|
50
|
+
|
51
|
+
# Create onsets files using logfile responses for given conditions.
|
52
|
+
#
|
53
|
+
# Responses is a hash containing a directory and filenames of the .txt
|
54
|
+
# button-press response logfiles formatted by Presentation's SDF.
|
55
|
+
#
|
56
|
+
# responses = { 'directory' => '/path/to/files', 'logfiles' => ['subid_taskB.txt', 'subid_taskA.txt']}
|
57
|
+
#
|
58
|
+
# Conditions is an array of vector labels to extract from the .txt file
|
59
|
+
# Each label may be either an individual condition or a hash of multiple
|
60
|
+
# conditions, with the combined label as the key and the separate labels as
|
61
|
+
# values.
|
62
|
+
#
|
63
|
+
# conditions = [:new, :old, {:misses => [:new_misses, :old_misses]} ]
|
64
|
+
def create_onsets_files(responses, conditions)
|
65
|
+
onsets_csv_files = []
|
66
|
+
onsets_mat_files = []
|
67
|
+
wd = Dir.pwd
|
68
|
+
matching_directories = Dir.glob(responses['directory'])
|
69
|
+
raise IOError, "Response directory #{responses['directory']} doesn't exist." unless File.directory?(responses['directory'])
|
70
|
+
raise IOError, "Only one response directory currently accepted (matched directories: #{matching_directories.join(', ')})" unless matching_directories.length == 1
|
71
|
+
Dir.chdir matching_directories.first do
|
72
|
+
responses['logfiles'].each do |logfile|
|
73
|
+
# Either Strip off the prefix directly without changing the name...
|
74
|
+
# prefix = File.basename(logfile, '.txt')
|
75
|
+
# Or create a new name based on standard logfile naming scheme:
|
76
|
+
# mrt00000_abc_021110_faces3_recognitionA.txt
|
77
|
+
prefix = File.basename(logfile, '.txt').split("_").values_at(0,3,4).join("_")
|
78
|
+
log = Logfile.new(logfile, *conditions)
|
79
|
+
|
80
|
+
# puts log.to_csv
|
81
|
+
onsets_csv_files << log.write_csv(prefix + '.csv')
|
82
|
+
onsets_mat_files << log.write_mat(prefix)
|
83
|
+
end
|
84
|
+
|
85
|
+
[onsets_csv_files, onsets_mat_files].flatten.each do |response_file|
|
86
|
+
FileUtils.move response_file, wd
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
return onsets_mat_files
|
91
|
+
end
|
92
|
+
|
93
|
+
|
94
|
+
end
|
@@ -0,0 +1,69 @@
|
|
1
|
+
module DefaultRecon
|
2
|
+
# Create Physionoise Regressors for Inclusion in GLM
|
3
|
+
def create_physiosnoise_regressors(scan_spec)
|
4
|
+
runs = build_physionoise_run_spec(scan_spec)
|
5
|
+
Physionoise.run_physionoise_on(runs, ["--saveFiles"])
|
6
|
+
end
|
7
|
+
|
8
|
+
# Generate a Physionoise Spec
|
9
|
+
def generate_physiospec
|
10
|
+
physiospec = Physiospec.new(@rawdir, File.join(@rawdir, '..', 'cardiac'))
|
11
|
+
physiospec.epis_and_associated_phys_files
|
12
|
+
end
|
13
|
+
|
14
|
+
# Build a Run Spec from a Scan Spec
|
15
|
+
# This should be moved to the generators and shouldn't be used here.
|
16
|
+
def build_physionoise_run_spec(rpipe_scan_spec)
|
17
|
+
run = rpipe_scan_spec['physio_files'].dup
|
18
|
+
flash "Physionoise Regressors: #{run[:phys_directory]}"
|
19
|
+
run[:bold_reps] = rpipe_scan_spec['bold_reps']
|
20
|
+
run[:rep_time] = rpipe_scan_spec['rep_time']
|
21
|
+
unless Pathname.new(run[:phys_directory]).absolute?
|
22
|
+
run[:phys_directory] = File.join(@rawdir, run[:phys_directory])
|
23
|
+
end
|
24
|
+
run[:run_directory] = @rawdir
|
25
|
+
runs = [run]
|
26
|
+
end
|
27
|
+
|
28
|
+
# Runs 3dRetroicor for a scan.
|
29
|
+
# Returns the output filename if successful or raises an error if there was an error.
|
30
|
+
def run_retroicor(physio_files, file)
|
31
|
+
icor_cmd, outfile = build_retroicor_cmd(physio_files, file)
|
32
|
+
flash "3dRetroicor: #{file} \n #{icor_cmd}"
|
33
|
+
if run(icor_cmd)
|
34
|
+
return outfile
|
35
|
+
else
|
36
|
+
raise ScriptError, "Problem running #{icor_cmd}"
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
# Builds a properly formed 3dRetroicor command and returns the command and
|
41
|
+
# output filename.
|
42
|
+
#
|
43
|
+
# Input a physio_files hash with keys:
|
44
|
+
# :respiration_signal: RESPData_epiRT_0303201014_46_27_463
|
45
|
+
# :respiration_trigger: RESPTrig_epiRT_0303201014_46_27_463
|
46
|
+
# :phys_directory: cardiac/
|
47
|
+
# :cardiac_signal: PPGData_epiRT_0303201014_46_27_463
|
48
|
+
# :cardiac_trigger: PPGTrig_epiRT_0303201014_46_27_463
|
49
|
+
def build_retroicor_cmd(physio_files, file)
|
50
|
+
[:cardiac_signal, :respiration_signal].collect {|req| raise ScriptError, "Missing physio config: #{req}" unless physio_files.include?(req)}
|
51
|
+
|
52
|
+
prefix = 'p'
|
53
|
+
unless Pathname.new(physio_files[:cardiac_signal]).absolute?
|
54
|
+
cardiac_signal = File.join(@rawdir, physio_files[:phys_directory], physio_files[:cardiac_signal])
|
55
|
+
end
|
56
|
+
|
57
|
+
unless Pathname.new(physio_files[:respiration_signal]).absolute?
|
58
|
+
respiration_signal = File.join(@rawdir, physio_files[:phys_directory], physio_files[:respiration_signal])
|
59
|
+
end
|
60
|
+
|
61
|
+
outfile = prefix + file
|
62
|
+
|
63
|
+
icor_format = "3dretroicor -prefix %s -card %s -resp %s %s"
|
64
|
+
icor_options = [outfile, cardiac_signal, respiration_signal, file]
|
65
|
+
icor_cmd = icor_format % icor_options
|
66
|
+
return icor_cmd, outfile
|
67
|
+
end
|
68
|
+
|
69
|
+
end
|
@@ -0,0 +1,109 @@
|
|
1
|
+
module DefaultRecon
|
2
|
+
# An abstract class for Raw Image Sequences
|
3
|
+
# The Recon job will calls prepare on Raw Sequence Instances to process
|
4
|
+
# them from their raw state (dicoms or pfiles) to Nifti files suitable for
|
5
|
+
# processing.
|
6
|
+
class RawSequence
|
7
|
+
def initialize(scan_spec, rawdir)
|
8
|
+
@scan_spec = scan_spec
|
9
|
+
@rawdir = rawdir
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
# Manage a folder of Raw Dicoms for Nifti file conversion
|
14
|
+
class DicomRawSequence < RawSequence
|
15
|
+
# Locally copy and unzip a folder of Raw Dicoms and call convert_sequence on them
|
16
|
+
def prepare_and_convert_sequence(outfile)
|
17
|
+
scandir = File.join(@rawdir, @scan_spec['dir'])
|
18
|
+
$Log.info "Dicom Reconstruction: #{scandir}"
|
19
|
+
Pathname.new(scandir).all_dicoms do |dicoms|
|
20
|
+
convert_sequence(dicoms, outfile)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
alias_method :prepare, :prepare_and_convert_sequence
|
25
|
+
|
26
|
+
private
|
27
|
+
|
28
|
+
# Convert a folder of unzipped Dicom files to outfile
|
29
|
+
def convert_sequence(dicoms, outfile)
|
30
|
+
local_scandir = File.dirname(dicoms.first)
|
31
|
+
second_file = Dir.glob( File.join(local_scandir, "*0002*") )
|
32
|
+
wildcard = File.join(local_scandir, "*.[0-9]*")
|
33
|
+
|
34
|
+
recon_cmd_format = 'to3d -skip_outliers %s -prefix tmp.nii "%s"'
|
35
|
+
|
36
|
+
timing_opts = timing_options(@scan_spec, second_file)
|
37
|
+
|
38
|
+
unless run(recon_cmd_format % [timing_opts, wildcard])
|
39
|
+
raise(IOError,"Failed to reconstruct scan: #{scandir}")
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
# Determines the proper timing options to pass to to3d for functional scans.
|
44
|
+
# Must pass a static path to the second file in the series to determine zt
|
45
|
+
# versus tz ordering. Assumes 2sec TR's. Returns the options as a string
|
46
|
+
# that may be empty if the scan is an anatomical.
|
47
|
+
def timing_options(scan_spec, second_file)
|
48
|
+
return "" if scan_spec['type'] == "anat"
|
49
|
+
instance_offset = scan_spec['z_slices'] + 1
|
50
|
+
if system("dicom_hdr #{second_file} | grep .*REL.Instance.*#{instance_offset}")
|
51
|
+
return "-epan -time:tz #{scan_spec['bold_reps']} #{scan_spec['z_slices']} 2000 alt+z"
|
52
|
+
else
|
53
|
+
return "-epan -time:zt #{scan_spec['z_slices']} #{scan_spec['bold_reps']} 2000 alt+z"
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
# Reconstucts a PFile from Raw to Nifti File
|
59
|
+
class PfileRawSequence < RawSequence
|
60
|
+
# Create a local unzipped copy of the Pfile and prepare Scanner Reference Data for reconstruction
|
61
|
+
def initialize(scan_spec, rawdir)
|
62
|
+
super(scan_spec, rawdir)
|
63
|
+
|
64
|
+
base_pfile_path = File.join(@rawdir, @scan_spec['pfile'])
|
65
|
+
pfile_path = File.exist?(base_pfile_path) ? base_pfile_path : base_pfile_path + '.bz2'
|
66
|
+
|
67
|
+
raise IOError, "#{pfile_path} does not exist." unless File.exist?(pfile_path)
|
68
|
+
|
69
|
+
flash "Pfile Reconstruction: #{pfile_path}"
|
70
|
+
@pfile_data = Pathname.new(pfile_path).local_copy
|
71
|
+
|
72
|
+
@refdat_file = @scan_spec['refdat_stem'] ||= search_for_refdat_file
|
73
|
+
setup_refdat(@refdat_file)
|
74
|
+
end
|
75
|
+
|
76
|
+
# Reconstructs a single pfile using epirecon
|
77
|
+
# Outfile may include a '.nii' extension - a nifti file will be constructed
|
78
|
+
# directly in this case.
|
79
|
+
def reconstruct_sequence(outfile)
|
80
|
+
volumes_to_skip = @scan_spec['volumes_to_skip'] ||= 3
|
81
|
+
epirecon_cmd_format = "epirecon_ex -f %s -NAME %s -skip %d -scltype=0"
|
82
|
+
epirecon_cmd_options = [@pfile_data, outfile, volumes_to_skip]
|
83
|
+
epirecon_cmd = epirecon_cmd_format % epirecon_cmd_options
|
84
|
+
raise ScriptError, "Problem running #{epirecon_cmd}" unless run(epirecon_cmd)
|
85
|
+
end
|
86
|
+
|
87
|
+
alias_method :prepare, :reconstruct_sequence
|
88
|
+
|
89
|
+
private
|
90
|
+
|
91
|
+
# Find an appropriate ref.dat file if not provided in the scan spec.
|
92
|
+
def search_for_refdat_file
|
93
|
+
Dir.new(@rawdir).each do |file|
|
94
|
+
return file if file =~ /ref.dat/
|
95
|
+
end
|
96
|
+
raise ScriptError, "No candidate ref.dat file found in #{@rawdir}"
|
97
|
+
end
|
98
|
+
|
99
|
+
# Create a new unzipped local copy of the ref.dat file and link it into
|
100
|
+
# pwd for reconstruction.
|
101
|
+
def setup_refdat(refdat_stem)
|
102
|
+
base_refdat_path = File.join(@rawdir, refdat_stem)
|
103
|
+
refdat_path = File.exist?(base_refdat_path) ? base_refdat_path : base_refdat_path + ".bz2"
|
104
|
+
raise IOError, "#{refdat_path} does not exist." unless File.exist?(refdat_path)
|
105
|
+
local_refdat_file = Pathname.new(refdat_path).local_copy
|
106
|
+
FileUtils.ln_s(local_refdat_file, Dir.pwd, :force => true)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
########################################################################################################################
|
2
|
+
# A class for parsing a data directory and creating Job Specs
|
3
|
+
class JobGenerator
|
4
|
+
# Configuration details are put in a spec hash and used to drive processing.
|
5
|
+
attr_reader :spec
|
6
|
+
# The spec hash of a previous step (i.e. the recon hash to build a preprocessing hash on.)
|
7
|
+
attr_reader :previous_step
|
8
|
+
|
9
|
+
# Intialize spec and previous step and set job defaults.
|
10
|
+
def initialize(config = {})
|
11
|
+
@spec = {}
|
12
|
+
config_defaults = {}
|
13
|
+
@config = config_defaults.merge(config)
|
14
|
+
|
15
|
+
@previous_step = @config['previous_step']
|
16
|
+
@spec['method'] = @config['method'] if @config['method']
|
17
|
+
end
|
18
|
+
|
19
|
+
def config_requires(*args)
|
20
|
+
missing_args = [*args.collect { |arg| arg unless @config.has_key?(arg) }].flatten.compact
|
21
|
+
unless missing_args.empty?
|
22
|
+
raise DriverConfigError, "Missing Configuration for: #{missing_args.join(', ')}"
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
def spec_validates(*args)
|
27
|
+
invalid_args = [*args.collect{ |arg| arg if eval("@spec['#{arg}'].nil?") }].flatten.compact
|
28
|
+
unless invalid_args.empty?
|
29
|
+
raise DriverConfigError, "Job could not create: #{invalid_args.join(', ')}"
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
end
|
34
|
+
|
35
|
+
# Raised when a JobGenerator class is missing required information.
|
36
|
+
class DriverConfigError < ScriptError; end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
require 'generators/job_generator'
|
2
|
+
|
3
|
+
########################################################################################################################
|
4
|
+
# A class for parsing a data directory and creating a default Preprocessing Job
|
5
|
+
# Intialize a PreprocJobGenerator with a config hash including the following optional keys:
|
6
|
+
#
|
7
|
+
# - scans : A hash of scans upon which to base preprocessing.
|
8
|
+
# Used to extract the correct bold reps for SPM.
|
9
|
+
# See #ReconJobGenerator for options for the scans hash.
|
10
|
+
#
|
11
|
+
|
12
|
+
class PreprocJobGenerator < JobGenerator
|
13
|
+
def initialize(config = {})
|
14
|
+
config_defaults = {}
|
15
|
+
super config_defaults.merge(config)
|
16
|
+
|
17
|
+
@spec['step'] = 'preprocess'
|
18
|
+
|
19
|
+
config_requires 'scans'
|
20
|
+
end
|
21
|
+
|
22
|
+
# Build a job spec and return it.
|
23
|
+
def build
|
24
|
+
bold_reps = []
|
25
|
+
@spec['bold_reps'] = @config['scans'].collect do |scan|
|
26
|
+
scan['bold_reps'] - scan['volumes_to_skip']
|
27
|
+
end
|
28
|
+
|
29
|
+
return @spec
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,76 @@
|
|
1
|
+
gem 'activeresource', '<=2.3.8'
|
2
|
+
$LOAD_PATH.unshift('~/projects/metamri/lib').unshift('~/code/metamri/lib')
|
3
|
+
require 'metamri'
|
4
|
+
require 'generators/job_generator'
|
5
|
+
|
6
|
+
########################################################################################################################
|
7
|
+
# A class for parsing a data directory and creating a default Reconstruction Job
|
8
|
+
# Intialize the ReconJobGenerator with the following options in a config hash.
|
9
|
+
#
|
10
|
+
# Required Options:
|
11
|
+
# - rawdir : The directory containing EPI runs
|
12
|
+
#
|
13
|
+
# Raises an IOError if the Raw Directory cannot be read, and a
|
14
|
+
# DriverConfigError if the Raw Directory is not specified.
|
15
|
+
|
16
|
+
class ReconJobGenerator < JobGenerator
|
17
|
+
def initialize(config)
|
18
|
+
# Add job-specific config defaults to config and initialize teh JobGenerator with them.
|
19
|
+
config_defaults = {}
|
20
|
+
config_defaults['epi_pattern'] = /fMRI/i
|
21
|
+
config_defaults['ignore_patterns'] = [/pcasl/i]
|
22
|
+
config_defaults['volumes_to_skip'] = 3
|
23
|
+
super config_defaults.merge(config)
|
24
|
+
|
25
|
+
@spec['step'] = 'reconstruct'
|
26
|
+
|
27
|
+
config_requires 'rawdir'
|
28
|
+
@rawdir = @config['rawdir']
|
29
|
+
raise IOError, "Can't find raw directory #{@rawdir}" unless File.readable?(@rawdir)
|
30
|
+
end
|
31
|
+
|
32
|
+
def build
|
33
|
+
scans = Array.new
|
34
|
+
|
35
|
+
visit = VisitRawDataDirectory.new(@rawdir)
|
36
|
+
# Scan the datasets, ignoring unwanted (very large unused) directories.
|
37
|
+
visit.scan(:ignore_patterns => [@config['ignore_patterns']].flatten)
|
38
|
+
|
39
|
+
visit.datasets.each do |dataset|
|
40
|
+
# Only build hashes for EPI datasets
|
41
|
+
next unless dataset.series_description =~ @config['epi_pattern']
|
42
|
+
|
43
|
+
scans << build_scan_hash(dataset)
|
44
|
+
end
|
45
|
+
|
46
|
+
@spec['scans'] = scans
|
47
|
+
|
48
|
+
return @spec
|
49
|
+
end
|
50
|
+
|
51
|
+
# Returns a hash describing how to reconstruct the dataset.
|
52
|
+
def build_scan_hash(dataset)
|
53
|
+
scan = {}
|
54
|
+
raw_image_file = dataset.raw_image_files.first
|
55
|
+
# phys = Physionoise.new(@rawdir, File.join(@rawdir, '..', 'cardiac' ))
|
56
|
+
|
57
|
+
scan['dir'] = dataset.relative_dataset_path
|
58
|
+
scan['type'] = 'func'
|
59
|
+
scan['z_slices'] = raw_image_file.num_slices
|
60
|
+
scan['bold_reps'] = raw_image_file.bold_reps
|
61
|
+
scan['volumes_to_skip'] = @config['volumes_to_skip']
|
62
|
+
scan['rep_time'] = raw_image_file.rep_time.in_seconds
|
63
|
+
scan['label'] = dataset.series_description.escape_filename
|
64
|
+
# scan['task'] = '?'
|
65
|
+
# scan['physio_files'] = "#TODO"
|
66
|
+
|
67
|
+
return scan
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
# Convert Milliseconds to Seconds for TRs
|
72
|
+
class Float
|
73
|
+
def in_seconds
|
74
|
+
self / 1000.0
|
75
|
+
end
|
76
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
require 'generators/job_generator'
|
2
|
+
require 'logfile'
|
3
|
+
|
4
|
+
########################################################################################################################
|
5
|
+
# A class for parsing a data directory and creating a default Stats Job
|
6
|
+
# Intialize a StatsJobGenerator with a config hash including the following optional keys:
|
7
|
+
#
|
8
|
+
# - subid : SubjectID (i.e. 'mrt00015')
|
9
|
+
# - conditions : An array of condition names for analysis.
|
10
|
+
# - scans : A hash containing scan information (labels, bold_reps, etc.)
|
11
|
+
|
12
|
+
class StatsJobGenerator < JobGenerator
|
13
|
+
def initialize(config = {})
|
14
|
+
config_defaults = {}
|
15
|
+
config_defaults['epi_task_pattern'] = /Task/i
|
16
|
+
config_defaults['regressors_prefix'] = 'rp_a'
|
17
|
+
super config_defaults.merge(config)
|
18
|
+
|
19
|
+
@spec['step'] = 'stats'
|
20
|
+
|
21
|
+
config_requires 'scans', 'subid', 'conditions', 'responses_dir'
|
22
|
+
|
23
|
+
@scans = []
|
24
|
+
@config['scans'].each { |scan| @scans << scan if scan['label'] =~ @config['epi_task_pattern'] }
|
25
|
+
|
26
|
+
end
|
27
|
+
|
28
|
+
def build
|
29
|
+
@spec['bold_reps'] = bold_reps
|
30
|
+
@spec['responses'] = responses
|
31
|
+
@spec['conditions'] = @config['conditions']
|
32
|
+
@spec['regressorsfiles'] = regressorsfiles
|
33
|
+
return @spec
|
34
|
+
end
|
35
|
+
|
36
|
+
def bold_reps
|
37
|
+
return @bold_reps if @bold_reps
|
38
|
+
bold_reps = []
|
39
|
+
@scans.collect {|scan| scan['bold_reps'] - scan['volumes_to_skip']}
|
40
|
+
end
|
41
|
+
|
42
|
+
# A getter/builder method for behavioural responses.
|
43
|
+
def responses
|
44
|
+
return @responses if @responses
|
45
|
+
@responses = {}
|
46
|
+
@responses['directory'] = @config['responses_dir']
|
47
|
+
@responses['logfiles'] = logfiles
|
48
|
+
|
49
|
+
return @responses
|
50
|
+
end
|
51
|
+
|
52
|
+
def logfiles
|
53
|
+
return @logfiles if @logfiles
|
54
|
+
logfiles = Dir.glob(File.join(@responses['directory'], @config['subid'] + "*.txt"))
|
55
|
+
raise IOError, "No logfiles found in #{@responses['directory']} matching #{@config['subid']}" if logfiles.empty?
|
56
|
+
logfiles = logfiles.collect! {|file| Logfile.new(file)}.sort
|
57
|
+
@logfiles = logfiles.collect! {|file| File.basename(file.textfile) }
|
58
|
+
end
|
59
|
+
|
60
|
+
def regressorsfiles
|
61
|
+
return @regressorsfiles if @regressorsfiles
|
62
|
+
regressorsfiles = []
|
63
|
+
@regressorsfiles = @scans.collect {|scan| "%s%s_%s.txt" % [ @config['regressors_prefix'], @config['subid'], scan['label'] ]}
|
64
|
+
end
|
65
|
+
|
66
|
+
def valid?
|
67
|
+
spec_validates 'regressorsfiles', 'responses'
|
68
|
+
end
|
69
|
+
|
70
|
+
end
|
@@ -0,0 +1,128 @@
|
|
1
|
+
require 'tmpdir'
|
2
|
+
require 'pathname'
|
3
|
+
|
4
|
+
require 'core_additions'
|
5
|
+
require 'generators/recon_job_generator'
|
6
|
+
require 'generators/preproc_job_generator'
|
7
|
+
require 'generators/stats_job_generator'
|
8
|
+
|
9
|
+
########################################################################################################################
|
10
|
+
# A class for parsing a data directory and creating default Driver Configurations
|
11
|
+
# Intialize a WorkflowGenerator with a Raw Directory containing Scans
|
12
|
+
# and with the following optional keys in a config hash:
|
13
|
+
#
|
14
|
+
# Directory Options:
|
15
|
+
# - processing_dir : A directory common to orig, proc and stats directories, if they are not explicitly specified..
|
16
|
+
# - origdir : A directory where dicoms will be converted to niftis and basic preprocessing occurs.
|
17
|
+
# - procdir : A directory for detailed preprocessing (normalization and smoothing)
|
18
|
+
# - statsdir : A directory where stats will be saved. (This should be a final directory.)
|
19
|
+
|
20
|
+
class WorkflowGenerator < JobGenerator
|
21
|
+
attr_reader :spec
|
22
|
+
|
23
|
+
def initialize(rawdir, config = Hash.new)
|
24
|
+
config_defaults = {}
|
25
|
+
config_defaults['conditions'] = ['new_correct', 'new_incorrect', 'old_correct', 'old_incorrect']
|
26
|
+
config_defaults['processing_dir'] = Dir.mktmpdir
|
27
|
+
super config_defaults.merge(config)
|
28
|
+
|
29
|
+
@rawdir = rawdir
|
30
|
+
@spec['rawdir'] = @rawdir
|
31
|
+
@spec['subid'] = parse_subid
|
32
|
+
@spec['study_procedure'] = @config['study_procedure'] ||= guess_study_procedure_from(@rawdir)
|
33
|
+
|
34
|
+
config_requires 'responses_dir'
|
35
|
+
end
|
36
|
+
|
37
|
+
# Create and return a workflow spec to drive processing
|
38
|
+
def build
|
39
|
+
configure_directories
|
40
|
+
|
41
|
+
@spec['collision'] = 'destroy'
|
42
|
+
|
43
|
+
|
44
|
+
jobs = []
|
45
|
+
|
46
|
+
# Recon
|
47
|
+
recon_options = {'rawdir' => @rawdir, 'epi_pattern' => /(Resting|Task)/i, }
|
48
|
+
config_step_method(recon_options, 'recon') if @config['custom_methods']
|
49
|
+
jobs << ReconJobGenerator.new(recon_options).build
|
50
|
+
|
51
|
+
# Preproc
|
52
|
+
preproc_options = {'scans' => jobs.first['scans']}
|
53
|
+
config_step_method(preproc_options, 'preproc') if @config['custom_methods']
|
54
|
+
jobs << PreprocJobGenerator.new(preproc_options).build
|
55
|
+
|
56
|
+
# Stats
|
57
|
+
stats_options = {
|
58
|
+
'scans' => jobs.first['scans'],
|
59
|
+
'conditions' => @config['conditions'],
|
60
|
+
'responses_dir' => @config['responses_dir'],
|
61
|
+
'subid' => @spec['subid']
|
62
|
+
}
|
63
|
+
config_step_method(stats_options, 'stats') if @config['custom_methods']
|
64
|
+
jobs << StatsJobGenerator.new(stats_options).build
|
65
|
+
|
66
|
+
@spec['jobs'] = jobs
|
67
|
+
|
68
|
+
return @spec
|
69
|
+
end
|
70
|
+
|
71
|
+
# Guesses a Subject Id from @rawdir
|
72
|
+
# Takes the split basename of rawdir itself if rawdir includes subdir, or
|
73
|
+
# the basename of its parent.
|
74
|
+
def parse_subid
|
75
|
+
subject_path = File.basename(@rawdir) == 'dicoms' ?
|
76
|
+
Pathname.new(File.join(@rawdir, '..')).realpath : Pathname.new(@rawdir).realpath
|
77
|
+
|
78
|
+
subject_path.basename.to_s.split('_').first
|
79
|
+
end
|
80
|
+
|
81
|
+
# Handle Directory Configuration and Defaults for orig, proc and stats dirs.
|
82
|
+
def configure_directories
|
83
|
+
processing_dir = @config['processing_dir']
|
84
|
+
@spec['origdir'] = @config['origdir'] || parse_directory_format(@config['directory_formats']['origdir']) || File.join(processing_dir, @spec['subid'] + '_orig')
|
85
|
+
@spec['procdir'] = @config['procdir'] || parse_directory_format(@config['directory_formats']['procdir']) || File.join(processing_dir, @spec['subid'] + '_proc')
|
86
|
+
@spec['statsdir'] = @config['statsdir'] || parse_directory_format(@config['directory_formats']['statsdir']) || File.join(processing_dir, @spec['subid'] + '_stats')
|
87
|
+
end
|
88
|
+
|
89
|
+
# Replace a directory format string with respective values from the spec.
|
90
|
+
# For example, replace the string "/Data/<study_procedure>/<subid>/stats" from
|
91
|
+
# a workflow_driver['directory_formats']['statsdir'] with
|
92
|
+
# "/Data/johnson.merit220.visit1/mrt00000/stats"
|
93
|
+
def parse_directory_format(fmt)
|
94
|
+
dir = fmt.dup
|
95
|
+
dir.scan(/<\w*>/).each do |replacement|
|
96
|
+
key = replacement.to_s.gsub(/(<|>)/, '')
|
97
|
+
dir.sub!(/<\w*>/, @spec[key])
|
98
|
+
end
|
99
|
+
return dir
|
100
|
+
end
|
101
|
+
|
102
|
+
# Guess a StudyProcedure from the data's raw directory.
|
103
|
+
# A properly formed study procdure should be: <PI>.<Study>.<Description or Visit>
|
104
|
+
# Raises a ScriptError if it couldn't guess a reasonable procedure.
|
105
|
+
def guess_study_procedure_from(dir)
|
106
|
+
dirs = dir.split("/")
|
107
|
+
while dirs.empty? == false do
|
108
|
+
current_dir = dirs.pop
|
109
|
+
return current_dir if current_dir =~ /\w*\.\w*\.\w*/
|
110
|
+
end
|
111
|
+
raise ScriptError, "Could not guess study procedure from #{dir}"
|
112
|
+
end
|
113
|
+
|
114
|
+
# Configure Custom Methods from the Workflow Driver
|
115
|
+
#
|
116
|
+
# Custom methods may be simply set to true for a given job or listed
|
117
|
+
# explicitly. If true, they will set the method to the a camelcased version
|
118
|
+
# of the study_procedure and step, i.e. JohnsonMerit220Visit1Stats
|
119
|
+
# If listed explicitly, it will set the step to the value listed.
|
120
|
+
def config_step_method(options, step)
|
121
|
+
if @config['custom_methods'][step].class == String
|
122
|
+
options['method'] = @config['custom_methods'][step]
|
123
|
+
elsif @config['custom_methods'][step] == true
|
124
|
+
options['method'] = [@config['study_procedure'], step.capitalize].join("_").dot_camelize
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
require 'popen4'
|
2
|
+
|
3
|
+
# Global Method to Log and Run system commands.
|
4
|
+
def run(command)
|
5
|
+
$CommandLog.info command
|
6
|
+
|
7
|
+
status = POpen4::popen4(command) do |stdout, stderr|
|
8
|
+
puts stdout.read.strip
|
9
|
+
puts stderr.read.strip
|
10
|
+
end
|
11
|
+
|
12
|
+
status && status.exitstatus == 0 ? true : false
|
13
|
+
end
|
14
|
+
|
15
|
+
# Global Method to display a message and the date/time to standard output.
|
16
|
+
def flash(msg)
|
17
|
+
$Log.info msg
|
18
|
+
end
|