miga-base 1.2.15.2 → 1.2.15.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (304) hide show
  1. checksums.yaml +4 -4
  2. data/lib/miga/cli/action/download/gtdb.rb +4 -1
  3. data/lib/miga/cli/action/gtdb_get.rb +4 -0
  4. data/lib/miga/remote_dataset/download.rb +3 -2
  5. data/lib/miga/remote_dataset.rb +25 -7
  6. data/lib/miga/taxonomy.rb +6 -0
  7. data/lib/miga/version.rb +2 -2
  8. metadata +6 -302
  9. data/utils/FastAAI/00.Libraries/01.SCG_HMMs/Archaea_SCG.hmm +0 -41964
  10. data/utils/FastAAI/00.Libraries/01.SCG_HMMs/Bacteria_SCG.hmm +0 -32439
  11. data/utils/FastAAI/00.Libraries/01.SCG_HMMs/Complete_SCG_DB.hmm +0 -62056
  12. data/utils/FastAAI/FastAAI +0 -3659
  13. data/utils/FastAAI/FastAAI-legacy/FastAAI +0 -1336
  14. data/utils/FastAAI/FastAAI-legacy/kAAI_v1.0_virus.py +0 -1296
  15. data/utils/FastAAI/README.md +0 -84
  16. data/utils/enveomics/Docs/recplot2.md +0 -244
  17. data/utils/enveomics/Examples/aai-matrix.bash +0 -66
  18. data/utils/enveomics/Examples/ani-matrix.bash +0 -66
  19. data/utils/enveomics/Examples/essential-phylogeny.bash +0 -105
  20. data/utils/enveomics/Examples/unus-genome-phylogeny.bash +0 -100
  21. data/utils/enveomics/LICENSE.txt +0 -73
  22. data/utils/enveomics/Makefile +0 -52
  23. data/utils/enveomics/Manifest/Tasks/aasubs.json +0 -103
  24. data/utils/enveomics/Manifest/Tasks/blasttab.json +0 -790
  25. data/utils/enveomics/Manifest/Tasks/distances.json +0 -161
  26. data/utils/enveomics/Manifest/Tasks/fasta.json +0 -802
  27. data/utils/enveomics/Manifest/Tasks/fastq.json +0 -291
  28. data/utils/enveomics/Manifest/Tasks/graphics.json +0 -126
  29. data/utils/enveomics/Manifest/Tasks/mapping.json +0 -137
  30. data/utils/enveomics/Manifest/Tasks/ogs.json +0 -382
  31. data/utils/enveomics/Manifest/Tasks/other.json +0 -906
  32. data/utils/enveomics/Manifest/Tasks/remote.json +0 -355
  33. data/utils/enveomics/Manifest/Tasks/sequence-identity.json +0 -650
  34. data/utils/enveomics/Manifest/Tasks/tables.json +0 -308
  35. data/utils/enveomics/Manifest/Tasks/trees.json +0 -68
  36. data/utils/enveomics/Manifest/Tasks/variants.json +0 -111
  37. data/utils/enveomics/Manifest/categories.json +0 -165
  38. data/utils/enveomics/Manifest/examples.json +0 -162
  39. data/utils/enveomics/Manifest/tasks.json +0 -4
  40. data/utils/enveomics/Pipelines/assembly.pbs/CONFIG.mock.bash +0 -69
  41. data/utils/enveomics/Pipelines/assembly.pbs/FastA.N50.pl +0 -1
  42. data/utils/enveomics/Pipelines/assembly.pbs/FastA.filterN.pl +0 -1
  43. data/utils/enveomics/Pipelines/assembly.pbs/FastA.length.pl +0 -1
  44. data/utils/enveomics/Pipelines/assembly.pbs/README.md +0 -189
  45. data/utils/enveomics/Pipelines/assembly.pbs/RUNME-2.bash +0 -112
  46. data/utils/enveomics/Pipelines/assembly.pbs/RUNME-3.bash +0 -23
  47. data/utils/enveomics/Pipelines/assembly.pbs/RUNME-4.bash +0 -44
  48. data/utils/enveomics/Pipelines/assembly.pbs/RUNME.bash +0 -50
  49. data/utils/enveomics/Pipelines/assembly.pbs/kSelector.R +0 -37
  50. data/utils/enveomics/Pipelines/assembly.pbs/newbler.pbs +0 -68
  51. data/utils/enveomics/Pipelines/assembly.pbs/newbler_preparator.pl +0 -49
  52. data/utils/enveomics/Pipelines/assembly.pbs/soap.pbs +0 -80
  53. data/utils/enveomics/Pipelines/assembly.pbs/stats.pbs +0 -57
  54. data/utils/enveomics/Pipelines/assembly.pbs/velvet.pbs +0 -63
  55. data/utils/enveomics/Pipelines/blast.pbs/01.pbs.bash +0 -38
  56. data/utils/enveomics/Pipelines/blast.pbs/02.pbs.bash +0 -73
  57. data/utils/enveomics/Pipelines/blast.pbs/03.pbs.bash +0 -21
  58. data/utils/enveomics/Pipelines/blast.pbs/BlastTab.recover_job.pl +0 -72
  59. data/utils/enveomics/Pipelines/blast.pbs/CONFIG.mock.bash +0 -98
  60. data/utils/enveomics/Pipelines/blast.pbs/FastA.split.pl +0 -1
  61. data/utils/enveomics/Pipelines/blast.pbs/README.md +0 -127
  62. data/utils/enveomics/Pipelines/blast.pbs/RUNME.bash +0 -109
  63. data/utils/enveomics/Pipelines/blast.pbs/TASK.check.bash +0 -128
  64. data/utils/enveomics/Pipelines/blast.pbs/TASK.dry.bash +0 -16
  65. data/utils/enveomics/Pipelines/blast.pbs/TASK.eo.bash +0 -22
  66. data/utils/enveomics/Pipelines/blast.pbs/TASK.pause.bash +0 -26
  67. data/utils/enveomics/Pipelines/blast.pbs/TASK.run.bash +0 -89
  68. data/utils/enveomics/Pipelines/blast.pbs/sentinel.pbs.bash +0 -29
  69. data/utils/enveomics/Pipelines/idba.pbs/README.md +0 -49
  70. data/utils/enveomics/Pipelines/idba.pbs/RUNME.bash +0 -95
  71. data/utils/enveomics/Pipelines/idba.pbs/run.pbs +0 -56
  72. data/utils/enveomics/Pipelines/trim.pbs/README.md +0 -54
  73. data/utils/enveomics/Pipelines/trim.pbs/RUNME.bash +0 -70
  74. data/utils/enveomics/Pipelines/trim.pbs/run.pbs +0 -130
  75. data/utils/enveomics/README.md +0 -42
  76. data/utils/enveomics/Scripts/AAsubs.log2ratio.rb +0 -171
  77. data/utils/enveomics/Scripts/Aln.cat.rb +0 -221
  78. data/utils/enveomics/Scripts/Aln.convert.pl +0 -35
  79. data/utils/enveomics/Scripts/AlphaDiversity.pl +0 -152
  80. data/utils/enveomics/Scripts/BedGraph.tad.rb +0 -93
  81. data/utils/enveomics/Scripts/BedGraph.window.rb +0 -71
  82. data/utils/enveomics/Scripts/BlastPairwise.AAsubs.pl +0 -102
  83. data/utils/enveomics/Scripts/BlastTab.addlen.rb +0 -63
  84. data/utils/enveomics/Scripts/BlastTab.advance.bash +0 -48
  85. data/utils/enveomics/Scripts/BlastTab.best_hit_sorted.pl +0 -55
  86. data/utils/enveomics/Scripts/BlastTab.catsbj.pl +0 -104
  87. data/utils/enveomics/Scripts/BlastTab.cogCat.rb +0 -76
  88. data/utils/enveomics/Scripts/BlastTab.filter.pl +0 -47
  89. data/utils/enveomics/Scripts/BlastTab.kegg_pep2path_rest.pl +0 -194
  90. data/utils/enveomics/Scripts/BlastTab.metaxaPrep.pl +0 -104
  91. data/utils/enveomics/Scripts/BlastTab.pairedHits.rb +0 -157
  92. data/utils/enveomics/Scripts/BlastTab.recplot2.R +0 -48
  93. data/utils/enveomics/Scripts/BlastTab.seqdepth.pl +0 -86
  94. data/utils/enveomics/Scripts/BlastTab.seqdepth_ZIP.pl +0 -119
  95. data/utils/enveomics/Scripts/BlastTab.seqdepth_nomedian.pl +0 -86
  96. data/utils/enveomics/Scripts/BlastTab.subsample.pl +0 -47
  97. data/utils/enveomics/Scripts/BlastTab.sumPerHit.pl +0 -114
  98. data/utils/enveomics/Scripts/BlastTab.taxid2taxrank.pl +0 -90
  99. data/utils/enveomics/Scripts/BlastTab.topHits_sorted.rb +0 -123
  100. data/utils/enveomics/Scripts/Chao1.pl +0 -97
  101. data/utils/enveomics/Scripts/CharTable.classify.rb +0 -234
  102. data/utils/enveomics/Scripts/EBIseq2tax.rb +0 -83
  103. data/utils/enveomics/Scripts/FastA.N50.pl +0 -60
  104. data/utils/enveomics/Scripts/FastA.extract.rb +0 -152
  105. data/utils/enveomics/Scripts/FastA.filter.pl +0 -52
  106. data/utils/enveomics/Scripts/FastA.filterLen.pl +0 -28
  107. data/utils/enveomics/Scripts/FastA.filterN.pl +0 -60
  108. data/utils/enveomics/Scripts/FastA.fragment.rb +0 -100
  109. data/utils/enveomics/Scripts/FastA.gc.pl +0 -42
  110. data/utils/enveomics/Scripts/FastA.interpose.pl +0 -93
  111. data/utils/enveomics/Scripts/FastA.length.pl +0 -38
  112. data/utils/enveomics/Scripts/FastA.mask.rb +0 -89
  113. data/utils/enveomics/Scripts/FastA.per_file.pl +0 -36
  114. data/utils/enveomics/Scripts/FastA.qlen.pl +0 -57
  115. data/utils/enveomics/Scripts/FastA.rename.pl +0 -65
  116. data/utils/enveomics/Scripts/FastA.revcom.pl +0 -23
  117. data/utils/enveomics/Scripts/FastA.sample.rb +0 -98
  118. data/utils/enveomics/Scripts/FastA.slider.pl +0 -85
  119. data/utils/enveomics/Scripts/FastA.split.pl +0 -55
  120. data/utils/enveomics/Scripts/FastA.split.rb +0 -79
  121. data/utils/enveomics/Scripts/FastA.subsample.pl +0 -131
  122. data/utils/enveomics/Scripts/FastA.tag.rb +0 -65
  123. data/utils/enveomics/Scripts/FastA.toFastQ.rb +0 -69
  124. data/utils/enveomics/Scripts/FastA.wrap.rb +0 -48
  125. data/utils/enveomics/Scripts/FastQ.filter.pl +0 -54
  126. data/utils/enveomics/Scripts/FastQ.interpose.pl +0 -90
  127. data/utils/enveomics/Scripts/FastQ.maskQual.rb +0 -89
  128. data/utils/enveomics/Scripts/FastQ.offset.pl +0 -90
  129. data/utils/enveomics/Scripts/FastQ.split.pl +0 -53
  130. data/utils/enveomics/Scripts/FastQ.tag.rb +0 -70
  131. data/utils/enveomics/Scripts/FastQ.test-error.rb +0 -81
  132. data/utils/enveomics/Scripts/FastQ.toFastA.awk +0 -24
  133. data/utils/enveomics/Scripts/GFF.catsbj.pl +0 -127
  134. data/utils/enveomics/Scripts/GenBank.add_fields.rb +0 -84
  135. data/utils/enveomics/Scripts/HMM.essential.rb +0 -351
  136. data/utils/enveomics/Scripts/HMM.haai.rb +0 -168
  137. data/utils/enveomics/Scripts/HMMsearch.extractIds.rb +0 -83
  138. data/utils/enveomics/Scripts/JPlace.distances.rb +0 -88
  139. data/utils/enveomics/Scripts/JPlace.to_iToL.rb +0 -320
  140. data/utils/enveomics/Scripts/M5nr.getSequences.rb +0 -81
  141. data/utils/enveomics/Scripts/MeTaxa.distribution.pl +0 -198
  142. data/utils/enveomics/Scripts/MyTaxa.fragsByTax.pl +0 -35
  143. data/utils/enveomics/Scripts/MyTaxa.seq-taxrank.rb +0 -49
  144. data/utils/enveomics/Scripts/NCBIacc2tax.rb +0 -92
  145. data/utils/enveomics/Scripts/Newick.autoprune.R +0 -27
  146. data/utils/enveomics/Scripts/RAxML-EPA.to_iToL.pl +0 -228
  147. data/utils/enveomics/Scripts/RecPlot2.compareIdentities.R +0 -32
  148. data/utils/enveomics/Scripts/RefSeq.download.bash +0 -48
  149. data/utils/enveomics/Scripts/SRA.download.bash +0 -55
  150. data/utils/enveomics/Scripts/TRIBS.plot-test.R +0 -36
  151. data/utils/enveomics/Scripts/TRIBS.test.R +0 -39
  152. data/utils/enveomics/Scripts/Table.barplot.R +0 -31
  153. data/utils/enveomics/Scripts/Table.df2dist.R +0 -30
  154. data/utils/enveomics/Scripts/Table.filter.pl +0 -61
  155. data/utils/enveomics/Scripts/Table.merge.pl +0 -77
  156. data/utils/enveomics/Scripts/Table.prefScore.R +0 -60
  157. data/utils/enveomics/Scripts/Table.replace.rb +0 -69
  158. data/utils/enveomics/Scripts/Table.round.rb +0 -63
  159. data/utils/enveomics/Scripts/Table.split.pl +0 -57
  160. data/utils/enveomics/Scripts/Taxonomy.silva2ncbi.rb +0 -227
  161. data/utils/enveomics/Scripts/VCF.KaKs.rb +0 -147
  162. data/utils/enveomics/Scripts/VCF.SNPs.rb +0 -88
  163. data/utils/enveomics/Scripts/aai.rb +0 -421
  164. data/utils/enveomics/Scripts/ani.rb +0 -362
  165. data/utils/enveomics/Scripts/anir.rb +0 -137
  166. data/utils/enveomics/Scripts/clust.rand.rb +0 -102
  167. data/utils/enveomics/Scripts/gi2tax.rb +0 -103
  168. data/utils/enveomics/Scripts/in_silico_GA_GI.pl +0 -96
  169. data/utils/enveomics/Scripts/lib/data/dupont_2012_essential.hmm.gz +0 -0
  170. data/utils/enveomics/Scripts/lib/data/lee_2019_essential.hmm.gz +0 -0
  171. data/utils/enveomics/Scripts/lib/enveomics.R +0 -1
  172. data/utils/enveomics/Scripts/lib/enveomics_rb/anir.rb +0 -293
  173. data/utils/enveomics/Scripts/lib/enveomics_rb/bm_set.rb +0 -175
  174. data/utils/enveomics/Scripts/lib/enveomics_rb/enveomics.rb +0 -24
  175. data/utils/enveomics/Scripts/lib/enveomics_rb/errors.rb +0 -17
  176. data/utils/enveomics/Scripts/lib/enveomics_rb/gmm_em.rb +0 -30
  177. data/utils/enveomics/Scripts/lib/enveomics_rb/jplace.rb +0 -253
  178. data/utils/enveomics/Scripts/lib/enveomics_rb/match.rb +0 -88
  179. data/utils/enveomics/Scripts/lib/enveomics_rb/og.rb +0 -182
  180. data/utils/enveomics/Scripts/lib/enveomics_rb/rbm.rb +0 -49
  181. data/utils/enveomics/Scripts/lib/enveomics_rb/remote_data.rb +0 -74
  182. data/utils/enveomics/Scripts/lib/enveomics_rb/seq_range.rb +0 -237
  183. data/utils/enveomics/Scripts/lib/enveomics_rb/stats/rand.rb +0 -31
  184. data/utils/enveomics/Scripts/lib/enveomics_rb/stats/sample.rb +0 -152
  185. data/utils/enveomics/Scripts/lib/enveomics_rb/stats.rb +0 -3
  186. data/utils/enveomics/Scripts/lib/enveomics_rb/utils.rb +0 -74
  187. data/utils/enveomics/Scripts/lib/enveomics_rb/vcf.rb +0 -135
  188. data/utils/enveomics/Scripts/ogs.annotate.rb +0 -88
  189. data/utils/enveomics/Scripts/ogs.core-pan.rb +0 -160
  190. data/utils/enveomics/Scripts/ogs.extract.rb +0 -125
  191. data/utils/enveomics/Scripts/ogs.mcl.rb +0 -186
  192. data/utils/enveomics/Scripts/ogs.rb +0 -104
  193. data/utils/enveomics/Scripts/ogs.stats.rb +0 -131
  194. data/utils/enveomics/Scripts/rbm-legacy.rb +0 -172
  195. data/utils/enveomics/Scripts/rbm.rb +0 -108
  196. data/utils/enveomics/Scripts/sam.filter.rb +0 -148
  197. data/utils/enveomics/Tests/Makefile +0 -10
  198. data/utils/enveomics/Tests/Mgen_M2288.faa +0 -3189
  199. data/utils/enveomics/Tests/Mgen_M2288.fna +0 -8282
  200. data/utils/enveomics/Tests/Mgen_M2321.fna +0 -8288
  201. data/utils/enveomics/Tests/Nequ_Kin4M.faa +0 -2970
  202. data/utils/enveomics/Tests/Xanthomonas_oryzae-PilA.tribs.Rdata +0 -0
  203. data/utils/enveomics/Tests/Xanthomonas_oryzae-PilA.txt +0 -7
  204. data/utils/enveomics/Tests/Xanthomonas_oryzae.aai-mat.tsv +0 -17
  205. data/utils/enveomics/Tests/Xanthomonas_oryzae.aai.tsv +0 -137
  206. data/utils/enveomics/Tests/a_mg.cds-go.blast.tsv +0 -123
  207. data/utils/enveomics/Tests/a_mg.reads-cds.blast.tsv +0 -200
  208. data/utils/enveomics/Tests/a_mg.reads-cds.counts.tsv +0 -55
  209. data/utils/enveomics/Tests/alkB.nwk +0 -1
  210. data/utils/enveomics/Tests/anthrax-cansnp-data.tsv +0 -13
  211. data/utils/enveomics/Tests/anthrax-cansnp-key.tsv +0 -17
  212. data/utils/enveomics/Tests/hiv1.faa +0 -59
  213. data/utils/enveomics/Tests/hiv1.fna +0 -134
  214. data/utils/enveomics/Tests/hiv2.faa +0 -70
  215. data/utils/enveomics/Tests/hiv_mix-hiv1.blast.tsv +0 -233
  216. data/utils/enveomics/Tests/hiv_mix-hiv1.blast.tsv.lim +0 -1
  217. data/utils/enveomics/Tests/hiv_mix-hiv1.blast.tsv.rec +0 -233
  218. data/utils/enveomics/Tests/phyla_counts.tsv +0 -10
  219. data/utils/enveomics/Tests/primate_lentivirus.ogs +0 -11
  220. data/utils/enveomics/Tests/primate_lentivirus.rbm/hiv1-hiv1.rbm +0 -9
  221. data/utils/enveomics/Tests/primate_lentivirus.rbm/hiv1-hiv2.rbm +0 -8
  222. data/utils/enveomics/Tests/primate_lentivirus.rbm/hiv1-siv.rbm +0 -6
  223. data/utils/enveomics/Tests/primate_lentivirus.rbm/hiv2-hiv2.rbm +0 -9
  224. data/utils/enveomics/Tests/primate_lentivirus.rbm/hiv2-siv.rbm +0 -6
  225. data/utils/enveomics/Tests/primate_lentivirus.rbm/siv-siv.rbm +0 -6
  226. data/utils/enveomics/build_enveomics_r.bash +0 -45
  227. data/utils/enveomics/enveomics.R/DESCRIPTION +0 -31
  228. data/utils/enveomics/enveomics.R/NAMESPACE +0 -39
  229. data/utils/enveomics/enveomics.R/R/autoprune.R +0 -155
  230. data/utils/enveomics/enveomics.R/R/barplot.R +0 -184
  231. data/utils/enveomics/enveomics.R/R/cliopts.R +0 -135
  232. data/utils/enveomics/enveomics.R/R/df2dist.R +0 -154
  233. data/utils/enveomics/enveomics.R/R/growthcurve.R +0 -331
  234. data/utils/enveomics/enveomics.R/R/prefscore.R +0 -79
  235. data/utils/enveomics/enveomics.R/R/recplot.R +0 -354
  236. data/utils/enveomics/enveomics.R/R/recplot2.R +0 -1631
  237. data/utils/enveomics/enveomics.R/R/tribs.R +0 -583
  238. data/utils/enveomics/enveomics.R/R/utils.R +0 -80
  239. data/utils/enveomics/enveomics.R/README.md +0 -81
  240. data/utils/enveomics/enveomics.R/data/growth.curves.rda +0 -0
  241. data/utils/enveomics/enveomics.R/data/phyla.counts.rda +0 -0
  242. data/utils/enveomics/enveomics.R/man/cash-enve.GrowthCurve-method.Rd +0 -16
  243. data/utils/enveomics/enveomics.R/man/cash-enve.RecPlot2-method.Rd +0 -16
  244. data/utils/enveomics/enveomics.R/man/cash-enve.RecPlot2.Peak-method.Rd +0 -16
  245. data/utils/enveomics/enveomics.R/man/enve.GrowthCurve-class.Rd +0 -25
  246. data/utils/enveomics/enveomics.R/man/enve.TRIBS-class.Rd +0 -46
  247. data/utils/enveomics/enveomics.R/man/enve.TRIBS.merge.Rd +0 -23
  248. data/utils/enveomics/enveomics.R/man/enve.TRIBStest-class.Rd +0 -47
  249. data/utils/enveomics/enveomics.R/man/enve.__prune.iter.Rd +0 -23
  250. data/utils/enveomics/enveomics.R/man/enve.__prune.reduce.Rd +0 -23
  251. data/utils/enveomics/enveomics.R/man/enve.__tribs.Rd +0 -40
  252. data/utils/enveomics/enveomics.R/man/enve.barplot.Rd +0 -103
  253. data/utils/enveomics/enveomics.R/man/enve.cliopts.Rd +0 -67
  254. data/utils/enveomics/enveomics.R/man/enve.col.alpha.Rd +0 -24
  255. data/utils/enveomics/enveomics.R/man/enve.col2alpha.Rd +0 -19
  256. data/utils/enveomics/enveomics.R/man/enve.df2dist.Rd +0 -45
  257. data/utils/enveomics/enveomics.R/man/enve.df2dist.group.Rd +0 -44
  258. data/utils/enveomics/enveomics.R/man/enve.df2dist.list.Rd +0 -47
  259. data/utils/enveomics/enveomics.R/man/enve.growthcurve.Rd +0 -75
  260. data/utils/enveomics/enveomics.R/man/enve.prefscore.Rd +0 -50
  261. data/utils/enveomics/enveomics.R/man/enve.prune.dist.Rd +0 -44
  262. data/utils/enveomics/enveomics.R/man/enve.recplot.Rd +0 -139
  263. data/utils/enveomics/enveomics.R/man/enve.recplot2-class.Rd +0 -45
  264. data/utils/enveomics/enveomics.R/man/enve.recplot2.ANIr.Rd +0 -24
  265. data/utils/enveomics/enveomics.R/man/enve.recplot2.Rd +0 -77
  266. data/utils/enveomics/enveomics.R/man/enve.recplot2.__counts.Rd +0 -25
  267. data/utils/enveomics/enveomics.R/man/enve.recplot2.__peakHist.Rd +0 -21
  268. data/utils/enveomics/enveomics.R/man/enve.recplot2.__whichClosestPeak.Rd +0 -19
  269. data/utils/enveomics/enveomics.R/man/enve.recplot2.changeCutoff.Rd +0 -19
  270. data/utils/enveomics/enveomics.R/man/enve.recplot2.compareIdentities.Rd +0 -47
  271. data/utils/enveomics/enveomics.R/man/enve.recplot2.coordinates.Rd +0 -29
  272. data/utils/enveomics/enveomics.R/man/enve.recplot2.corePeak.Rd +0 -18
  273. data/utils/enveomics/enveomics.R/man/enve.recplot2.extractWindows.Rd +0 -45
  274. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.Rd +0 -36
  275. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.__em_e.Rd +0 -19
  276. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.__em_m.Rd +0 -19
  277. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.__emauto_one.Rd +0 -27
  278. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.__mow_one.Rd +0 -52
  279. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.__mower.Rd +0 -17
  280. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.em.Rd +0 -51
  281. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.emauto.Rd +0 -43
  282. data/utils/enveomics/enveomics.R/man/enve.recplot2.findPeaks.mower.Rd +0 -82
  283. data/utils/enveomics/enveomics.R/man/enve.recplot2.peak-class.Rd +0 -59
  284. data/utils/enveomics/enveomics.R/man/enve.recplot2.seqdepth.Rd +0 -27
  285. data/utils/enveomics/enveomics.R/man/enve.recplot2.windowDepthThreshold.Rd +0 -36
  286. data/utils/enveomics/enveomics.R/man/enve.selvector.Rd +0 -23
  287. data/utils/enveomics/enveomics.R/man/enve.tribs.Rd +0 -68
  288. data/utils/enveomics/enveomics.R/man/enve.tribs.test.Rd +0 -28
  289. data/utils/enveomics/enveomics.R/man/enve.truncate.Rd +0 -27
  290. data/utils/enveomics/enveomics.R/man/growth.curves.Rd +0 -14
  291. data/utils/enveomics/enveomics.R/man/phyla.counts.Rd +0 -13
  292. data/utils/enveomics/enveomics.R/man/plot.enve.GrowthCurve.Rd +0 -78
  293. data/utils/enveomics/enveomics.R/man/plot.enve.TRIBS.Rd +0 -46
  294. data/utils/enveomics/enveomics.R/man/plot.enve.TRIBStest.Rd +0 -45
  295. data/utils/enveomics/enveomics.R/man/plot.enve.recplot2.Rd +0 -125
  296. data/utils/enveomics/enveomics.R/man/summary.enve.GrowthCurve.Rd +0 -19
  297. data/utils/enveomics/enveomics.R/man/summary.enve.TRIBS.Rd +0 -19
  298. data/utils/enveomics/enveomics.R/man/summary.enve.TRIBStest.Rd +0 -19
  299. data/utils/enveomics/globals.mk +0 -8
  300. data/utils/enveomics/manifest.json +0 -9
  301. data/utils/multitrim/Multitrim How-To.pdf +0 -0
  302. data/utils/multitrim/README.md +0 -67
  303. data/utils/multitrim/multitrim.py +0 -1555
  304. data/utils/multitrim/multitrim.yml +0 -13
@@ -1,168 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # @author Luis M. Rodriguez-R <lmrodriguezr at gmail dot com>
4
- # @license Artistic-2.0
5
-
6
- require 'optparse'
7
-
8
- o = {q: false}
9
- ARGV << '-h' if ARGV.size==0
10
-
11
- OptionParser.new do |opt|
12
- opt.banner = "
13
- Estimates Average Amino Acid Identity (AAI) from the essential genes extracted
14
- and aligned by HMM.essential.rb (see --alignments).
15
-
16
- Usage: #{$0} [options]"
17
- opt.separator ''
18
- opt.separator 'Mandatory'
19
- opt.on('-1 PATH', 'Input alignments file for genome 1.'){ |v| o[:a] = v }
20
- opt.on('-2 PATH', 'Input alignments file for genome 2.'){ |v| o[:b] = v }
21
- opt.separator ''
22
- opt.separator 'Options'
23
- opt.on('-a', '--aln-out FILE',
24
- 'Output file containing the aligned proteins'){ |v| o[:alnout] = v }
25
- opt.on('-c', '--components FILE',
26
- 'Output file containing the components of the estimation.',
27
- 'Tab-delimited file with model name, matches, and columns.'
28
- ){ |v| o[:compout] = v }
29
- opt.on('-q', '--quiet', 'Run quietly (no STDERR output).'){ o[:q] = true }
30
- opt.on('-h', '--help', 'Display this screen.') do
31
- puts opt
32
- exit
33
- end
34
- opt.separator ''
35
- end.parse!
36
- abort '-1 is mandatory.' if o[:a].nil?
37
- abort '-2 is mandatory.' if o[:b].nil?
38
-
39
- class HList
40
- attr_accessor :list
41
-
42
- def initialize(file)
43
- @list = {}
44
- r = File.readlines(file)
45
- while not r.empty?
46
- e = HElement.new(*r.shift(3))
47
- @list[ e.model_id ] = e
48
- end
49
- end
50
-
51
- def [](model_id)
52
- list[model_id]
53
- end
54
-
55
- ##
56
- # Returns an array of HAln objects.
57
- def align(other)
58
- list.keys.map do |model_id|
59
- self[model_id].align(other[model_id]) unless other[model_id].nil?
60
- end.compact
61
- end
62
-
63
- def models
64
- list.keys
65
- end
66
- end
67
-
68
- class HElement
69
- attr_accessor :defline, :model_id, :protein_id, :protein_coords
70
- attr_accessor :model_aln, :protein_aln
71
-
72
- def initialize(defline, model_aln, protein_aln)
73
- @defline = defline.chomp
74
- @model_aln = model_aln.chomp
75
- @protein_aln = protein_aln.chomp
76
- if defline =~ /^# (.+) : (.+) : (.+)/
77
- @model_id = $1
78
- @protein_id = $2
79
- @protein_coords = $3
80
- end
81
- end
82
-
83
- def dup
84
- HElement.new(defline, model_aln, protein_aln)
85
- end
86
-
87
- ##
88
- # Returns an HAln object
89
- def align(other)
90
- return nil unless model_width == other.model_width
91
- HAln.new(self, other)
92
- end
93
-
94
- def masked_protein
95
- @masked_protein ||= model_aln.chars.
96
- each_with_index.map{ |c, pos| c == 'X' ? protein_aln[pos] : nil }.
97
- compact.join('')
98
- end
99
-
100
- def model_width
101
- masked_protein.size
102
- end
103
- end
104
-
105
- class HAln
106
- attr :protein_1, :protein_2, :model_id, :protein_1_id, :protein_2_id
107
-
108
- def initialize(a, b)
109
- @protein_1 = a.masked_protein
110
- @protein_2 = b.masked_protein
111
- @model_id = a.model_id
112
- @protein_1_id = a.protein_id + '/' + a.protein_coords
113
- @protein_2_id = b.protein_id + '/' + b.protein_coords
114
- end
115
-
116
- def stats
117
- @stats = { len: 0, gaps: 0, matches: 0 }
118
- return @stats unless @stats[:id].nil?
119
- protein_1.chars.each_with_index do |v, k|
120
- # Ignore gaps in both proteins
121
- next if v == '-' and protein_2[k] == '-'
122
- # Count matches
123
- @stats[:len] += 1
124
- if v == protein_2[k]
125
- @stats[:matches] += 1
126
- elsif v == '-' or protein_2[k] == '-'
127
- @stats[:gaps] += 1
128
- end
129
- end
130
- @stats.tap { |i| i[:id] = 100.0 * i[:matches] / i[:len] }
131
- end
132
-
133
- def stats_to_s
134
- stats.map{ |k,v| "#{k}:#{v}" }.join ' '
135
- end
136
-
137
- def to_s
138
- ["# #{model_id} | #{protein_1_id} | #{protein_2_id} | #{stats_to_s}",
139
- protein_1, protein_2, ''].join("\n")
140
- end
141
- end
142
-
143
- hlist1 = HList.new(o[:a])
144
- hlist2 = HList.new(o[:b])
145
- haln_arr = hlist1.align(hlist2)
146
-
147
- avg_identity = haln_arr.map{ |i| i.stats[:id] }.inject(:+) / haln_arr.size
148
- avg2_identity = haln_arr.map{ |i| i.stats[:id] ** 2 }.inject(:+) / haln_arr.size
149
- sd_identity = Math.sqrt( avg2_identity - avg_identity ** 2 )
150
- puts "Common models: #{haln_arr.size}"
151
- puts "All models: #{(hlist1.models | hlist1.models).size}"
152
- puts "Average identity: #{avg_identity.round(2)}%"
153
- puts "SD identity: #{sd_identity.round(2)}"
154
-
155
- if o[:alnout]
156
- File.open(o[:alnout], 'w') do |fh|
157
- haln_arr.each { |i| fh.puts i }
158
- end
159
- end
160
-
161
- if o[:compout]
162
- File.open(o[:compout], 'w') do |fh|
163
- haln_arr.each do |i|
164
- fh.puts "#{i.model_id}\t#{i.stats[:matches]}\t#{i.stats[:len]}"
165
- end
166
- end
167
- end
168
-
@@ -1,83 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- #
4
- # @author Luis M. Rodriguez-R <lmrodriguezr at gmail dot com>
5
- # @update Dec-01-2015
6
- # @license artistic 2.0
7
- #
8
-
9
- require "optparse"
10
-
11
- o = {quiet:false, model:true}
12
-
13
- OptionParser.new do |opts|
14
- opts.banner = "
15
- Extracts the sequence IDs and query model form a (multiple) HMMsearch report
16
- (for HMMer 3.0).
17
-
18
- Usage: #{$0} [options] < input.hmmsearch > list.txt"
19
- opts.separator ""
20
- opts.separator "Options"
21
- opts.on("-E", "--all-evalue FLOAT",
22
- "Maximum e-value of sequence to report result."
23
- ){|v| o[:all_evalue] = v.to_f }
24
- opts.on("-S", "--all-score FLOAT",
25
- "Minimum score of sequence to report result."
26
- ){|v| o[:all_score] = v.to_f }
27
- opts.on("-e", "--best-evalue FLOAT",
28
- "Maximum e-value of best domain to report result."
29
- ){|v| o[:best_evalue] = v.to_f }
30
- opts.on("-s", "--best-score FLOAT",
31
- "Minimum score of best domain to report result."
32
- ){|v| o[:best_score] = v.to_f }
33
- opts.on("-n", "--no-model",
34
- "Do not include the query model in the output list."){ o[:model]=false }
35
- opts.on("-q", "--quiet", "Run quietly."){ o[:quiet]=true }
36
- opts.on("-h", "--help", "Display this screen.") do
37
- puts opts
38
- exit
39
- end
40
- opts.separator ""
41
- end.parse!
42
-
43
- at = :header
44
- query = ""
45
- i = 0
46
- ARGF.each_line do |ln|
47
- next unless /^(#.*)$/.match(ln).nil?
48
- ln.chomp!
49
- case at
50
- when :header
51
- qm = /Query:\s+(.*?)\s+/.match(ln)
52
- qm.nil? or query=qm[1]
53
- unless /^[\-\s]+$/.match(ln).nil?
54
- at = :list
55
- i = 0
56
- STDERR.print "Parsing hits against #{query}: " unless o[:quiet]
57
- end
58
- when :list
59
- if /^\s*$/.match(ln).nil?
60
- next if ln =~ /^\s*-+ inclusion threshold -+$/
61
- ln.gsub!(/#.*/,"")
62
- row = ln.split(/\s+/)
63
- row << nil if row.count==10
64
- raise "Unable to parse seemingly malformed list of hits in line " +
65
- "#{$.}:\n#{ln}" unless row.count==11
66
- good = true
67
- good &&= ( o[:all_evalue].nil? || row[1].to_f <= o[:all_evalue] )
68
- good &&= ( o[:all_score].nil? || row[2].to_f >= o[:all_score] )
69
- good &&= ( o[:best_evalue].nil? || row[4].to_f <= o[:best_evalue] )
70
- good &&= ( o[:best_score].nil? || row[5].to_f >= o[:best_score] )
71
- if good
72
- puts row[9]+(o[:model]?"\t#{query}":"")
73
- i+=1
74
- end
75
- else
76
- at = :align
77
- STDERR.puts "#{i} results." unless o[:quiet]
78
- end
79
- when :align
80
- at = :header unless /^\/\/$/.match(ln).nil?
81
- end
82
- end
83
-
@@ -1,88 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- #
4
- # @author: Luis M. Rodriguez-R
5
- # @update: Jul-14-2015
6
- # @license: artistic license 2.0
7
- #
8
-
9
- $:.push File.expand_path(File.dirname(__FILE__) + '/lib')
10
- require 'enveomics_rb/jplace'
11
- require 'optparse'
12
- require 'json'
13
-
14
- o = {:q=>false}
15
- ARGV << '-h' if ARGV.size==0
16
- OptionParser.new do |opts|
17
- opts.banner = "
18
- Extracts the distance (estimated branch length) of each placed read to a given node in a JPlace file.
19
-
20
- Usage: #{$0} [options]"
21
- opts.separator ""
22
- opts.separator "Mandatory"
23
- opts.on("-i", "--in FILE", ".jplace input file containing the read placement."){ |v| o[:in]=v }
24
- opts.on("-n", "--node STR", "Index (number in curly brackets) of the node to which distances should be measured."){ |v| o[:node]=v }
25
- opts.on("-o", "--out FILE", "Ouput file."){ |v| o[:out]=v }
26
- opts.separator ""
27
- opts.separator "Other Options"
28
- opts.on("-N", "--in-node STR","Report only reads placed at this node or it's children."){ |v| o[:onlynode]=v }
29
- opts.on("-q", "--quiet", "Run quietly (no STDERR output)."){ o[:q] = true }
30
- opts.on("-h", "--help", "Display this screen.") do
31
- puts opts
32
- exit
33
- end
34
- opts.separator ""
35
- end.parse!
36
- abort "-i is mandatory" if o[:in].nil?
37
- abort "-o is mandatory" if o[:out].nil?
38
- abort "-n is mandatory" if o[:node].nil?
39
-
40
- ##### MAIN:
41
- begin
42
- $stderr.puts "Loading jplace file." unless o[:q]
43
- ifh = File.open(o[:in], 'r')
44
- jplace = JSON.load(ifh)
45
- ifh.close
46
-
47
- $stderr.puts "Parsing tree." unless o[:q]
48
- tree = JPlace::Tree.from_nwk(jplace["tree"])
49
- node = JPlace::Node.edges[ o[:node].gsub(/[{}]/,"").to_i ]
50
- from_node = o[:onlynode].nil? ? tree : JPlace::Node.edges[ o[:onlynode].gsub(/[{}]/,"").to_i ]
51
- raise "Cannot find node with index #{o[:node]}." if node.nil?
52
- raise "Cannot find node with index #{o[:onlynode]}." if from_node.nil?
53
-
54
- $stderr.puts "Parsing placements." unless o[:q]
55
- JPlace::Placement.fields = jplace["fields"]
56
- placements_n = 0
57
- jplace["placements"].each do |placement|
58
- JPlace::Node.link_placement(JPlace::Placement.new(placement))
59
- placements_n += 1
60
- end
61
- $stderr.puts " #{placements_n} placements in tree, #{node.placements.length} direct placements to {#{node.index}}." unless o[:q]
62
-
63
- # First, calculate distances
64
- from_node.pre_order do |n|
65
- d = n.distance(node)
66
- if node.path_to_root.include? n
67
- n.placements.each{ |p| p.flag = d + p.pendant_length + p.distal_length }
68
- else
69
- n.placements.each{ |p| p.flag = d + p.pendant_length - p.distal_length }
70
- end
71
- end
72
-
73
- # Finally, report results
74
- ofh = File.open(o[:out], "w")
75
- ofh.puts %w(read distance multiplicity edge_index node_name).join("\t")
76
- from_node.pre_order do |n|
77
- n.placements.each do |p|
78
- p.nm.each{ |r| ofh.puts [ r[:n], p.flag, r[:m], n.index, n.name ].join("\t") }
79
- end
80
- end
81
- ofh.close
82
- rescue => err
83
- $stderr.puts "Exception: #{err}\n\n"
84
- err.backtrace.each { |l| $stderr.puts l + "\n" }
85
- err
86
- end
87
-
88
-
@@ -1,320 +0,0 @@
1
- #!/usr/bin/env ruby
2
-
3
- # @author: Luis M. Rodriguez-R
4
- # @license: artistic license 2.0
5
-
6
- $:.push File.expand_path("../lib", __FILE__)
7
- require "enveomics_rb/enveomics"
8
- require "enveomics_rb/jplace"
9
- require "optparse"
10
- require "json"
11
-
12
- o = {q: false, regex: "^(?<dataset>.+?):.*", area: false, norm: :counts,
13
- olditol: false}
14
- ARGV << "-h" if ARGV.size==0
15
- OptionParser.new do |opts|
16
- opts.banner = "
17
- Generates iToL-compatible files from a .jplace file (produced by RAxML's EPA
18
- or pplacer), that can be used to draw pie-charts in the nodes of the reference
19
- tree.
20
-
21
- Usage: #{$0} [options]"
22
- opts.separator ""
23
- opts.separator "Mandatory"
24
- opts.on("-i", "--in FILE",
25
- ".jplace input file containing the read placement."){ |v| o[:in]=v }
26
- opts.on("-o", "--out FILE", "Base of the output files."){ |v| o[:out]=v }
27
- opts.separator ""
28
- opts.separator "Other Options"
29
- opts.on("-u", "--unique STR",
30
- "Name of the dataset (if only one is used). Conflicts with -r and -s."
31
- ){ |v| o[:unique]=v }
32
- opts.on("-r", "--regex STR",
33
- "Regular expression capturing the sample ID (named dataset) in read names.",
34
- "By default: '#{o[:regex]}'. Conflicts with -s."){ |v| o[:regex]=v }
35
- opts.on("-s", "--separator STR",
36
- "String separating the dataset name and the rest of the read name.",
37
- "It assumes that the read name starts by the dataset name. Conflicts with -r."
38
- ){ |v| o[:regex]="^(?<dataset>.+?)#{v}" }
39
- opts.on("-m", "--metadata FILE",
40
- "Datasets metadata in tab-delimited format with a header row.",
41
- "Valid headers: name (required), color (in Hex), size (# reads), norm (any float)."
42
- ){ |v| o[:metadata]=v }
43
- opts.on("-n", "--norm STR", %w[none counts size norm],
44
- "Normalization strategy. Must be one of:",
45
- "none: Direct read counts are reported without normalization.",
46
- "count (default): The counts are normalized (divided) by the total counts per dataset.",
47
- "size: The counts are normalized (divided) by the size column in metadata (must be integer).",
48
- "norm: The counts are normalized (divided) by the norm column in metadata (can be any float)."
49
- ){ |v| o[:norm]=v.to_sym }
50
- opts.on("--old-itol",
51
- "Generate output file using the old iToL format (pre v3.0)."
52
- ){ |v| o[:olditol] = v }
53
- opts.on("-c", "--collapse FILE",
54
- "Internal nodes to collapse (requires rootted tree)."){ |v| o[:collapse]=v }
55
- opts.on("-a", "--area",
56
- "If set, the area of the pies is proportional to the placements. Otherwise, the radius is."
57
- ){ o[:area]=true }
58
- opts.on("-q", "--quiet", "Run quietly (no STDERR output)."){ o[:q] = true }
59
- opts.on("-h", "--help", "Display this screen.") do
60
- puts opts
61
- exit
62
- end
63
- opts.separator ""
64
- opts.separator "Quick how-to in 5 steps"
65
- opts.separator " 1. Create the placement file using RAxML's EPA [1] or pplacer [2]. You can use any other software"
66
- opts.separator " producing a compliant .jplace file [3]. If you're using multiple datasets, include the name of"
67
- opts.separator " the dataset somewhere in the read names."
68
- opts.separator " 2. If you have multiple datasets, it's convenient to create a metadata table. It's not necessary,"
69
- opts.separator " but it allows you to control the colors and the normalization method (see -m)."
70
- opts.separator " 3. Execute this script passing the .jplace file created in step 1 (see -i). If you have a single"
71
- opts.separator " dataset, use the option -u to give it a short name. If you have multiple datasets, use the -s"
72
- opts.separator " or -r options to tell the script how to find the dataset name within the read name. Note that"
73
- opts.separator " some programs (like CheckM) may produce nonstandard characters that won't be correctly parsed."
74
- opts.separator " To avoid this problem, install iconv support (gem install iconv) before running this script"
75
- opts.separator " (currently "+(JPlace::Tree.has_iconv? ? "" : "NOT ")+"installed)."
76
- opts.separator " 4. Upload the tree (.nwk file) to iToL [4]. Make sure you check 'Keep internal node IDs' in the"
77
- opts.separator " advanced options. In that same page, upload the dataset (.itol file), pick a name, and select"
78
- opts.separator " the data type 'Multi-value Bar Chart or Pie Chart'. If you used the -c option, upload the list"
79
- opts.separator " of nodes to collapse (.collapse file) in the 'Pre-collapsed clades' field (advanced options)."
80
- opts.separator " 5. Open the tree. You can now see the names of the internal nodes. If you want to collapse nodes,"
81
- opts.separator " simply list the nodes to collapse and go back to step 3, this time using the -c option."
82
- opts.separator ""
83
- opts.separator "References"
84
- opts.separator " [1] SA Berger, D Krompass and A Stamatakis, 2011, Syst Biol 60(3):291-302."
85
- opts.separator " http://sysbio.oxfordjournals.org/content/60/3/291"
86
- opts.separator " [2] FA Matsen, RB Kodner and EV Armbrust, 2010, BMC Bioinf 11:538."
87
- opts.separator " http://www.biomedcentral.com/1471-2105/11/538/"
88
- opts.separator " [3] FA Matsen, NG Hoffman, A Gallagher and A Stamatakis, 2012, PLoS ONE 7(2):e31009."
89
- opts.separator " http://www.plosone.org/article/info%3Adoi%2F10.1371%2Fjournal.pone.0031009"
90
- opts.separator " [4] I Letunic and P Bork, 2011, NAR 39(suppl 2):W475-W478."
91
- opts.separator " http://nar.oxfordjournals.org/content/39/suppl_2/W475.full"
92
- opts.separator ""
93
- end.parse!
94
- abort "-o is mandatory" if o[:out].nil?
95
-
96
- ##### CLASSES:
97
-
98
- class Dataset
99
- attr_reader :name, :data
100
- def initialize(name)
101
- @name = name
102
- @data = {:count=>0}
103
- end
104
- def count
105
- self.datum :count
106
- end
107
- def add_count(n)
108
- @data[:count] += n
109
- end
110
- def datum(k)
111
- @data[k]
112
- end
113
- def add_datum(k, v)
114
- @data[k] = v
115
- end
116
- def color
117
- if @data[:color].nil?
118
- @data[:color] = "#" + (1 .. 3).map{ |i|
119
- sprintf("%02X", rand(255)) }.join("")
120
- end
121
- @data[:color].sub(/^#?/, "#")
122
- self.datum :color
123
- end
124
- def size
125
- self.datum :size
126
- end
127
- def norm
128
- self.datum :norm
129
- end
130
- end
131
-
132
- class Metadata
133
- attr_reader :datasets
134
- def initialize
135
- @datasets = {}
136
- end
137
- def load_table(file)
138
- f = File.open(file, "r")
139
- h = f.gets.chomp.split(/\t/)
140
- name_idx = h.find_index "name"
141
- color_idx = h.find_index "color"
142
- size_idx = h.find_index "size"
143
- norm_idx = h.find_index "norm"
144
- abort "The metadata table must contain a 'name' column." if name_idx.nil?
145
- while ln = f.gets
146
- vals = ln.chomp.split(/\t/)
147
- name = vals[name_idx]
148
- self[name] # Create sample, in case "name" is the only column
149
- self[name].add_datum(:color, vals[color_idx]) unless color_idx.nil?
150
- self[name].add_datum(:size, vals[size_idx].to_i) unless size_idx.nil?
151
- self[name].add_datum(:norm, vals[norm_idx].to_f) unless norm_idx.nil?
152
- end
153
- f.close
154
- end
155
- def [](name)
156
- self << Dataset.new(name) unless @datasets.has_key?(name)
157
- @datasets[name]
158
- end
159
- def <<(dataset)
160
- @datasets[dataset.name] = dataset
161
- end
162
- def names
163
- @datasets.keys
164
- end
165
- def colors
166
- @datasets.values.map{ |d| d.color }
167
- end
168
- def data(k)
169
- self.names.map{ |name| self[name].datum[k] }
170
- end
171
- def set_unique!(n)
172
- u = self[n]
173
- @datasets = {}
174
- @datasets[n] = u
175
- end
176
- def size
177
- self.datasets.length
178
- end
179
- end
180
-
181
- ##### MAIN:
182
- begin
183
- $stderr.puts "Parsing metadata." unless o[:q]
184
- metadata = Metadata.new
185
- metadata.load_table(o[:metadata]) unless o[:metadata].nil?
186
- metadata.set_unique! o[:unique] unless o[:unique].nil?
187
-
188
-
189
- $stderr.puts "Loading jplace file." unless o[:q]
190
- ifh = File.open(o[:in], 'r')
191
- jplace = JSON.load(ifh)
192
- ifh.close
193
-
194
-
195
- $stderr.puts "Parsing tree." unless o[:q]
196
- tree = JPlace::Tree.from_nwk(jplace["tree"])
197
-
198
-
199
- $stderr.puts "Parsing placements." unless o[:q]
200
- JPlace::Placement.fields = jplace["fields"]
201
- placements_n = 0
202
- jplace["placements"].each do |placement|
203
- JPlace::Node.link_placement(JPlace::Placement.new(placement))
204
- placements_n += 1
205
- end
206
- $stderr.puts " #{placements_n} placements." unless o[:q]
207
- tree.pre_order do |n|
208
- n.placements.each do |p|
209
- p.nm.each do |r|
210
- m = (o[:unique].nil? ? (/#{o[:regex]}/.match(r[:n]) or
211
- abort "Cannot parse read name: #{r[:n]}, placed at edge #{n.index}") :
212
- {:dataset=>o[:unique]})
213
- metadata[ m[:dataset] ].add_count(r[:m])
214
- end
215
- end
216
- end
217
-
218
-
219
- unless o[:collapse].nil?
220
- $stderr.puts "Collapsing nodes." unless o[:q]
221
- collapse = File.readlines(o[:collapse]).map do |ln|
222
- l = ln.chomp.split(/\t/)
223
- l[1] = l[0] if l[1].nil?
224
- l
225
- end.inject({}) do |hash,ar|
226
- hash[ar[0]] = ar[1]
227
- hash
228
- end
229
- f = File.open(o[:out] + ".collapse", "w")
230
- coll_n = 0
231
- tree.pre_order do |n|
232
- if collapse.keys.include? n.cannonical_name
233
- n.collapse!
234
- n.name = collapse[n.cannonical_name]
235
- f.puts n.name
236
- coll_n += 1
237
- end
238
- end
239
- f.close
240
- $stderr.puts " #{coll_n} nodes collapsed (#{collapse.length} requested)." unless o[:q]
241
- end
242
-
243
-
244
- $stderr.puts "Estimating normalizing factors by #{o[:norm].to_s}." unless o[:q] or o[:norm]==:none
245
- case o[:norm]
246
- when :none
247
- metadata.datasets.values.each{ |d| d.add_datum :norm, 1.0 }
248
- when :counts
249
- metadata.datasets.values.each{ |d| d.add_datum :norm, d.count.to_f }
250
- when :size
251
- abort "Column 'size' required in metadata." if metadata.datasets.values[0].size.nil?
252
- metadata.datasets.values.each{ |d| d.add_datum :norm, d.size.to_f }
253
- when :norm
254
- abort "Column 'norm' required in metadata." if metadata.datasets.values[0].norm.nil?
255
- end
256
- max_norm = metadata.datasets.values.map{ |d| d.norm }.max
257
-
258
-
259
- $stderr.puts "Generating iToL dataset." unless o[:q]
260
- f = File.open(o[:out] + ".itol.txt", "w")
261
- if o[:olditol]
262
- f.puts "LABELS\t" + metadata.names.join("\t")
263
- f.puts "COLORS\t" + metadata.colors.join("\t")
264
- else
265
- f.puts "DATASET_PIECHART"
266
- f.puts "SEPARATOR TAB"
267
- f.puts "DATASET_LABEL\tReadPlacement"
268
- f.puts "COLOR\t#1f2122"
269
- f.puts "FIELD_LABELS\t" + metadata.names.join("\t")
270
- f.puts "FIELD_COLORS\t" + metadata.colors.join("\t")
271
- f.puts "DATA"
272
- end
273
- max_norm_sum,min_norm_sum,max_norm_n,min_norm_n = 0.0,Float::INFINITY,"",""
274
- tree.pre_order do |n|
275
- ds_counts = Hash.new(0.0)
276
- n.placements.each do |p|
277
- p.nm.each do |r|
278
- m = (o[:unique].nil? ? (/#{o[:regex]}/.match(r[:n]) or
279
- abort "Cannot parse read name: #{r[:n]}, placed at edge #{n.index}") :
280
- {:dataset=>o[:unique]})
281
- ds_counts[ m[:dataset] ] += r[:m] / metadata[ m[:dataset] ].norm
282
- end
283
- end
284
- counts_sum = ds_counts.values.reduce(:+)
285
- unless counts_sum.nil?
286
- # In the area option, the radius is "twice" to make the smallest > 1 (since counts_sum is >= 1)
287
- radius = (o[:area] ? 2*Math.sqrt(counts_sum/Math::PI) : counts_sum)*max_norm
288
- f.puts n.cannonical_name +
289
- "#{"\t0.5" unless o[:olditol]}\t#{"R" if o[:olditol]}" +
290
- radius.to_i.to_s + "\t" +
291
- metadata.names.map{ |n| ds_counts[n] }.join("\t")
292
- if counts_sum > max_norm_sum
293
- max_norm_n = n.cannonical_name
294
- max_norm_sum = counts_sum
295
- end
296
- if counts_sum < min_norm_sum
297
- min_norm_n = n.cannonical_name
298
- min_norm_sum = counts_sum
299
- end
300
- end
301
- end
302
- f.close
303
- units = {none: "", counts: " per million placements",
304
- size: " per million reads", norm: " per normalizing unit"}
305
- $stderr.puts " The pie #{o[:area] ? "areas" : "radii"} are proportional to the placements#{units[o[:norm]]}." unless o[:q]
306
- $stderr.puts " The minimum radius (#{min_norm_n}) represents #{min_norm_sum*(([:none, :norm].include? o[:norm]) ? 1 : 1e6)} placements#{units[o[:norm]]}." unless o[:q]
307
- $stderr.puts " The maximum radius (#{max_norm_n}) represents #{max_norm_sum*(([:none, :norm].include? o[:norm]) ? 1 : 1e6)} placements#{units[o[:norm]]}." unless o[:q]
308
-
309
-
310
- $stderr.puts "Re-formatting tree for iToL." unless o[:q]
311
- f = File.open(o[:out] + ".nwk", "w")
312
- f.puts tree.to_s + ";"
313
- f.close
314
-
315
- rescue => err
316
- $stderr.puts "Exception: #{err}\n\n"
317
- err.backtrace.each { |l| $stderr.puts l + "\n" }
318
- err
319
- end
320
-