lalsuite 7.26.2.dev20251210__cp312-cp312-macosx_12_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lal/.dylibs/Python +0 -0
- lal/.dylibs/libaec.0.dylib +0 -0
- lal/.dylibs/libbrotlicommon.1.2.0.dylib +0 -0
- lal/.dylibs/libbrotlidec.1.2.0.dylib +0 -0
- lal/.dylibs/libcfitsio.10.4.4.1.dylib +0 -0
- lal/.dylibs/libcrypto.3.dylib +0 -0
- lal/.dylibs/libcurl.4.dylib +0 -0
- lal/.dylibs/libfftw3.3.dylib +0 -0
- lal/.dylibs/libfftw3f.3.dylib +0 -0
- lal/.dylibs/libframel.8.48.4.dylib +0 -0
- lal/.dylibs/libgsl.28.dylib +0 -0
- lal/.dylibs/libgslcblas.0.dylib +0 -0
- lal/.dylibs/libhdf5.310.dylib +0 -0
- lal/.dylibs/libhdf5_hl.310.dylib +0 -0
- lal/.dylibs/libiconv.2.dylib +0 -0
- lal/.dylibs/libidn2.0.dylib +0 -0
- lal/.dylibs/libintl.8.dylib +0 -0
- lal/.dylibs/liblal.20.dylib +0 -0
- lal/.dylibs/liblalburst.8.dylib +0 -0
- lal/.dylibs/liblalframe.14.dylib +0 -0
- lal/.dylibs/liblalinference.23.dylib +0 -0
- lal/.dylibs/liblalinspiral.18.dylib +0 -0
- lal/.dylibs/liblalmetaio.11.dylib +0 -0
- lal/.dylibs/liblalpulsar.30.dylib +0 -0
- lal/.dylibs/liblalsimulation.37.dylib +0 -0
- lal/.dylibs/liblalsupport.14.dylib +0 -0
- lal/.dylibs/libmetaio.1.dylib +0 -0
- lal/.dylibs/libnghttp2.14.dylib +0 -0
- lal/.dylibs/libpsl.5.dylib +0 -0
- lal/.dylibs/libssl.3.dylib +0 -0
- lal/.dylibs/libsz.2.dylib +0 -0
- lal/.dylibs/libunistring.5.dylib +0 -0
- lal/.dylibs/libz.1.3.1.dylib +0 -0
- lal/.dylibs/libzstd.1.5.7.dylib +0 -0
- lal/__init__.py +145 -0
- lal/_lal.cpython-312-darwin.so +0 -0
- lal/_lal_swig.py +12 -0
- lal/antenna.py +1200 -0
- lal/git_version.py +64 -0
- lal/gpstime.py +233 -0
- lal/iterutils.py +408 -0
- lal/pipeline.py +3139 -0
- lal/rate.py +2455 -0
- lal/series.py +244 -0
- lal/utils/__init__.py +29 -0
- lal/utils/cache.py +379 -0
- lal/utils/series.py +277 -0
- lalapps/__init__.py +26 -0
- lalapps/bin/lal_cache +0 -0
- lalapps/bin/lal_fftw_wisdom +0 -0
- lalapps/bin/lal_fftwf_wisdom +0 -0
- lalapps/bin/lal_simd_detect +0 -0
- lalapps/bin/lal_tconvert +0 -0
- lalapps/bin/lal_version +0 -0
- lalapps/bin/lalapps_ComputeAntennaPattern +16 -0
- lalapps/bin/lalapps_ComputeFstatBenchmark +16 -0
- lalapps/bin/lalapps_ComputeFstatLatticeCount +16 -0
- lalapps/bin/lalapps_ComputeFstatMCUpperLimit +16 -0
- lalapps/bin/lalapps_ComputeFstatistic_v2 +16 -0
- lalapps/bin/lalapps_ComputePSD +16 -0
- lalapps/bin/lalapps_CopySFTs +16 -0
- lalapps/bin/lalapps_DistanceVsMass +0 -0
- lalapps/bin/lalapps_DriveHoughMulti +16 -0
- lalapps/bin/lalapps_FstatMetric_v2 +16 -0
- lalapps/bin/lalapps_HierarchSearchGCT +16 -0
- lalapps/bin/lalapps_HierarchicalSearch +16 -0
- lalapps/bin/lalapps_MakeSFTDAG +16 -0
- lalapps/bin/lalapps_MakeSFTs +16 -0
- lalapps/bin/lalapps_Makefakedata_v4 +16 -0
- lalapps/bin/lalapps_Makefakedata_v5 +16 -0
- lalapps/bin/lalapps_PredictFstat +16 -0
- lalapps/bin/lalapps_PrintDetectorState +16 -0
- lalapps/bin/lalapps_SFTclean +16 -0
- lalapps/bin/lalapps_SFTvalidate +16 -0
- lalapps/bin/lalapps_StringAddFrame +0 -0
- lalapps/bin/lalapps_StringSearch +0 -0
- lalapps/bin/lalapps_Weave +16 -0
- lalapps/bin/lalapps_WeaveCompare +16 -0
- lalapps/bin/lalapps_WeaveConcat +16 -0
- lalapps/bin/lalapps_WeaveSetup +16 -0
- lalapps/bin/lalapps_WriteSFTsfromSFDBs +16 -0
- lalapps/bin/lalapps_animate +0 -0
- lalapps/bin/lalapps_binj +0 -0
- lalapps/bin/lalapps_blindinj +0 -0
- lalapps/bin/lalapps_cache +16 -0
- lalapps/bin/lalapps_calfacs +0 -0
- lalapps/bin/lalapps_cbc_stochasticbank +0 -0
- lalapps/bin/lalapps_chirplen +0 -0
- lalapps/bin/lalapps_coh_PTF_inspiral +0 -0
- lalapps/bin/lalapps_coinj +0 -0
- lalapps/bin/lalapps_combine_crosscorr_toplists +16 -0
- lalapps/bin/lalapps_compareFstats +16 -0
- lalapps/bin/lalapps_compareSFTs +16 -0
- lalapps/bin/lalapps_create_time_correction_ephemeris +16 -0
- lalapps/bin/lalapps_dumpSFT +16 -0
- lalapps/bin/lalapps_effdist +0 -0
- lalapps/bin/lalapps_exc_resp +0 -0
- lalapps/bin/lalapps_fftw_wisdom +16 -0
- lalapps/bin/lalapps_fftwf_wisdom +16 -0
- lalapps/bin/lalapps_fits_header_getval +16 -0
- lalapps/bin/lalapps_fits_header_list +16 -0
- lalapps/bin/lalapps_fits_overview +16 -0
- lalapps/bin/lalapps_fits_table_list +16 -0
- lalapps/bin/lalapps_fr_ninja +0 -0
- lalapps/bin/lalapps_frextr +0 -0
- lalapps/bin/lalapps_frinfo +0 -0
- lalapps/bin/lalapps_frjoin +0 -0
- lalapps/bin/lalapps_frread +0 -0
- lalapps/bin/lalapps_frview +0 -0
- lalapps/bin/lalapps_gwf2xml +0 -0
- lalapps/bin/lalapps_heterodyne_pulsar +16 -0
- lalapps/bin/lalapps_inspawgfile +0 -0
- lalapps/bin/lalapps_inspfrinj +0 -0
- lalapps/bin/lalapps_inspinj +0 -0
- lalapps/bin/lalapps_inspiralDistance +0 -0
- lalapps/bin/lalapps_knope +16 -0
- lalapps/bin/lalapps_knope_automation_script +16 -0
- lalapps/bin/lalapps_knope_collate_results +16 -0
- lalapps/bin/lalapps_knope_result_page +16 -0
- lalapps/bin/lalapps_makeblindinj +85 -0
- lalapps/bin/lalapps_makeblindinj_himass +67 -0
- lalapps/bin/lalapps_ninja +0 -0
- lalapps/bin/lalapps_path2cache +16 -0
- lalapps/bin/lalapps_power +0 -0
- lalapps/bin/lalapps_pulsar_crosscorr_v2 +16 -0
- lalapps/bin/lalapps_pulsar_frequency_evolution +16 -0
- lalapps/bin/lalapps_pulsar_parameter_estimation_nested +16 -0
- lalapps/bin/lalapps_random_bank +0 -0
- lalapps/bin/lalapps_randombank +0 -0
- lalapps/bin/lalapps_run_pulsar_crosscorr_v2 +16 -0
- lalapps/bin/lalapps_searchsum2cache +16 -0
- lalapps/bin/lalapps_spec_avg +16 -0
- lalapps/bin/lalapps_spec_avg_long +16 -0
- lalapps/bin/lalapps_spec_coherence +16 -0
- lalapps/bin/lalapps_spininj +0 -0
- lalapps/bin/lalapps_splitSFTs +16 -0
- lalapps/bin/lalapps_splitbank +0 -0
- lalapps/bin/lalapps_ssbtodetector +16 -0
- lalapps/bin/lalapps_synthesizeBstatMC +16 -0
- lalapps/bin/lalapps_synthesizeLVStats +16 -0
- lalapps/bin/lalapps_synthesizeTransientStats +16 -0
- lalapps/bin/lalapps_tconvert +16 -0
- lalapps/bin/lalapps_tmpltbank +0 -0
- lalapps/bin/lalapps_version +0 -0
- lalapps/bin/lalapps_xtefitstoframe +0 -0
- lalapps/bin/lalburst_version +0 -0
- lalapps/bin/lalfr-cat +0 -0
- lalapps/bin/lalfr-cksum +0 -0
- lalapps/bin/lalfr-cut +0 -0
- lalapps/bin/lalfr-dump +0 -0
- lalapps/bin/lalfr-fmt +0 -0
- lalapps/bin/lalfr-paste +0 -0
- lalapps/bin/lalfr-print +0 -0
- lalapps/bin/lalfr-split +0 -0
- lalapps/bin/lalfr-stat +0 -0
- lalapps/bin/lalfr-stream +0 -0
- lalapps/bin/lalfr-vis +0 -0
- lalapps/bin/lalframe_version +0 -0
- lalapps/bin/lalinference_bench +0 -0
- lalapps/bin/lalinference_burst +0 -0
- lalapps/bin/lalinference_datadump +0 -0
- lalapps/bin/lalinference_injectedlike +0 -0
- lalapps/bin/lalinference_mpi_wrapper +59 -0
- lalapps/bin/lalinference_nest +0 -0
- lalapps/bin/lalinference_version +0 -0
- lalapps/bin/lalinspiral_version +0 -0
- lalapps/bin/lalmetaio_version +0 -0
- lalapps/bin/lalpulsar_ComputeAntennaPattern +0 -0
- lalapps/bin/lalpulsar_ComputeFstatBenchmark +0 -0
- lalapps/bin/lalpulsar_ComputeFstatLatticeCount +0 -0
- lalapps/bin/lalpulsar_ComputeFstatMCUpperLimit +0 -0
- lalapps/bin/lalpulsar_ComputeFstatistic_v2 +0 -0
- lalapps/bin/lalpulsar_ComputePSD +0 -0
- lalapps/bin/lalpulsar_DriveHoughMulti +0 -0
- lalapps/bin/lalpulsar_FstatMetric_v2 +0 -0
- lalapps/bin/lalpulsar_HierarchSearchGCT +0 -0
- lalapps/bin/lalpulsar_HierarchicalSearch +0 -0
- lalapps/bin/lalpulsar_MakeSFTs +0 -0
- lalapps/bin/lalpulsar_Makefakedata_v4 +0 -0
- lalapps/bin/lalpulsar_Makefakedata_v5 +0 -0
- lalapps/bin/lalpulsar_PredictFstat +0 -0
- lalapps/bin/lalpulsar_PrintDetectorState +0 -0
- lalapps/bin/lalpulsar_SFTclean +0 -0
- lalapps/bin/lalpulsar_SFTvalidate +0 -0
- lalapps/bin/lalpulsar_Weave +0 -0
- lalapps/bin/lalpulsar_WeaveCompare +0 -0
- lalapps/bin/lalpulsar_WeaveConcat +0 -0
- lalapps/bin/lalpulsar_WeaveSetup +0 -0
- lalapps/bin/lalpulsar_WriteSFTsfromSFDBs +0 -0
- lalapps/bin/lalpulsar_compareFstats +0 -0
- lalapps/bin/lalpulsar_compareSFTs +0 -0
- lalapps/bin/lalpulsar_create_time_correction_ephemeris +0 -0
- lalapps/bin/lalpulsar_crosscorr_v2 +0 -0
- lalapps/bin/lalpulsar_dumpSFT +0 -0
- lalapps/bin/lalpulsar_fits_header_getval +0 -0
- lalapps/bin/lalpulsar_fits_header_list +0 -0
- lalapps/bin/lalpulsar_fits_overview +0 -0
- lalapps/bin/lalpulsar_fits_table_list +0 -0
- lalapps/bin/lalpulsar_frequency_evolution +0 -0
- lalapps/bin/lalpulsar_heterodyne +0 -0
- lalapps/bin/lalpulsar_parameter_estimation_nested +0 -0
- lalapps/bin/lalpulsar_spec_avg +0 -0
- lalapps/bin/lalpulsar_spec_avg_long +0 -0
- lalapps/bin/lalpulsar_spec_coherence +0 -0
- lalapps/bin/lalpulsar_splitSFTs +0 -0
- lalapps/bin/lalpulsar_ssbtodetector +0 -0
- lalapps/bin/lalpulsar_synthesizeBstatMC +0 -0
- lalapps/bin/lalpulsar_synthesizeLVStats +0 -0
- lalapps/bin/lalpulsar_synthesizeTransientStats +0 -0
- lalapps/bin/lalpulsar_version +0 -0
- lalapps/bin/lalsim-bh-qnmode +0 -0
- lalapps/bin/lalsim-bh-ringdown +0 -0
- lalapps/bin/lalsim-bh-sphwf +0 -0
- lalapps/bin/lalsim-burst +0 -0
- lalapps/bin/lalsim-detector-noise +0 -0
- lalapps/bin/lalsim-detector-strain +0 -0
- lalapps/bin/lalsim-inject +0 -0
- lalapps/bin/lalsim-inspiral +0 -0
- lalapps/bin/lalsim-ns-eos-table +0 -0
- lalapps/bin/lalsim-ns-mass-radius +0 -0
- lalapps/bin/lalsim-ns-params +0 -0
- lalapps/bin/lalsim-sgwb +0 -0
- lalapps/bin/lalsim-unicorn +0 -0
- lalapps/bin/lalsimulation_version +0 -0
- lalapps/cosmicstring.py +691 -0
- lalapps/data/BNSMasses.dat +65022 -0
- lalapps/data/CorrelationMatrix.csv +15 -0
- lalapps/data/LALSimNeutronStarEOS_ABHT_QMC_RMF1_META.dat +1882 -0
- lalapps/data/LALSimNeutronStarEOS_ABHT_QMC_RMF2_META.dat +1939 -0
- lalapps/data/LALSimNeutronStarEOS_ABHT_QMC_RMF3_META.dat +1784 -0
- lalapps/data/LALSimNeutronStarEOS_ABHT_QMC_RMF4_META.dat +2074 -0
- lalapps/data/LALSimNeutronStarEOS_ALF1.dat +435 -0
- lalapps/data/LALSimNeutronStarEOS_ALF2.dat +453 -0
- lalapps/data/LALSimNeutronStarEOS_ALF3.dat +441 -0
- lalapps/data/LALSimNeutronStarEOS_ALF4.dat +441 -0
- lalapps/data/LALSimNeutronStarEOS_AP1.dat +212 -0
- lalapps/data/LALSimNeutronStarEOS_AP2.dat +212 -0
- lalapps/data/LALSimNeutronStarEOS_AP3.dat +212 -0
- lalapps/data/LALSimNeutronStarEOS_AP4.dat +210 -0
- lalapps/data/LALSimNeutronStarEOS_APR.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_APR4_EPP.dat +1447 -0
- lalapps/data/LALSimNeutronStarEOS_BBB2.dat +84 -0
- lalapps/data/LALSimNeutronStarEOS_BGN1H1.dat +123 -0
- lalapps/data/LALSimNeutronStarEOS_BHF_BBB2.dat +499 -0
- lalapps/data/LALSimNeutronStarEOS_BL_CHIRAL_META.dat +1534 -0
- lalapps/data/LALSimNeutronStarEOS_BPAL12.dat +61 -0
- lalapps/data/LALSimNeutronStarEOS_BSK19.dat +310 -0
- lalapps/data/LALSimNeutronStarEOS_BSK20.dat +310 -0
- lalapps/data/LALSimNeutronStarEOS_BSK21.dat +310 -0
- lalapps/data/LALSimNeutronStarEOS_ENG.dat +108 -0
- lalapps/data/LALSimNeutronStarEOS_FPS.dat +129 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_BSK14_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_DHSL59_BSK24.dat +1009 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_DHSL69_BSK24.dat +1009 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_F0_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_H1_BSK24.dat +1009 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_H2_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_H3_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_H4_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_H5_BSK24.dat +1009 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_LN55_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GMSR_SLY5_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GNH3.dat +71 -0
- lalapps/data/LALSimNeutronStarEOS_GPPVA_DD2_BSK24.dat +1009 -0
- lalapps/data/LALSimNeutronStarEOS_GPPVA_DDME2_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GPPVA_FSU2H_BSK24.dat +1009 -0
- lalapps/data/LALSimNeutronStarEOS_GPPVA_FSU2_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GPPVA_NL3WRL55_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_GS1.dat +136 -0
- lalapps/data/LALSimNeutronStarEOS_GS2.dat +100 -0
- lalapps/data/LALSimNeutronStarEOS_H1.dat +114 -0
- lalapps/data/LALSimNeutronStarEOS_H2.dat +114 -0
- lalapps/data/LALSimNeutronStarEOS_H3.dat +98 -0
- lalapps/data/LALSimNeutronStarEOS_H4.dat +664 -0
- lalapps/data/LALSimNeutronStarEOS_H5.dat +703 -0
- lalapps/data/LALSimNeutronStarEOS_H6.dat +509 -0
- lalapps/data/LALSimNeutronStarEOS_H7.dat +703 -0
- lalapps/data/LALSimNeutronStarEOS_HQC18.dat +388 -0
- lalapps/data/LALSimNeutronStarEOS_KDE0V.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_KDE0V1.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_KDE0V1_BSK24.dat +1388 -0
- lalapps/data/LALSimNeutronStarEOS_KDE0V_BSK24.dat +1398 -0
- lalapps/data/LALSimNeutronStarEOS_MPA1.dat +102 -0
- lalapps/data/LALSimNeutronStarEOS_MS1.dat +122 -0
- lalapps/data/LALSimNeutronStarEOS_MS1B.dat +126 -0
- lalapps/data/LALSimNeutronStarEOS_MS1B_PP.dat +1447 -0
- lalapps/data/LALSimNeutronStarEOS_MS1_PP.dat +1447 -0
- lalapps/data/LALSimNeutronStarEOS_MS2.dat +48 -0
- lalapps/data/LALSimNeutronStarEOS_PAL6.dat +148 -0
- lalapps/data/LALSimNeutronStarEOS_PCL2.dat +134 -0
- lalapps/data/LALSimNeutronStarEOS_PCP_BSK24_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_PS.dat +165 -0
- lalapps/data/LALSimNeutronStarEOS_QMC700.dat +117 -0
- lalapps/data/LALSimNeutronStarEOS_RG_SLY4_BSK24.dat +1010 -0
- lalapps/data/LALSimNeutronStarEOS_RS.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_RS_BSK24.dat +1356 -0
- lalapps/data/LALSimNeutronStarEOS_SK255.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SK255_BSK24.dat +1066 -0
- lalapps/data/LALSimNeutronStarEOS_SK272.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKA.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKA_BSK24.dat +1433 -0
- lalapps/data/LALSimNeutronStarEOS_SKB.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKB_BSK24.dat +1373 -0
- lalapps/data/LALSimNeutronStarEOS_SKI2.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKI2_BSK24.dat +1348 -0
- lalapps/data/LALSimNeutronStarEOS_SKI3.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKI3_BSK24.dat +1355 -0
- lalapps/data/LALSimNeutronStarEOS_SKI4.dat +497 -0
- lalapps/data/LALSimNeutronStarEOS_SKI4_BSK24.dat +1348 -0
- lalapps/data/LALSimNeutronStarEOS_SKI5.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKI6.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKI6_BSK24.dat +1358 -0
- lalapps/data/LALSimNeutronStarEOS_SKMP.dat +498 -0
- lalapps/data/LALSimNeutronStarEOS_SKOP.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SKOP_BSK24.dat +1373 -0
- lalapps/data/LALSimNeutronStarEOS_SLY.dat +99 -0
- lalapps/data/LALSimNeutronStarEOS_SLY2.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SLY230A.dat +500 -0
- lalapps/data/LALSimNeutronStarEOS_SLY230A_BSK24.dat +1116 -0
- lalapps/data/LALSimNeutronStarEOS_SLY2_BSK24.dat +1106 -0
- lalapps/data/LALSimNeutronStarEOS_SLY4.dat +100 -0
- lalapps/data/LALSimNeutronStarEOS_SLY9.dat +498 -0
- lalapps/data/LALSimNeutronStarEOS_SLY9_BSK24.dat +1083 -0
- lalapps/data/LALSimNeutronStarEOS_SQM1.dat +176 -0
- lalapps/data/LALSimNeutronStarEOS_SQM2.dat +180 -0
- lalapps/data/LALSimNeutronStarEOS_SQM3.dat +176 -0
- lalapps/data/LALSimNeutronStarEOS_WFF1.dat +109 -0
- lalapps/data/LALSimNeutronStarEOS_WFF2.dat +109 -0
- lalapps/data/LALSimNeutronStarEOS_WFF3.dat +107 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_DDLZ1_BSK24.dat +1227 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_DDME2_BSK24.dat +1272 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_DDMEX_BSK24.dat +1280 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_GM1_BSK24.dat +1288 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_MTVTC_BSK24.dat +1288 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_NL3_BSK24.dat +1230 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_PKDD_BSK24.dat +1288 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_TM1_BSK24.dat +1288 -0
- lalapps/data/LALSimNeutronStarEOS_XMLSLZ_TW99_BSK24.dat +1288 -0
- lalapps/data/LIGO-P1200087-v18-AdV_BNS_OPTIMIZED.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-AdV_DESIGN.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-AdV_EARLY_HIGH.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-AdV_EARLY_LOW.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-AdV_LATE_HIGH.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-AdV_LATE_LOW.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-AdV_MID_HIGH.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-AdV_MID_LOW.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_BNS_OPTIMIZED.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_DESIGN.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_EARLY_HIGH.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_EARLY_LOW.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_LATE_HIGH.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_LATE_LOW.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_MID_HIGH.txt +3000 -0
- lalapps/data/LIGO-P1200087-v18-aLIGO_MID_LOW.txt +3000 -0
- lalapps/data/LIGO-P1600143-v18-CE.txt +3000 -0
- lalapps/data/LIGO-P1600143-v18-CE_Pessimistic.txt +3000 -0
- lalapps/data/LIGO-P1600143-v18-CE_Wideband.txt +3000 -0
- lalapps/data/LIGO-P1600143-v18-ET_D.txt +3000 -0
- lalapps/data/LIGO-T0900288-v3-BHBH_20deg.txt +3000 -0
- lalapps/data/LIGO-T0900288-v3-High_Freq.txt +3000 -0
- lalapps/data/LIGO-T0900288-v3-NO_SRM.txt +3000 -0
- lalapps/data/LIGO-T0900288-v3-NSNS_Opt.txt +3000 -0
- lalapps/data/LIGO-T0900288-v3-ZERO_DET_high_P.txt +3000 -0
- lalapps/data/LIGO-T0900288-v3-ZERO_DET_low_P.txt +3000 -0
- lalapps/data/LIGO-T1600593-v1-KAGRA_Design.txt +4000 -0
- lalapps/data/LIGO-T1600593-v1-KAGRA_Early.txt +4000 -0
- lalapps/data/LIGO-T1600593-v1-KAGRA_Late.txt +4000 -0
- lalapps/data/LIGO-T1600593-v1-KAGRA_Mid.txt +4000 -0
- lalapps/data/LIGO-T1600593-v1-KAGRA_Opening.txt +4000 -0
- lalapps/data/LIGO-T1800042-v5-aLIGO_APLUS.txt +3000 -0
- lalapps/data/LIGO-T1800044-v5-aLIGO_DESIGN.txt +3000 -0
- lalapps/data/LIGO-T1800545-v1-AdV_O3low.txt +3000 -0
- lalapps/data/LIGO-T1800545-v1-AdV_O4.txt +3000 -0
- lalapps/data/LIGO-T1800545-v1-AdV_O4intermediate.txt +3000 -0
- lalapps/data/LIGO-T1800545-v1-KAGRA_128Mpc.txt +1000 -0
- lalapps/data/LIGO-T1800545-v1-KAGRA_25Mpc.txt +1000 -0
- lalapps/data/LIGO-T1800545-v1-KAGRA_80Mpc.txt +1000 -0
- lalapps/data/LIGO-T1800545-v1-aLIGO_140Mpc.txt +1000 -0
- lalapps/data/LIGO-T1800545-v1-aLIGO_175Mpc.txt +2792 -0
- lalapps/data/LIGO-T1800545-v1-aLIGO_O3low.txt +2792 -0
- lalapps/data/bimodalMeans.csv +3 -0
- lalapps/data/config_tiger_example.ini +150 -0
- lalapps/data/fiducialBBH.xml +67 -0
- lalapps/data/fiducialBNS.xml +67 -0
- lalapps/data/inspsrcs100Mpc.errors +38735 -0
- lalapps/data/lalinference_pipe_example.ini +573 -0
- lalapps/data/lib_pipe_example.ini +303 -0
- lalapps/data/power_pipe.ini +129 -0
- lalapps/data/unimodalMeans.csv +2 -0
- lalapps/git_version.py +64 -0
- lalburst/SimBurstUtils.py +324 -0
- lalburst/SnglBurstUtils.py +367 -0
- lalburst/__init__.py +7 -0
- lalburst/_lalburst.cpython-312-darwin.so +0 -0
- lalburst/_lalburst_swig.py +16 -0
- lalburst/binjfind.py +824 -0
- lalburst/bucluster.py +409 -0
- lalburst/burca.py +315 -0
- lalburst/burca_tailor.py +349 -0
- lalburst/cafe.py +579 -0
- lalburst/calc_likelihood.py +145 -0
- lalburst/cs_gamma.cpython-312-darwin.so +0 -0
- lalburst/date.py +118 -0
- lalburst/git_version.py +64 -0
- lalburst/offsetvector.py +278 -0
- lalburst/packing.py +170 -0
- lalburst/power.py +1457 -0
- lalburst/snglcluster.py +136 -0
- lalburst/snglcoinc.py +2637 -0
- lalburst/stringutils.py +607 -0
- lalburst/timeslides.py +236 -0
- lalframe/__init__.py +7 -0
- lalframe/_lalframe.cpython-312-darwin.so +0 -0
- lalframe/_lalframe_swig.py +14 -0
- lalframe/frread.py +324 -0
- lalframe/git_version.py +64 -0
- lalframe/utils/__init__.py +25 -0
- lalframe/utils/frtools.py +61 -0
- lalinference/__init__.py +7 -0
- lalinference/_bayespputils.cpython-312-darwin.so +0 -0
- lalinference/_lalinference.cpython-312-darwin.so +0 -0
- lalinference/_lalinference_swig.py +19 -0
- lalinference/bayespputils.py +7479 -0
- lalinference/bayestar/__init__.py +2 -0
- lalinference/bayestar/deprecation.py +72 -0
- lalinference/git_version.py +64 -0
- lalinference/imrtgr/__init__.py +0 -0
- lalinference/imrtgr/imrtgrutils.py +168 -0
- lalinference/imrtgr/nrutils.py +1366 -0
- lalinference/imrtgr/pneqns.py +250 -0
- lalinference/io/__init__.py +31 -0
- lalinference/io/hdf5.py +365 -0
- lalinference/lalinference_pipe_utils.py +3617 -0
- lalinference/nest2pos.py +151 -0
- lalinference/plot/__init__.py +34 -0
- lalinference/plot/spindisk.py +104 -0
- lalinference/tiger/__init__.py +0 -0
- lalinference/tiger/make_injtimes.py +634 -0
- lalinference/tiger/omegascans_dag.py +691 -0
- lalinference/tiger/postproc.py +1338 -0
- lalinference/wrapper.py +231 -0
- lalinspiral/__init__.py +7 -0
- lalinspiral/_lalinspiral.cpython-312-darwin.so +0 -0
- lalinspiral/_lalinspiral_swig.py +18 -0
- lalinspiral/_thinca.cpython-312-darwin.so +0 -0
- lalinspiral/git_version.py +64 -0
- lalinspiral/inspinjfind.py +485 -0
- lalinspiral/thinca.py +509 -0
- lalmetaio/__init__.py +7 -0
- lalmetaio/_lalmetaio.cpython-312-darwin.so +0 -0
- lalmetaio/_lalmetaio_swig.py +14 -0
- lalmetaio/git_version.py +64 -0
- lalpulsar/NstarTools.py +259 -0
- lalpulsar/PulsarParametersWrapper.py +938 -0
- lalpulsar/__init__.py +7 -0
- lalpulsar/_lalpulsar.cpython-312-darwin.so +0 -0
- lalpulsar/_lalpulsar_swig.py +17 -0
- lalpulsar/git_version.py +64 -0
- lalpulsar/knope_utils.py +6497 -0
- lalpulsar/lineFileParser.py +264 -0
- lalpulsar/metric_utils.py +78 -0
- lalpulsar/piecewise_model/__init__.py +7 -0
- lalpulsar/piecewise_model/basis_functions.py +156 -0
- lalpulsar/piecewise_model/class_definitions.py +323 -0
- lalpulsar/piecewise_model/errors.py +37 -0
- lalpulsar/piecewise_model/estimating_knots.py +833 -0
- lalpulsar/piecewise_model/gte_and_other_methods.py +189 -0
- lalpulsar/piecewise_model/mols_for_gte.py +269 -0
- lalpulsar/piecewise_model/pw_fstat.py +813 -0
- lalpulsar/piecewise_model/pw_model_simulations.py +156 -0
- lalpulsar/piecewise_model/sampling_methods.py +186 -0
- lalpulsar/piecewise_model/semicoherent_metric_methods.py +375 -0
- lalpulsar/piecewise_model/tbank_estimates.py +293 -0
- lalpulsar/public_sft_directory.py +82 -0
- lalpulsar/pulsarhtmlutils.py +1395 -0
- lalpulsar/pulsarpputils.py +3638 -0
- lalpulsar/simulateCW.py +602 -0
- lalpulsar/simulateHeterodynedCW.py +591 -0
- lalsimulation/__init__.py +7 -0
- lalsimulation/_lalsimulation.cpython-312-darwin.so +0 -0
- lalsimulation/_lalsimulation_swig.py +14 -0
- lalsimulation/git_version.py +64 -0
- lalsimulation/gwsignal/__init__.py +9 -0
- lalsimulation/gwsignal/core/__init__.py +2 -0
- lalsimulation/gwsignal/core/conditioning_subroutines.py +196 -0
- lalsimulation/gwsignal/core/errors.py +136 -0
- lalsimulation/gwsignal/core/gw.py +206 -0
- lalsimulation/gwsignal/core/parameter_conventions.py +122 -0
- lalsimulation/gwsignal/core/utils.py +329 -0
- lalsimulation/gwsignal/core/waveform.py +725 -0
- lalsimulation/gwsignal/core/waveform_conditioning.py +455 -0
- lalsimulation/gwsignal/models/__init__.py +29 -0
- lalsimulation/gwsignal/models/pyseobnr_model.py +452 -0
- lalsimulation/nrfits/NRSur3dq8Remnant.py +92 -0
- lalsimulation/nrfits/NRSur7dq4Remnant.py +469 -0
- lalsimulation/nrfits/__init__.py +1 -0
- lalsimulation/nrfits/eval_fits.py +364 -0
- lalsimulation/nrfits/nrfits.py +78 -0
- lalsimulation/nrfits/pn_spin_evolution_wrapper.py +92 -0
- lalsimulation/nrfits/quaternion_utils.py +74 -0
- lalsimulation/tilts_at_infinity/__init__.py +2 -0
- lalsimulation/tilts_at_infinity/calc_tilts_prec_avg_regularized.py +1424 -0
- lalsimulation/tilts_at_infinity/hybrid_spin_evolution.py +461 -0
- lalsimulation/tilts_at_infinity/tilts_at_infinity_utils.py +167 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesBurstPPAnalysis +305 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesBurstPostProc +1364 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesCombinePTMCMCh5s +100 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesCombinePosteriors +235 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesCompPos +1121 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesDIEvidence +68 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesGraceDBinfo +182 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesMCMC2pos +314 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesPPAnalysis +322 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesPlotSpinDisk +42 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesPosToSimBurst +227 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesPosToSimInspiral +307 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesPostProc +1345 -0
- lalsuite-7.26.2.dev20251210.data/scripts/cbcBayesThermoInt +107 -0
- lalsuite-7.26.2.dev20251210.data/scripts/imrtgr_imr_consistency_test +796 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_cache +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_fftw_wisdom +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_fftwf_wisdom +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_path2cache +148 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_searchsum2cache +172 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_simd_detect +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_tconvert +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lal_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ComputeAntennaPattern +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ComputeFstatBenchmark +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ComputeFstatLatticeCount +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ComputeFstatMCUpperLimit +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ComputeFstatistic_v2 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ComputePSD +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_CopySFTs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_DistanceVsMass +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_DriveHoughMulti +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_FstatMetric_v2 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_HierarchSearchGCT +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_HierarchicalSearch +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_MakeSFTDAG +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_MakeSFTs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_Makefakedata_v4 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_Makefakedata_v5 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_PredictFstat +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_PrintDetectorState +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_SFTclean +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_SFTvalidate +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_StringAddFrame +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_StringSearch +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_Weave +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_WeaveCompare +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_WeaveConcat +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_WeaveSetup +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_WriteSFTsfromSFDBs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_animate +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_binj +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_blindinj +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_cache +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_cafe +99 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_calfacs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_cbc_stochasticbank +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_chirplen +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_coh_PTF_inspiral +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_coinj +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_combine_crosscorr_toplists +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_compareFstats +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_compareSFTs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_cosmicstring_pipe +525 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_create_time_correction_ephemeris +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_dumpSFT +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_effdist +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_exc_resp +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_fftw_wisdom +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_fftwf_wisdom +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_fits_header_getval +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_fits_header_list +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_fits_overview +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_fits_table_list +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_fr_ninja +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_frextr +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_frinfo +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_frjoin +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_frread +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_frview +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_gwf2xml +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_heterodyne_pulsar +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_inspawgfile +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_inspfrinj +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_inspinj +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_inspiralDistance +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_knope +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_knope_automation_script +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_knope_collate_results +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_knope_result_page +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_makeblindinj +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_makeblindinj_himass +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ninja +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_path2cache +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_power +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_power_likelihood_pipe +219 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_power_pipe +417 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_pulsar_crosscorr_v2 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_pulsar_frequency_evolution +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_pulsar_parameter_estimation_nested +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_random_bank +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_randombank +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_run_pulsar_crosscorr_v2 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_searchsum2cache +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_spec_avg +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_spec_avg_long +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_spec_coherence +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_spininj +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_splitSFTs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_splitbank +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_ssbtodetector +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_apply_vetoes +171 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_calc_likelihood +172 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_contour_plotter +141 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_contour_plotter_largeloops +133 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_cs_gamma +110 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_cs_gamma_largeloops +119 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_final +1064 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_meas_likelihood +264 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_plot_binj +543 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_string_plot_likelihood +380 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_synthesizeBstatMC +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_synthesizeLVStats +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_synthesizeTransientStats +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_tconvert +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_tmpltbank +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalapps_xtefitstoframe +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_cluster +156 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_coinc +224 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_cut +425 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_gen_timeslides +254 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_inj_pic +254 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_injfind +170 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_plot_tisi +165 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_power_calc_likelihood +182 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_power_final +1369 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_power_meas_likelihood +206 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_power_plot_binj +934 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_power_plot_binjtf +302 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalburst_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-cat +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-cksum +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-cut +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-dump +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-fmt +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-paste +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-print +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-split +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-stat +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-stream +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalfr-vis +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalframe_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_bench +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_burst +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_burst_pp_pipe +220 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_coherence_test +139 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_compute_roq_weights +404 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_cpnest +58 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_datadump +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_evolve_spins_and_append_samples +202 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_injectedlike +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_merge_posteriors +57 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_mpi_wrapper +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_multi_pipe +144 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_nest +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_nest2pos +286 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_pipe +512 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_pp_pipe +229 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_review_test +362 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinference_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinspiral_injfind +206 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinspiral_thinca +240 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalinspiral_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalmetaio_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_ComputeAntennaPattern +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_ComputeFstatBenchmark +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_ComputeFstatLatticeCount +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_ComputeFstatMCUpperLimit +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_ComputeFstatistic_v2 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_ComputePSD +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_CopyPublicSFTs +216 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_DriveHoughMulti +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_FstatMetric_v2 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_HierarchSearchGCT +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_HierarchicalSearch +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_MakeSFTDAG +1142 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_MakeSFTs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_Makefakedata_v4 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_Makefakedata_v5 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_MoveSFTs +208 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_PiecewiseSearch +963 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_PredictFstat +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_PrintDetectorState +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_SFTclean +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_SFTvalidate +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_Weave +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_WeaveCompare +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_WeaveConcat +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_WeaveSetup +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_WriteSFTsfromSFDBs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_compareFstats +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_compareSFTs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_create_time_correction_ephemeris +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_crosscorr_v2 +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_dumpSFT +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_fits_header_getval +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_fits_header_list +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_fits_overview +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_fits_table_list +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_frequency_evolution +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_heterodyne +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_knope +145 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_knope_automation_script +731 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_knope_collate_results +675 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_knope_result_page +2977 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_parameter_estimation_nested +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_spec_avg +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_spec_avg_long +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_spec_coherence +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_splitSFTs +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_ssbtodetector +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_synthesizeBstatMC +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_synthesizeLVStats +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_synthesizeTransientStats +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalpulsar_version +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-bh-qnmode +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-bh-ringdown +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-bh-sphwf +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-burst +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-detector-noise +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-detector-strain +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-inject +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-inspiral +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-ns-eos-table +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-ns-mass-radius +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-ns-params +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-sgwb +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsim-unicorn +6 -0
- lalsuite-7.26.2.dev20251210.data/scripts/lalsimulation_version +6 -0
- lalsuite-7.26.2.dev20251210.dist-info/METADATA +90 -0
- lalsuite-7.26.2.dev20251210.dist-info/RECORD +749 -0
- lalsuite-7.26.2.dev20251210.dist-info/WHEEL +6 -0
- lalsuite-7.26.2.dev20251210.dist-info/licenses/COPYING +339 -0
- lalsuite-7.26.2.dev20251210.dist-info/top_level.txt +9 -0
lalburst/power.py
ADDED
|
@@ -0,0 +1,1457 @@
|
|
|
1
|
+
#
|
|
2
|
+
# This program is free software; you can redistribute it and/or modify it under
|
|
3
|
+
# the terms of the GNU General Public License as published by the Free Software
|
|
4
|
+
# Foundation; either version 2 of the License, or (at your option) any later
|
|
5
|
+
# version.
|
|
6
|
+
#
|
|
7
|
+
# This program is distributed in the hope that it will be useful, but WITHOUT
|
|
8
|
+
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
|
9
|
+
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
|
|
10
|
+
# details.
|
|
11
|
+
#
|
|
12
|
+
# You should have received a copy of the GNU General Public License along with
|
|
13
|
+
# this program; if not, write to the Free Software Foundation, Inc., 51
|
|
14
|
+
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
#
|
|
18
|
+
# =============================================================================
|
|
19
|
+
#
|
|
20
|
+
# Preamble
|
|
21
|
+
#
|
|
22
|
+
# =============================================================================
|
|
23
|
+
#
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
"""
|
|
27
|
+
Excess power pipeline construction tools.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
import errno
|
|
32
|
+
import os
|
|
33
|
+
import sys
|
|
34
|
+
import time
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
import igwn_segments as segments
|
|
38
|
+
from igwn_segments import utils as segmentsUtils
|
|
39
|
+
import lal
|
|
40
|
+
from lal import iterutils
|
|
41
|
+
from lal import pipeline
|
|
42
|
+
from lal.utils import CacheEntry
|
|
43
|
+
import lalburst
|
|
44
|
+
from . import cafe
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
__author__ = "Duncan Brown <duncan@gravity.phys.uwm.edu>, Kipp Cannon <kipp@gravity.phys.uwm.edu>"
|
|
48
|
+
__date__ = "$Date$"
|
|
49
|
+
__version__ = "$Revision$"
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
#
|
|
53
|
+
# =============================================================================
|
|
54
|
+
#
|
|
55
|
+
# Helpers
|
|
56
|
+
#
|
|
57
|
+
# =============================================================================
|
|
58
|
+
#
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def get_universe(config_parser):
|
|
62
|
+
return config_parser.get("condor", "universe")
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def get_accounting_group(config_parser):
|
|
66
|
+
return config_parser.get("condor", "accounting_group")
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def get_executable(config_parser, name):
|
|
70
|
+
return config_parser.get("condor", name)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def get_out_dir(config_parser):
|
|
74
|
+
return config_parser.get("pipeline", "out_dir")
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def get_cache_dir(config_parser):
|
|
78
|
+
return config_parser.get("pipeline", "cache_dir")
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def get_triggers_dir(config_parser):
|
|
82
|
+
return config_parser.get("pipeline", "triggers_dir")
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def make_dir_if_not_exists(dir):
|
|
86
|
+
try:
|
|
87
|
+
os.mkdir(dir)
|
|
88
|
+
except OSError as e:
|
|
89
|
+
if e.errno != errno.EEXIST:
|
|
90
|
+
# OK if directory exists, otherwise report error
|
|
91
|
+
raise e
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def make_dag_directories(config_parser):
|
|
95
|
+
make_dir_if_not_exists(get_cache_dir(config_parser))
|
|
96
|
+
make_dir_if_not_exists(get_out_dir(config_parser))
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def get_files_per_bucluster(config_parser):
|
|
100
|
+
return config_parser.getint("pipeline", "files_per_bucluster")
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def get_files_per_bucut(config_parser):
|
|
104
|
+
return config_parser.getint("pipeline", "files_per_bucut")
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def get_files_per_burca(config_parser):
|
|
108
|
+
return config_parser.getint("pipeline", "files_per_burca")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def get_files_per_binjfind(config_parser):
|
|
112
|
+
return config_parser.getint("pipeline", "files_per_binjfind")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class TimingParameters(object):
|
|
116
|
+
"""
|
|
117
|
+
A class to hold timing parameter values.
|
|
118
|
+
"""
|
|
119
|
+
def __init__(self, config_parser):
|
|
120
|
+
# initialize from config file
|
|
121
|
+
self.resample_rate = config_parser.getfloat("lalapps_power", "resample-rate")
|
|
122
|
+
self.window_length = config_parser.getint("lalapps_power", "window-length")
|
|
123
|
+
self.max_tile_length = int(config_parser.getfloat("lalapps_power", "max-tile-duration") * self.resample_rate)
|
|
124
|
+
self.tile_stride_fraction = config_parser.getfloat("lalapps_power", "tile-stride-fraction")
|
|
125
|
+
self.filter_corruption = config_parser.getint("lalapps_power", "filter-corruption")
|
|
126
|
+
self.max_tile_bandwidth = config_parser.getfloat("lalapps_power", "max-tile-bandwidth")
|
|
127
|
+
|
|
128
|
+
# populate additional computed parameters from library code
|
|
129
|
+
self.psd_length, self.psd_shift, self.window_shift, self.window_pad, self.tiling_length = lalburst.EPGetTimingParameters(
|
|
130
|
+
self.window_length,
|
|
131
|
+
self.max_tile_length,
|
|
132
|
+
self.tile_stride_fraction,
|
|
133
|
+
config_parser.getint("lalapps_power", "psd-average-points")
|
|
134
|
+
)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def make_cache_entry(input_cache, description, path):
|
|
138
|
+
# summarize segment information
|
|
139
|
+
seglists = segments.segmentlistdict()
|
|
140
|
+
for c in input_cache:
|
|
141
|
+
seglists |= c.segmentlistdict
|
|
142
|
+
|
|
143
|
+
# obtain instrument list
|
|
144
|
+
instruments = seglists.keys()
|
|
145
|
+
if None in instruments:
|
|
146
|
+
instruments.remove(None)
|
|
147
|
+
instruments.sort()
|
|
148
|
+
|
|
149
|
+
# remove empty segment lists to allow extent_all() to work
|
|
150
|
+
for instrument in seglists.keys():
|
|
151
|
+
if not seglists[instrument]:
|
|
152
|
+
del seglists[instrument]
|
|
153
|
+
|
|
154
|
+
# make the URL
|
|
155
|
+
if path:
|
|
156
|
+
url = "file://localhost%s" % os.path.abspath(path)
|
|
157
|
+
else:
|
|
158
|
+
# FIXME: old version of CacheEntry allowed None for URL,
|
|
159
|
+
# new version doesn't. correct fix is to modify calling
|
|
160
|
+
# code to not try to initialize the output cache until
|
|
161
|
+
# after the input is known, but for now we'll just do this
|
|
162
|
+
# stupid hack.
|
|
163
|
+
url = "file://localhost/dev/null"
|
|
164
|
+
|
|
165
|
+
# construct a cache entry from the instruments and
|
|
166
|
+
# segments that remain
|
|
167
|
+
return CacheEntry("+".join(instruments) or None, description, seglists.extent_all(), url)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def collect_output_caches(parents):
|
|
171
|
+
cache = [(cache_entry, parent) for parent in parents for cache_entry in parent.get_output_cache()]
|
|
172
|
+
cache.sort(key = lambda x: x[0].segment)
|
|
173
|
+
return cache
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
def match_nodes_to_caches(nodes, caches):
|
|
177
|
+
"""
|
|
178
|
+
For each cache, get the set of nodes whose output files it
|
|
179
|
+
contains. A node is allowed to provide more than one output file,
|
|
180
|
+
and thus can be listed in more than one set.
|
|
181
|
+
"""
|
|
182
|
+
# cache_entry --> node loop-up table
|
|
183
|
+
nodes = set(nodes)
|
|
184
|
+
index = {}
|
|
185
|
+
for node in nodes:
|
|
186
|
+
for cache_entry in node.get_output_cache():
|
|
187
|
+
index[cache_entry] = node
|
|
188
|
+
|
|
189
|
+
# can't use [set()] * len(caches) for the normal reason
|
|
190
|
+
node_groups = [set() for cache in caches]
|
|
191
|
+
|
|
192
|
+
# form node groups matching input caches
|
|
193
|
+
for node_group, cache in zip(node_groups, caches):
|
|
194
|
+
for cache_entry in cache:
|
|
195
|
+
node_group.add(index[cache_entry])
|
|
196
|
+
|
|
197
|
+
# how many nodes didn't get used?
|
|
198
|
+
unused = len(nodes) - len(set.union(*node_groups))
|
|
199
|
+
|
|
200
|
+
# done
|
|
201
|
+
return node_groups, unused
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def cache_span(cache):
|
|
205
|
+
a = min([cache_entry.segment[0] for cache_entry in cache])
|
|
206
|
+
b = max([cache_entry.segment[1] for cache_entry in cache])
|
|
207
|
+
return segments.segment(a, b)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
#
|
|
211
|
+
# How to write an output cache
|
|
212
|
+
#
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def write_output_cache(nodes, filename):
|
|
216
|
+
f = file(filename, "w")
|
|
217
|
+
for cache_entry, node in collect_output_caches(nodes):
|
|
218
|
+
print(str(cache_entry), file=f)
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
#
|
|
222
|
+
# =============================================================================
|
|
223
|
+
#
|
|
224
|
+
# DAG Node and Job Class
|
|
225
|
+
#
|
|
226
|
+
# =============================================================================
|
|
227
|
+
#
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
class RMJob(pipeline.CondorDAGJob):
|
|
231
|
+
def __init__(self, config_parser):
|
|
232
|
+
"""
|
|
233
|
+
config_parser = ConfigParser object
|
|
234
|
+
"""
|
|
235
|
+
pipeline.CondorDAGJob.__init__(self, "local", "/bin/rm")
|
|
236
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "rm-$(cluster)-$(process).out"))
|
|
237
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "rm-$(cluster)-$(process).err"))
|
|
238
|
+
self.add_condor_cmd("getenv", "True")
|
|
239
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
240
|
+
self.add_opt("force", "")
|
|
241
|
+
self.set_sub_file("rm.sub")
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
class RMNode(pipeline.CondorDAGNode):
|
|
245
|
+
def __init__(self, job):
|
|
246
|
+
pipeline.CondorDAGNode.__init__(self, job)
|
|
247
|
+
self.input_cache = set()
|
|
248
|
+
self.output_cache = set()
|
|
249
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
250
|
+
|
|
251
|
+
def add_input_cache(self, cache):
|
|
252
|
+
self.input_cache |= cache
|
|
253
|
+
for cache_entry in cache:
|
|
254
|
+
pipeline.CondorDAGNode.add_file_arg(self, cache_entry.path)
|
|
255
|
+
|
|
256
|
+
def get_output_cache(self):
|
|
257
|
+
return set()
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
class BurstInjJob(pipeline.CondorDAGJob, pipeline.AnalysisJob):
|
|
261
|
+
"""
|
|
262
|
+
A lalapps_binj job used by the power pipeline. The static options
|
|
263
|
+
are read from the [lalapps_binj] section in the ini file. The
|
|
264
|
+
stdout and stderr from the job are directed to the logs directory.
|
|
265
|
+
The job runs in the universe specified in the ini file. The path
|
|
266
|
+
to the executable is determined from the ini file.
|
|
267
|
+
"""
|
|
268
|
+
def __init__(self, config_parser):
|
|
269
|
+
"""
|
|
270
|
+
config_parser = ConfigParser object
|
|
271
|
+
"""
|
|
272
|
+
pipeline.CondorDAGJob.__init__(self, get_universe(config_parser), get_executable(config_parser, "lalapps_binj"))
|
|
273
|
+
pipeline.AnalysisJob.__init__(self, config_parser)
|
|
274
|
+
|
|
275
|
+
# do this many injections between flow and fhigh inclusively
|
|
276
|
+
if config_parser.has_option("pipeline", "injection_bands"):
|
|
277
|
+
self.injection_bands = config_parser.getint("pipeline", "injection_bands")
|
|
278
|
+
else:
|
|
279
|
+
self.injection_bands = None
|
|
280
|
+
|
|
281
|
+
self.add_ini_opts(config_parser, "lalapps_binj")
|
|
282
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalapps_binj-$(macrogpsstarttime)-$(macrogpsendtime)-$(cluster)-$(process).out"))
|
|
283
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalapps_binj-$(macrogpsstarttime)-$(macrogpsendtime)-$(cluster)-$(process).err"))
|
|
284
|
+
self.add_condor_cmd("getenv", "True")
|
|
285
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
286
|
+
self.set_sub_file("lalapps_binj.sub")
|
|
287
|
+
|
|
288
|
+
self.output_dir = "."
|
|
289
|
+
|
|
290
|
+
# one injection every time-step seconds
|
|
291
|
+
self.time_step = config_parser.getfloat("lalapps_binj", "time-step")
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
class BurstInjNode(pipeline.CondorDAGNode,pipeline.AnalysisNode):
|
|
295
|
+
def __init__(self, job):
|
|
296
|
+
pipeline.CondorDAGNode.__init__(self, job)
|
|
297
|
+
pipeline.AnalysisNode.__init__(self)
|
|
298
|
+
self.__usertag = None
|
|
299
|
+
self.output_cache = []
|
|
300
|
+
self.output_dir = os.path.join(os.getcwd(), self.job().output_dir)
|
|
301
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
302
|
+
|
|
303
|
+
def set_user_tag(self, tag):
|
|
304
|
+
self.__usertag = tag
|
|
305
|
+
self.add_var_opt("user-tag", self.__usertag)
|
|
306
|
+
|
|
307
|
+
def get_user_tag(self):
|
|
308
|
+
if self.output_cache:
|
|
309
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
310
|
+
return self.__usertag
|
|
311
|
+
|
|
312
|
+
def set_time_slide_file(self, filename):
|
|
313
|
+
self.add_var_opt("time-slide-file", filename)
|
|
314
|
+
|
|
315
|
+
def get_time_slide_file(self):
|
|
316
|
+
return self.get_opts().get("macrotimeslidefile", None)
|
|
317
|
+
|
|
318
|
+
def set_start(self, start):
|
|
319
|
+
if self.output_cache:
|
|
320
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
321
|
+
self.add_var_opt("gps-start-time", start)
|
|
322
|
+
|
|
323
|
+
def get_start(self):
|
|
324
|
+
return self.get_opts().get("macrogpsstarttime", None)
|
|
325
|
+
|
|
326
|
+
def set_end(self, end):
|
|
327
|
+
if self.output_cache:
|
|
328
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
329
|
+
self.add_var_opt("gps-end-time", end)
|
|
330
|
+
|
|
331
|
+
def get_end(self):
|
|
332
|
+
return self.get_opts().get("macrogpsendtime", None)
|
|
333
|
+
|
|
334
|
+
def get_output_cache(self):
|
|
335
|
+
"""
|
|
336
|
+
Returns a LAL cache of the output file name. Calling this
|
|
337
|
+
method also induces the output name to get set, so it must
|
|
338
|
+
be at least once.
|
|
339
|
+
"""
|
|
340
|
+
if not self.output_cache:
|
|
341
|
+
# FIXME: instruments hardcoded to "everything"
|
|
342
|
+
self.output_cache = [CacheEntry("G1+H1+H2+L1+T1+V1", self.__usertag, segments.segment(lal.LIGOTimeGPS(self.get_start()), lal.LIGOTimeGPS(self.get_end())), "file://localhost" + os.path.abspath(self.get_output()))]
|
|
343
|
+
return self.output_cache
|
|
344
|
+
|
|
345
|
+
def get_output_files(self):
|
|
346
|
+
raise NotImplementedError
|
|
347
|
+
|
|
348
|
+
def get_output(self):
|
|
349
|
+
if self._AnalysisNode__output is None:
|
|
350
|
+
if None in (self.get_start(), self.get_end(), self.__usertag):
|
|
351
|
+
raise ValueError("start time, end time, ifo, or user tag has not been set")
|
|
352
|
+
seg = segments.segment(lal.LIGOTimeGPS(self.get_start()), lal.LIGOTimeGPS(self.get_end()))
|
|
353
|
+
self.set_output(os.path.join(self.output_dir, "G1+H1+H2+L1+T1+V1-INJECTIONS_%s-%d-%d.xml.gz" % (self.__usertag, int(self.get_start()), int(self.get_end() - self.get_start()))))
|
|
354
|
+
return self._AnalysisNode__output
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
class PowerJob(pipeline.CondorDAGJob, pipeline.AnalysisJob):
|
|
358
|
+
"""
|
|
359
|
+
A lalapps_power job used by the power pipeline. The static options
|
|
360
|
+
are read from the [lalapps_power] and [lalapps_power_<inst>]
|
|
361
|
+
sections in the ini file. The stdout and stderr from the job are
|
|
362
|
+
directed to the logs directory. The job runs in the universe
|
|
363
|
+
specified in the ini file. The path to the executable is determined
|
|
364
|
+
from the ini file.
|
|
365
|
+
"""
|
|
366
|
+
def __init__(self, config_parser):
|
|
367
|
+
"""
|
|
368
|
+
config_parser = ConfigParser object
|
|
369
|
+
"""
|
|
370
|
+
pipeline.CondorDAGJob.__init__(self, get_universe(config_parser), get_executable(config_parser, "lalapps_power"))
|
|
371
|
+
pipeline.AnalysisJob.__init__(self, config_parser)
|
|
372
|
+
self.add_ini_opts(config_parser, "lalapps_power")
|
|
373
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalapps_power-$(cluster)-$(process).out"))
|
|
374
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalapps_power-$(cluster)-$(process).err"))
|
|
375
|
+
self.add_condor_cmd("getenv", "True")
|
|
376
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
377
|
+
self.set_sub_file("lalapps_power.sub")
|
|
378
|
+
|
|
379
|
+
self.output_dir = "."
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
class PowerNode(pipeline.AnalysisNode):
|
|
383
|
+
def __init__(self, job):
|
|
384
|
+
pipeline.CondorDAGNode.__init__(self, job)
|
|
385
|
+
pipeline.AnalysisNode.__init__(self)
|
|
386
|
+
self.__usertag = None
|
|
387
|
+
self.output_cache = []
|
|
388
|
+
self.output_dir = os.path.join(os.getcwd(), self.job().output_dir)
|
|
389
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
390
|
+
|
|
391
|
+
def set_ifo(self, instrument):
|
|
392
|
+
"""
|
|
393
|
+
Load additional options from the per-instrument section in
|
|
394
|
+
the config file.
|
|
395
|
+
"""
|
|
396
|
+
if self.output_cache:
|
|
397
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
398
|
+
pipeline.AnalysisNode.set_ifo(self, instrument)
|
|
399
|
+
for optvalue in self.job()._AnalysisJob__cp.items("lalapps_power_%s" % instrument):
|
|
400
|
+
self.add_var_arg("--%s %s" % optvalue)
|
|
401
|
+
|
|
402
|
+
def set_user_tag(self, tag):
|
|
403
|
+
if self.output_cache:
|
|
404
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
405
|
+
self.__usertag = tag
|
|
406
|
+
self.add_var_opt("user-tag", self.__usertag)
|
|
407
|
+
|
|
408
|
+
def get_user_tag(self):
|
|
409
|
+
return self.__usertag
|
|
410
|
+
|
|
411
|
+
def get_output_cache(self):
|
|
412
|
+
"""
|
|
413
|
+
Returns a LAL cache of the output file name. Calling this
|
|
414
|
+
method also induces the output name to get set, so it must
|
|
415
|
+
be at least once.
|
|
416
|
+
"""
|
|
417
|
+
if not self.output_cache:
|
|
418
|
+
self.output_cache = [CacheEntry(self.get_ifo(), self.__usertag, segments.segment(lal.LIGOTimeGPS(self.get_start()), lal.LIGOTimeGPS(self.get_end())), "file://localhost" + os.path.abspath(self.get_output()))]
|
|
419
|
+
return self.output_cache
|
|
420
|
+
|
|
421
|
+
def get_output_files(self):
|
|
422
|
+
raise NotImplementedError
|
|
423
|
+
|
|
424
|
+
def get_output(self):
|
|
425
|
+
if self._AnalysisNode__output is None:
|
|
426
|
+
if None in (self.get_start(), self.get_end(), self.get_ifo(), self.__usertag):
|
|
427
|
+
raise ValueError("start time, end time, ifo, or user tag has not been set")
|
|
428
|
+
seg = segments.segment(lal.LIGOTimeGPS(self.get_start()), lal.LIGOTimeGPS(self.get_end()))
|
|
429
|
+
self.set_output(os.path.join(self.output_dir, "%s-POWER_%s-%d-%d.xml.gz" % (self.get_ifo(), self.__usertag, int(self.get_start()), int(self.get_end()) - int(self.get_start()))))
|
|
430
|
+
return self._AnalysisNode__output
|
|
431
|
+
|
|
432
|
+
def set_mdccache(self, file):
|
|
433
|
+
"""
|
|
434
|
+
Set the LAL frame cache to to use. The frame cache is
|
|
435
|
+
passed to the job with the --frame-cache argument. @param
|
|
436
|
+
file: calibration file to use.
|
|
437
|
+
"""
|
|
438
|
+
self.add_var_opt("mdc-cache", file)
|
|
439
|
+
self.add_input_file(file)
|
|
440
|
+
|
|
441
|
+
def set_injection_file(self, file):
|
|
442
|
+
"""
|
|
443
|
+
Set the name of the XML file from which to read a list of
|
|
444
|
+
software injections.
|
|
445
|
+
"""
|
|
446
|
+
self.add_var_opt("injection-file", file)
|
|
447
|
+
self.add_input_file(file)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
class LigolwAddNode(pipeline.LigolwAddNode):
|
|
451
|
+
def __init__(self, job, remove_input, *args):
|
|
452
|
+
pipeline.LigolwAddNode.__init__(self, job, *args)
|
|
453
|
+
self.input_cache = []
|
|
454
|
+
self.output_cache = []
|
|
455
|
+
self.cache_dir = os.path.join(os.getcwd(), self.job().cache_dir)
|
|
456
|
+
self.output_dir = os.path.join(os.getcwd(), ".") # "." == self.job().output_dir except the job class doesn't yet have this info
|
|
457
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
458
|
+
self.remove_input = bool(remove_input)
|
|
459
|
+
if self.remove_input:
|
|
460
|
+
self.add_var_arg("--remove-input")
|
|
461
|
+
|
|
462
|
+
def __update_output_cache(self, observatory = None, segment = None):
|
|
463
|
+
del self.output_cache[:]
|
|
464
|
+
cache_entry = make_cache_entry(self.input_cache, None, self._AnalysisNode__output)
|
|
465
|
+
if observatory is not None:
|
|
466
|
+
cache_entry.observatory = observatory
|
|
467
|
+
if segment is not None:
|
|
468
|
+
cache_entry.segment = segment
|
|
469
|
+
cache_entry = self.output_cache.append(cache_entry)
|
|
470
|
+
|
|
471
|
+
def set_name(self, *args):
|
|
472
|
+
pipeline.LigolwAddNode.set_name(self, *args)
|
|
473
|
+
self.cache_name = os.path.join(self.cache_dir, "%s.cache" % self.get_name())
|
|
474
|
+
self.add_var_opt("input-cache", self.cache_name)
|
|
475
|
+
|
|
476
|
+
def add_input_cache(self, cache):
|
|
477
|
+
self.input_cache.extend(cache)
|
|
478
|
+
self.__update_output_cache()
|
|
479
|
+
|
|
480
|
+
def set_output(self, path = None, observatory = None, segment = None):
|
|
481
|
+
pipeline.LigolwAddNode.set_output(self, path)
|
|
482
|
+
self.__update_output_cache(observatory = observatory, segment = segment)
|
|
483
|
+
|
|
484
|
+
def add_preserve_cache(self, cache):
|
|
485
|
+
if self.remove_input:
|
|
486
|
+
for c in cache:
|
|
487
|
+
self.add_var_arg("--remove-input-except %s" % c.path)
|
|
488
|
+
|
|
489
|
+
def get_input_cache(self):
|
|
490
|
+
return self.input_cache
|
|
491
|
+
|
|
492
|
+
def get_output_cache(self):
|
|
493
|
+
return self.output_cache
|
|
494
|
+
|
|
495
|
+
def write_input_files(self, *args):
|
|
496
|
+
f = file(self.cache_name, "w")
|
|
497
|
+
for c in self.input_cache:
|
|
498
|
+
print(str(c), file=f)
|
|
499
|
+
pipeline.LigolwAddNode.write_input_files(self, *args)
|
|
500
|
+
|
|
501
|
+
def get_output_files(self):
|
|
502
|
+
raise NotImplementedError
|
|
503
|
+
|
|
504
|
+
def get_output(self):
|
|
505
|
+
raise NotImplementedError
|
|
506
|
+
|
|
507
|
+
|
|
508
|
+
class BucutJob(pipeline.CondorDAGJob):
|
|
509
|
+
def __init__(self, config_parser):
|
|
510
|
+
pipeline.CondorDAGJob.__init__(self, "vanilla", get_executable(config_parser, "lalburst_cut"))
|
|
511
|
+
self.set_sub_file("lalburst_cut.sub")
|
|
512
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalburst_cut-$(cluster)-$(process).out"))
|
|
513
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalburst_cut-$(cluster)-$(process).err"))
|
|
514
|
+
self.add_condor_cmd("getenv", "True")
|
|
515
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
516
|
+
self.add_condor_cmd("Requirements", "Memory > 1100")
|
|
517
|
+
self.add_ini_opts(config_parser, "lalburst_cut")
|
|
518
|
+
|
|
519
|
+
self.files_per_bucut = get_files_per_bucut(config_parser)
|
|
520
|
+
if self.files_per_bucut < 1:
|
|
521
|
+
raise ValueError("files_per_bucut < 1")
|
|
522
|
+
|
|
523
|
+
|
|
524
|
+
class BucutNode(pipeline.CondorDAGNode):
|
|
525
|
+
def __init__(self, *args):
|
|
526
|
+
pipeline.CondorDAGNode.__init__(self, *args)
|
|
527
|
+
self.input_cache = []
|
|
528
|
+
self.output_cache = self.input_cache
|
|
529
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
530
|
+
|
|
531
|
+
def add_input_cache(self, cache):
|
|
532
|
+
self.input_cache.extend(cache)
|
|
533
|
+
for c in cache:
|
|
534
|
+
filename = c.path
|
|
535
|
+
pipeline.CondorDAGNode.add_file_arg(self, filename)
|
|
536
|
+
self.add_output_file(filename)
|
|
537
|
+
|
|
538
|
+
def add_file_arg(self, filename):
|
|
539
|
+
raise NotImplementedError
|
|
540
|
+
|
|
541
|
+
def get_input_cache(self):
|
|
542
|
+
return self.input_cache
|
|
543
|
+
|
|
544
|
+
def get_output_cache(self):
|
|
545
|
+
return self.output_cache
|
|
546
|
+
|
|
547
|
+
def get_output_files(self):
|
|
548
|
+
raise NotImplementedError
|
|
549
|
+
|
|
550
|
+
def get_output(self):
|
|
551
|
+
raise NotImplementedError
|
|
552
|
+
|
|
553
|
+
|
|
554
|
+
class BuclusterJob(pipeline.CondorDAGJob):
|
|
555
|
+
def __init__(self, config_parser):
|
|
556
|
+
pipeline.CondorDAGJob.__init__(self, "vanilla", get_executable(config_parser, "lalburst_cluster"))
|
|
557
|
+
self.set_sub_file("lalburst_cluster.sub")
|
|
558
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalburst_cluster-$(cluster)-$(process).out"))
|
|
559
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalburst_cluster-$(cluster)-$(process).err"))
|
|
560
|
+
self.add_condor_cmd("getenv", "True")
|
|
561
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
562
|
+
self.add_condor_cmd("Requirements", "Memory > 1100")
|
|
563
|
+
self.add_ini_opts(config_parser, "lalburst_cluster")
|
|
564
|
+
|
|
565
|
+
self.cache_dir = get_cache_dir(config_parser)
|
|
566
|
+
|
|
567
|
+
self.files_per_bucluster = get_files_per_bucluster(config_parser)
|
|
568
|
+
if self.files_per_bucluster < 1:
|
|
569
|
+
raise ValueError("files_per_bucluster < 1")
|
|
570
|
+
|
|
571
|
+
|
|
572
|
+
class BuclusterNode(pipeline.CondorDAGNode):
|
|
573
|
+
def __init__(self, *args):
|
|
574
|
+
pipeline.CondorDAGNode.__init__(self, *args)
|
|
575
|
+
self.input_cache = []
|
|
576
|
+
self.output_cache = self.input_cache
|
|
577
|
+
self.cache_dir = os.path.join(os.getcwd(), self.job().cache_dir)
|
|
578
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
579
|
+
|
|
580
|
+
def set_name(self, *args):
|
|
581
|
+
pipeline.CondorDAGNode.set_name(self, *args)
|
|
582
|
+
self.cache_name = os.path.join(self.cache_dir, "%s.cache" % self.get_name())
|
|
583
|
+
self.add_var_opt("input-cache", self.cache_name)
|
|
584
|
+
|
|
585
|
+
def add_input_cache(self, cache):
|
|
586
|
+
self.input_cache.extend(cache)
|
|
587
|
+
|
|
588
|
+
def add_file_arg(self, filename):
|
|
589
|
+
raise NotImplementedError
|
|
590
|
+
|
|
591
|
+
def write_input_files(self, *args):
|
|
592
|
+
f = file(self.cache_name, "w")
|
|
593
|
+
for c in self.input_cache:
|
|
594
|
+
print(str(c), file=f)
|
|
595
|
+
pipeline.CondorDAGNode.write_input_files(self, *args)
|
|
596
|
+
|
|
597
|
+
def get_input_cache(self):
|
|
598
|
+
return self.input_cache
|
|
599
|
+
|
|
600
|
+
def get_output_cache(self):
|
|
601
|
+
return self.output_cache
|
|
602
|
+
|
|
603
|
+
def get_output_files(self):
|
|
604
|
+
raise NotImplementedError
|
|
605
|
+
|
|
606
|
+
def get_output(self):
|
|
607
|
+
raise NotImplementedError
|
|
608
|
+
|
|
609
|
+
|
|
610
|
+
class BinjfindJob(pipeline.CondorDAGJob):
|
|
611
|
+
def __init__(self, config_parser):
|
|
612
|
+
pipeline.CondorDAGJob.__init__(self, "vanilla", get_executable(config_parser, "lalburst_injfind"))
|
|
613
|
+
self.set_sub_file("lalburst_injfind.sub")
|
|
614
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalburst_injfind-$(cluster)-$(process).out"))
|
|
615
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalburst_injfind-$(cluster)-$(process).err"))
|
|
616
|
+
self.add_condor_cmd("getenv", "True")
|
|
617
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
618
|
+
self.add_ini_opts(config_parser, "lalburst_injfind")
|
|
619
|
+
|
|
620
|
+
self.files_per_binjfind = get_files_per_binjfind(config_parser)
|
|
621
|
+
if self.files_per_binjfind < 1:
|
|
622
|
+
raise ValueError("files_per_binjfind < 1")
|
|
623
|
+
|
|
624
|
+
|
|
625
|
+
class BinjfindNode(pipeline.CondorDAGNode):
|
|
626
|
+
def __init__(self, *args):
|
|
627
|
+
pipeline.CondorDAGNode.__init__(self, *args)
|
|
628
|
+
self.input_cache = []
|
|
629
|
+
self.output_cache = self.input_cache
|
|
630
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
631
|
+
|
|
632
|
+
def add_input_cache(self, cache):
|
|
633
|
+
self.input_cache.extend(cache)
|
|
634
|
+
for c in cache:
|
|
635
|
+
filename = c.path
|
|
636
|
+
pipeline.CondorDAGNode.add_file_arg(self, filename)
|
|
637
|
+
self.add_output_file(filename)
|
|
638
|
+
|
|
639
|
+
def add_file_arg(self, filename):
|
|
640
|
+
raise NotImplementedError
|
|
641
|
+
|
|
642
|
+
def get_input_cache(self):
|
|
643
|
+
return self.input_cache
|
|
644
|
+
|
|
645
|
+
def get_output_cache(self):
|
|
646
|
+
return self.output_cache
|
|
647
|
+
|
|
648
|
+
def get_output_files(self):
|
|
649
|
+
raise NotImplementedError
|
|
650
|
+
|
|
651
|
+
def get_output(self):
|
|
652
|
+
raise NotImplementedError
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
class BurcaJob(pipeline.CondorDAGJob):
|
|
656
|
+
def __init__(self, config_parser):
|
|
657
|
+
pipeline.CondorDAGJob.__init__(self, "vanilla", get_executable(config_parser, "lalburst_coinc"))
|
|
658
|
+
self.set_sub_file("lalburst_coinc.sub")
|
|
659
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalburst_coinc-$(cluster)-$(process).out"))
|
|
660
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalburst_coinc-$(cluster)-$(process).err"))
|
|
661
|
+
self.add_condor_cmd("getenv", "True")
|
|
662
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
663
|
+
self.add_condor_cmd("Requirements", "Memory >= $(macrominram)")
|
|
664
|
+
self.add_ini_opts(config_parser, "lalburst_coinc")
|
|
665
|
+
|
|
666
|
+
self.files_per_burca = get_files_per_burca(config_parser)
|
|
667
|
+
if self.files_per_burca < 1:
|
|
668
|
+
raise ValueError("files_per_burca < 1")
|
|
669
|
+
|
|
670
|
+
|
|
671
|
+
class Burca2Job(pipeline.CondorDAGJob):
|
|
672
|
+
def __init__(self, config_parser):
|
|
673
|
+
pipeline.CondorDAGJob.__init__(self, "vanilla", get_executable(config_parser, "lalburst_coinc"))
|
|
674
|
+
self.set_sub_file("lalburst_coinc2.sub")
|
|
675
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalburst_coinc2-$(cluster)-$(process).out"))
|
|
676
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalburst_coinc2-$(cluster)-$(process).err"))
|
|
677
|
+
self.add_condor_cmd("getenv", "True")
|
|
678
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
679
|
+
self.add_ini_opts(config_parser, "lalburst_coinc2")
|
|
680
|
+
|
|
681
|
+
self.cache_dir = get_cache_dir(config_parser)
|
|
682
|
+
|
|
683
|
+
|
|
684
|
+
class BurcaNode(pipeline.CondorDAGNode):
|
|
685
|
+
def __init__(self, *args):
|
|
686
|
+
pipeline.CondorDAGNode.__init__(self, *args)
|
|
687
|
+
self.input_cache = []
|
|
688
|
+
self.output_cache = self.input_cache
|
|
689
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
690
|
+
|
|
691
|
+
def add_input_cache(self, cache):
|
|
692
|
+
self.input_cache.extend(cache)
|
|
693
|
+
for c in cache:
|
|
694
|
+
filename = c.path
|
|
695
|
+
pipeline.CondorDAGNode.add_file_arg(self, filename)
|
|
696
|
+
self.add_output_file(filename)
|
|
697
|
+
longest_duration = max(abs(cache_entry.segment) for cache_entry in self.input_cache)
|
|
698
|
+
if longest_duration > 25000:
|
|
699
|
+
# ask for >= 1300 MB
|
|
700
|
+
self.add_macro("macrominram", 1300)
|
|
701
|
+
elif longest_duration > 10000:
|
|
702
|
+
# ask for >= 800 MB
|
|
703
|
+
self.add_macro("macrominram", 800)
|
|
704
|
+
else:
|
|
705
|
+
# run on any node
|
|
706
|
+
self.add_macro("macrominram", 0)
|
|
707
|
+
|
|
708
|
+
def add_file_arg(self, filename):
|
|
709
|
+
raise NotImplementedError
|
|
710
|
+
|
|
711
|
+
def get_input_cache(self):
|
|
712
|
+
return self.input_cache
|
|
713
|
+
|
|
714
|
+
def get_output_cache(self):
|
|
715
|
+
return self.output_cache
|
|
716
|
+
|
|
717
|
+
def get_output_files(self):
|
|
718
|
+
raise NotImplementedError
|
|
719
|
+
|
|
720
|
+
def get_output(self):
|
|
721
|
+
raise NotImplementedError
|
|
722
|
+
|
|
723
|
+
def set_coincidence_segments(self, seglist):
|
|
724
|
+
self.add_var_arg("--coincidence-segments %s" % ",".join(segmentsUtils.to_range_strings(seglist)))
|
|
725
|
+
|
|
726
|
+
|
|
727
|
+
class SQLiteJob(pipeline.CondorDAGJob):
|
|
728
|
+
def __init__(self, config_parser):
|
|
729
|
+
pipeline.CondorDAGJob.__init__(self, "vanilla", get_executable(config_parser, "ligolw_sqlite"))
|
|
730
|
+
self.set_sub_file("ligolw_sqlite.sub")
|
|
731
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "ligolw_sqlite-$(cluster)-$(process).out"))
|
|
732
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "ligolw_sqlite-$(cluster)-$(process).err"))
|
|
733
|
+
self.add_condor_cmd("getenv", "True")
|
|
734
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
735
|
+
self.add_ini_opts(config_parser, "ligolw_sqlite")
|
|
736
|
+
|
|
737
|
+
|
|
738
|
+
class SQLiteNode(pipeline.CondorDAGNode):
|
|
739
|
+
def __init__(self, *args):
|
|
740
|
+
pipeline.CondorDAGNode.__init__(self, *args)
|
|
741
|
+
self.input_cache = []
|
|
742
|
+
self.output_cache = []
|
|
743
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
744
|
+
|
|
745
|
+
def add_input_cache(self, cache):
|
|
746
|
+
if self.output_cache:
|
|
747
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
748
|
+
self.input_cache.extend(cache)
|
|
749
|
+
for c in cache:
|
|
750
|
+
filename = c.path
|
|
751
|
+
pipeline.CondorDAGNode.add_file_arg(self, filename)
|
|
752
|
+
self.add_output_file(filename)
|
|
753
|
+
|
|
754
|
+
def add_file_arg(self, filename):
|
|
755
|
+
raise NotImplementedError
|
|
756
|
+
|
|
757
|
+
def set_output(self, filename):
|
|
758
|
+
if self.output_cache:
|
|
759
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
760
|
+
self.add_macro("macrodatabase", filename)
|
|
761
|
+
|
|
762
|
+
def get_input_cache(self):
|
|
763
|
+
return self.input_cache
|
|
764
|
+
|
|
765
|
+
def get_output_cache(self):
|
|
766
|
+
if not self.output_cache:
|
|
767
|
+
self.output_cache = [make_cache_entry(self.input_cache, None, self.get_opts()["macrodatabase"])]
|
|
768
|
+
return self.output_cache
|
|
769
|
+
|
|
770
|
+
def get_output_files(self):
|
|
771
|
+
raise NotImplementedError
|
|
772
|
+
|
|
773
|
+
def get_output(self):
|
|
774
|
+
raise NotImplementedError
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
class BurcaTailorJob(pipeline.CondorDAGJob):
|
|
778
|
+
def __init__(self, config_parser):
|
|
779
|
+
pipeline.CondorDAGJob.__init__(self, "vanilla", get_executable(config_parser, "lalburst_power_meas_likelihood"))
|
|
780
|
+
self.set_sub_file("lalburst_power_meas_likelihood.sub")
|
|
781
|
+
self.set_stdout_file(os.path.join(get_out_dir(config_parser), "lalburst_power_meas_likelihood-$(cluster)-$(process).out"))
|
|
782
|
+
self.set_stderr_file(os.path.join(get_out_dir(config_parser), "lalburst_power_meas_likelihood-$(cluster)-$(process).err"))
|
|
783
|
+
self.add_condor_cmd("getenv", "True")
|
|
784
|
+
self.add_condor_cmd("accounting_group", get_accounting_group(config_parser))
|
|
785
|
+
self.add_ini_opts(config_parser, "lalburst_power_meas_likelihood")
|
|
786
|
+
|
|
787
|
+
self.cache_dir = get_cache_dir(config_parser)
|
|
788
|
+
self.output_dir = "."
|
|
789
|
+
|
|
790
|
+
|
|
791
|
+
class BurcaTailorNode(pipeline.CondorDAGNode):
|
|
792
|
+
def __init__(self, *args):
|
|
793
|
+
pipeline.CondorDAGNode.__init__(self, *args)
|
|
794
|
+
self.input_cache = []
|
|
795
|
+
self.output_cache = []
|
|
796
|
+
self.cache_dir = os.path.join(os.getcwd(), self.job().cache_dir)
|
|
797
|
+
self.output_dir = os.path.join(os.getcwd(), self.job().output_dir)
|
|
798
|
+
self._CondorDAGNode__macros["initialdir"] = os.getcwd()
|
|
799
|
+
|
|
800
|
+
def set_name(self, *args):
|
|
801
|
+
pipeline.CondorDAGNode.set_name(self, *args)
|
|
802
|
+
self.cache_name = os.path.join(self.cache_dir, "%s.cache" % self.get_name())
|
|
803
|
+
|
|
804
|
+
def add_input_cache(self, cache):
|
|
805
|
+
if self.output_cache:
|
|
806
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
807
|
+
self.input_cache.extend(cache)
|
|
808
|
+
for c in cache:
|
|
809
|
+
filename = c.path
|
|
810
|
+
pipeline.CondorDAGNode.add_file_arg(self, filename)
|
|
811
|
+
self.add_output_file(filename)
|
|
812
|
+
|
|
813
|
+
def add_file_arg(self, filename):
|
|
814
|
+
raise NotImplementedError
|
|
815
|
+
|
|
816
|
+
def set_output(self, description):
|
|
817
|
+
if self.output_cache:
|
|
818
|
+
raise AttributeError("cannot change attributes after computing output cache")
|
|
819
|
+
cache_entry = make_cache_entry(self.input_cache, description, "")
|
|
820
|
+
filename = os.path.join(self.output_dir, "%s-%s-%d-%d.xml.gz" % (cache_entry.observatory, cache_entry.description, int(cache_entry.segment[0]), int(abs(cache_entry.segment))))
|
|
821
|
+
self.add_var_opt("output", filename)
|
|
822
|
+
cache_entry.url = "file://localhost" + os.path.abspath(filename)
|
|
823
|
+
del self.output_cache[:]
|
|
824
|
+
self.output_cache.append(cache_entry)
|
|
825
|
+
return filename
|
|
826
|
+
|
|
827
|
+
def get_input_cache(self):
|
|
828
|
+
return self.input_cache
|
|
829
|
+
|
|
830
|
+
def get_output_cache(self):
|
|
831
|
+
if not self.output_cache:
|
|
832
|
+
raise AttributeError("must call set_output(description) first")
|
|
833
|
+
return self.output_cache
|
|
834
|
+
|
|
835
|
+
def write_input_files(self, *args):
|
|
836
|
+
# oh. my. god. this is fscked.
|
|
837
|
+
for arg in self.get_args():
|
|
838
|
+
if "--add-from-cache" in arg:
|
|
839
|
+
f = file(self.cache_name, "w")
|
|
840
|
+
for c in self.input_cache:
|
|
841
|
+
print(str(c), file=f)
|
|
842
|
+
pipeline.CondorDAGNode.write_input_files(self, *args)
|
|
843
|
+
break
|
|
844
|
+
|
|
845
|
+
def get_output_files(self):
|
|
846
|
+
raise NotImplementedError
|
|
847
|
+
|
|
848
|
+
def get_output(self):
|
|
849
|
+
raise NotImplementedError
|
|
850
|
+
|
|
851
|
+
|
|
852
|
+
#
|
|
853
|
+
# =============================================================================
|
|
854
|
+
#
|
|
855
|
+
# DAG Job Types
|
|
856
|
+
#
|
|
857
|
+
# =============================================================================
|
|
858
|
+
#
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
#
|
|
862
|
+
# This is *SUCH* a hack I don't know where to begin. Please, shoot me.
|
|
863
|
+
#
|
|
864
|
+
|
|
865
|
+
|
|
866
|
+
datafindjob = None
|
|
867
|
+
binjjob = None
|
|
868
|
+
powerjob = None
|
|
869
|
+
lladdjob = None
|
|
870
|
+
binjfindjob = None
|
|
871
|
+
bucutjob = None
|
|
872
|
+
buclusterjob = None
|
|
873
|
+
burcajob = None
|
|
874
|
+
burca2job = None
|
|
875
|
+
sqlitejob = None
|
|
876
|
+
burcatailorjob = None
|
|
877
|
+
|
|
878
|
+
|
|
879
|
+
def init_job_types(config_parser, job_types = ("datafind", "rm", "binj", "power", "lladd", "binjfind", "bucluster", "bucut", "burca", "burca2", "sqlite", "burcatailor")):
|
|
880
|
+
"""
|
|
881
|
+
Construct definitions of the submit files.
|
|
882
|
+
"""
|
|
883
|
+
global datafindjob, rmjob, binjjob, powerjob, lladdjob, binjfindjob, buclusterjob, llb2mjob, bucutjob, burcajob, burca2job, sqlitejob, burcatailorjob
|
|
884
|
+
|
|
885
|
+
# ligo_data_find
|
|
886
|
+
if "datafind" in job_types:
|
|
887
|
+
datafindjob = pipeline.LSCDataFindJob(os.path.join(os.getcwd(), get_cache_dir(config_parser)), os.path.join(os.getcwd(), get_out_dir(config_parser)), config_parser)
|
|
888
|
+
|
|
889
|
+
# rm
|
|
890
|
+
if "rm" in job_types:
|
|
891
|
+
rmjob = RMJob(config_parser)
|
|
892
|
+
|
|
893
|
+
# lalapps_binj
|
|
894
|
+
if "binj" in job_types:
|
|
895
|
+
binjjob = BurstInjJob(config_parser)
|
|
896
|
+
|
|
897
|
+
# lalapps_power
|
|
898
|
+
if "power" in job_types:
|
|
899
|
+
powerjob = PowerJob(config_parser)
|
|
900
|
+
|
|
901
|
+
# ligolw_add
|
|
902
|
+
if "lladd" in job_types:
|
|
903
|
+
lladdjob = pipeline.LigolwAddJob(os.path.join(get_out_dir(config_parser)), config_parser)
|
|
904
|
+
lladdjob.cache_dir = get_cache_dir(config_parser)
|
|
905
|
+
|
|
906
|
+
# lalburst_injfind
|
|
907
|
+
if "binjfind" in job_types:
|
|
908
|
+
binjfindjob = BinjfindJob(config_parser)
|
|
909
|
+
|
|
910
|
+
# lalburst_cut
|
|
911
|
+
if "bucut" in job_types:
|
|
912
|
+
bucutjob = BucutJob(config_parser)
|
|
913
|
+
|
|
914
|
+
# lalburst_cluster
|
|
915
|
+
if "bucluster" in job_types:
|
|
916
|
+
buclusterjob = BuclusterJob(config_parser)
|
|
917
|
+
|
|
918
|
+
# lalburst_coinc
|
|
919
|
+
if "burca" in job_types:
|
|
920
|
+
burcajob = BurcaJob(config_parser)
|
|
921
|
+
|
|
922
|
+
# lalburst_coinc2
|
|
923
|
+
if "burca2" in job_types:
|
|
924
|
+
burca2job = Burca2Job(config_parser)
|
|
925
|
+
|
|
926
|
+
# ligolw_sqlite
|
|
927
|
+
if "sqlite" in job_types:
|
|
928
|
+
sqlitejob = SQLiteJob(config_parser)
|
|
929
|
+
|
|
930
|
+
# lalburst_power_meas_likelihood
|
|
931
|
+
if "burcatailor" in job_types:
|
|
932
|
+
burcatailorjob = BurcaTailorJob(config_parser)
|
|
933
|
+
|
|
934
|
+
|
|
935
|
+
#
|
|
936
|
+
# =============================================================================
|
|
937
|
+
#
|
|
938
|
+
# Segmentation
|
|
939
|
+
#
|
|
940
|
+
# =============================================================================
|
|
941
|
+
#
|
|
942
|
+
|
|
943
|
+
|
|
944
|
+
def psds_from_job_length(timing_params, t):
|
|
945
|
+
"""
|
|
946
|
+
Return the number of PSDs that can fit into a job of length t
|
|
947
|
+
seconds. In general, the return value is a non-integer.
|
|
948
|
+
"""
|
|
949
|
+
if t < 0:
|
|
950
|
+
raise ValueError(t)
|
|
951
|
+
# convert to samples, and remove filter corruption
|
|
952
|
+
t = t * timing_params.resample_rate - 2 * timing_params.filter_corruption
|
|
953
|
+
if t < timing_params.psd_length:
|
|
954
|
+
return 0
|
|
955
|
+
return (t - timing_params.psd_length) / timing_params.psd_shift + 1
|
|
956
|
+
|
|
957
|
+
|
|
958
|
+
def job_length_from_psds(timing_params, psds):
|
|
959
|
+
"""
|
|
960
|
+
From the analysis parameters and a count of PSDs, return the length
|
|
961
|
+
of the job in seconds.
|
|
962
|
+
"""
|
|
963
|
+
if psds < 1:
|
|
964
|
+
raise ValueError(psds)
|
|
965
|
+
# number of samples
|
|
966
|
+
result = (psds - 1) * timing_params.psd_shift + timing_params.psd_length
|
|
967
|
+
# add filter corruption
|
|
968
|
+
result += 2 * timing_params.filter_corruption
|
|
969
|
+
# convert to seconds
|
|
970
|
+
return result / timing_params.resample_rate
|
|
971
|
+
|
|
972
|
+
|
|
973
|
+
def split_segment(timing_params, segment, psds_per_job):
|
|
974
|
+
"""
|
|
975
|
+
Split the data segment into correctly-overlaping segments. We try
|
|
976
|
+
to have the numbers of PSDs in each segment be equal to
|
|
977
|
+
psds_per_job, but with a short segment at the end if needed.
|
|
978
|
+
"""
|
|
979
|
+
# in seconds
|
|
980
|
+
joblength = job_length_from_psds(timing_params, psds_per_job)
|
|
981
|
+
# in samples
|
|
982
|
+
joboverlap = 2 * timing_params.filter_corruption + (timing_params.psd_length - timing_params.psd_shift)
|
|
983
|
+
# in seconds
|
|
984
|
+
joboverlap /= timing_params.resample_rate
|
|
985
|
+
|
|
986
|
+
segs = segments.segmentlist()
|
|
987
|
+
t = segment[0]
|
|
988
|
+
while t + joblength <= segment[1]:
|
|
989
|
+
segs.append(segments.segment(t, t + joblength) & segment)
|
|
990
|
+
t += joblength - joboverlap
|
|
991
|
+
|
|
992
|
+
extra_psds = int(psds_from_job_length(timing_params, float(segment[1] - t)))
|
|
993
|
+
if extra_psds:
|
|
994
|
+
segs.append(segments.segment(t, t + job_length_from_psds(timing_params, extra_psds)))
|
|
995
|
+
return segs
|
|
996
|
+
|
|
997
|
+
|
|
998
|
+
def segment_ok(timing_params, segment):
|
|
999
|
+
"""
|
|
1000
|
+
Return True if the segment can be analyzed using lalapps_power.
|
|
1001
|
+
"""
|
|
1002
|
+
return psds_from_job_length(timing_params, float(abs(segment))) >= 1.0
|
|
1003
|
+
|
|
1004
|
+
|
|
1005
|
+
def remove_too_short_segments(seglistdict, timing_params):
|
|
1006
|
+
"""
|
|
1007
|
+
Remove segments from seglistdict that are too short to analyze.
|
|
1008
|
+
|
|
1009
|
+
CAUTION: this function modifies seglistdict in place.
|
|
1010
|
+
"""
|
|
1011
|
+
for seglist in seglistdict.values():
|
|
1012
|
+
iterutils.inplace_filter(lambda seg: segment_ok(timing_params, seg), seglist)
|
|
1013
|
+
|
|
1014
|
+
|
|
1015
|
+
#
|
|
1016
|
+
# =============================================================================
|
|
1017
|
+
#
|
|
1018
|
+
# Single Node Fragments
|
|
1019
|
+
#
|
|
1020
|
+
# =============================================================================
|
|
1021
|
+
#
|
|
1022
|
+
|
|
1023
|
+
|
|
1024
|
+
datafind_pad = 512
|
|
1025
|
+
|
|
1026
|
+
|
|
1027
|
+
def make_datafind_fragment(dag, instrument, seg):
|
|
1028
|
+
node = pipeline.LSCDataFindNode(datafindjob)
|
|
1029
|
+
node.set_name("ligo_data_find-%s-%d-%d" % (instrument, int(seg[0]), int(abs(seg))))
|
|
1030
|
+
node.set_start(seg[0] - datafind_pad)
|
|
1031
|
+
node.set_end(seg[1] + 1)
|
|
1032
|
+
# FIXME: argh, I need the node to know what instrument it's for,
|
|
1033
|
+
# but can't call set_ifo() because that adds a --channel-name
|
|
1034
|
+
# command line argument (!?)
|
|
1035
|
+
node._AnalysisNode__ifo = instrument
|
|
1036
|
+
node.set_observatory(instrument[0])
|
|
1037
|
+
if node.get_type() is None:
|
|
1038
|
+
node.set_type(datafindjob.get_config_file().get("datafind", "type_%s" % instrument))
|
|
1039
|
+
node.set_retry(3)
|
|
1040
|
+
dag.add_node(node)
|
|
1041
|
+
return set([node])
|
|
1042
|
+
|
|
1043
|
+
|
|
1044
|
+
def make_lladd_fragment(dag, parents, tag, segment = None, input_cache = None, remove_input = False, preserve_cache = None, extra_input_cache = None):
|
|
1045
|
+
node = LigolwAddNode(lladdjob, remove_input = remove_input)
|
|
1046
|
+
|
|
1047
|
+
# link to parents
|
|
1048
|
+
for parent in parents:
|
|
1049
|
+
node.add_parent(parent)
|
|
1050
|
+
|
|
1051
|
+
# build input cache
|
|
1052
|
+
if input_cache is None:
|
|
1053
|
+
# default is to use all output files from parents
|
|
1054
|
+
for parent in parents:
|
|
1055
|
+
node.add_input_cache(parent.get_output_cache())
|
|
1056
|
+
else:
|
|
1057
|
+
# but calling code can provide its own collection
|
|
1058
|
+
node.add_input_cache(input_cache)
|
|
1059
|
+
if extra_input_cache is not None:
|
|
1060
|
+
# sometimes it helps to add some extra
|
|
1061
|
+
node.add_input_cache(extra_input_cache)
|
|
1062
|
+
if preserve_cache is not None:
|
|
1063
|
+
node.add_preserve_cache(preserve_cache)
|
|
1064
|
+
|
|
1065
|
+
# construct names for the node and output file, and override the
|
|
1066
|
+
# segment if needed
|
|
1067
|
+
[cache_entry] = node.get_output_cache()
|
|
1068
|
+
if segment is None:
|
|
1069
|
+
segment = cache_entry.segment
|
|
1070
|
+
node.set_name("lladd_%s_%d_%d" % (tag, int(segment[0]), int(abs(segment))))
|
|
1071
|
+
node.set_output(os.path.join(node.output_dir, "%s-%s-%d-%d.xml.gz" % (cache_entry.observatory, tag, int(segment[0]), int(abs(segment)))), segment = segment)
|
|
1072
|
+
|
|
1073
|
+
node.set_retry(3)
|
|
1074
|
+
dag.add_node(node)
|
|
1075
|
+
return set([node])
|
|
1076
|
+
|
|
1077
|
+
|
|
1078
|
+
def make_power_fragment(dag, parents, instrument, seg, tag, framecache, injargs = {}):
|
|
1079
|
+
node = PowerNode(powerjob)
|
|
1080
|
+
node.set_name("lalapps_power_%s_%s_%d_%d" % (tag, instrument, int(seg[0]), int(abs(seg))))
|
|
1081
|
+
map(node.add_parent, parents)
|
|
1082
|
+
# FIXME: PowerNode should not be subclassed from AnalysisNode,
|
|
1083
|
+
# because that class is too hard-coded. For example, there is no
|
|
1084
|
+
# way to switch to analysing gaussian noise except to comment out
|
|
1085
|
+
# this line in the code.
|
|
1086
|
+
node.set_cache(framecache)
|
|
1087
|
+
node.set_ifo(instrument)
|
|
1088
|
+
node.set_start(seg[0])
|
|
1089
|
+
node.set_end(seg[1])
|
|
1090
|
+
node.set_user_tag(tag)
|
|
1091
|
+
for arg, value in injargs.iteritems():
|
|
1092
|
+
# this is a hack, but I can't be bothered
|
|
1093
|
+
node.add_var_arg("--%s %s" % (arg, value))
|
|
1094
|
+
dag.add_node(node)
|
|
1095
|
+
return set([node])
|
|
1096
|
+
|
|
1097
|
+
|
|
1098
|
+
def make_binj_fragment(dag, seg, time_slides_cache_entry, tag, offset, flow = None, fhigh = None):
|
|
1099
|
+
# adjust start time to be commensurate with injection period
|
|
1100
|
+
start = seg[0] - seg[0] % binjjob.time_step + binjjob.time_step * offset
|
|
1101
|
+
|
|
1102
|
+
node = BurstInjNode(binjjob)
|
|
1103
|
+
node.set_time_slide_file(time_slides_cache_entry.path)
|
|
1104
|
+
node.set_start(start)
|
|
1105
|
+
node.set_end(seg[1])
|
|
1106
|
+
if flow is not None:
|
|
1107
|
+
node.set_name("lalapps_binj_%s_%d_%d" % (tag, int(start), int(flow)))
|
|
1108
|
+
else:
|
|
1109
|
+
node.set_name("lalapps_binj_%s_%d" % (tag, int(start)))
|
|
1110
|
+
node.set_user_tag(tag)
|
|
1111
|
+
if flow is not None:
|
|
1112
|
+
node.add_macro("macroflow", flow)
|
|
1113
|
+
if fhigh is not None:
|
|
1114
|
+
node.add_macro("macrofhigh", fhigh)
|
|
1115
|
+
node.add_macro("macroseed", int(time.time()%100 + start))
|
|
1116
|
+
dag.add_node(node)
|
|
1117
|
+
return set([node])
|
|
1118
|
+
|
|
1119
|
+
|
|
1120
|
+
def make_binjfind_fragment(dag, parents, tag, verbose = False):
|
|
1121
|
+
input_cache = collect_output_caches(parents)
|
|
1122
|
+
nodes = set()
|
|
1123
|
+
while input_cache:
|
|
1124
|
+
node = BinjfindNode(binjfindjob)
|
|
1125
|
+
node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:binjfindjob.files_per_binjfind]])
|
|
1126
|
+
for parent in set(parent for cache_entry, parent in input_cache[:binjfindjob.files_per_binjfind]):
|
|
1127
|
+
node.add_parent(parent)
|
|
1128
|
+
del input_cache[:binjfindjob.files_per_binjfind]
|
|
1129
|
+
seg = cache_span(node.get_input_cache())
|
|
1130
|
+
node.set_name("lalburst_injfind_%s_%d_%d" % (tag, int(seg[0]), int(abs(seg))))
|
|
1131
|
+
node.add_macro("macrocomment", tag)
|
|
1132
|
+
dag.add_node(node)
|
|
1133
|
+
nodes.add(node)
|
|
1134
|
+
return nodes
|
|
1135
|
+
|
|
1136
|
+
|
|
1137
|
+
def make_bucluster_fragment(dag, parents, tag, verbose = False):
|
|
1138
|
+
input_cache = collect_output_caches(parents)
|
|
1139
|
+
nodes = set()
|
|
1140
|
+
while input_cache:
|
|
1141
|
+
node = BuclusterNode(buclusterjob)
|
|
1142
|
+
node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:buclusterjob.files_per_bucluster]])
|
|
1143
|
+
for parent in set(parent for cache_entry, parent in input_cache[:buclusterjob.files_per_bucluster]):
|
|
1144
|
+
node.add_parent(parent)
|
|
1145
|
+
del input_cache[:buclusterjob.files_per_bucluster]
|
|
1146
|
+
seg = cache_span(node.get_input_cache())
|
|
1147
|
+
node.set_name("lalburst_cluster_%s_%d_%d" % (tag, int(seg[0]), int(abs(seg))))
|
|
1148
|
+
node.add_macro("macrocomment", tag)
|
|
1149
|
+
node.set_retry(3)
|
|
1150
|
+
dag.add_node(node)
|
|
1151
|
+
nodes.add(node)
|
|
1152
|
+
return nodes
|
|
1153
|
+
|
|
1154
|
+
|
|
1155
|
+
def make_bucut_fragment(dag, parents, tag, verbose = False):
|
|
1156
|
+
input_cache = collect_output_caches(parents)
|
|
1157
|
+
nodes = set()
|
|
1158
|
+
while input_cache:
|
|
1159
|
+
node = BucutNode(bucutjob)
|
|
1160
|
+
node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:bucutjob.files_per_bucut]])
|
|
1161
|
+
for parent in set(parent for cache_entry, parent in input_cache[:bucutjob.files_per_bucut]):
|
|
1162
|
+
node.add_parent(parent)
|
|
1163
|
+
del input_cache[:bucutjob.files_per_bucut]
|
|
1164
|
+
seg = cache_span(node.get_input_cache())
|
|
1165
|
+
node.set_name("lalburst_cut_%s_%d_%d" % (tag, int(seg[0]), int(abs(seg))))
|
|
1166
|
+
node.add_macro("macrocomment", tag)
|
|
1167
|
+
dag.add_node(node)
|
|
1168
|
+
nodes.add(node)
|
|
1169
|
+
return nodes
|
|
1170
|
+
|
|
1171
|
+
|
|
1172
|
+
def make_burca_fragment(dag, parents, tag, coincidence_segments = None, verbose = False):
|
|
1173
|
+
input_cache = collect_output_caches(parents)
|
|
1174
|
+
if coincidence_segments is not None:
|
|
1175
|
+
# doesn't sense to supply this keyword argument for
|
|
1176
|
+
# more than one input file
|
|
1177
|
+
assert len(input_cache) == 1
|
|
1178
|
+
nodes = set()
|
|
1179
|
+
while input_cache:
|
|
1180
|
+
node = BurcaNode(burcajob)
|
|
1181
|
+
node.add_input_cache([cache_entry for (cache_entry, parent) in input_cache[:burcajob.files_per_burca]])
|
|
1182
|
+
for parent in set(parent for cache_entry, parent in input_cache[:burcajob.files_per_burca]):
|
|
1183
|
+
node.add_parent(parent)
|
|
1184
|
+
del input_cache[:burcajob.files_per_burca]
|
|
1185
|
+
seg = cache_span(node.get_input_cache())
|
|
1186
|
+
node.set_name("lalburst_coinc_%s_%d_%d" % (tag, int(seg[0]), int(abs(seg))))
|
|
1187
|
+
if coincidence_segments is not None:
|
|
1188
|
+
node.set_coincidence_segments(coincidence_segments)
|
|
1189
|
+
node.add_macro("macrocomment", tag)
|
|
1190
|
+
dag.add_node(node)
|
|
1191
|
+
nodes.add(node)
|
|
1192
|
+
return nodes
|
|
1193
|
+
|
|
1194
|
+
|
|
1195
|
+
def make_sqlite_fragment(dag, parents, tag, verbose = False):
|
|
1196
|
+
input_cache = collect_output_caches(parents)
|
|
1197
|
+
nodes = set()
|
|
1198
|
+
for cache_entry, parent in input_cache:
|
|
1199
|
+
node = SQLiteNode(sqlitejob)
|
|
1200
|
+
node.add_input_cache([cache_entry])
|
|
1201
|
+
node.add_parent(parent)
|
|
1202
|
+
node.set_name("ligolw_sqlite_%s_%d" % (tag, len(nodes)))
|
|
1203
|
+
node.set_output(cache_entry.path.replace(".xml.gz", ".sqlite"))
|
|
1204
|
+
dag.add_node(node)
|
|
1205
|
+
nodes.add(node)
|
|
1206
|
+
return nodes
|
|
1207
|
+
|
|
1208
|
+
|
|
1209
|
+
def make_burca_tailor_fragment(dag, input_cache, seg, tag):
|
|
1210
|
+
input_cache = list(input_cache)
|
|
1211
|
+
input_cache.sort(reverse = True)
|
|
1212
|
+
nodes = set()
|
|
1213
|
+
max_cost_per_job = 25 # 10000 s -equivalent files
|
|
1214
|
+
while input_cache:
|
|
1215
|
+
cache = []
|
|
1216
|
+
cost = 0
|
|
1217
|
+
while input_cache and cost <= max_cost_per_job:
|
|
1218
|
+
cache.append(input_cache.pop())
|
|
1219
|
+
# cost porportional to segment duration squared
|
|
1220
|
+
cost += (float(abs(cache[-1].segment)) / 10000.0)**2
|
|
1221
|
+
node = BurcaTailorNode(burcatailorjob)
|
|
1222
|
+
node.add_input_cache(cache)
|
|
1223
|
+
node.set_name("lalburst_power_meas_likelihood_%s_%d_%d_%d" % (tag, int(seg[0]), int(abs(seg)), len(nodes)))
|
|
1224
|
+
node.set_output("%s_%d" % (tag, len(nodes)))
|
|
1225
|
+
dag.add_node(node)
|
|
1226
|
+
nodes.add(node)
|
|
1227
|
+
node = BurcaTailorNode(burcatailorjob)
|
|
1228
|
+
node.set_name("lalburst_power_meas_likelihood_%s_%d_%d" % (tag, int(seg[0]), int(abs(seg))))
|
|
1229
|
+
for parent in nodes:
|
|
1230
|
+
node.add_parent(parent)
|
|
1231
|
+
node.add_input_cache(parent.get_output_cache())
|
|
1232
|
+
del node.get_args()[:]
|
|
1233
|
+
node.add_var_arg("--add-from-cache %s" % node.cache_name)
|
|
1234
|
+
node.set_output(tag)
|
|
1235
|
+
dag.add_node(node)
|
|
1236
|
+
delete_cache = set(node.get_input_cache()) - set(node.get_output_cache())
|
|
1237
|
+
if delete_cache:
|
|
1238
|
+
rmnode = RMNode(rmjob)
|
|
1239
|
+
rmnode.set_name("lalburst_power_meas_likelihood_rm_%s_%d_%d" % (tag, int(seg[0]), int(abs(seg))))
|
|
1240
|
+
rmnode.add_parent(node)
|
|
1241
|
+
rmnode.add_input_cache(delete_cache)
|
|
1242
|
+
dag.add_node(rmnode)
|
|
1243
|
+
return set([node])
|
|
1244
|
+
|
|
1245
|
+
|
|
1246
|
+
def make_burca2_fragment(dag, coinc_cache, likelihood_parents, tag):
|
|
1247
|
+
# FIXME: pass a node set instead of a cache
|
|
1248
|
+
#input_cache = collect_output_caches(coinc_parents)
|
|
1249
|
+
coinc_cache = list(coinc_cache)
|
|
1250
|
+
coinc_cache.sort(reverse = True)
|
|
1251
|
+
|
|
1252
|
+
likelihood_data_cache_filename = os.path.join(burca2job.cache_dir, "burca2_%s.cache" % tag)
|
|
1253
|
+
likelihood_data_cache_file = file(likelihood_data_cache_filename, "w")
|
|
1254
|
+
for cache_entry in [cache_entry for node in likelihood_parents for cache_entry in node.get_output_cache()]:
|
|
1255
|
+
print(str(cache_entry), file=likelihood_data_cache_file)
|
|
1256
|
+
|
|
1257
|
+
nodes = set()
|
|
1258
|
+
max_cost_per_job = 10 # 10000 s -equivalent files
|
|
1259
|
+
while coinc_cache:
|
|
1260
|
+
cache = []
|
|
1261
|
+
cost = 0
|
|
1262
|
+
while coinc_cache and cost <= max_cost_per_job:
|
|
1263
|
+
cache.append(coinc_cache.pop())
|
|
1264
|
+
# cost porportional to segment duration squared
|
|
1265
|
+
cost += (float(abs(cache[-1].segment)) / 10000.0)**2
|
|
1266
|
+
node = BurcaNode(burca2job)
|
|
1267
|
+
node.set_name("lalburst_coinc2_%s_%d" % (tag, len(nodes)))
|
|
1268
|
+
node.add_macro("macrocomment", tag)
|
|
1269
|
+
node.add_var_arg("--likelihood-data-cache %s" % likelihood_data_cache_filename)
|
|
1270
|
+
node.add_input_cache(cache)
|
|
1271
|
+
for parent in likelihood_parents:
|
|
1272
|
+
node.add_parent(parent)
|
|
1273
|
+
dag.add_node(node)
|
|
1274
|
+
nodes.add(node)
|
|
1275
|
+
return nodes
|
|
1276
|
+
|
|
1277
|
+
|
|
1278
|
+
#
|
|
1279
|
+
# =============================================================================
|
|
1280
|
+
#
|
|
1281
|
+
# ligo_data_find Stage
|
|
1282
|
+
#
|
|
1283
|
+
# =============================================================================
|
|
1284
|
+
#
|
|
1285
|
+
|
|
1286
|
+
|
|
1287
|
+
def make_datafind_stage(dag, seglists, verbose = False):
|
|
1288
|
+
if verbose:
|
|
1289
|
+
print("building ligo_data_find jobs ...", file=sys.stderr)
|
|
1290
|
+
|
|
1291
|
+
#
|
|
1292
|
+
# Fill gaps smaller than the padding added to each datafind job.
|
|
1293
|
+
# Filling in the gaps ensures that exactly 1 datafind job is
|
|
1294
|
+
# suitable for each lalapps_power job, and also hugely reduces the
|
|
1295
|
+
# number of ligo_data_find nodes in the DAG.
|
|
1296
|
+
#
|
|
1297
|
+
|
|
1298
|
+
filled = seglists.copy().protract(datafind_pad / 2).contract(datafind_pad / 2)
|
|
1299
|
+
|
|
1300
|
+
#
|
|
1301
|
+
# Build the nodes. Do this in time order to assist depth-first job
|
|
1302
|
+
# submission on clusters.
|
|
1303
|
+
#
|
|
1304
|
+
|
|
1305
|
+
segs = [(seg, instrument) for instrument, seglist in filled.iteritems() for seg in seglist]
|
|
1306
|
+
segs.sort()
|
|
1307
|
+
|
|
1308
|
+
nodes = set()
|
|
1309
|
+
for seg, instrument in segs:
|
|
1310
|
+
if verbose:
|
|
1311
|
+
print("making datafind job for %s spanning %s" % (instrument, seg), file=sys.stderr)
|
|
1312
|
+
new_nodes = make_datafind_fragment(dag, instrument, seg)
|
|
1313
|
+
nodes |= new_nodes
|
|
1314
|
+
|
|
1315
|
+
# add a post script to check the file list
|
|
1316
|
+
#required_segs_string = ",".join(segmentsUtils.to_range_strings(seglists[instrument] & segments.segmentlist([seg])))
|
|
1317
|
+
#for node in new_nodes:
|
|
1318
|
+
# node.set_post_script(datafindjob.get_config_file().get("condor", "LSCdataFindcheck") + " --dagman-return $RETURN --stat --gps-segment-list %s %s" % (required_segs_string, node.get_output()))
|
|
1319
|
+
|
|
1320
|
+
return nodes
|
|
1321
|
+
|
|
1322
|
+
|
|
1323
|
+
#
|
|
1324
|
+
# =============================================================================
|
|
1325
|
+
#
|
|
1326
|
+
# Analyze All Segments in a segmentlistdict Using lalapps_power
|
|
1327
|
+
#
|
|
1328
|
+
# =============================================================================
|
|
1329
|
+
#
|
|
1330
|
+
|
|
1331
|
+
|
|
1332
|
+
#
|
|
1333
|
+
# one segment
|
|
1334
|
+
#
|
|
1335
|
+
|
|
1336
|
+
|
|
1337
|
+
def make_power_segment_fragment(dag, datafindnodes, instrument, segment, tag, timing_params, psds_per_job, binjnodes = set(), verbose = False):
|
|
1338
|
+
"""
|
|
1339
|
+
Construct a DAG fragment for an entire segment, splitting the
|
|
1340
|
+
segment into multiple trigger generator jobs.
|
|
1341
|
+
"""
|
|
1342
|
+
# only one frame cache file can be provided as input, and only one
|
|
1343
|
+
# injection description file can be provided as input
|
|
1344
|
+
# the unpacking indirectly tests that the file count is correct
|
|
1345
|
+
[framecache] = [node.get_output() for node in datafindnodes]
|
|
1346
|
+
if binjnodes:
|
|
1347
|
+
[simfile] = [cache_entry.path for node in binjnodes for cache_entry in node.get_output_cache()]
|
|
1348
|
+
injargs = {"injection-file": simfile}
|
|
1349
|
+
else:
|
|
1350
|
+
injargs = {}
|
|
1351
|
+
seglist = split_segment(timing_params, segment, psds_per_job)
|
|
1352
|
+
if verbose:
|
|
1353
|
+
print("Segment split: " + str(seglist), file=sys.stderr)
|
|
1354
|
+
nodes = set()
|
|
1355
|
+
for seg in seglist:
|
|
1356
|
+
nodes |= make_power_fragment(dag, datafindnodes | binjnodes, instrument, seg, tag, framecache, injargs = injargs)
|
|
1357
|
+
return nodes
|
|
1358
|
+
|
|
1359
|
+
|
|
1360
|
+
#
|
|
1361
|
+
# all segments
|
|
1362
|
+
#
|
|
1363
|
+
|
|
1364
|
+
|
|
1365
|
+
def make_single_instrument_stage(dag, datafinds, seglistdict, tag, timing_params, psds_per_job, binjnodes = set(), verbose = False):
|
|
1366
|
+
nodes = []
|
|
1367
|
+
for instrument, seglist in seglistdict.iteritems():
|
|
1368
|
+
for seg in seglist:
|
|
1369
|
+
if verbose:
|
|
1370
|
+
print("generating %s fragment %s" % (instrument, str(seg)), file=sys.stderr)
|
|
1371
|
+
|
|
1372
|
+
# find the datafind job this job is going to need
|
|
1373
|
+
dfnodes = set([node for node in datafinds if (node.get_ifo() == instrument) and (seg in segments.segment(node.get_start(), node.get_end()))])
|
|
1374
|
+
if len(dfnodes) != 1:
|
|
1375
|
+
raise ValueError("error, not exactly 1 datafind is suitable for trigger generator job at %s in %s" % (str(seg), instrument))
|
|
1376
|
+
|
|
1377
|
+
# trigger generator jobs
|
|
1378
|
+
nodes += make_power_segment_fragment(dag, dfnodes, instrument, seg, tag, timing_params, psds_per_job, binjnodes = binjnodes, verbose = verbose)
|
|
1379
|
+
|
|
1380
|
+
# done
|
|
1381
|
+
return nodes
|
|
1382
|
+
|
|
1383
|
+
|
|
1384
|
+
#
|
|
1385
|
+
# =============================================================================
|
|
1386
|
+
#
|
|
1387
|
+
# Coincidence Post-Processing
|
|
1388
|
+
#
|
|
1389
|
+
# =============================================================================
|
|
1390
|
+
#
|
|
1391
|
+
|
|
1392
|
+
|
|
1393
|
+
def group_coinc_parents(parents, offset_vectors, extentlimit = None, verbose = False):
|
|
1394
|
+
if not offset_vectors:
|
|
1395
|
+
# no-op
|
|
1396
|
+
return []
|
|
1397
|
+
|
|
1398
|
+
if verbose:
|
|
1399
|
+
print("Grouping jobs for coincidence analysis:", file=sys.stderr)
|
|
1400
|
+
|
|
1401
|
+
#
|
|
1402
|
+
# use ligolw_cafe to group each output file according to how they
|
|
1403
|
+
# need to be combined to perform the coincidence analysis
|
|
1404
|
+
#
|
|
1405
|
+
|
|
1406
|
+
seglists, bins = cafe.ligolw_cafe([cache_entry for parent in parents for cache_entry in parent.get_output_cache()], offset_vectors, extentlimit = extentlimit, verbose = verbose)
|
|
1407
|
+
|
|
1408
|
+
#
|
|
1409
|
+
# retrieve the file caches and segments. note that ligolw_cafe
|
|
1410
|
+
# returns the bins sorted by segment, so we do too
|
|
1411
|
+
#
|
|
1412
|
+
|
|
1413
|
+
caches = [frozenset(bin.objects) for bin in bins]
|
|
1414
|
+
assert len(set(caches)) == len(caches)
|
|
1415
|
+
segs = [cache_span(bin.objects) for bin in bins]
|
|
1416
|
+
|
|
1417
|
+
#
|
|
1418
|
+
# determine the clipping boundaries to use for each coincidence job
|
|
1419
|
+
# if an extentlimit has been imposed
|
|
1420
|
+
#
|
|
1421
|
+
|
|
1422
|
+
clipsegs = [None] * len(bins)
|
|
1423
|
+
if extentlimit is not None:
|
|
1424
|
+
extents = [bin.extent for bin in bins]
|
|
1425
|
+
for i, extent in enumerate(extents):
|
|
1426
|
+
# FIXME: when we can rely on Python >= 2.5,
|
|
1427
|
+
#lo = segments.NegInfinity if i == 0 or extents[i - 1].disjoint(extent) else extent[0]
|
|
1428
|
+
# etc.
|
|
1429
|
+
if i == 0 or extents[i - 1].disjoint(extent):
|
|
1430
|
+
lo = segments.NegInfinity
|
|
1431
|
+
else:
|
|
1432
|
+
lo = extent[0]
|
|
1433
|
+
if i >= len(extents) - 1 or extents[i + 1].disjoint(extent):
|
|
1434
|
+
hi = segments.PosInfinity
|
|
1435
|
+
else:
|
|
1436
|
+
hi = extent[1]
|
|
1437
|
+
if lo is not segments.NegInfinity or hi is not segments.PosInfinity:
|
|
1438
|
+
clipsegs[i] = segments.segment(lo, hi)
|
|
1439
|
+
|
|
1440
|
+
#
|
|
1441
|
+
# match parents to caches
|
|
1442
|
+
#
|
|
1443
|
+
|
|
1444
|
+
if verbose:
|
|
1445
|
+
print("Matching jobs to caches ...", file=sys.stderr)
|
|
1446
|
+
parent_groups, unused = match_nodes_to_caches(parents, caches)
|
|
1447
|
+
if verbose and unused:
|
|
1448
|
+
# there were parents that didn't match any caches. this
|
|
1449
|
+
# happens if ligolw_cafe decides their outputs aren't
|
|
1450
|
+
# needed
|
|
1451
|
+
print("Notice: %d jobs (of %d) produce output that will not be used by a coincidence job" % (unused, len(parents)), file=sys.stderr)
|
|
1452
|
+
|
|
1453
|
+
#
|
|
1454
|
+
# done
|
|
1455
|
+
#
|
|
1456
|
+
|
|
1457
|
+
return zip(segs, parent_groups, caches, clipsegs)
|