Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/bank-compress-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
python -m pip install --upgrade pip setuptools
python -m pip install --upgrade pip "setuptools<82.0.0"
pip install GitPython # This shouldn't really be needed!
pip install -r requirements.txt
pip install .
Expand Down
19 changes: 18 additions & 1 deletion .github/workflows/basic-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
run: |
sudo apt-get -o Acquire::Retries=3 update
sudo apt-get -o Acquire::Retries=3 install *fftw3* mpi intel-mkl* graphviz
pip install tox pip setuptools --upgrade
pip install tox pip "setuptools<82.0.0" --upgrade
- name: Cache LAL auxiliary data files
id: cache-lal-aux-data
uses: actions/cache@v4
Expand All @@ -41,6 +41,23 @@ jobs:
--remote-name https://zenodo.org/records/14999310/files/SEOBNRv4ROM_v2.0.hdf5 \
--remote-name https://zenodo.org/records/14999310/files/SEOBNRv4ROM_v3.0.hdf5
popd
- name: Cache example GW data
id: cache-example-gw-data
uses: actions/cache@v4
with:
key: example-gw-data
path: |
docs/_include/*_TDI_v2.gwf
docs/_include/*_GWOSC_4KHZ_R1-1126257415-4096.gwf
docs/_include/*_LOSC_CLN_4_V1-1187007040-2048.gwf
examples/inference/lisa_smbhb_ldc/*_psd.txt
examples/inference/lisa_smbhb_ldc/*_TDI_v2.gwf
examples/inference/lisa_smbhb_ldc/MBHB_params_v2_LISA_frame.pkl
examples/inference/margtime/*.gwf
examples/inference/multisignal/*.gwf
examples/inference/relative/*.gwf
examples/inference/relmarg/*.gwf
examples/inference/single/*.gwf
- name: run pycbc test suite
run: |
export LAL_DATA_PATH=$HOME/lal_aux_data
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/inference-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
python -m pip install --upgrade pip setuptools
python -m pip install --upgrade pip "setuptools<82.0.0"
pip install GitPython # This shouldn't really be needed!
pip install -r requirements.txt
pip install .
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/mac-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ jobs:
run: |
conda install \
pip \
setuptools \
"setuptools<82.0.0" \
tox

- name: Run basic pycbc test suite
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/search-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
python -m pip install --upgrade pip setuptools
python -m pip install --upgrade pip "setuptools<82.0.0"
pip install GitPython # This shouldn't really be needed!
pip install -r requirements.txt
pip install .
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tmpltbank-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
python -m pip install --upgrade pip setuptools
python -m pip install --upgrade pip "setuptools<82.0.0"
pip install GitPython # This shouldn't really be needed!
pip install -r requirements.txt
pip install sbank
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tut-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
run: |
sudo apt-get -o Acquire::Retries=3 update
sudo apt-get -o Acquire::Retries=3 install *fftw3* mpi intel-mkl*
pip install tox pip setuptools notebook --upgrade
pip install tox pip "setuptools<82.0.0" notebook --upgrade
pip install .
- name: retrieving pycbc tutorials
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/workflow-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:
- run: sudo apt-get -o Acquire::Retries=3 install *fftw3* intel-mkl*
- name: Install pycbc
run: |
python -m pip install --upgrade pip setuptools
python -m pip install --upgrade pip "setuptools<82.0.0"
pip install GitPython # This shouldn't really be needed!
pip install -r requirements.txt
pip install .
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ ADD docker/etc/cvmfs/config-osg.opensciencegrid.org.conf /etc/cvmfs/config-osg.o

# Set up extra repositories
RUN <<EOF
dnf -y install https://ecsft.cern.ch/dist/cvmfs/cvmfs-release/cvmfs-release-latest.noarch.rpm
dnf -y install https://cvmrepo.s3.cern.ch/cvmrepo/yum/cvmfs-release-latest.noarch.rpm
dnf -y install cvmfs cvmfs-config-default
dnf makecache
dnf -y groupinstall "Development Tools" "Scientific Support"
Expand Down Expand Up @@ -36,7 +36,7 @@ dnf -y install \
which \
zlib-devel
alternatives --set python /usr/bin/python3.9
python -m pip install --upgrade pip setuptools wheel cython
python -m pip install --upgrade pip "setuptools<82.0.0" wheel cython
python -m pip install mkl ipython jupyter jupyterhub jupyterlab lalsuite
dnf -y install https://repo.opensciencegrid.org/osg/3.5/el8/testing/x86_64/osg-wn-client-3.5-5.osg35.el8.noarch.rpm
# force old version of stashcp as a workaround to
Expand Down
21 changes: 15 additions & 6 deletions bin/pygrb/pycbc_pygrb_minifollowups
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import os
import argparse
import logging

from igwn_segments import segmentlist
import pycbc.workflow as wf
from pycbc.workflow.core import FileList, resolve_url_to_file
import pycbc.workflow.minifollowups as mini
Expand Down Expand Up @@ -181,11 +182,17 @@ if veto_file:
veto_file = os.path.join(start_rundir, args.veto_file)
veto_file = wf.resolve_url_to_file(veto_file)

seg_file_abs_paths = [os.path.join(start_rundir, f) for f in args.seg_files]

# Convert the segments files to a FileList
seg_files = wf.FileList([
wf.resolve_url_to_file(os.path.join(start_rundir, f))
for f in args.seg_files
])
seg_files = wf.FileList(
[wf.resolve_url_to_file(f) for f in seg_file_abs_paths]
)

# The Q-scan jobs need to be told the valid science segments to deal with
# triggers near the boundaries correctly. We use the offsource segment as a
# proxy for the valid science segments, and ignore vetoed time inside it.
valid_segs = segmentlist([ppu._read_seg_files(seg_file_abs_paths)['off']])

# (Loudest) off/on-source events are on time-slid data so the
# try will succeed, as it finds the time shift columns.
Expand Down Expand Up @@ -220,7 +227,8 @@ for num_event in range(num_events):
for ifo in ifos:
files += mini.make_qscan_plot(workflow, ifo, gps_time,
args.output_dir,
tags=tags)
tags=tags,
data_segments=valid_segs)
# Handle off/on-source loudest triggers follow-up (which may be on slid
# data in the case of the off-source)
else:
Expand All @@ -235,7 +243,8 @@ for num_event in range(num_events):
tags=tags)
files += mini.make_qscan_plot(workflow, ifo, ifo_time,
args.output_dir,
tags=tags)
tags=tags,
data_segments=valid_segs)

layouts += list(layout.grouper(files, 2))

Expand Down
2 changes: 1 addition & 1 deletion bin/pygrb/pycbc_pygrb_page_tables
Original file line number Diff line number Diff line change
Expand Up @@ -579,7 +579,7 @@ if onsource_file:

# Table data: assemble human readable message when no trigger is recovered
if not loud_on_bestnr_trigs:
td = [list("-" * len(format_strings))]
td = [["-"] for _ in format_strings]
td[0][0] = "There are no events"
td = [np.asarray(d) for d in td]
html_table = pycbc.results.html_table(td, th,
Expand Down
22 changes: 11 additions & 11 deletions bin/pygrb/pycbc_pygrb_plot_stats_distribution
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ parser = ppu.pygrb_initialize_plot_parser(description=__doc__)
parser.add_argument("-F", "--trig-file", action="store", required=True,
help="Location of off-source trigger file")
parser.add_argument("-x", "--x-variable", required=True,
choices=["bestnr", "snr", "snruncut"],
choices=["bestnr", "snr", "snraftercuts"],
help="Quantity to plot on the horizontal axis.")
ppu.pygrb_add_bestnr_cut_opt(parser)
ppu.pygrb_add_slide_opts(parser)
Expand Down Expand Up @@ -114,13 +114,13 @@ trig_data = ppu.extract_trig_properties(
# Calculate SNR and BestNR values and maxima
time_veto_max_snr = {}
time_veto_max_bestnr = {}
time_veto_max_snr_uncut = {}
time_veto_max_snr_aftercuts = {}

for slide_id in slide_dict:
num_slide_segs = len(trial_dict[slide_id])
time_veto_max_snr[slide_id] = np.zeros(num_slide_segs)
time_veto_max_bestnr[slide_id] = np.zeros(num_slide_segs)
time_veto_max_snr_uncut[slide_id] = np.zeros(num_slide_segs)
time_veto_max_snr_aftercuts[slide_id] = np.zeros(num_slide_segs)

for slide_id in slide_dict:
for j, trial in enumerate(trial_dict[slide_id]):
Expand All @@ -134,25 +134,25 @@ for slide_id in slide_dict:
# Max BestNR
time_veto_max_bestnr[slide_id][j] = \
max(trig_data[keys[2]][slide_id][trial_cut])
# Max SNR for triggers passing SBVs
sbv_cut = trig_data[keys[2]][slide_id][:] != 0
# Max SNR for triggers passing the cut on reweighted SNR
sbv_cut = trig_data[keys[2]][slide_id][:] >= opts.newsnr_threshold
if not (trial_cut & sbv_cut).any():
continue
time_veto_max_snr_uncut[slide_id][j] =\
time_veto_max_snr_aftercuts[slide_id][j] =\
max(trig_data[keys[1]][slide_id][trial_cut & sbv_cut])

# This is the data that will be plotted
full_time_veto_max_snr = ppu.sort_stat(time_veto_max_snr)
full_time_veto_max_snr_uncut = ppu.sort_stat(time_veto_max_snr_uncut)
full_time_veto_max_snr_aftercuts = ppu.sort_stat(time_veto_max_snr_aftercuts)
_, _, full_time_veto_max_bestnr = \
ppu.max_median_stat(slide_dict, time_veto_max_bestnr, trig_data[keys[2]],
total_trials)
# The 0.'s here force the histograms to start at (0, 1) if no trial
# returned a no-event (i.e., BestNR = 0)
if full_time_veto_max_bestnr[0] != 0.:
full_time_veto_max_snr = np.concatenate(([0.], full_time_veto_max_snr))
full_time_veto_max_snr_uncut = \
np.concatenate(([0.], full_time_veto_max_snr_uncut))
full_time_veto_max_snr_aftercuts = \
np.concatenate(([0.], full_time_veto_max_snr_aftercuts))
full_time_veto_max_bestnr = \
np.concatenate(([0.], full_time_veto_max_bestnr))

Expand All @@ -164,10 +164,10 @@ logging.info("SNR and bestNR maxima calculated.")
# =========
x_label_dict = {"bestnr": "BestNR",
"snr": "SNR",
"snruncut": "SNR after signal based vetoes"}
"snraftercuts": "SNR after signal based vetoes"}
data_dict = {"bestnr": full_time_veto_max_bestnr,
"snr": full_time_veto_max_snr,
"snruncut": full_time_veto_max_snr_uncut}
"snraftercuts": full_time_veto_max_snr_aftercuts}
fig = plt.figure()
ax = fig.gca()
ax.grid(True)
Expand Down
20 changes: 14 additions & 6 deletions examples/inference/lisa_smbhb_ldc/get.sh
Original file line number Diff line number Diff line change
@@ -1,16 +1,24 @@
set -e

download_if_absent() {
local URL="$1"
local FILENAME=$(basename "$URL")
if [ ! -f "$FILENAME" ]; then
echo "Downloading $FILENAME"
curl -O -L --show-error --silent "$URL"
else
echo "File $FILENAME already exists, download skipped"
fi
}

for channel in A E T
do
strain_file=${channel}_TDI_v2.gwf
test -f ${strain_file} && continue
curl -LO --show-error --silent https://zenodo.org/record/7497853/files/${strain_file}
download_if_absent https://zenodo.org/record/7497853/files/${strain_file}

psd_file=${channel}_psd.txt
test -f ${psd_file} && continue
curl -LO --show-error --silent https://zenodo.org/record/7497853/files/${psd_file}
download_if_absent https://zenodo.org/record/7497853/files/${psd_file}
done

params_file=MBHB_params_v2_LISA_frame.pkl
test -f ${params_file} && continue
curl -LO --show-error --silent https://zenodo.org/record/7497853/files/${params_file}
download_if_absent https://zenodo.org/record/7497853/files/${params_file}
2 changes: 1 addition & 1 deletion examples/inference/margtime/get.sh
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@ do
file=${ifo}_GWOSC_4KHZ_R1-1126257415-4096.gwf
test -f ${file} && continue
curl -O -L --show-error --silent \
https://www.gwosc.org/eventapi/html/GWTC-1-confident/GW150914/v3/${ifo}_GWOSC_4KHZ_R1-1126257415-4096.gwf
https://www.gwosc.org/eventapi/html/GWTC-1-confident/GW150914/v3/${file}
done
2 changes: 1 addition & 1 deletion examples/inference/multisignal/get.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ for ifo in H-H1 L-L1 V-V1
do
file=${ifo}_LOSC_CLN_4_V1-1187007040-2048.gwf
test -f ${file} && continue
curl -O --show-error --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
curl -O -L --show-error --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
done
2 changes: 1 addition & 1 deletion examples/inference/relative/get.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ for ifo in H-H1 L-L1 V-V1
do
file=${ifo}_LOSC_CLN_4_V1-1187007040-2048.gwf
test -f ${file} && continue
curl -O --show-error --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
curl -O -L --show-error --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
done
2 changes: 1 addition & 1 deletion examples/inference/relmarg/get.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@ for ifo in H-H1 L-L1 V-V1
do
file=${ifo}_LOSC_CLN_4_V1-1187007040-2048.gwf
test -f ${file} && continue
curl -O --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
curl -O -L --show-error --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
done
2 changes: 1 addition & 1 deletion examples/inference/single/get.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,5 @@ for ifo in H-H1 L-L1 V-V1
do
file=${ifo}_LOSC_CLN_4_V1-1187007040-2048.gwf
test -f ${file} && continue
curl -O --show-error --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
curl -O -L --show-error --silent https://dcc.ligo.org/public/0146/P1700349/001/${file}
done
Loading
Loading