diff --git a/bilby/bilby_mcmc/sampler.py b/bilby/bilby_mcmc/sampler.py index 699cea265..3e158765b 100644 --- a/bilby/bilby_mcmc/sampler.py +++ b/bilby/bilby_mcmc/sampler.py @@ -10,7 +10,6 @@ from scipy.integrate import trapezoid from scipy.optimize import differential_evolution -from ..core.likelihood import _safe_likelihood_call from ..core.result import rejection_sample from ..core.sampler.base_sampler import ( MCMCSampler, @@ -1240,11 +1239,10 @@ def log_likelihood(self, sample): params = deepcopy(_sampling_convenience_dump.parameters) params.update(sample.sample_dict) - return _safe_likelihood_call( - _sampling_convenience_dump.likelihood, - parameters=params, - use_ratio=self.use_ratio, - ) + if self.use_ratio: + return _sampling_convenience_dump.likelihood.log_likelihood_ratio(params) + else: + return _sampling_convenience_dump.likelihood.log_likelihood(params) def log_prior(self, sample): return _sampling_convenience_dump.priors.ln_prob( @@ -1403,7 +1401,7 @@ def neg_log_post(x): parameters = deepcopy(_sampling_convenience_dump.parameters) parameters.update(sample) - return -beta * _safe_likelihood_call(likelihood, parameters) - ln_prior + return -beta * likelihood.log_likelihood(parameters) - ln_prior res = differential_evolution(neg_log_post, bounds, popsize=100, init="sobol") if res.success: diff --git a/bilby/core/fisher.py b/bilby/core/fisher.py index e8a869ec7..7651a9d0c 100644 --- a/bilby/core/fisher.py +++ b/bilby/core/fisher.py @@ -3,8 +3,6 @@ import scipy.linalg from scipy.optimize import minimize -from .likelihood import _safe_likelihood_call - class FisherMatrixPosteriorEstimator(object): def __init__(self, likelihood, priors, parameters=None, fd_eps=1e-6, n_prior_samples=100): @@ -47,7 +45,7 @@ def __init__(self, likelihood, priors, parameters=None, fd_eps=1e-6, n_prior_sam self.prior_width_dict[key] = width def log_likelihood(self, sample): - return _safe_likelihood_call(self.likelihood, sample) + return self.likelihood.log_likelihood(sample) def calculate_iFIM(self, sample): FIM = self.calculate_FIM(sample) diff --git a/bilby/core/grid.py b/bilby/core/grid.py index 0d103d4cc..c574a7a3a 100644 --- a/bilby/core/grid.py +++ b/bilby/core/grid.py @@ -3,7 +3,6 @@ import numpy as np -from .likelihood import _safe_likelihood_call from .prior import Prior, PriorDict from .utils import ( logtrapzexp, check_directory_exists_and_if_not_mkdir, logger, @@ -313,9 +312,7 @@ def _evaluate_recursion(self, dimension, parameters): current_point = tuple([[int(np.where( parameters[name] == self.sample_points[name])[0])] for name in self.parameter_names]) - self._ln_likelihood[current_point] = _safe_likelihood_call( - self.likelihood, parameters - ) + self._ln_likelihood[current_point] = self.likelihood.log_likelihood(parameters) else: name = self.parameter_names[dimension] for ii in range(self._ln_likelihood.shape[dimension]): diff --git a/bilby/core/likelihood.py b/bilby/core/likelihood.py index 4d75033b9..5d2259030 100644 --- a/bilby/core/likelihood.py +++ b/bilby/core/likelihood.py @@ -1,94 +1,15 @@ import copy -import inspect -import os -import warnings import numpy as np from scipy.special import gammaln, xlogy from scipy.stats import multivariate_normal -from .utils import infer_parameters_from_function, infer_args_from_function_except_n_args, logger - -PARAMETERS_AS_STATE = os.environ.get("BILBY_ALLOW_PARAMETERS_AS_STATE", "WARN") -for msg in [ - "No parameters provided in likelihood call", - "Using parameters as state", - "Parameter attribute queried", - "Settings non-trivial" -]: - warnings.filterwarnings("once", message=msg) - - -def set_parameters_as_state(level): - global PARAMETERS_AS_STATE - - level = str(level).upper() - if level not in ["TRUE", "FALSE", "WARN"]: - raise ValueError("Allowed levels for parameters as state are 'TRUE', 'FALSE', and 'WARN'") - PARAMETERS_AS_STATE = level - - -def _fallback_to_parameters(obj, parameters): - - if parameters is None: - msg = ( - f"No parameters provided in likelihood call, falling back to values stored in {obj}. " - "This is deprecated behaviour and will be removed in Bilby version 3. " - "See https://bilby-dev.github.io/bilby/parameters for more details." - ) - if PARAMETERS_AS_STATE == "FALSE": - raise LikelihoodParameterError(msg) - elif PARAMETERS_AS_STATE == "WARN": - warnings.warn(msg, FutureWarning) - else: - logger.debug(msg) - parameters = copy.deepcopy(obj.parameters) - - return parameters - - -def _safe_likelihood_call(likelihood, parameters=None, use_ratio=False): - if use_ratio: - method = likelihood.log_likelihood_ratio - else: - method = likelihood.log_likelihood - - if "parameters" in inspect.signature(method).parameters: - logl = method(parameters=parameters) - else: - if PARAMETERS_AS_STATE == "FALSE": - raise LikelihoodParameterError( - f"Unable to call {likelihood} with {parameters} as an argument" - ) - elif PARAMETERS_AS_STATE == "WARN": - warnings.warn( - f"Using parameters as state for {likelihood}. " - "This is deprecated behaviour and will be removed in Bilby version 3. " - "See https://bilby-dev.github.io/bilby/parameters for more details.", - DeprecationWarning, - ) - likelihood.parameters.update(parameters) - logl = method() - return logl +from .utils import infer_parameters_from_function, infer_args_from_function_except_n_args class Likelihood: - def __init_subclass__(cls, **kwargs): - super().__init_subclass__(**kwargs) - if ( - "parameters" not in inspect.signature(cls.log_likelihood).parameters - or "parameters" not in inspect.signature(cls.log_likelihood_ratio).parameters - ): - warnings.warn( - f"{cls} log_likelihood or log_likelihood_ratio method does not " - "accept 'parameters' as an argument. " - "This is deprecated behaviour and will be removed in Bilby version 3. " - "See https://bilby-dev.github.io/bilby/parameters for more details.", - FutureWarning, - ) - - def __init__(self, parameters=None): + def __init__(self): """Empty likelihood class to be subclassed by other likelihoods Parameters @@ -96,43 +17,13 @@ def __init__(self, parameters=None): parameters: dict A dictionary of the parameter names and associated values """ - self.parameters = parameters self._meta_data = None self._marginalized_parameters = [] - @property - def parameters(self): - msg = ( - f"Parameter attribute queried for {self.__class__}. " - "This is deprecated behaviour and will be removed in Bilby version 3. " - "See https://bilby-dev.github.io/bilby/parameters for more details." - ) - if PARAMETERS_AS_STATE == "FALSE": - raise LikelihoodParameterError(msg) - elif PARAMETERS_AS_STATE == "WARN": - warnings.warn(msg, FutureWarning) - return self._parameters - - @parameters.setter - def parameters(self, parameters): - if parameters is not None: - msg = ( - f"Setting non-trivial parameters for {self.__class__}. " - "This is deprecated behaviour and will be removed in Bilby version 3. " - "See https://bilby-dev.github.io/bilby/parameters for more details." - ) - if PARAMETERS_AS_STATE == "FALSE": - raise LikelihoodParameterError(msg) - elif PARAMETERS_AS_STATE == "WARN": - warnings.warn(msg, FutureWarning) - else: - parameters = dict() - self._parameters = parameters - def __repr__(self): return self.__class__.__name__ - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): """ Returns @@ -150,20 +41,14 @@ def noise_log_likelihood(self): """ return np.nan - def log_likelihood_ratio(self, parameters=None): + def log_likelihood_ratio(self, parameters): """Difference between log likelihood and noise log likelihood Returns ======= float """ - try: - return self.log_likelihood(parameters=parameters) - self.noise_log_likelihood() - except TypeError: - if PARAMETERS_AS_STATE != "FALSE": - return self.log_likelihood() - self.noise_log_likelihood() - else: - raise + return self.log_likelihood(parameters=parameters) - self.noise_log_likelihood() @property def meta_data(self): @@ -193,10 +78,9 @@ class ZeroLikelihood(Likelihood): def __init__(self, likelihood): super(ZeroLikelihood, self).__init__() - self.parameters = likelihood.parameters self._parent = likelihood - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): return 0 def noise_log_likelihood(self): @@ -239,9 +123,8 @@ def func(self): """ Make func read-only """ return self._func - def model_parameters(self, parameters=None): + def model_parameters(self, parameters): """ This sets up the function only parameters (i.e. not sigma for the GaussianLikelihood) """ - parameters = _fallback_to_parameters(self, parameters) return {key: parameters[key] for key in self.function_keys} @property @@ -276,7 +159,7 @@ def y(self, y): y = np.array([y]) self._y = y - def residual(self, parameters=None): + def residual(self, parameters): """ Residual of the function against the data. """ return self.y - self.func(self.x, **self.model_parameters(parameters=parameters), **self.kwargs) @@ -307,8 +190,7 @@ def __init__(self, x, y, func, sigma=None, **kwargs): super(GaussianLikelihood, self).__init__(x=x, y=y, func=func, **kwargs) self.sigma = sigma - def log_likelihood(self, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def log_likelihood(self, parameters): sigma = parameters.get("sigma", self.sigma) log_l = np.sum(- (self.residual(parameters) / sigma)**2 / 2 - np.log(2 * np.pi * sigma**2) / 2) @@ -326,10 +208,7 @@ def sigma(self): that if sigma is not in parameters the attribute is used which was given at init (i.e. the known sigma as either a float or array). """ - if PARAMETERS_AS_STATE == "FALSE": - return self._sigma - else: - return self.parameters.get('sigma', self._sigma) + return self._sigma @sigma.setter def sigma(self, sigma): @@ -368,7 +247,7 @@ def __init__(self, x, y, func, **kwargs): super(PoissonLikelihood, self).__init__(x=x, y=y, func=func, **kwargs) - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): rate = self.func(self.x, **self.model_parameters(parameters=parameters), **self.kwargs) if not isinstance(rate, np.ndarray): raise ValueError( @@ -419,7 +298,7 @@ def __init__(self, x, y, func, **kwargs): """ super(ExponentialLikelihood, self).__init__(x=x, y=y, func=func, **kwargs) - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): mu = self.func(self.x, **self.model_parameters(parameters=parameters), **self.kwargs) if np.any(mu < 0.): return -np.inf @@ -477,8 +356,7 @@ def __init__(self, x, y, func, nu=None, sigma=1, **kwargs): self.nu = nu self.sigma = sigma - def log_likelihood(self, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def log_likelihood(self, parameters): nu = parameters.get("nu", self.nu) if nu <= 0.: raise ValueError("Number of degrees of freedom for Student's " @@ -506,10 +384,7 @@ def nu(self): values will be used. Otherwise, the attribute nu is used. The logic is that if nu is not in parameters the attribute is used which was given at init (i.e. the known nu as a float).""" - if PARAMETERS_AS_STATE == "FALSE": - return self._nu - else: - return self.parameters.get('nu', self._nu) + return self._nu @nu.setter def nu(self, nu): @@ -540,11 +415,10 @@ def __init__(self, data, n_dimensions, base="parameter_"): self.base = base self._nll = None - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): """ Since n - 1 parameters are sampled, the last parameter is 1 - the rest """ - parameters = _fallback_to_parameters(self, parameters) probs = [parameters[self.base + str(ii)] for ii in range(self.n - 1)] probs.append(1 - sum(probs)) @@ -590,8 +464,7 @@ def __init__(self, mean, cov): def dim(self): return len(self.cov[0]) - def log_likelihood(self, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def log_likelihood(self, parameters): x = np.array([parameters["x{0}".format(i)] for i in range(self.dim)]) return self.pdf.logpdf(x) @@ -623,8 +496,7 @@ def __init__(self, mean_1, mean_2, cov): def dim(self): return len(self.cov[0]) - def log_likelihood(self, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def log_likelihood(self, parameters): x = np.array([parameters["x{0}".format(i)] for i in range(self.dim)]) return -np.log(2) + np.logaddexp(self.pdf_1.logpdf(x), self.pdf_2.logpdf(x)) @@ -646,21 +518,7 @@ def __init__(self, *likelihoods): likelihoods to be combined parsed as arguments """ self.likelihoods = likelihoods - if PARAMETERS_AS_STATE == "FALSE": - params = None - else: - params = dict() - super(JointLikelihood, self).__init__(parameters=params) - self.__sync_parameters() - - def __sync_parameters(self): - """ Synchronizes parameters between the likelihoods - so that all likelihoods share a single parameter dict.""" - if PARAMETERS_AS_STATE != "FALSE": - for likelihood in self.likelihoods: - self.parameters.update(likelihood.parameters) - for likelihood in self.likelihoods: - likelihood.parameters = self.parameters + super(JointLikelihood, self).__init__() @property def likelihoods(self): @@ -681,7 +539,7 @@ def likelihoods(self, likelihoods): else: raise ValueError('Input likelihood is not a list of tuple. You need to set multiple likelihoods.') - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): """ This is just the sum of the log likelihoods of all parts of the joint likelihood""" return sum([likelihood.log_likelihood(parameters=parameters) for likelihood in self.likelihoods]) @@ -747,11 +605,7 @@ def __init__(self, kernel, mean_model, t, y, yerr=1e-6, gp_class=None): self.GPClass = gp_class self.gp = self.GPClass(kernel=self.kernel, mean=self.mean_model, fit_mean=True, fit_white_noise=True) self.gp.compute(self.t, yerr=self.yerr) - if PARAMETERS_AS_STATE == "FALSE": - parameters = None - else: - parameters = self.gp.get_parameter_dict() - super().__init__(parameters=parameters) + super().__init__() def set_parameters(self, parameters): """ @@ -768,8 +622,6 @@ def set_parameters(self, parameters): self.gp.set_parameter(name=name, value=value) except ValueError: pass - if PARAMETERS_AS_STATE != "FALSE": - self.parameters[name] = value class CeleriteLikelihood(_GPLikelihood): @@ -797,7 +649,7 @@ def __init__(self, kernel, mean_model, t, y, yerr=1e-6): import celerite super().__init__(kernel=kernel, mean_model=mean_model, t=t, y=y, yerr=yerr, gp_class=celerite.GP) - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): """ Calculate the log-likelihood for the Gaussian process given the current parameters. @@ -805,7 +657,6 @@ def log_likelihood(self, parameters=None): ======= float: The log-likelihood value. """ - parameters = _fallback_to_parameters(self, parameters) self.gp.set_parameter_vector(vector=np.array(list(parameters.values()))) try: return self.gp.log_likelihood(self.y) @@ -838,7 +689,7 @@ def __init__(self, kernel, mean_model, t, y, yerr=1e-6): import george super().__init__(kernel=kernel, mean_model=mean_model, t=t, y=y, yerr=yerr, gp_class=george.GP) - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): """ Calculate the log-likelihood for the Gaussian process given the current parameters. @@ -846,7 +697,6 @@ def log_likelihood(self, parameters=None): ======= float: The log-likelihood value. """ - parameters = _fallback_to_parameters(self, parameters) for name, value in parameters.items(): try: self.gp.set_parameter(name=name, value=value) diff --git a/bilby/core/result.py b/bilby/core/result.py index e9d9cb9ef..84b55c26c 100644 --- a/bilby/core/result.py +++ b/bilby/core/result.py @@ -7,13 +7,11 @@ from importlib import import_module from itertools import product import multiprocessing -from functools import partial import numpy as np import pandas as pd import scipy.stats from . import utils -from .likelihood import _safe_likelihood_call from .utils import ( logger, infer_parameters_from_function, check_directory_exists_and_if_not_mkdir, @@ -244,8 +242,11 @@ def eval_pool(this_logl): with multiprocessing.Pool(processes=npool) as pool: chunksize = max(100, n // (2 * npool)) return list(tqdm( - pool.imap(partial(_safe_likelihood_call, this_logl), - dict_samples[starting_index:], chunksize=chunksize), + pool.imap( + this_logl.log_likelihood, + dict_samples[starting_index:], + chunksize=chunksize, + ), desc='Computing likelihoods', total=n) ) diff --git a/bilby/core/sampler/base_sampler.py b/bilby/core/sampler/base_sampler.py index b81a36173..41e43db0e 100644 --- a/bilby/core/sampler/base_sampler.py +++ b/bilby/core/sampler/base_sampler.py @@ -11,7 +11,6 @@ import numpy as np from pandas import DataFrame -from ..likelihood import _safe_likelihood_call from ..prior import Constraint, DeltaFunction, Prior, PriorDict from ..result import Result, read_in_result from ..utils import ( @@ -495,9 +494,7 @@ def _verify_use_ratio(self): parameters = deepcopy(self.parameters) parameters.update(self.priors.sample()) - ratio_is_nan = np.isnan( - _safe_likelihood_call(self.likelihood, parameters, use_ratio=True) - ) + ratio_is_nan = np.isnan(self.likelihood.log_likelihood_ratio(parameters)) if self.use_ratio is True and ratio_is_nan: logger.warning( @@ -560,9 +557,10 @@ def log_likelihood(self, theta): params = deepcopy(self.parameters) params.update({key: t for key, t in zip(self._search_parameter_keys, theta)}) - return _safe_likelihood_call( - self.likelihood, parameters=params, use_ratio=self.use_ratio - ) + if self.use_ratio: + return self.likelihood.log_likelihood_ratio(params) + else: + return self.likelihood.log_likelihood(params) def get_random_draw_from_prior(self): """Get a random draw from the prior distribution @@ -1153,12 +1151,12 @@ def logl(self, v_array): search_parameter_keys = _sampling_convenience_dump.search_parameter_keys parameters = _sampling_convenience_dump.parameters.copy() parameters.update({key: v for key, v in zip(search_parameter_keys, v_array)}) - if priors.evaluate_constraints(parameters) > 0: - return _safe_likelihood_call( - likelihood, parameters, _sampling_convenience_dump.use_ratio - ) - else: + if priors.evaluate_constraints(parameters) == 0: return np.nan_to_num(-np.inf) + elif _sampling_convenience_dump.use_ratio: + return likelihood.log_likelihood_ratio(parameters) + else: + return likelihood.log_likelihood(parameters) def logp(self, v_array): priors = _sampling_convenience_dump.priors diff --git a/bilby/core/sampler/dynesty.py b/bilby/core/sampler/dynesty.py index 18a178a41..c309ad794 100644 --- a/bilby/core/sampler/dynesty.py +++ b/bilby/core/sampler/dynesty.py @@ -9,7 +9,6 @@ import numpy as np from pandas import DataFrame -from ..likelihood import _safe_likelihood_call from ..result import rejection_sample from ..utils import ( check_directory_exists_and_if_not_mkdir, @@ -41,28 +40,16 @@ def _log_likelihood_wrapper(theta): """Wrapper to the log likelihood. Needed for multiprocessing.""" from .base_sampler import _sampling_convenience_dump - if _sampling_convenience_dump.priors.evaluate_constraints( - { - key: theta[ii] - for ii, key in enumerate(_sampling_convenience_dump.search_parameter_keys) - } - ): - params = deepcopy(_sampling_convenience_dump.parameters) - params.update( - { - key: t - for key, t in zip( - _sampling_convenience_dump.search_parameter_keys, theta - ) - } - ) - return _safe_likelihood_call( - _sampling_convenience_dump.likelihood, - params, - _sampling_convenience_dump.use_ratio, - ) - else: + keys = _sampling_convenience_dump.search_parameter_keys + sampling_params = {key: t for key, t in zip(keys, theta)} + params = deepcopy(_sampling_convenience_dump.parameters) + params.update(sampling_params) + if not _sampling_convenience_dump.priors.evaluate_constraints(sampling_params): return np.nan_to_num(-np.inf) + elif _sampling_convenience_dump.use_ratio: + return _sampling_convenience_dump.likelihood.log_likelihood_ratio(params) + else: + return _sampling_convenience_dump.likelihood.log_likelihood(params) class Dynesty(NestedSampler): diff --git a/bilby/core/sampler/fake_sampler.py b/bilby/core/sampler/fake_sampler.py index c1d285605..e36b9a1c8 100644 --- a/bilby/core/sampler/fake_sampler.py +++ b/bilby/core/sampler/fake_sampler.py @@ -1,6 +1,5 @@ import numpy as np -from ..likelihood import _safe_likelihood_call from ..result import read_in_result from .base_sampler import Sampler @@ -77,7 +76,7 @@ def run_sampler(self): sample = posterior.iloc[i] params = sample.to_dict() - logl = _safe_likelihood_call(self.likelihood, params, use_ratio=True) + logl = self.likelihood.log_likelihood_ratio(params) sample.log_likelihood = logl likelihood_ratios.append(logl) diff --git a/bilby/core/sampler/ptemcee.py b/bilby/core/sampler/ptemcee.py index 529feb4cb..e41794f14 100644 --- a/bilby/core/sampler/ptemcee.py +++ b/bilby/core/sampler/ptemcee.py @@ -10,7 +10,6 @@ import pandas as pd from scipy.integrate import trapezoid -from ..likelihood import _safe_likelihood_call from ..utils import check_directory_exists_and_if_not_mkdir, logger, safe_file_dump from .base_sampler import LikePriorEvaluator, MCMCSampler, SamplerError, signal_wrapper @@ -348,7 +347,7 @@ def neg_log_like(params): try: parameters = copy.copy(draw) parameters.update({key: val for key, val in zip(minimize_list, params)}) - return -_safe_likelihood_call(likelihood_copy, parameters) + return -likelihood_copy.log_likelihood(parameters) except RuntimeError: return +np.inf diff --git a/bilby/core/sampler/pymc.py b/bilby/core/sampler/pymc.py index 90a607c4d..0ae413453 100644 --- a/bilby/core/sampler/pymc.py +++ b/bilby/core/sampler/pymc.py @@ -8,7 +8,6 @@ GaussianLikelihood, PoissonLikelihood, StudentTLikelihood, - _safe_likelihood_call, ) from ..prior import Cosine, DeltaFunction, MultivariateGaussian, PowerLaw, Sine from ..utils import derivatives, infer_args_from_method @@ -837,9 +836,7 @@ def perform(self, node, inputs, outputs): for i, key in enumerate(self.parameters): self.parameters[key] = theta[i] - logl = _safe_likelihood_call( - self.likelihood.log_likelihood, self.parameters - ) + logl = self.likelihood.log_likelihood(self.parameters) outputs[0][0] = np.array(logl) def grad(self, inputs, g): @@ -870,9 +867,7 @@ def perform(self, node, inputs, outputs): def lnlike(values): for i, key in enumerate(self.parameters): self.parameters[key] = values[i] - return _safe_likelihood_call( - self.likelihood.log_likelihood, self.parameters - ) + return self.likelihood.log_likelihood(self.parameters) # calculate gradients grads = derivatives( diff --git a/bilby/gw/likelihood/base.py b/bilby/gw/likelihood/base.py index e1778ddb1..8dfbcdbf5 100644 --- a/bilby/gw/likelihood/base.py +++ b/bilby/gw/likelihood/base.py @@ -6,7 +6,7 @@ import numpy as np from scipy.special import logsumexp -from ...core.likelihood import Likelihood, _fallback_to_parameters +from ...core.likelihood import Likelihood from ...core.utils import logger, BoundedRectBivariateSpline, create_time_series from ...core.prior import Interped, Prior, Uniform, DeltaFunction from ..detector import InterferometerList, get_empty_interferometer, calibration @@ -250,7 +250,7 @@ def _check_set_duration_and_sampling_frequency_of_waveform_generator(self): "waveform_generator.".format(attribute)) setattr(self.waveform_generator, attribute, ifo_attr) - def calculate_snrs(self, waveform_polarizations, interferometer, return_array=True, parameters=None): + def calculate_snrs(self, waveform_polarizations, interferometer, *, return_array=True, parameters): """ Compute the snrs @@ -272,7 +272,6 @@ def calculate_snrs(self, waveform_polarizations, interferometer, return_array=Tr the internal array objects. """ - parameters = _fallback_to_parameters(self, parameters) signal = self._compute_full_waveform( signal_polarizations=waveform_polarizations, interferometer=interferometer, @@ -405,11 +404,9 @@ def noise_log_likelihood(self): self._noise_log_likelihood_value = self._calculate_noise_log_likelihood() return self._noise_log_likelihood_value - def log_likelihood_ratio(self, parameters=None): - if parameters is not None: - parameters = copy.deepcopy(parameters) - else: - parameters = _fallback_to_parameters(self, parameters) + def log_likelihood_ratio(self, parameters): + parameters = copy.deepcopy(parameters) + waveform_polarizations = \ self.waveform_generator.frequency_domain_strain(parameters) if waveform_polarizations is None: @@ -438,8 +435,7 @@ def log_likelihood_ratio(self, parameters=None): return float(log_l.real) - def compute_log_likelihood_from_snrs(self, total_snrs, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def compute_log_likelihood_from_snrs(self, total_snrs, parameters): if self.calibration_marginalization: log_l = self.calibration_marginalized_likelihood( @@ -470,8 +466,7 @@ def compute_log_likelihood_from_snrs(self, total_snrs, parameters=None): return log_l - def compute_per_detector_log_likelihood(self, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def compute_per_detector_log_likelihood(self, parameters): waveform_polarizations = \ self.waveform_generator.frequency_domain_strain(parameters) @@ -495,7 +490,7 @@ def compute_per_detector_log_likelihood(self, parameters=None): return parameters.copy() - def generate_posterior_sample_from_marginalized_likelihood(self, parameters=None): + def generate_posterior_sample_from_marginalized_likelihood(self, parameters): """ Reconstruct the distance posterior from a run which used a likelihood which explicitly marginalised over time/distance/phase. @@ -512,7 +507,6 @@ def generate_posterior_sample_from_marginalized_likelihood(self, parameters=None This involves a deepcopy of the signal to avoid issues with waveform caching, as the signal is overwritten in place. """ - parameters = _fallback_to_parameters(self, parameters) if len(self._marginalized_parameters) > 0: signal_polarizations = copy.deepcopy( self.waveform_generator.frequency_domain_strain( @@ -539,7 +533,7 @@ def generate_posterior_sample_from_marginalized_likelihood(self, parameters=None return parameters.copy() def generate_calibration_sample_from_marginalized_likelihood( - self, signal_polarizations=None, parameters=None): + self, signal_polarizations=None, *, parameters): """ Generate a single sample from the posterior distribution for the set of calibration response curves when explicitly marginalizing over the calibration uncertainty. @@ -556,7 +550,6 @@ def generate_calibration_sample_from_marginalized_likelihood( """ from ...core.utils import random - parameters = _fallback_to_parameters(self, parameters) if 'recalib_index' in parameters: parameters.pop('recalib_index') parameters.update(self.get_sky_frame_parameters(parameters)) @@ -576,7 +569,7 @@ def generate_calibration_sample_from_marginalized_likelihood( return new_calibration def generate_time_sample_from_marginalized_likelihood( - self, signal_polarizations=None, parameters=None): + self, signal_polarizations=None, *, parameters): """ Generate a single sample from the posterior distribution for coalescence time when using a likelihood which explicitly marginalises over time. @@ -595,7 +588,6 @@ def generate_time_sample_from_marginalized_likelihood( new_time: float Sample from the time posterior. """ - parameters = _fallback_to_parameters(self, parameters) parameters.update(self.get_sky_frame_parameters(parameters)) if self.jitter_time: parameters['geocent_time'] += parameters['time_jitter'] @@ -655,7 +647,7 @@ def generate_time_sample_from_marginalized_likelihood( return new_time def generate_distance_sample_from_marginalized_likelihood( - self, signal_polarizations=None, parameters=None): + self, signal_polarizations=None, *, parameters): """ Generate a single sample from the posterior distribution for luminosity distance when using a likelihood which explicitly marginalises over @@ -675,7 +667,6 @@ def generate_distance_sample_from_marginalized_likelihood( new_distance: float Sample from the distance posterior. """ - parameters = _fallback_to_parameters(self, parameters) parameters.update(self.get_sky_frame_parameters(parameters)) if signal_polarizations is None: signal_polarizations = \ @@ -718,7 +709,7 @@ def _calculate_inner_products(self, signal_polarizations, parameters): h_inner_h += per_detector_snr.optimal_snr_squared return d_inner_h, h_inner_h - def _compute_full_waveform(self, signal_polarizations, interferometer, parameters=None): + def _compute_full_waveform(self, signal_polarizations, interferometer, parameters): """ Project the waveform polarizations against the interferometer response. This is useful for likelihood classes that don't @@ -733,11 +724,10 @@ def _compute_full_waveform(self, signal_polarizations, interferometer, parameter interferometer: bilby.gw.detector.Interferometer Interferometer to compute the response with respect to. """ - parameters = _fallback_to_parameters(self, parameters) return interferometer.get_detector_response(signal_polarizations, parameters) def generate_phase_sample_from_marginalized_likelihood( - self, signal_polarizations=None, parameters=None): + self, signal_polarizations=None, *, parameters): r""" Generate a single sample from the posterior distribution for phase when using a likelihood which explicitly marginalises over phase. @@ -758,7 +748,6 @@ def generate_phase_sample_from_marginalized_likelihood( ===== This is only valid when assumes that mu(phi) \propto exp(-2i phi). """ - parameters = _fallback_to_parameters(self, parameters) parameters.update(self.get_sky_frame_parameters(parameters)) if signal_polarizations is None: signal_polarizations = \ @@ -774,8 +763,7 @@ def generate_phase_sample_from_marginalized_likelihood( new_phase = Interped(phases, phase_post).sample() return new_phase - def distance_marginalized_likelihood(self, d_inner_h, h_inner_h, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def distance_marginalized_likelihood(self, d_inner_h, h_inner_h, *, parameters): d_inner_h_ref, h_inner_h_ref = self._setup_rho( d_inner_h, h_inner_h, parameters=parameters) if self.phase_marginalization: @@ -794,8 +782,7 @@ def phase_marginalized_likelihood(self, d_inner_h, h_inner_h): else: return d_inner_h - h_inner_h / 2 - def time_marginalized_likelihood(self, d_inner_h_tc_array, h_inner_h, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def time_marginalized_likelihood(self, d_inner_h_tc_array, h_inner_h, *, parameters): times = self._times if self.jitter_time: times = self._times + parameters['time_jitter'] @@ -822,8 +809,7 @@ def time_marginalized_likelihood(self, d_inner_h_tc_array, h_inner_h, parameters log_l_tc_array = np.real(d_inner_h_tc_array) - h_inner_h / 2 return logsumexp(log_l_tc_array, b=time_prior_array, axis=-1) - def get_calibration_log_likelihoods(self, signal_polarizations=None, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def get_calibration_log_likelihoods(self, signal_polarizations=None, *, parameters): parameters.update(self.get_sky_frame_parameters(parameters)) if signal_polarizations is None: signal_polarizations = \ @@ -861,8 +847,7 @@ def get_calibration_log_likelihoods(self, signal_polarizations=None, parameters= return log_l_cal_array - def calibration_marginalized_likelihood(self, d_inner_h_calibration_array, h_inner_h, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def calibration_marginalized_likelihood(self, d_inner_h_calibration_array, h_inner_h, *, parameters): if self.time_marginalization: log_l_cal_array = self.time_marginalized_likelihood( d_inner_h_tc_array=d_inner_h_calibration_array, @@ -881,8 +866,7 @@ def calibration_marginalized_likelihood(self, d_inner_h_calibration_array, h_inn return logsumexp(log_l_cal_array) - np.log(self.number_of_response_curves) - def _setup_rho(self, d_inner_h, optimal_snr_squared, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def _setup_rho(self, d_inner_h, optimal_snr_squared, parameters): optimal_snr_squared_ref = (optimal_snr_squared.real * parameters['luminosity_distance'] ** 2 / self._ref_dist ** 2.) @@ -890,7 +874,7 @@ def _setup_rho(self, d_inner_h, optimal_snr_squared, parameters=None): self._ref_dist) return d_inner_h_ref, optimal_snr_squared_ref - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): return self.log_likelihood_ratio(parameters=parameters) + self.noise_log_likelihood() @property @@ -1089,7 +1073,7 @@ def reference_frame(self, frame): else: raise ValueError("Unable to parse reference frame {}".format(frame)) - def get_sky_frame_parameters(self, parameters=None): + def get_sky_frame_parameters(self, parameters): """ Generate ra, dec, and geocenter time for :code:`parameters` @@ -1100,13 +1084,11 @@ def get_sky_frame_parameters(self, parameters=None): ========== parameters: dict, optional The parameters to be converted. - If not specified :code:`self.parameters` will be used. Returns ======= dict: dictionary containing ra, dec, and geocent_time """ - parameters = _fallback_to_parameters(self, parameters) time = parameters.get(f'{self.time_reference}_time', None) if time is None and "geocent_time" in parameters: logger.warning( diff --git a/bilby/gw/likelihood/basic.py b/bilby/gw/likelihood/basic.py index 2931ec742..da67481f0 100644 --- a/bilby/gw/likelihood/basic.py +++ b/bilby/gw/likelihood/basic.py @@ -1,6 +1,6 @@ import numpy as np -from ...core.likelihood import Likelihood, _fallback_to_parameters +from ...core.likelihood import Likelihood class BasicGravitationalWaveTransient(Likelihood): @@ -48,7 +48,7 @@ def noise_log_likelihood(self): interferometer.power_spectral_density_array) return log_l.real - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): """ Calculates the real part of log-likelihood value Returns @@ -56,7 +56,6 @@ def log_likelihood(self, parameters=None): float: The real part of the log likelihood """ - parameters = _fallback_to_parameters(self, parameters) log_l = 0 waveform_polarizations = \ self.waveform_generator.frequency_domain_strain(parameters) @@ -68,7 +67,7 @@ def log_likelihood(self, parameters=None): return log_l.real def log_likelihood_interferometer(self, waveform_polarizations, - interferometer, parameters=None): + interferometer, parameters): """ Parameters @@ -83,7 +82,6 @@ def log_likelihood_interferometer(self, waveform_polarizations, float: The real part of the log-likelihood for this interferometer """ - parameters = _fallback_to_parameters(self, parameters) signal_ifo = interferometer.get_detector_response( waveform_polarizations, parameters) diff --git a/bilby/gw/likelihood/multiband.py b/bilby/gw/likelihood/multiband.py index 056864334..7b746eefb 100644 --- a/bilby/gw/likelihood/multiband.py +++ b/bilby/gw/likelihood/multiband.py @@ -11,7 +11,6 @@ recursively_load_dict_contents_from_group, recursively_save_dict_contents_to_group ) -from ...core.likelihood import _fallback_to_parameters from ..prior import CBCPriorDict from ..utils import ln_i0 @@ -726,7 +725,7 @@ def _setup_time_marginalization_multiband(self): ) / 2 self.interferometers.reference_time = self._beam_pattern_reference_time - def calculate_snrs(self, waveform_polarizations, interferometer, return_array=True, parameters=None): + def calculate_snrs(self, waveform_polarizations, interferometer, *, return_array=True, parameters): """ Compute the snrs @@ -747,7 +746,6 @@ def calculate_snrs(self, waveform_polarizations, interferometer, return_array=Tr An object containing the SNR quantities. """ - parameters = _fallback_to_parameters(self, parameters) modes = { mode: value[self.unique_to_original_frequencies] @@ -807,8 +805,7 @@ def _rescale_signal(self, signal, new_distance): for mode in signal: signal[mode] *= self._ref_dist / new_distance - def generate_time_sample_from_marginalized_likelihood(self, signal_polarizations=None, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def generate_time_sample_from_marginalized_likelihood(self, signal_polarizations=None, *, parameters): parameters.update(self.get_sky_frame_parameters(parameters=parameters)) if signal_polarizations is None: signal_polarizations = \ diff --git a/bilby/gw/likelihood/relative.py b/bilby/gw/likelihood/relative.py index f4c72e8ef..a0fc8aab1 100644 --- a/bilby/gw/likelihood/relative.py +++ b/bilby/gw/likelihood/relative.py @@ -7,7 +7,6 @@ from ...core.utils import logger from ...core.prior.base import Constraint from ...core.prior import DeltaFunction -from ...core.likelihood import _fallback_to_parameters from ..utils import noise_weighted_inner_product @@ -356,8 +355,7 @@ def compute_summary_data(self): self.summary_data = summary_data - def compute_waveform_ratio_per_interferometer(self, waveform_polarizations, interferometer, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def compute_waveform_ratio_per_interferometer(self, waveform_polarizations, interferometer, parameters): name = interferometer.name strain = interferometer.get_detector_response( waveform_polarizations=waveform_polarizations, @@ -372,7 +370,7 @@ def compute_waveform_ratio_per_interferometer(self, waveform_polarizations, inte return [r0, r1] - def _compute_full_waveform(self, signal_polarizations, interferometer, parameters=None): + def _compute_full_waveform(self, signal_polarizations, interferometer, parameters): fiducial_waveform = self.per_detector_fiducial_waveforms[interferometer.name] r0, r1 = self.compute_waveform_ratio_per_interferometer( waveform_polarizations=signal_polarizations, @@ -390,7 +388,7 @@ def _compute_full_waveform(self, signal_polarizations, interferometer, parameter full_waveform_ratio[idxs] = duplicated_r0 + duplicated_r1 * (f[idxs] - duplicated_fm) return fiducial_waveform * full_waveform_ratio - def calculate_snrs(self, waveform_polarizations, interferometer, return_array=True, parameters=None): + def calculate_snrs(self, waveform_polarizations, interferometer, *, return_array=True, parameters): r0, r1 = self.compute_waveform_ratio_per_interferometer( waveform_polarizations=waveform_polarizations, interferometer=interferometer, diff --git a/bilby/gw/likelihood/roq.py b/bilby/gw/likelihood/roq.py index fd3ddae54..46a88d09c 100644 --- a/bilby/gw/likelihood/roq.py +++ b/bilby/gw/likelihood/roq.py @@ -5,7 +5,6 @@ from ...core.utils import ( logger, create_frequency_series, speed_of_light, radius_of_earth ) -from ...core.likelihood import _fallback_to_parameters from ..prior import CBCPriorDict from ..utils import ln_i0 @@ -39,8 +38,8 @@ class ROQGravitationalWaveTransient(GravitationalWaveTransient): roq_scale_factor: float The ROQ scale factor used. parameter_conversion: func, optional - Function to update self.parameters before bases are selected based on - the values of self.parameters. This enables a user to switch bases + Function to update parameters before bases are selected based on + the values of parameters. This enables a user to switch bases based on the values of parameters which are not directly used for sampling. priors: dict, bilby.prior.PriorDict @@ -371,7 +370,7 @@ def _select_prior_ranges(self, prior_ranges): dict((param_name, prior_ranges[param_name][idxs_in_prior_range]) for param_name in param_names) - def _update_basis(self, parameters=None): + def _update_basis(self, parameters): """ Update basis and frequency nodes depending on the curret values of parameters @@ -380,7 +379,7 @@ def _update_basis(self, parameters=None): - frequency_nodes_linear/quadratic in self._waveform_generator.waveform_arguments """ - parameters = _fallback_to_parameters(self, parameters).copy() + parameters = parameters.copy() if self.parameter_conversion is not None: parameters = self.parameter_conversion(parameters) if self._cache["parameters"] == parameters: @@ -432,11 +431,11 @@ def waveform_generator(self): def waveform_generator(self, waveform_generator): self._waveform_generator = waveform_generator - def log_likelihood_ratio(self, parameters=None): + def log_likelihood_ratio(self, parameters): self._update_basis(parameters) return super().log_likelihood_ratio(parameters=parameters) - def calculate_snrs(self, waveform_polarizations, interferometer, return_array=True, parameters=None): + def calculate_snrs(self, waveform_polarizations, interferometer, *, return_array=True, parameters): """ Compute the snrs for ROQ @@ -446,7 +445,6 @@ def calculate_snrs(self, waveform_polarizations, interferometer, return_array=Tr interferometer: bilby.gw.detector.Interferometer """ - parameters = _fallback_to_parameters(self, parameters) self._update_basis(parameters) if self.time_marginalization: time_ref = self._beam_pattern_reference_time @@ -1172,10 +1170,9 @@ def _rescale_signal(self, signal, new_distance): for mode in signal[kind]: signal[kind][mode] *= self._ref_dist / new_distance - def generate_time_sample_from_marginalized_likelihood(self, signal_polarizations=None, parameters=None): + def generate_time_sample_from_marginalized_likelihood(self, signal_polarizations=None, *, parameters): from ...core.utils import random - parameters = _fallback_to_parameters(self, parameters) parameters.update(self.get_sky_frame_parameters(parameters=parameters)) if signal_polarizations is None: signal_polarizations = \ diff --git a/bilby/gw/waveform_generator.py b/bilby/gw/waveform_generator.py index e0f8b4e23..742ec35f8 100644 --- a/bilby/gw/waveform_generator.py +++ b/bilby/gw/waveform_generator.py @@ -73,8 +73,10 @@ def __init__(self, duration=None, sampling_frequency=None, start_time=0, frequen self.waveform_arguments = waveform_arguments else: self.waveform_arguments = dict() - if isinstance(parameters, dict): - self.parameters = parameters + if parameters is not None: + logger.warning( + "Non null parameters passed to waveform generator. These will be ignored." + ) self._cache = dict(parameters=None, waveform=None, model=None) logger.info(f"Waveform generator instantiated: {self}") @@ -102,15 +104,13 @@ def __repr__(self): def frequency_domain_strain(self, parameters=None): """ Wrapper to source_model. - Converts self.parameters with self.parameter_conversion before handing it off to the source model. + Converts parameters with self.parameter_conversion before handing it off to the source model. Automatically refers to the time_domain_source model via NFFT if no frequency_domain_source_model is given. Parameters ========== parameters: dict, optional - Parameters to evaluate the waveform for, this overwrites - `self.parameters`. - If not provided will fall back to `self.parameters`. + Parameters to evaluate the waveform for. Returns ======= @@ -131,16 +131,14 @@ def frequency_domain_strain(self, parameters=None): def time_domain_strain(self, parameters=None): """ Wrapper to source_model. - Converts self.parameters with self.parameter_conversion before handing it off to the source model. + Converts parameters with self.parameter_conversion before handing it off to the source model. Automatically refers to the frequency_domain_source model via INFFT if no frequency_domain_source_model is given. Parameters ========== parameters: dict, optional - Parameters to evaluate the waveform for, this overwrites - `self.parameters`. - If not provided will fall back to `self.parameters`. + Parameters to evaluate the waveform for. Returns ======= @@ -160,8 +158,11 @@ def time_domain_strain(self, parameters=None): def _calculate_strain(self, model, model_data_points, transformation_function, transformed_model, transformed_model_data_points, parameters): - if parameters is None: - parameters = self.parameters + if parameters is None and self._cache["parameters"] is not None: + parameters = self._cache["parameters"] + elif parameters is None: + raise ValueError("No parameters passed to waveform generator.") + if parameters == self._cache['parameters'] and self._cache['model'] == model and \ self._cache['transformed_model'] == transformed_model: return self._cache['waveform'] @@ -506,9 +507,6 @@ def _from_bilby_parameters(self, **parameters): def frequency_domain_strain(self, parameters): from lalsimulation.gwsignal import GenerateFDWaveform - if parameters is None: - parameters = self.parameters - hpc = _try_waveform_call( GenerateFDWaveform, self._from_bilby_parameters(**parameters), @@ -540,9 +538,6 @@ def frequency_domain_strain(self, parameters): def time_domain_strain(self, parameters): from lalsimulation.gwsignal import GenerateTDWaveform - if parameters is None: - parameters = self.parameters - hpc = _try_waveform_call( GenerateTDWaveform, self._from_bilby_parameters(**parameters), diff --git a/bilby/hyper/likelihood.py b/bilby/hyper/likelihood.py index 4b691845e..2ca63bbbd 100644 --- a/bilby/hyper/likelihood.py +++ b/bilby/hyper/likelihood.py @@ -3,7 +3,7 @@ import numpy as np -from ..core.likelihood import Likelihood, _fallback_to_parameters +from ..core.likelihood import Likelihood from .model import Model from ..core.prior import PriorDict @@ -59,8 +59,7 @@ def __init__(self, posteriors, hyper_prior, sampling_prior=None, self.samples_factor =\ - self.n_posteriors * np.log(self.samples_per_posterior) - def log_likelihood_ratio(self, parameters=None): - parameters = _fallback_to_parameters(self, parameters) + def log_likelihood_ratio(self, parameters): log_l = np.sum(np.log(np.sum(self.hyper_prior.prob(self.data, **parameters) / self.data['prior'], axis=-1))) log_l += self.samples_factor @@ -69,7 +68,7 @@ def log_likelihood_ratio(self, parameters=None): def noise_log_likelihood(self): return self.evidence_factor - def log_likelihood(self, parameters=None): + def log_likelihood(self, parameters): return self.noise_log_likelihood() + self.log_likelihood_ratio(parameters=parameters) def resample_posteriors(self, max_samples=None): diff --git a/examples/core_examples/alternative_samplers/linear_regression_pymc_custom_likelihood.py b/examples/core_examples/alternative_samplers/linear_regression_pymc_custom_likelihood.py index cb381aec7..e8c35a5cd 100644 --- a/examples/core_examples/alternative_samplers/linear_regression_pymc_custom_likelihood.py +++ b/examples/core_examples/alternative_samplers/linear_regression_pymc_custom_likelihood.py @@ -75,7 +75,7 @@ def __init__(self, x, y, sigma, func): """ super(GaussianLikelihoodPyMC, self).__init__(x=x, y=y, func=func, sigma=sigma) - def log_likelihood(self, sampler=None, parameters=None): + def log_likelihood(self, sampler=None, *, parameters): """ Parameters ----------