Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

RandMaxVar update and batch acquisitions #478

Merged
merged 7 commits into from
Mar 22, 2024
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
Changelog
=========

- Improve `randmaxvar` initialisation and batch acquisitions
- Enable using `maxiter` in `bo.utils.minimize`
- Fix surrogate model copy operation
- Fix typo in requirements.txt
Expand Down
60 changes: 45 additions & 15 deletions elfi/methods/bo/acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,16 +327,20 @@ class MaxVar(AcquisitionBase):

"""

def __init__(self, quantile_eps=.01, *args, **opts):
def __init__(self, model, prior, quantile_eps=.01, **opts):
"""Initialise MaxVar.

Parameters
----------
model : elfi.GPyRegression
Gaussian process model used to calculate the unnormalised approximate likelihood.
prior : scipy-like distribution
Prior distribution.
quantile_eps : int, optional
Quantile of the observed discrepancies used in setting the ABC threshold.

"""
super(MaxVar, self).__init__(*args, **opts)
super(MaxVar, self).__init__(model, prior=prior, **opts)
self.name = 'max_var'
self.label_fn = 'Variance of the Unnormalised Approximate Posterior'
self.quantile_eps = quantile_eps
Expand Down Expand Up @@ -492,30 +496,41 @@ class RandMaxVar(MaxVar):

"""

def __init__(self, quantile_eps=.01, sampler='nuts', n_samples=50,
limit_faulty_init=10, sigma_proposals=None, *args, **opts):
def __init__(self, model, prior, quantile_eps=.01, sampler='nuts', n_samples=50, warmup=None,
limit_faulty_init=1000, init_from_prior=False, sigma_proposals=None, **opts):
"""Initialise RandMaxVar.

Parameters
----------
model : elfi.GPyRegression
Gaussian process model used to calculate the unnormalised approximate likelihood.
prior : scipy-like distribution
Prior distribution.
quantile_eps : int, optional
Quantile of the observed discrepancies used in setting the ABC threshold.
sampler : string, optional
Name of the sampler (options: metropolis, nuts).
n_samples : int, optional
Length of the sampler's chain for obtaining the acquisitions.
warmup : int, optional
Number of samples discarded as warmup. Defaults to n_samples/2.
limit_faulty_init : int, optional
Limit for the iterations used to obtain the sampler's initial points.
init_from_prior : bool, optional
Controls whether the sampler's initial points are sampled from the prior or
a uniform distribution within model bounds. Defaults to model bounds.
sigma_proposals : dict, optional
Standard deviations for Gaussian proposals of each parameter for Metropolis
Markov Chain sampler. Defaults to 1/10 of surrogate model bound lengths.

"""
super(RandMaxVar, self).__init__(quantile_eps, *args, **opts)
super(RandMaxVar, self).__init__(model, prior, quantile_eps, **opts)
self.name = 'rand_max_var'
self.name_sampler = sampler
self._n_samples = n_samples
self._warmup = warmup or n_samples // 2
self._limit_faulty_init = limit_faulty_init
self._init_from_prior = init_from_prior
if self.name_sampler == 'metropolis':
self._sigma_proposals = resolve_sigmas(self.model.parameter_names,
sigma_proposals,
Expand All @@ -538,8 +553,8 @@ def acquire(self, n, t=None):

"""
if n > self._n_samples:
raise ValueError(("The number of acquisitions ({0}) has to be lower "
"than the number of the samples ({1}).").format(n, self._n_samples))
raise ValueError(("The number of acquisitions ({0}) has to be lower than the number "
"of the samples ({1}).").format(n, self._n_samples - self._warmup))

logger.debug('Acquiring the next batch of %d values', n)
gp = self.model
Expand Down Expand Up @@ -568,9 +583,15 @@ def _evaluate_logpdf(theta):
raise SystemExit("Unable to find a suitable initial point.")

# Proposing the initial point.
theta_init = np.zeros(shape=len(gp.bounds))
for idx_param, range_bound in enumerate(gp.bounds):
theta_init[idx_param] = self.random_state.uniform(range_bound[0], range_bound[1])
if self._init_from_prior:
theta_init = self.prior.rvs(random_state=self.random_state)
for idx_param, bound in enumerate(gp.bounds):
theta_init[idx_param] = np.clip(theta_init[idx_param], bound[0], bound[1])

else:
theta_init = np.zeros(shape=len(gp.bounds))
for idx_param, bound in enumerate(gp.bounds):
theta_init[idx_param] = self.random_state.uniform(bound[0], bound[1])

# Refusing to accept a faulty initial point.
if np.isinf(_evaluate_logpdf(theta_init)):
Expand All @@ -593,8 +614,13 @@ def _evaluate_logpdf(theta):
raise ValueError(
"Incompatible sampler. Please check the options in the documentation.")

# Using the last n points of the MH chain for the acquisition batch.
batch_theta = samples[-n:, :]
if n > 1:
# Remove warmup samples and return n random points
samples = samples[self._warmup:]
batch_theta = self.random_state.permutation(samples)[:n]
else:
# Return the last point
batch_theta = samples[-1:]
break

return batch_theta
Expand Down Expand Up @@ -629,13 +655,17 @@ class ExpIntVar(MaxVar):

"""

def __init__(self, quantile_eps=.01, integration='grid', d_grid=.2,
def __init__(self, model, prior, quantile_eps=.01, integration='grid', d_grid=.2,
n_samples_imp=100, iter_imp=2, sampler='nuts', n_samples=2000,
sigma_proposals=None, *args, **opts):
sigma_proposals=None, **opts):
"""Initialise ExpIntVar.

Parameters
----------
model : elfi.GPyRegression
Gaussian process model used to calculate the approximate unnormalised likelihood.
prior : scipy-like distribution
Prior distribution.
quantile_eps : int, optional
Quantile of the observed discrepancies used in setting the discrepancy threshold.
integration : str, optional
Expand All @@ -661,7 +691,7 @@ def __init__(self, quantile_eps=.01, integration='grid', d_grid=.2,
Markov Chain sampler. Defaults to 1/10 of surrogate model bound lengths.

"""
super(ExpIntVar, self).__init__(quantile_eps, *args, **opts)
super(ExpIntVar, self).__init__(model, prior, quantile_eps, **opts)
self.name = 'exp_int_var'
self.label_fn = 'Expected Loss'
self._integration = integration
Expand Down
2 changes: 1 addition & 1 deletion elfi/methods/bo/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def minimize(fun,
for i in range(n_start_points):
result = scipy.optimize.minimize(fun, start_points[i, :],
method=method, jac=grad,
bounds=bounds, constraints=constraints,
bounds=bounds, constraints=constraints,
options={'maxiter': maxiter})
locs.append(result['x'])
vals[i] = result['fun']
Expand Down
Loading