Skip to content

Commit dcf1373

Browse files
authored
Merge branch 'dev' into Feature/arviz_diagnostics
2 parents 1f22fd4 + 536dc8d commit dcf1373

File tree

3 files changed

+47
-16
lines changed

3 files changed

+47
-16
lines changed

CHANGELOG.rst

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ Changelog
22
=========
33

44
- Add convenience method for obtaining elfi samples as `InferenceData`` to be used with `arviz`
5+
- Improve `randmaxvar` batch acquisitions and initialisation by enabling sampling from prior
56
- Drop official Python support for 3.7 and 3.8 as GPy is not officially supported for these versions
67
- Enable using `maxiter` in `bo.utils.minimize`
78
- Fix surrogate model copy operation

elfi/methods/bo/acquisition.py

+45-15
Original file line numberDiff line numberDiff line change
@@ -327,16 +327,20 @@ class MaxVar(AcquisitionBase):
327327
328328
"""
329329

330-
def __init__(self, quantile_eps=.01, *args, **opts):
330+
def __init__(self, model, prior, quantile_eps=.01, **opts):
331331
"""Initialise MaxVar.
332332
333333
Parameters
334334
----------
335+
model : elfi.GPyRegression
336+
Gaussian process model used to calculate the unnormalised approximate likelihood.
337+
prior : scipy-like distribution
338+
Prior distribution.
335339
quantile_eps : int, optional
336340
Quantile of the observed discrepancies used in setting the ABC threshold.
337341
338342
"""
339-
super(MaxVar, self).__init__(*args, **opts)
343+
super(MaxVar, self).__init__(model, prior=prior, **opts)
340344
self.name = 'max_var'
341345
self.label_fn = 'Variance of the Unnormalised Approximate Posterior'
342346
self.quantile_eps = quantile_eps
@@ -492,30 +496,41 @@ class RandMaxVar(MaxVar):
492496
493497
"""
494498

495-
def __init__(self, quantile_eps=.01, sampler='nuts', n_samples=50,
496-
limit_faulty_init=10, sigma_proposals=None, *args, **opts):
499+
def __init__(self, model, prior, quantile_eps=.01, sampler='nuts', n_samples=50, warmup=None,
500+
limit_faulty_init=1000, init_from_prior=False, sigma_proposals=None, **opts):
497501
"""Initialise RandMaxVar.
498502
499503
Parameters
500504
----------
505+
model : elfi.GPyRegression
506+
Gaussian process model used to calculate the unnormalised approximate likelihood.
507+
prior : scipy-like distribution
508+
Prior distribution.
501509
quantile_eps : int, optional
502510
Quantile of the observed discrepancies used in setting the ABC threshold.
503511
sampler : string, optional
504512
Name of the sampler (options: metropolis, nuts).
505513
n_samples : int, optional
506514
Length of the sampler's chain for obtaining the acquisitions.
515+
warmup : int, optional
516+
Number of samples discarded as warmup. Defaults to n_samples/2.
507517
limit_faulty_init : int, optional
508518
Limit for the iterations used to obtain the sampler's initial points.
519+
init_from_prior : bool, optional
520+
Controls whether the sampler's initial points are sampled from the prior or
521+
a uniform distribution within model bounds. Defaults to model bounds.
509522
sigma_proposals : dict, optional
510523
Standard deviations for Gaussian proposals of each parameter for Metropolis
511524
Markov Chain sampler. Defaults to 1/10 of surrogate model bound lengths.
512525
513526
"""
514-
super(RandMaxVar, self).__init__(quantile_eps, *args, **opts)
527+
super(RandMaxVar, self).__init__(model, prior, quantile_eps, **opts)
515528
self.name = 'rand_max_var'
516529
self.name_sampler = sampler
517530
self._n_samples = n_samples
531+
self._warmup = warmup or n_samples // 2
518532
self._limit_faulty_init = limit_faulty_init
533+
self._init_from_prior = init_from_prior
519534
if self.name_sampler == 'metropolis':
520535
self._sigma_proposals = resolve_sigmas(self.model.parameter_names,
521536
sigma_proposals,
@@ -538,8 +553,8 @@ def acquire(self, n, t=None):
538553
539554
"""
540555
if n > self._n_samples:
541-
raise ValueError(("The number of acquisitions ({0}) has to be lower "
542-
"than the number of the samples ({1}).").format(n, self._n_samples))
556+
raise ValueError(("The number of acquisitions ({0}) has to be lower than the number "
557+
"of the samples ({1}).").format(n, self._n_samples - self._warmup))
543558

544559
logger.debug('Acquiring the next batch of %d values', n)
545560
gp = self.model
@@ -568,9 +583,15 @@ def _evaluate_logpdf(theta):
568583
raise SystemExit("Unable to find a suitable initial point.")
569584

570585
# Proposing the initial point.
571-
theta_init = np.zeros(shape=len(gp.bounds))
572-
for idx_param, range_bound in enumerate(gp.bounds):
573-
theta_init[idx_param] = self.random_state.uniform(range_bound[0], range_bound[1])
586+
if self._init_from_prior:
587+
theta_init = self.prior.rvs(random_state=self.random_state)
588+
for idx_param, bound in enumerate(gp.bounds):
589+
theta_init[idx_param] = np.clip(theta_init[idx_param], bound[0], bound[1])
590+
591+
else:
592+
theta_init = np.zeros(shape=len(gp.bounds))
593+
for idx_param, bound in enumerate(gp.bounds):
594+
theta_init[idx_param] = self.random_state.uniform(bound[0], bound[1])
574595

575596
# Refusing to accept a faulty initial point.
576597
if np.isinf(_evaluate_logpdf(theta_init)):
@@ -593,8 +614,13 @@ def _evaluate_logpdf(theta):
593614
raise ValueError(
594615
"Incompatible sampler. Please check the options in the documentation.")
595616

596-
# Using the last n points of the MH chain for the acquisition batch.
597-
batch_theta = samples[-n:, :]
617+
if n > 1:
618+
# Remove warmup samples and return n random points
619+
samples = samples[self._warmup:]
620+
batch_theta = self.random_state.permutation(samples)[:n]
621+
else:
622+
# Return the last point
623+
batch_theta = samples[-1:]
598624
break
599625

600626
return batch_theta
@@ -629,13 +655,17 @@ class ExpIntVar(MaxVar):
629655
630656
"""
631657

632-
def __init__(self, quantile_eps=.01, integration='grid', d_grid=.2,
658+
def __init__(self, model, prior, quantile_eps=.01, integration='grid', d_grid=.2,
633659
n_samples_imp=100, iter_imp=2, sampler='nuts', n_samples=2000,
634-
sigma_proposals=None, *args, **opts):
660+
sigma_proposals=None, **opts):
635661
"""Initialise ExpIntVar.
636662
637663
Parameters
638664
----------
665+
model : elfi.GPyRegression
666+
Gaussian process model used to calculate the approximate unnormalised likelihood.
667+
prior : scipy-like distribution
668+
Prior distribution.
639669
quantile_eps : int, optional
640670
Quantile of the observed discrepancies used in setting the discrepancy threshold.
641671
integration : str, optional
@@ -661,7 +691,7 @@ def __init__(self, quantile_eps=.01, integration='grid', d_grid=.2,
661691
Markov Chain sampler. Defaults to 1/10 of surrogate model bound lengths.
662692
663693
"""
664-
super(ExpIntVar, self).__init__(quantile_eps, *args, **opts)
694+
super(ExpIntVar, self).__init__(model, prior, quantile_eps, **opts)
665695
self.name = 'exp_int_var'
666696
self.label_fn = 'Expected Loss'
667697
self._integration = integration

elfi/methods/bo/utils.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ def minimize(fun,
9797
for i in range(n_start_points):
9898
result = scipy.optimize.minimize(fun, start_points[i, :],
9999
method=method, jac=grad,
100-
bounds=bounds, constraints=constraints,
100+
bounds=bounds, constraints=constraints,
101101
options={'maxiter': maxiter})
102102
locs.append(result['x'])
103103
vals[i] = result['fun']

0 commit comments

Comments
 (0)