Skip to content

Commit

Permalink
Bug Fix of Verbose when pareto front is not known
Browse files Browse the repository at this point in the history
  • Loading branch information
Julian Blank committed Sep 27, 2019
1 parent 9b808e6 commit d6f35c9
Show file tree
Hide file tree
Showing 83 changed files with 16,536 additions and 137 deletions.
9 changes: 8 additions & 1 deletion TODO
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,18 @@
# TODO


- Algorithm workflow - initialize, run, finalize -- solve does everything

- Write test suite for all algorithms (many problems just run once - no crash should occur)

- Infill Criteria Class and Mating inherits from it

- Add global tests of all optimization methods
- Elementwise function evaluation gradient is not working yet

- Report None of infeasible or feasible

- 1+1 and archive




Expand Down
11 changes: 6 additions & 5 deletions pymoo/algorithms/genetic_algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def __init__(self,
if self.n_offsprings is None:
self.n_offsprings = pop_size

# other run specific data updated whenever solve is called - to share them in all methods
# other run specific data updated whenever solve is called - to share them in all algorithms
self.n_gen = None
self.pop = None
self.off = None
Expand All @@ -84,7 +84,7 @@ def _initialize(self):
if isinstance(self.sampling, np.ndarray):
pop = pop.new("X", self.sampling)
else:
pop = self.sampling.do(self.problem, pop, self.pop_size, algorithm=self)
pop = self.sampling.do(self.problem, self.pop_size, pop=pop, algorithm=self)

# in case the initial population was not evaluated
if np.any(pop.collect(lambda ind: ind.F is None, as_numpy_array=True)):
Expand Down Expand Up @@ -147,9 +147,10 @@ def _mating(self, pop):
is_duplicate = self.eliminate_duplicates(_off, pop, off, algorithm=self)
_off = _off[np.logical_not(is_duplicate)]

# if more offsprings than necessary - truncate them
if len(_off) > self.n_offsprings - len(off):
I = np.random.permutation(self.n_offsprings - len(off))
# if more offsprings than necessary - truncate them randomly
if len(off) + len(_off) > self.n_offsprings:
n_remaining = self.n_offsprings - len(off)
I = np.random.permutation(len(_off))[:n_remaining]
_off = _off[I]

# add to the offsprings and increase the mating counter
Expand Down
2 changes: 1 addition & 1 deletion pymoo/algorithms/so_global_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def _next(self):
if not current_best and (has_finished or too_close_to_others):
# find a suitable x0 which is far from other or has good expectations
self.sampling.criterion = lambda X: vectorized_cdist(X, _X).min()
X = self.sampling.do(self.problem, Population(), self.n_initial_samples).get("X")
X = self.sampling.do(self.problem, self.n_initial_samples).get("X")

# distance in x space to other existing points
x_dist = vectorized_cdist(X, _X, func_dist=norm_euclidean_distance(self.problem)).min(axis=1)
Expand Down
40 changes: 10 additions & 30 deletions pymoo/decision_making/high_tradeoff.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import numpy as np
from scipy.spatial import cKDTree

from pymoo.model.decision_making import DecisionMaking, normalize
from pymoo.model.decision_making import DecisionMaking, normalize, find_outliers_upper_tail, NeighborFinder


class HighTradeoffPoints(DecisionMaking):
Expand All @@ -11,52 +11,32 @@ def __init__(self, epsilon=0.125, **kwargs) -> None:
self.epsilon = epsilon

def _do(self, F, **kwargs):

n, m = F.shape

if self.normalize:
F = normalize(F, self.ideal_point, self.nadir_point, estimate_bounds_if_none=True)

tree = cKDTree(F)
neighbors_finder = NeighborFinder(F, epsilon=0.125, n_min_neigbors="auto", consider_2d=False)

mu = np.full(n, - np.inf)

# for each solution in the set calculate the least amount of improvement per unit deterioration
for i in range(n):

# for each neighbour in a specific radius of that solution
neighbours = tree.query_ball_point([F[i]], self.epsilon).tolist()[0]

# consider at least m+1 neighbours - if not found force it
if len(neighbours) < 2 * m + 1:
neighbours = tree.query([F[i]], k=m + 1)[1].tolist()[0]
neighbors = neighbors_finder.find(i)

# calculate the trade-off to all neighbours
diff = F[neighbours] - F[i]
diff = F[neighbors] - F[i]

# calculate sacrifice and gain
sacrifice = np.maximum(0, diff).sum(axis=1)
gain = np.maximum(0, -diff).sum(axis=1)

np.warnings.filterwarnings('ignore')
tradeoff = np.maximum(0, diff).sum(axis=1) / np.maximum(0, -diff).sum(axis=1)
tradeoff = sacrifice / gain

# otherwise find the one with the smalled one
mu[i] = np.nanmin(tradeoff)

# remove values that are nan
I = np.where(np.logical_not(np.isnan(mu)))[0]
mu = mu[I]

# calculate mean and sigma
mean, sigma = mu.mean(), mu.std()

# calculate the deviation in terms of sigma
deviation = (mu - mean) / sigma

# 2 * sigma is considered as an outlier
S = I[np.where(deviation >= 2)[0]]

if len(S) == 0 and deviation.max() > 1:
S = I[np.argmax(mu)]

if len(S) == 0:
return None
else:
return S
return find_outliers_upper_tail(mu)
47 changes: 47 additions & 0 deletions pymoo/decision_making/high_tradeoff_inverted.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import numpy as np

from pymoo.model.decision_making import DecisionMaking, normalize, NeighborFinder, find_outliers_upper_tail


class HighTradeoffPointsInverted(DecisionMaking):

def __init__(self, **kwargs) -> None:
super().__init__(**kwargs)

def _do(self, F, **kwargs):

n, m = F.shape

if self.normalize:
F = normalize(F, self.ideal_point, self.nadir_point, estimate_bounds_if_none=True)

neighbors_finder = NeighborFinder(F, n_min_neigbors="auto")

mu = np.full(n, - np.inf)

# for each solution in the set calculate the least amount of improvement per unit deterioration
for i in range(n):

# neighbors to the current point
neighbors = neighbors_finder.find(i)

# calculate the trade-off to all neighbours
diff = F[neighbors] - F[i]

# calculate sacrifice and gain
sacrifice = np.maximum(0, diff).sum(axis=1)
gain = np.maximum(0, -diff).sum(axis=1)

np.warnings.filterwarnings('ignore')
tradeoff = sacrifice / gain

# otherwise find the one with the smalled one
mu[i] = np.nanmean(tradeoff)

return find_outliers_upper_tail(mu)

if __name__ == '__main__':

F = np.array([[2, 13], [3, 11], [7, 9], [9, 4], [12, 3]])
#_, w = PseudoWeights(np.array([0.5, 0.5])).do(F, return_pseudo_weights=True)
HighTradeoffPointsInverted(normalize=False).do(F)
8 changes: 3 additions & 5 deletions pymoo/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

import numpy as np

from pymoo.algorithms.nsga2 import NSGA2
from pymoo.model.algorithm import filter_optimum
from pymoo.model.individual import Individual
from pymoo.model.population import Population
Expand All @@ -24,13 +23,12 @@ def get_problem_func(n_var, xl, xu, type_var):
class P(Problem):
def __init__(self) -> None:
super().__init__(n_var=n_var, n_obj=1, n_constr=0, xl=xl, xu=xu, type_var=type_var)

return P


def sample(sampling, n_samples, n_var, xl=0, xu=1, type_var=np.double, **kwargs):
problem = get_problem_func(n_var, xl, xu, type_var)(**kwargs)
return sampling.do(problem, Population(), n_samples, **kwargs).get("X")
def sample(sampling, n_samples, n_var, xl=0, xu=1, **kwargs):
problem = get_problem_func(n_var, xl, xu, None)(**kwargs)
return sampling.do(problem, n_samples, pop=None, **kwargs)


def crossover(crossover, a, b, c=None, xl=0, xu=1, type_var=np.double, **kwargs):
Expand Down
2 changes: 1 addition & 1 deletion pymoo/model/algorithm.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def _solve(self, problem):
# finalize the algorithm and do postprocessing of desired
self.finalize()

# method that is called each iteration to call some methods regularly
# method that is called each iteration to call some algorithms regularly
def _each_iteration(self, D, first=False, **kwargs):

# display the output if defined by the algorithm
Expand Down
91 changes: 90 additions & 1 deletion pymoo/model/decision_making.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
from scipy.spatial.ckdtree import cKDTree


class DecisionMaking:
Expand All @@ -13,7 +14,7 @@ def do(self, F, *args, **kwargs):


def normalize(F, ideal_point=None, nadir_point=None, estimate_bounds_if_none=True, return_bounds=False):
N = np.copy(F)
N = F.astype(np.float)

if estimate_bounds_if_none:
if ideal_point is None:
Expand Down Expand Up @@ -43,3 +44,91 @@ def normalize(F, ideal_point=None, nadir_point=None, estimate_bounds_if_none=Tru
return N, norm, ideal_point, nadir_point
else:
return N


class NeighborFinder:

def __init__(self, N,
epsilon=0.125,
n_neighbors=None,
n_min_neigbors=None,
consider_2d=True):

super().__init__()
self.N = N
self.consider_2d = consider_2d

_, n_dim = N.shape

# at least find dimensionality times two neighbors - if enabled
if n_min_neigbors == "auto":
self.n_min_neigbors = 2 * n_dim

# disable the minimum neighbor variable
else:
self.n_min_neigbors = np.inf

# either choose epsilon
self.epsilon = epsilon

# if none choose the number of neighbors
self.n_neighbors = n_neighbors

if self.N.shape[1] == 1:
raise Exception("At least 2 objectives must be provided.")

elif self.consider_2d and self.N.shape[1] == 2:
self.min, self.max = N.min(), N.max()
self.rank = np.argsort(N[:, 0])
self.pos_in_rank = np.argsort(self.rank)

else:
self.tree = cKDTree(N)

def find(self, i):

if self.consider_2d and self.N.shape[1] == 2:
neighbours = []

pos = self.pos_in_rank[i]
if pos > 0:
neighbours.append(self.rank[pos - 1])
if pos < len(self.N) - 1:
neighbours.append(self.rank[pos + 1])

else:

# for each neighbour in a specific radius of that solution
if self.epsilon is not None:
neighbours = self.tree.query_ball_point([self.N[i]], self.epsilon).tolist()[0]
elif self.n_neighbors is not None:
neighbours = self.tree.query([self.N[i]], k=self.n_neighbors + 1)[1].tolist()[0]
else:
raise Exception("Either define epsilon or number of neighbors.")

# in case n_min_neigbors is enabled
if len(neighbours) < self.n_min_neigbors:
neighbours = self.tree.query([self.N[i]], k=self.n_min_neigbors + 1)[1].tolist()[0]

return neighbours


def find_outliers_upper_tail(mu):

# remove values that are nan
I = np.where(np.logical_and(np.logical_not(np.isnan(mu)), np.logical_not(np.isinf(mu))))[0]
mu = mu[I]

# calculate mean and sigma
mean, sigma = mu.mean(), mu.std()

# calculate the deviation in terms of sigma
deviation = (mu - mean) / sigma

# 2 * sigma is considered as an outlier
S = I[np.where(deviation >= 2)[0]]

if len(S) == 0 and deviation.max() > 1:
S = I[[np.argmax(mu)]]

return S if len(S) > 0 else None
11 changes: 8 additions & 3 deletions pymoo/model/problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -146,9 +146,14 @@ def pareto_front(self, *args, use_cache=True, exception_if_failing=True, **kwarg
"""
if not use_cache or self._pareto_front is None:
try:
self._pareto_front = at_least_2d_array(self._calc_pareto_front(*args, **kwargs))
self._ideal_point = np.min(self._pareto_front, axis=0)
self._nadir_point = np.max(self._pareto_front, axis=0)
pf = self._calc_pareto_front(*args, **kwargs)
if pf is not None:
self._pareto_front = at_least_2d_array(pf)
self._ideal_point = np.min(self._pareto_front, axis=0)
self._nadir_point = np.max(self._pareto_front, axis=0)
else:
self._pareto_front, self._ideal_point, self._nadir_point = None, None, None

except Exception as e:
if exception_if_failing:
raise e
Expand Down
14 changes: 13 additions & 1 deletion pymoo/model/sampling.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from abc import abstractmethod

from pymoo.model.population import Population


class Sampling:
"""
Expand All @@ -9,12 +11,13 @@ class Sampling:
"""

def do(self, problem, pop, n_samples, **kwargs):
def do(self, problem, n_samples, pop=Population(), **kwargs):
"""
Sample new points with problem information if necessary.
Parameters
----------
problem: class
The problem to which points should be sampled. (lower and upper bounds, discrete, binary, ...)
Expand All @@ -24,13 +27,22 @@ def do(self, problem, pop, n_samples, **kwargs):
kwargs: class
Any additional data that might be necessary. e.g. constants of the algorithm, ...
pop : Population
The sampling results are stored in a population. The template of the population can be changed.
If 'none' simply a numpy array is returned.
Returns
-------
X : np.array
Samples points in a two dimensional array
"""
val = self._do(problem, n_samples, **kwargs)

if pop is None:
return val

return pop.new("X", val)

@abstractmethod
Expand Down
4 changes: 2 additions & 2 deletions pymoo/operators/sampling/latin_hypercube_sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def _calc_score(self, X):
else:
raise Exception("Either provide a str or a function as a criterion!")

def do(self, problem, pop, n_samples, **kwargs):
def _do(self, problem, n_samples, **kwargs):

# sample for the first time -
X = self._sample(n_samples, problem.n_var)
Expand All @@ -76,4 +76,4 @@ def do(self, problem, pop, n_samples, **kwargs):
for i in range(problem.n_var):
X[:, i] = X[:, i] * (problem.xu[i] - problem.xl[i]) + problem.xl[i]

return pop.new("X", X)
return X
Loading

0 comments on commit d6f35c9

Please sign in to comment.