-
Notifications
You must be signed in to change notification settings - Fork 409
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
654 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,135 @@ | ||
import json | ||
import time | ||
import matplotlib | ||
matplotlib.use('Agg') | ||
import matplotlib.pyplot as plt | ||
from pymoo.algorithms.moo.dnsga2 import DNSGA2 | ||
from pymoo.core.callback import CallbackCollection, Callback | ||
from pymoo.optimize import minimize | ||
from pymoo.problems import get_problem | ||
from pymoo.problems.dyn import TimeSimulation | ||
from pymoo.termination import get_termination | ||
from pymoo.indicators.igd import IGD | ||
from pymoo.indicators.hv import Hypervolume | ||
from statistics import mean | ||
from pymoo.algorithms.moo.kgb import KGB | ||
|
||
# Experimental Settings | ||
n_var = 5 | ||
change_frequency = 10 | ||
change_severity = 1 | ||
pop_size = 100 | ||
max_n_gen = 30 * change_frequency | ||
termination = get_termination("n_gen", max_n_gen) | ||
problem_string = "df1" | ||
verbose = False | ||
seed = 1 | ||
|
||
# Metric Vars / Callbacks | ||
po_gen = [] | ||
igds = [] | ||
hvs = [] | ||
pof = [] | ||
pos = [] | ||
|
||
def reset_metrics(): | ||
global po_gen, igds, hvs, igds_monitor, hvs_monitor, pof, pos | ||
po_gen = [] | ||
igds = [] | ||
hvs = [] | ||
igds_monitor = [] | ||
hvs_monitor = [] | ||
pof = [] | ||
pos = [] | ||
|
||
def update_metrics(algorithm): | ||
|
||
_F = algorithm.opt.get("F") | ||
PF = algorithm.problem._calc_pareto_front() | ||
igd = IGD(PF).do(_F) | ||
hv = Hypervolume(pf=PF).do(_F) | ||
|
||
pos.append(algorithm.opt.get("X")) | ||
igds.append(igd) | ||
hvs.append(hv) | ||
|
||
po_gen.append(algorithm.opt) | ||
|
||
pof.append(PF) | ||
|
||
class DefaultDynCallback(Callback): | ||
|
||
def _update(self, algorithm): | ||
|
||
update_metrics(algorithm) | ||
|
||
# Function to run an algorithm and return the results | ||
def run_algorithm(problem, algorithm, termination, seed, verbose): | ||
reset_metrics() | ||
simulation = TimeSimulation() | ||
callback = CallbackCollection(DefaultDynCallback(), simulation) | ||
res = minimize(problem, algorithm, termination=termination, callback=callback, seed=seed, verbose=verbose) | ||
return res, igds, hvs | ||
|
||
# Function to plot metrics on an axis | ||
def plot_metrics(ax, data, ylabel, label=None): | ||
ax.set_xlabel("Generation") | ||
ax.set_ylabel(ylabel) | ||
ax.plot(data, label=label) | ||
|
||
|
||
def main(): | ||
# DNSGA2 | ||
problem = get_problem(problem_string, taut=change_frequency, nt=change_severity, n_var=n_var) | ||
algorithm = DNSGA2(pop_size=pop_size) | ||
start = time.time() | ||
res, igds, hvs = run_algorithm(problem, algorithm, termination, seed, verbose) | ||
print("DNSGA2 Performance") | ||
print(f'Time: {time.time() - start}') | ||
print("MIGDS", mean(igds)) | ||
print("MHV", mean(hvs)) | ||
|
||
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) | ||
plot_metrics(ax1, hvs, "Hypervolume", label="DNSGA2") | ||
plot_metrics(ax2, igds, "IGD", label="DNSGA2") | ||
|
||
# KGB-DMOEA | ||
problem = get_problem(problem_string, taut=change_frequency, nt=change_severity, n_var=n_var) | ||
algorithm = KGB(pop_size=pop_size, save_ps=True) | ||
start = time.time() | ||
res, igds, hvs = run_algorithm(problem, algorithm, termination, seed, verbose) | ||
|
||
print("KGBDMOEA Performance") | ||
print(f'Time: {time.time() - start}') | ||
print("MIGDS", mean(igds)) | ||
print("MHV", mean(hvs)) | ||
|
||
plot_metrics(ax1, hvs, "Hypervolume", label="KGB-DMOEA") | ||
plot_metrics(ax2, igds, "IGD", label="KGB-DMOEA") | ||
|
||
# KGB-DMOA with PS Init load archive of POS | ||
|
||
with open('ps.json', 'r') as f: | ||
ps = json.load(f) | ||
|
||
problem = get_problem(problem_string, taut=change_frequency, nt=change_severity, n_var=n_var) | ||
algorithm = KGB(pop_size=pop_size, ps=ps, save_ps=True) | ||
start = time.time() | ||
res, igds, hvs = run_algorithm(problem, algorithm, termination, seed, verbose) | ||
|
||
print("KGBDMOEA Performance") | ||
print(f'Time: {time.time() - start}') | ||
print("MIGDS", mean(igds)) | ||
print("MHV", mean(hvs)) | ||
|
||
plot_metrics(ax1, hvs, "Hypervolume", label="KGB-DMOA with PS Init") | ||
plot_metrics(ax2, igds, "IGD", label="KGB-DMOA with PS Init") | ||
|
||
ax1.legend() | ||
ax2.legend() | ||
|
||
plt.tight_layout() | ||
plt.savefig('output_plot.png') | ||
|
||
if __name__ == '__main__': | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,75 @@ | ||
KGB-DMOEA (Knowledge-Guided Bayesian Dynamic | ||
Multi-Objective Evolutionary Algorithm) Overview | ||
KGB-DMOEA is a sophisticated evolutionary algorithm | ||
for dynamic multi-objective optimization problems | ||
(DMOPs). It employs a knowledge-guided Bayesian | ||
classification approach to adeptly navigate and | ||
adapt to changing Pareto-optimal solutions in | ||
dynamic environments. This algorithm utilizes past | ||
search experiences, distinguishing them as | ||
beneficial or non-beneficial, to effectively direct | ||
the search in new scenarios. Key Features | ||
• Knowledge Reconstruction-Examination | ||
(KRE): Dynamically re-evaluates historical | ||
optimal solutions based on their relevance | ||
and utility in the current environment. • | ||
Bayesian Classification: Employs a Naive | ||
Bayesian Classifier to forecast | ||
high-quality initial populations for new | ||
environments. • Adaptive Strategy: | ||
Incorporates dynamic parameter adjustment | ||
for optimized performance across varying | ||
dynamic contexts. | ||
Parameters • perc_detect_change (float, optional): | ||
Proportion of the population used to detect | ||
environmental changes. Default: 0.1. • | ||
perc_diversity (float, optional): | ||
Proportion of the population allocated for | ||
introducing diversity. Default: 0.3. • | ||
c_size (int, optional): Cluster size. | ||
Default: 13. • eps (float, optional): | ||
Threshold for detecting changes. Default: | ||
0.0. • ps (dict, optional): Record of | ||
historical Pareto sets. Default: {}. • | ||
pertub_dev (float, optional): Deviation for | ||
perturbation in diversity introduction. | ||
Default: 0.1. • save_ps (bool, optional): | ||
Option to save Pareto set data. Default: | ||
False. | ||
Methods • __init__(**kwargs): Initializes the | ||
KGB-DMOEA algorithm with the provided | ||
parameters. • | ||
knowledge_reconstruction_examination(): | ||
Implements the KRE strategy. • | ||
naive_bayesian_classifier(pop_useful, | ||
pop_useless): Trains the Naive Bayesian | ||
Classifier using useful and useless | ||
populations. • add_to_ps(): Incorporates | ||
the current Pareto optimal set into the | ||
historical Pareto set. • | ||
predicted_population(X_test, Y_test): | ||
Constructs a predicted population based on | ||
classifier outcomes. • | ||
calculate_cluster_centroid(solution_cluster): | ||
Calculates the centroid for a specified | ||
solution cluster. • check_boundaries(pop): | ||
Ensures all population solutions stay | ||
within defined problem boundaries. • | ||
random_strategy(N_r): Generates a random | ||
population within the bounds of the | ||
problem. • diversify_population(pop): | ||
Introduces diversity to the population. • | ||
_advance(**kwargs): Progresses the | ||
optimization algorithm by one iteration. | ||
Usage Example from pymoo.algorithms.moo.kgb import | ||
KGB | ||
# Define your problem | ||
problem = ... | ||
# Initialize KGB-DMOEA with specific parameters | ||
algorithm = KGB( perc_detect_change=0.1, | ||
perc_diversity=0.3, c_size=13, eps=0.0, ps={}, | ||
pertub_dev=0.1, save_ps=False | ||
) | ||
# Execute the optimization | ||
res = minimize(problem, algorithm, ...) References | ||
1. Yulong Ye, Lingjie Li, Qiuzhen Lin, Ka-Chun Wong, Jianqiang Li, Zhong Ming. “A knowledge guided Bayesian classification for dynamic multi-objective optimization”. Knowledge-Based Systems, Volume 251, 2022. Link to the paper |
Oops, something went wrong.