From 013aea43347afabe20bb4dda7d827ec1b4594d0a Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Wed, 2 Jul 2025 18:00:02 +0000 Subject: [PATCH 01/18] initial pygad optimizer --- pyproject.toml | 1 + src/optimagic/algorithms.py | 33 +++ src/optimagic/config.py | 8 + src/optimagic/optimizers/pygad_optimizer.py | 210 ++++++++++++++++++++ 4 files changed, 252 insertions(+) create mode 100644 src/optimagic/optimizers/pygad_optimizer.py diff --git a/pyproject.toml b/pyproject.toml index ce6707e6e..46c9eea56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -381,5 +381,6 @@ module = [ "pdbp", "iminuit", "nevergrad", + "pygad", ] ignore_missing_imports = true diff --git a/src/optimagic/algorithms.py b/src/optimagic/algorithms.py index 588514e95..69a1f2745 100644 --- a/src/optimagic/algorithms.py +++ b/src/optimagic/algorithms.py @@ -38,6 +38,7 @@ NloptVAR, ) from optimagic.optimizers.pounders import Pounders +from optimagic.optimizers.pygad_optimizer import Pygad from optimagic.optimizers.pygmo_optimizers import ( PygmoBeeColony, PygmoCmaes, @@ -173,6 +174,7 @@ def Scalar( @dataclass(frozen=True) class BoundedGlobalGradientFreeParallelScalarAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -371,6 +373,7 @@ class BoundedGlobalGradientFreeScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -407,6 +410,7 @@ def Parallel(self) -> BoundedGlobalGradientFreeParallelScalarAlgorithms: @dataclass(frozen=True) class BoundedGlobalGradientFreeParallelAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -463,6 +467,7 @@ def Scalar(self) -> GlobalGradientFreeNonlinearConstrainedParallelScalarAlgorith @dataclass(frozen=True) class GlobalGradientFreeParallelScalarAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -611,6 +616,7 @@ def Scalar(self) -> BoundedGradientFreeNonlinearConstrainedParallelScalarAlgorit @dataclass(frozen=True) class BoundedGradientFreeParallelScalarAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -706,6 +712,7 @@ def Scalar(self) -> BoundedGlobalNonlinearConstrainedParallelScalarAlgorithms: @dataclass(frozen=True) class BoundedGlobalParallelScalarAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1037,6 +1044,7 @@ class BoundedGlobalGradientFreeAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1101,6 +1109,7 @@ class GlobalGradientFreeScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1141,6 +1150,7 @@ def Parallel(self) -> GlobalGradientFreeParallelScalarAlgorithms: @dataclass(frozen=True) class GlobalGradientFreeParallelAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1316,6 +1326,7 @@ class BoundedGradientFreeScalarAlgorithms(AlgoSelection): nlopt_newuoa: Type[NloptNEWUOA] = NloptNEWUOA nlopt_neldermead: Type[NloptNelderMead] = NloptNelderMead nlopt_sbplx: Type[NloptSbplx] = NloptSbplx + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1380,6 +1391,7 @@ def Parallel(self) -> BoundedGradientFreeLeastSquaresParallelAlgorithms: class BoundedGradientFreeParallelAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1461,6 +1473,7 @@ def Scalar(self) -> GradientFreeNonlinearConstrainedParallelScalarAlgorithms: class GradientFreeParallelScalarAlgorithms(AlgoSelection): neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1534,6 +1547,7 @@ class BoundedGlobalScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -1579,6 +1593,7 @@ def Parallel(self) -> BoundedGlobalParallelScalarAlgorithms: @dataclass(frozen=True) class BoundedGlobalParallelAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1648,6 +1663,7 @@ def Scalar(self) -> GlobalNonlinearConstrainedParallelScalarAlgorithms: @dataclass(frozen=True) class GlobalParallelScalarAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -1883,6 +1899,7 @@ def Scalar(self) -> BoundedNonlinearConstrainedParallelScalarAlgorithms: @dataclass(frozen=True) class BoundedParallelScalarAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -2146,6 +2163,7 @@ class GlobalGradientFreeAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2240,6 +2258,7 @@ class BoundedGradientFreeAlgorithms(AlgoSelection): nlopt_neldermead: Type[NloptNelderMead] = NloptNelderMead nlopt_sbplx: Type[NloptSbplx] = NloptSbplx pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2337,6 +2356,7 @@ class GradientFreeScalarAlgorithms(AlgoSelection): nlopt_neldermead: Type[NloptNelderMead] = NloptNelderMead nlopt_praxis: Type[NloptPRAXIS] = NloptPRAXIS nlopt_sbplx: Type[NloptSbplx] = NloptSbplx + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2409,6 +2429,7 @@ class GradientFreeParallelAlgorithms(AlgoSelection): neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel nevergrad_pso: Type[NevergradPSO] = NevergradPSO pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -2452,6 +2473,7 @@ class BoundedGlobalAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2534,6 +2556,7 @@ class GlobalScalarAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2583,6 +2606,7 @@ def Parallel(self) -> GlobalParallelScalarAlgorithms: @dataclass(frozen=True) class GlobalParallelAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -2863,6 +2887,7 @@ class BoundedScalarAlgorithms(AlgoSelection): nlopt_sbplx: Type[NloptSbplx] = NloptSbplx nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -2950,6 +2975,7 @@ def Parallel(self) -> BoundedLeastSquaresParallelAlgorithms: class BoundedParallelAlgorithms(AlgoSelection): nevergrad_pso: Type[NevergradPSO] = NevergradPSO pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -3051,6 +3077,7 @@ def Scalar(self) -> NonlinearConstrainedParallelScalarAlgorithms: class ParallelScalarAlgorithms(AlgoSelection): neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel nevergrad_pso: Type[NevergradPSO] = NevergradPSO + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -3170,6 +3197,7 @@ class GradientFreeAlgorithms(AlgoSelection): nlopt_praxis: Type[NloptPRAXIS] = NloptPRAXIS nlopt_sbplx: Type[NloptSbplx] = NloptSbplx pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3234,6 +3262,7 @@ class GlobalAlgorithms(AlgoSelection): nlopt_direct: Type[NloptDirect] = NloptDirect nlopt_esch: Type[NloptESCH] = NloptESCH nlopt_isres: Type[NloptISRES] = NloptISRES + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3380,6 +3409,7 @@ class BoundedAlgorithms(AlgoSelection): nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3517,6 +3547,7 @@ class ScalarAlgorithms(AlgoSelection): nlopt_sbplx: Type[NloptSbplx] = NloptSbplx nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch @@ -3631,6 +3662,7 @@ class ParallelAlgorithms(AlgoSelection): neldermead_parallel: Type[NelderMeadParallel] = NelderMeadParallel nevergrad_pso: Type[NevergradPSO] = NevergradPSO pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_gaco: Type[PygmoGaco] = PygmoGaco pygmo_pso_gen: Type[PygmoPsoGen] = PygmoPsoGen scipy_brute: Type[ScipyBrute] = ScipyBrute @@ -3696,6 +3728,7 @@ class Algorithms(AlgoSelection): nlopt_tnewton: Type[NloptTNewton] = NloptTNewton nlopt_var: Type[NloptVAR] = NloptVAR pounders: Type[Pounders] = Pounders + pygad: Type[Pygad] = Pygad pygmo_bee_colony: Type[PygmoBeeColony] = PygmoBeeColony pygmo_cmaes: Type[PygmoCmaes] = PygmoCmaes pygmo_compass_search: Type[PygmoCompassSearch] = PygmoCompassSearch diff --git a/src/optimagic/config.py b/src/optimagic/config.py index 643a6f663..13ce4fee9 100644 --- a/src/optimagic/config.py +++ b/src/optimagic/config.py @@ -108,6 +108,14 @@ IS_NEVERGRAD_INSTALLED = True +try: + import pygad # noqa: F401 +except ImportError: + IS_PYGAD_INSTALLED = False +else: + IS_PYGAD_INSTALLED = True + + # ====================================================================================== # Check if pandas version is newer or equal to version 2.1.0 # ====================================================================================== diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py new file mode 100644 index 000000000..dc1a6fbfd --- /dev/null +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -0,0 +1,210 @@ +from dataclasses import dataclass +from typing import Any, Literal, Union + +import numpy as np +from numpy.typing import NDArray + +from optimagic import mark +from optimagic.config import IS_PYGAD_INSTALLED +from optimagic.exceptions import NotInstalledError +from optimagic.optimization.algo_options import get_population_size +from optimagic.optimization.algorithm import Algorithm, InternalOptimizeResult +from optimagic.optimization.internal_optimization_problem import ( + InternalOptimizationProblem, +) +from optimagic.typing import ( + AggregationLevel, + NonNegativeFloat, + PositiveFloat, + PositiveInt, +) + +if IS_PYGAD_INSTALLED: + import pygad + + +@mark.minimizer( + name="pygad", + solver_type=AggregationLevel.SCALAR, + is_available=IS_PYGAD_INSTALLED, + is_global=True, + needs_jac=False, + needs_hess=False, + supports_parallelism=True, + supports_bounds=True, + supports_linear_constraints=False, + supports_nonlinear_constraints=False, + disable_history=False, +) +@dataclass(frozen=True) +class Pygad(Algorithm): + population_size: PositiveInt | None = None + num_parents_mating: PositiveInt = 10 + num_generations: PositiveInt = 100 + + initial_population: NDArray[np.float64] | list[list[float]] | None = None + gene_type: ( + type[int] + | type[float] + | type[np.int8] + | type[np.int16] + | type[np.int32] + | type[np.int64] + | type[np.uint] + | type[np.uint8] + | type[np.uint16] + | type[np.uint32] + | type[np.uint64] + | type[np.float16] + | type[np.float32] + | type[np.float64] + | list[type] + | list[list[type | None]] + ) = float + + parent_selection_type: Literal[ + "sss", "rws", "sus", "rank", "random", "tournament" + ] = "sss" + keep_parents: int = -1 + keep_elitism: PositiveInt = 1 + K_tournament: PositiveInt = 3 + + crossover_type: ( + Literal["single_point", "two_points", "uniform", "scattered"] | None + ) = "single_point" + crossover_probability: NonNegativeFloat | None = None + + mutation_type: ( + Literal["random", "swap", "inversion", "scramble", "adaptive"] | None + ) = "random" + mutation_probability: ( + NonNegativeFloat + | list[NonNegativeFloat] + | tuple[NonNegativeFloat, NonNegativeFloat] + | NDArray[np.float64] + | None + ) = None + mutation_percent_genes: ( + PositiveFloat + | str + | list[PositiveFloat] + | tuple[PositiveFloat, PositiveFloat] + | NDArray[np.float64] + ) = "default" + mutation_num_genes: ( + PositiveInt + | list[PositiveInt] + | tuple[PositiveInt, PositiveInt] + | NDArray[np.int_] + | None + ) = None + mutation_by_replacement: bool = False + random_mutation_min_val: float | list[float] | NDArray[np.float64] = -1.0 + random_mutation_max_val: float | list[float] | NDArray[np.float64] = 1.0 + + allow_duplicate_genes: bool = True + + fitness_batch_size: PositiveInt | None = None + save_best_solutions: bool = False + save_solutions: bool = False + stop_criteria: str | list[str] | None = None + + random_seed: int | None = None + parallel_processing: ( + int + | tuple[Literal["process", "thread"], int | None] + | list[Union[Literal["process", "thread"], int | None]] + | None + ) = None + suppress_warnings: bool = True + + def _solve_internal_problem( + self, problem: InternalOptimizationProblem, x0: NDArray[np.float64] + ) -> InternalOptimizeResult: + if not IS_PYGAD_INSTALLED: + raise NotInstalledError( + "The 'pygad_pygad' algorithm requires the pygad package to be " + "installed. You can install it with 'pip install pygad'." + ) + + if ( + problem.bounds.lower is None + or problem.bounds.upper is None + or not np.isfinite(problem.bounds.lower).all() + or not np.isfinite(problem.bounds.upper).all() + ): + raise ValueError("pygad_pygad requires finite bounds for all parameters.") + + def fitness_func( + ga_instance: Any, solution: NDArray[np.float64], solution_idx: int + ) -> float: + return -float(problem.fun(solution)) + + if self.initial_population is not None: + initial_population = self.initial_population + population_size = len(initial_population) + else: + population_size = get_population_size( + population_size=self.population_size, x=x0 + ) + initial_population = np.random.uniform( + low=problem.bounds.lower, + high=problem.bounds.upper, + size=(population_size, len(x0)), + ) + initial_population[0] = x0 + + gene_space = [ + {"low": problem.bounds.lower[i], "high": problem.bounds.upper[i]} + for i in range(len(x0)) + ] + + ga_instance = pygad.GA( + num_generations=self.num_generations, + num_parents_mating=self.num_parents_mating, + fitness_func=fitness_func, + sol_per_pop=population_size, + num_genes=len(x0), + initial_population=initial_population, + init_range_low=problem.bounds.lower, + init_range_high=problem.bounds.upper, + gene_space=gene_space, + gene_type=self.gene_type, + parent_selection_type=self.parent_selection_type, + keep_parents=self.keep_parents, + keep_elitism=self.keep_elitism, + K_tournament=self.K_tournament, + crossover_type=self.crossover_type, + crossover_probability=self.crossover_probability, + mutation_type=self.mutation_type, + mutation_probability=self.mutation_probability, + mutation_by_replacement=self.mutation_by_replacement, + mutation_percent_genes=self.mutation_percent_genes, + mutation_num_genes=self.mutation_num_genes, + random_mutation_min_val=self.random_mutation_min_val, + random_mutation_max_val=self.random_mutation_max_val, + allow_duplicate_genes=self.allow_duplicate_genes, + save_best_solutions=self.save_best_solutions, + save_solutions=self.save_solutions, + suppress_warnings=self.suppress_warnings, + stop_criteria=self.stop_criteria, + parallel_processing=self.parallel_processing, + random_seed=self.random_seed, + ) + + ga_instance.run() + + solution, solution_fitness, solution_idx = ga_instance.best_solution() + + res = InternalOptimizeResult( + x=solution, + fun=-solution_fitness, + success=True, + message=( + f"Optimization terminated successfully after " + f"{ga_instance.generations_completed} generations." + ), + n_fun_evals=ga_instance.generations_completed * population_size, + ) + + return res From 217a73802ac47a542edf7a6f1bf6b24587716272 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 10:29:41 +0000 Subject: [PATCH 02/18] add pygad to environment --- .tools/envs/testenv-linux.yml | 1 + .tools/envs/testenv-numpy.yml | 1 + .tools/envs/testenv-others.yml | 1 + .tools/envs/testenv-pandas.yml | 1 + environment.yml | 1 + 5 files changed, 5 insertions(+) diff --git a/.tools/envs/testenv-linux.yml b/.tools/envs/testenv-linux.yml index ec4b969f9..8f15a402c 100644 --- a/.tools/envs/testenv-linux.yml +++ b/.tools/envs/testenv-linux.yml @@ -28,6 +28,7 @@ dependencies: - jinja2 # dev, tests - annotated-types # dev, tests - iminuit # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - nevergrad # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/.tools/envs/testenv-numpy.yml b/.tools/envs/testenv-numpy.yml index 9f9fa7d0f..bd0b8710d 100644 --- a/.tools/envs/testenv-numpy.yml +++ b/.tools/envs/testenv-numpy.yml @@ -26,6 +26,7 @@ dependencies: - jinja2 # dev, tests - annotated-types # dev, tests - iminuit # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - nevergrad # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/.tools/envs/testenv-others.yml b/.tools/envs/testenv-others.yml index ce9490b7f..982bd14cb 100644 --- a/.tools/envs/testenv-others.yml +++ b/.tools/envs/testenv-others.yml @@ -26,6 +26,7 @@ dependencies: - jinja2 # dev, tests - annotated-types # dev, tests - iminuit # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - nevergrad # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/.tools/envs/testenv-pandas.yml b/.tools/envs/testenv-pandas.yml index 7b342240b..da95ae87f 100644 --- a/.tools/envs/testenv-pandas.yml +++ b/.tools/envs/testenv-pandas.yml @@ -26,6 +26,7 @@ dependencies: - jinja2 # dev, tests - annotated-types # dev, tests - iminuit # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - nevergrad # dev, tests - DFO-LS>=1.5.3 # dev, tests diff --git a/environment.yml b/environment.yml index 80435b8d7..b733472cc 100644 --- a/environment.yml +++ b/environment.yml @@ -38,6 +38,7 @@ dependencies: - furo # dev, docs - annotated-types # dev, tests - iminuit # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - nevergrad # dev, tests - DFO-LS>=1.5.3 # dev, tests From c07ae373eecb3f1b16f950ece750e9e689a0270b Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 20:24:28 +0000 Subject: [PATCH 03/18] remove unused parameter --- src/optimagic/optimizers/pygad_optimizer.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index dc1a6fbfd..ae62652f7 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -164,10 +164,7 @@ def fitness_func( num_parents_mating=self.num_parents_mating, fitness_func=fitness_func, sol_per_pop=population_size, - num_genes=len(x0), initial_population=initial_population, - init_range_low=problem.bounds.lower, - init_range_high=problem.bounds.upper, gene_space=gene_space, gene_type=self.gene_type, parent_selection_type=self.parent_selection_type, From 9861eb8c47b791f56afd080f8559fd4bbd289c81 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 20:42:48 +0000 Subject: [PATCH 04/18] batch evaluator for fitness function --- src/optimagic/optimizers/pygad_optimizer.py | 24 +++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index ae62652f7..8d2ef54ac 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -135,10 +135,26 @@ def _solve_internal_problem( ): raise ValueError("pygad_pygad requires finite bounds for all parameters.") - def fitness_func( - ga_instance: Any, solution: NDArray[np.float64], solution_idx: int - ) -> float: - return -float(problem.fun(solution)) + if self.fitness_batch_size is not None and self.fitness_batch_size > 1: + + def fitness_function( + _ga_instance: Any, + batch_solutions: NDArray[np.float64], + _batch_indices: list[int] | NDArray[np.int_], + ) -> list[float]: + solution_list = [ + batch_solutions[i] for i in range(batch_solutions.shape[0]) + ] + + batch_results = problem.batch_fun(solution_list, n_cores=1) + + return [-float(result) for result in batch_results] + else: + + def fitness_function( + _ga_instance: Any, solution: NDArray[np.float64], _solution_idx: int + ) -> float: + return -float(problem.fun(solution)) if self.initial_population is not None: initial_population = self.initial_population From 80e6caa57996d0d6044b64d52c1105806bf03f6c Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 20:44:16 +0000 Subject: [PATCH 05/18] remove save parameters from pygad --- src/optimagic/optimizers/pygad_optimizer.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 8d2ef54ac..08e3a819b 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -105,8 +105,6 @@ class Pygad(Algorithm): allow_duplicate_genes: bool = True fitness_batch_size: PositiveInt | None = None - save_best_solutions: bool = False - save_solutions: bool = False stop_criteria: str | list[str] | None = None random_seed: int | None = None @@ -197,8 +195,6 @@ def fitness_function( random_mutation_min_val=self.random_mutation_min_val, random_mutation_max_val=self.random_mutation_max_val, allow_duplicate_genes=self.allow_duplicate_genes, - save_best_solutions=self.save_best_solutions, - save_solutions=self.save_solutions, suppress_warnings=self.suppress_warnings, stop_criteria=self.stop_criteria, parallel_processing=self.parallel_processing, From 1eb2ba8071de8bd38598b606b894c0d8b96c0462 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 20:59:29 +0000 Subject: [PATCH 06/18] remove gene_type --- src/optimagic/optimizers/pygad_optimizer.py | 21 +-------------------- 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 08e3a819b..a54f0c71f 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -43,24 +43,6 @@ class Pygad(Algorithm): num_generations: PositiveInt = 100 initial_population: NDArray[np.float64] | list[list[float]] | None = None - gene_type: ( - type[int] - | type[float] - | type[np.int8] - | type[np.int16] - | type[np.int32] - | type[np.int64] - | type[np.uint] - | type[np.uint8] - | type[np.uint16] - | type[np.uint32] - | type[np.uint64] - | type[np.float16] - | type[np.float32] - | type[np.float64] - | list[type] - | list[list[type | None]] - ) = float parent_selection_type: Literal[ "sss", "rws", "sus", "rank", "random", "tournament" @@ -176,11 +158,10 @@ def fitness_function( ga_instance = pygad.GA( num_generations=self.num_generations, num_parents_mating=self.num_parents_mating, - fitness_func=fitness_func, + fitness_func=fitness_function, sol_per_pop=population_size, initial_population=initial_population, gene_space=gene_space, - gene_type=self.gene_type, parent_selection_type=self.parent_selection_type, keep_parents=self.keep_parents, keep_elitism=self.keep_elitism, From b70589fa7c797ad5eede8c3ce952a9f6d6990dd7 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 21:33:21 +0000 Subject: [PATCH 07/18] fix batch processing --- src/optimagic/optimizers/pygad_optimizer.py | 73 +++++++++++++-------- 1 file changed, 47 insertions(+), 26 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index a54f0c71f..6f935e1dc 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -1,5 +1,6 @@ +import warnings from dataclasses import dataclass -from typing import Any, Literal, Union +from typing import Any, Literal import numpy as np from numpy.typing import NDArray @@ -39,8 +40,8 @@ @dataclass(frozen=True) class Pygad(Algorithm): population_size: PositiveInt | None = None - num_parents_mating: PositiveInt = 10 - num_generations: PositiveInt = 100 + num_parents_mating: PositiveInt | None = None + num_generations: PositiveInt | None = None initial_population: NDArray[np.float64] | list[list[float]] | None = None @@ -89,14 +90,8 @@ class Pygad(Algorithm): fitness_batch_size: PositiveInt | None = None stop_criteria: str | list[str] | None = None + n_cores: PositiveInt = 1 random_seed: int | None = None - parallel_processing: ( - int - | tuple[Literal["process", "thread"], int | None] - | list[Union[Literal["process", "thread"], int | None]] - | None - ) = None - suppress_warnings: bool = True def _solve_internal_problem( self, problem: InternalOptimizationProblem, x0: NDArray[np.float64] @@ -115,39 +110,49 @@ def _solve_internal_problem( ): raise ValueError("pygad_pygad requires finite bounds for all parameters.") - if self.fitness_batch_size is not None and self.fitness_batch_size > 1: + # Determine effective fitness_batch_size for parallel processing + effective_fitness_batch_size = determine_effective_batch_size( + self.fitness_batch_size, self.n_cores + ) + if ( + effective_fitness_batch_size is not None + and effective_fitness_batch_size > 1 + and self.n_cores > 1 + ): def fitness_function( _ga_instance: Any, batch_solutions: NDArray[np.float64], _batch_indices: list[int] | NDArray[np.int_], ) -> list[float]: - solution_list = [ - batch_solutions[i] for i in range(batch_solutions.shape[0]) - ] + solution_list = [solution for solution in batch_solutions] - batch_results = problem.batch_fun(solution_list, n_cores=1) + batch_results = problem.batch_fun(solution_list, n_cores=self.n_cores) return [-float(result) for result in batch_results] else: - def fitness_function( _ga_instance: Any, solution: NDArray[np.float64], _solution_idx: int ) -> float: return -float(problem.fun(solution)) + population_size = get_population_size( + population_size=self.population_size, x=x0, lower_bound=10 + ) + if self.initial_population is not None: - initial_population = self.initial_population + initial_population = np.array(self.initial_population) population_size = len(initial_population) + num_genes = len(initial_population[0]) else: - population_size = get_population_size( - population_size=self.population_size, x=x0 - ) + num_genes = len(x0) + initial_population = np.random.uniform( - low=problem.bounds.lower, - high=problem.bounds.upper, - size=(population_size, len(x0)), + problem.bounds.lower, + problem.bounds.upper, + size=(population_size, num_genes), ) + initial_population[0] = x0 gene_space = [ @@ -159,7 +164,7 @@ def fitness_function( num_generations=self.num_generations, num_parents_mating=self.num_parents_mating, fitness_func=fitness_function, - sol_per_pop=population_size, + fitness_batch_size=effective_fitness_batch_size, initial_population=initial_population, gene_space=gene_space, parent_selection_type=self.parent_selection_type, @@ -176,9 +181,8 @@ def fitness_function( random_mutation_min_val=self.random_mutation_min_val, random_mutation_max_val=self.random_mutation_max_val, allow_duplicate_genes=self.allow_duplicate_genes, - suppress_warnings=self.suppress_warnings, stop_criteria=self.stop_criteria, - parallel_processing=self.parallel_processing, + parallel_processing=None, random_seed=self.random_seed, ) @@ -198,3 +202,20 @@ def fitness_function( ) return res + + +def determine_effective_batch_size( + fitness_batch_size: int | None, n_cores: int +) -> int | None: + if fitness_batch_size is not None: + if fitness_batch_size < n_cores: + warnings.warn( + f"fitness_batch_size ({fitness_batch_size}) is smaller than " + f"n_cores ({n_cores}). This may reduce parallel efficiency. " + f"Consider setting fitness_batch_size >= n_cores." + ) + return fitness_batch_size + elif n_cores > 1: + return n_cores + else: + return None From ac288ca1b4ba7fd4021e9eb44f2cd30552336613 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 22:09:15 +0000 Subject: [PATCH 08/18] fix: fitness function --- src/optimagic/optimizers/pygad_optimizer.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 6f935e1dc..98b7d6ea3 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -120,30 +120,34 @@ def _solve_internal_problem( and effective_fitness_batch_size > 1 and self.n_cores > 1 ): - def fitness_function( + + def _fitness_func_batch( _ga_instance: Any, batch_solutions: NDArray[np.float64], _batch_indices: list[int] | NDArray[np.int_], ) -> list[float]: - solution_list = [solution for solution in batch_solutions] - - batch_results = problem.batch_fun(solution_list, n_cores=self.n_cores) + batch_results = problem.batch_fun( + batch_solutions.tolist(), n_cores=self.n_cores + ) return [-float(result) for result in batch_results] + + fitness_function: Any = _fitness_func_batch else: - def fitness_function( + + def _fitness_func_single( _ga_instance: Any, solution: NDArray[np.float64], _solution_idx: int ) -> float: return -float(problem.fun(solution)) + fitness_function = _fitness_func_single + population_size = get_population_size( population_size=self.population_size, x=x0, lower_bound=10 ) if self.initial_population is not None: initial_population = np.array(self.initial_population) - population_size = len(initial_population) - num_genes = len(initial_population[0]) else: num_genes = len(x0) From bccb162adaba277531e1129ab1f34d277500ba32 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Mon, 14 Jul 2025 22:29:20 +0000 Subject: [PATCH 09/18] add protocol for user-defined functions --- src/optimagic/typing.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/optimagic/typing.py b/src/optimagic/typing.py index 2b400ecd9..b0e11568d 100644 --- a/src/optimagic/typing.py +++ b/src/optimagic/typing.py @@ -156,3 +156,24 @@ class MultiStartIterationHistory(TupleLikeAccess): history: IterationHistory local_histories: list[IterationHistory] | None = None exploration: IterationHistory | None = None + + +class ParentSelectionFunction(Protocol): + def __call__( + self, fitness: NDArray[np.float64], num_parents: int, ga_instance: Any + ) -> tuple[NDArray[np.float64], NDArray[np.int_]]: ... + + +class CrossoverFunction(Protocol): + def __call__( + self, + parents: NDArray[np.float64], + offspring_size: tuple[int, int], + ga_instance: Any, + ) -> NDArray[np.float64]: ... + + +class MutationFunction(Protocol): + def __call__( + self, offspring: NDArray[np.float64], ga_instance: Any + ) -> NDArray[np.float64]: ... From abee7231e81749ab14ea58a5de8503aafd5ab041 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 00:20:16 +0000 Subject: [PATCH 10/18] add user-defined GA operator Protocol types --- src/optimagic/optimizers/pygad_optimizer.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 98b7d6ea3..9973a281f 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -15,7 +15,10 @@ ) from optimagic.typing import ( AggregationLevel, + CrossoverFunction, + MutationFunction, NonNegativeFloat, + ParentSelectionFunction, PositiveFloat, PositiveInt, ) @@ -45,20 +48,25 @@ class Pygad(Algorithm): initial_population: NDArray[np.float64] | list[list[float]] | None = None - parent_selection_type: Literal[ - "sss", "rws", "sus", "rank", "random", "tournament" - ] = "sss" + parent_selection_type: ( + Literal["sss", "rws", "sus", "rank", "random", "tournament"] + | ParentSelectionFunction + ) = "sss" keep_parents: int = -1 keep_elitism: PositiveInt = 1 K_tournament: PositiveInt = 3 crossover_type: ( - Literal["single_point", "two_points", "uniform", "scattered"] | None + Literal["single_point", "two_points", "uniform", "scattered"] + | CrossoverFunction + | None ) = "single_point" crossover_probability: NonNegativeFloat | None = None mutation_type: ( - Literal["random", "swap", "inversion", "scramble", "adaptive"] | None + Literal["random", "swap", "inversion", "scramble", "adaptive"] + | MutationFunction + | None ) = "random" mutation_probability: ( NonNegativeFloat From 2d1ec8f5895f791492dafb4bd4dd4dc9b6d9d369 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 00:52:23 +0000 Subject: [PATCH 11/18] add docstring --- src/optimagic/optimizers/pygad_optimizer.py | 18 ++++++++++ src/optimagic/typing.py | 38 +++++++++++++++++++++ 2 files changed, 56 insertions(+) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 9973a281f..42140b6b1 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -219,6 +219,24 @@ def _fitness_func_single( def determine_effective_batch_size( fitness_batch_size: int | None, n_cores: int ) -> int | None: + """Determine the effective fitness_batch_size for parallel processing. + + Behavior: + - If `fitness_batch_size` is explicitly provided: + - The value is returned unchanged. + - A warning is issued if it is less than `n_cores`, as this may + underutilize available cores. + - If `fitness_batch_size` is `None`: + - If `n_cores` > 1, defaults to `n_cores`. + - Otherwise, returns None (i.e., single-threaded evaluation). + Args: + fitness_batch_size: User-specified batch size or None + n_cores: Number of cores for parallel processing + + Returns: + Effective batch size for PyGAD, or None for single-threaded processing + + """ if fitness_batch_size is not None: if fitness_batch_size < n_cores: warnings.warn( diff --git a/src/optimagic/typing.py b/src/optimagic/typing.py index b0e11568d..2a20f65af 100644 --- a/src/optimagic/typing.py +++ b/src/optimagic/typing.py @@ -159,12 +159,39 @@ class MultiStartIterationHistory(TupleLikeAccess): class ParentSelectionFunction(Protocol): + """Protocol for user-defined parent selection functions. + + Args: + fitness: Array of fitness values for all solutions in the population. + num_parents: Number of parents to select. + ga_instance: The PyGAD GA instance. + + Returns: + Tuple of (selected_parents, parent_indices) where: + - selected_parents: 2D array of selected parent solutions + - parent_indices: 1D array of indices of selected parents + + """ + def __call__( self, fitness: NDArray[np.float64], num_parents: int, ga_instance: Any ) -> tuple[NDArray[np.float64], NDArray[np.int_]]: ... class CrossoverFunction(Protocol): + """Protocol for user-defined crossover functions. + + Args: + parents: 2D array of parent solutions selected for mating. + offspring_size: Tuple (num_offspring, num_genes) specifying + offspring size. + ga_instance: The PyGAD GA instance. + + Returns: + 2D array of offspring solutions. + + """ + def __call__( self, parents: NDArray[np.float64], @@ -174,6 +201,17 @@ def __call__( class MutationFunction(Protocol): + """Protocol for user-defined mutation functions. + + Args: + offspring: 2D array of offspring solutions to be mutated. + ga_instance: The PyGAD GA instance. + + Returns: + 2D array of mutated offspring solutions. + + """ + def __call__( self, offspring: NDArray[np.float64], ga_instance: Any ) -> NDArray[np.float64]: ... From 2e28118a4bc61a6e82704a2cad80cf6fac577f53 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 02:31:23 +0000 Subject: [PATCH 12/18] fix: make tests pass --- src/optimagic/optimizers/pygad_optimizer.py | 22 ++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 42140b6b1..3fba6a039 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -1,6 +1,6 @@ import warnings from dataclasses import dataclass -from typing import Any, Literal +from typing import Any, List, Literal import numpy as np from numpy.typing import NDArray @@ -43,8 +43,8 @@ @dataclass(frozen=True) class Pygad(Algorithm): population_size: PositiveInt | None = None - num_parents_mating: PositiveInt | None = None - num_generations: PositiveInt | None = None + num_parents_mating: PositiveInt | None = 10 + num_generations: PositiveInt | None = 50 initial_population: NDArray[np.float64] | list[list[float]] | None = None @@ -134,9 +134,11 @@ def _fitness_func_batch( batch_solutions: NDArray[np.float64], _batch_indices: list[int] | NDArray[np.int_], ) -> list[float]: - batch_results = problem.batch_fun( - batch_solutions.tolist(), n_cores=self.n_cores - ) + solutions_list: List[NDArray[np.float64]] = [ + np.asarray(batch_solutions[i]) + for i in range(batch_solutions.shape[0]) + ] + batch_results = problem.batch_fun(solutions_list, n_cores=self.n_cores) return [-float(result) for result in batch_results] @@ -154,6 +156,12 @@ def _fitness_func_single( population_size=self.population_size, x=x0, lower_bound=10 ) + num_parents_mating = ( + self.num_parents_mating + if self.num_parents_mating is not None + else max(2, population_size // 2) + ) + if self.initial_population is not None: initial_population = np.array(self.initial_population) else: @@ -174,7 +182,7 @@ def _fitness_func_single( ga_instance = pygad.GA( num_generations=self.num_generations, - num_parents_mating=self.num_parents_mating, + num_parents_mating=num_parents_mating, fitness_func=fitness_function, fitness_batch_size=effective_fitness_batch_size, initial_population=initial_population, From 726bd133d95b1e2b67ffb4f24f15bfb727f3cd99 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 02:46:53 +0000 Subject: [PATCH 13/18] fix: typo in error message --- src/optimagic/optimizers/pygad_optimizer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 3fba6a039..f8dd30448 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -1,6 +1,6 @@ import warnings from dataclasses import dataclass -from typing import Any, List, Literal +from typing import Any, Literal import numpy as np from numpy.typing import NDArray @@ -106,7 +106,7 @@ def _solve_internal_problem( ) -> InternalOptimizeResult: if not IS_PYGAD_INSTALLED: raise NotInstalledError( - "The 'pygad_pygad' algorithm requires the pygad package to be " + "The 'pygad' algorithm requires the pygad package to be " "installed. You can install it with 'pip install pygad'." ) @@ -116,7 +116,7 @@ def _solve_internal_problem( or not np.isfinite(problem.bounds.lower).all() or not np.isfinite(problem.bounds.upper).all() ): - raise ValueError("pygad_pygad requires finite bounds for all parameters.") + raise ValueError("pygad requires finite bounds for all parameters.") # Determine effective fitness_batch_size for parallel processing effective_fitness_batch_size = determine_effective_batch_size( @@ -134,7 +134,7 @@ def _fitness_func_batch( batch_solutions: NDArray[np.float64], _batch_indices: list[int] | NDArray[np.int_], ) -> list[float]: - solutions_list: List[NDArray[np.float64]] = [ + solutions_list: list[NDArray[np.float64]] = [ np.asarray(batch_solutions[i]) for i in range(batch_solutions.shape[0]) ] From 170335634404ca4be531ffd4ef6d7306126cae7e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Jul 2025 02:52:03 +0000 Subject: [PATCH 14/18] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .tools/envs/testenv-plotly.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.tools/envs/testenv-plotly.yml b/.tools/envs/testenv-plotly.yml index 27504174b..6f7f47001 100644 --- a/.tools/envs/testenv-plotly.yml +++ b/.tools/envs/testenv-plotly.yml @@ -26,6 +26,7 @@ dependencies: - jinja2 # dev, tests - annotated-types # dev, tests - iminuit # dev, tests + - pygad # dev, tests - pip: # dev, tests, docs - nevergrad # dev, tests - DFO-LS>=1.5.3 # dev, tests From 7e1bd67f50c0b9d20727bdeb0279e1334ac64a44 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 17:00:00 +0000 Subject: [PATCH 15/18] refactor result processing --- src/optimagic/optimizers/pygad_optimizer.py | 42 +++++++++++++++------ 1 file changed, 30 insertions(+), 12 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index f8dd30448..6771ab56e 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -208,18 +208,7 @@ def _fitness_func_single( ga_instance.run() - solution, solution_fitness, solution_idx = ga_instance.best_solution() - - res = InternalOptimizeResult( - x=solution, - fun=-solution_fitness, - success=True, - message=( - f"Optimization terminated successfully after " - f"{ga_instance.generations_completed} generations." - ), - n_fun_evals=ga_instance.generations_completed * population_size, - ) + res = _process_pygad_result(ga_instance) return res @@ -257,3 +246,32 @@ def determine_effective_batch_size( return n_cores else: return None + + +def _process_pygad_result(ga_instance: Any) -> InternalOptimizeResult: + """Process PyGAD result into InternalOptimizeResult. + + Args: + ga_instance: The PyGAD instance after running the optimization + + Returns: + InternalOptimizeResult: Processed optimization results + + """ + best_solution, best_fitness, _ = ga_instance.best_solution() + + best_criterion = -best_fitness + + success = ga_instance.run_completed + if success: + message = f"Optimization terminated successfully after {ga_instance.generations_completed} generations." + else: + message = f"Optimization failed to complete. Only {ga_instance.generations_completed} generations completed." + + return InternalOptimizeResult( + x=best_solution, + fun=best_criterion, + success=success, + message=message, + n_fun_evals=ga_instance.generations_completed * ga_instance.pop_size[0], + ) From cbb72f406d4bf3a6158a5dd9da6e7be34b97e6d1 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 17:32:52 +0000 Subject: [PATCH 16/18] fix: ruff --- src/optimagic/optimizers/pygad_optimizer.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/src/optimagic/optimizers/pygad_optimizer.py b/src/optimagic/optimizers/pygad_optimizer.py index 6771ab56e..030508221 100644 --- a/src/optimagic/optimizers/pygad_optimizer.py +++ b/src/optimagic/optimizers/pygad_optimizer.py @@ -262,11 +262,18 @@ def _process_pygad_result(ga_instance: Any) -> InternalOptimizeResult: best_criterion = -best_fitness + completed_generations = ga_instance.generations_completed success = ga_instance.run_completed if success: - message = f"Optimization terminated successfully after {ga_instance.generations_completed} generations." + message = ( + "Optimization terminated successfully.\n" + f"Generations completed: {completed_generations}" + ) else: - message = f"Optimization failed to complete. Only {ga_instance.generations_completed} generations completed." + message = ( + "Optimization failed to complete.\n" + f"Generations completed: {completed_generations}" + ) return InternalOptimizeResult( x=best_solution, From f28ac40805cb76db4d6ed42be7ca84366f17cd1b Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 17:56:19 +0000 Subject: [PATCH 17/18] add: docs --- docs/source/algorithms.md | 52 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/docs/source/algorithms.md b/docs/source/algorithms.md index d9e43a004..dff093e5b 100644 --- a/docs/source/algorithms.md +++ b/docs/source/algorithms.md @@ -4043,6 +4043,58 @@ these optimizers, you need to have initialization for speed. Default is False. ``` +## PyGAD Optimizer + +optimagic supports the [PyGAD](https://github.com/ahmedfgad/GeneticAlgorithmPython) +genetic algorithm optimizer. To use PyGAD, you need to have +[the pygad package](https://github.com/ahmedfgad/GeneticAlgorithmPython) installed +(`pip install pygad`). + +```{eval-rst} +.. dropdown:: pygad + + .. code-block:: + + "pygad" + + Minimize a scalar function using the PyGAD genetic algorithm. + + PyGAD is a Python library for building genetic algorithms and training machine learning algorithms. + Genetic algorithms are metaheuristics inspired by the process of natural selection that belong to + the larger class of evolutionary algorithms. + + The algorithm supports the following options: + + - **population_size** (int): Number of solutions in each generation. Default is None. + - **num_parents_mating** (int): Number of parents selected for mating in each generation. Default is None. + - **num_generations** (int): Number of generations. Default is None. + - **initial_population** (array-like): initial population is a 2D array where + each row represents a solution and each column represents a parameter (gene) value. + The number of rows must equal population_size, and the number of columns must + match the length of the initial parameters (x0). + When None, the population is randomly generated within the parameter bounds using + the specified population_size and the dimensionality from x0. + - **parent_selection_type** (str or callable): Method for selecting parents. Can be a string ("sss", "rws", "sus", "rank", "random", "tournament") or a custom function with signature ``parent_selection_func(fitness, num_parents, ga_instance) -> tuple[NDArray, NDArray]``. Default is "sss". + - **keep_parents** (int): Number of best parents to keep in the next generation. Only has effect when keep_elitism is 0. Default is -1. + - **keep_elitism** (int): Number of best solutions to preserve across generations. If non-zero, keep_parents has no effect. Default is 1. + - **K_tournament** (int): Tournament size for tournament selection. Only used when parent_selection_type is "tournament". Default is 3. + - **crossover_type** (str, callable, or None): Crossover method. Can be a string ("single_point", "two_points", "uniform", "scattered"), a custom function with signature ``crossover_func(parents, offspring_size, ga_instance) -> NDArray``, or None to disable crossover. Default is "single_point". + - **crossover_probability** (float): Probability of applying crossover. Range [0, 1]. Default is None. + - **mutation_type** (str, callable, or None): Mutation method. Can be a string ("random", "swap", "inversion", "scramble", "adaptive"), a custom function with signature ``mutation_func(offspring, ga_instance) -> NDArray``, or None to disable mutation. Default is "random". + - **mutation_probability** (float/list/tuple/array): Probability of mutation. Range [0, 1]. If specified, mutation_percent_genes and mutation_num_genes are ignored. Default is None. + - **mutation_percent_genes** (float/str/list/tuple/array): Percentage of genes to mutate. Default is "default" (equivalent to 10%). Ignored if mutation_probability or mutation_num_genes are specified. + - **mutation_num_genes** (int/list/tuple/array): Exact number of genes to mutate. Ignored if mutation_probability is specified. Default is None. + - **mutation_by_replacement** (bool): Whether to replace gene values during mutation. Only works with mutation_type="random". Default is False. + - **random_mutation_min_val** (float/list/array): Minimum value for random mutation. Only used with mutation_type="random". Default is -1.0. + - **random_mutation_max_val** (float/list/array): Maximum value for random mutation. Only used with mutation_type="random". Default is 1.0. + - **allow_duplicate_genes** (bool): Whether to allow duplicate gene values within a solution. Default is True. + - **fitness_batch_size** (int): Number of solutions to evaluate in parallel batches. When None and n_cores > 1, automatically set to n_cores for optimal parallelization. Default is None. + - **stop_criteria** (str/list): Early stopping criteria. Format: "reach_value" or "saturate_N". Default is None. + - **n_cores** (int): Number of cores for parallel fitness evaluation. Default is 1. + - **random_seed** (int): Random seed for reproducibility. Default is None. + +``` + ## References ```{eval-rst} From f0432e17cceff47275758e9631e6593981799285 Mon Sep 17 00:00:00 2001 From: spline2hg <181270613+spline2hg@users.noreply.github.com> Date: Tue, 15 Jul 2025 18:13:55 +0000 Subject: [PATCH 18/18] improve docs --- docs/source/algorithms.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/source/algorithms.md b/docs/source/algorithms.md index dff093e5b..b1b2c5371 100644 --- a/docs/source/algorithms.md +++ b/docs/source/algorithms.md @@ -4061,7 +4061,12 @@ genetic algorithm optimizer. To use PyGAD, you need to have PyGAD is a Python library for building genetic algorithms and training machine learning algorithms. Genetic algorithms are metaheuristics inspired by the process of natural selection that belong to - the larger class of evolutionary algorithms. + the larger class of evolutionary algorithms. These algorithms apply biologically inspired + operators such as mutation, crossover, and selection to optimization problems. + + The algorithm maintains a population of candidate solutions and iteratively improves them + through genetic operations, making it ideal for global optimization problems with complex + search spaces that may contain multiple local optima. The algorithm supports the following options: