Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Align plotting from logs and results #553

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 75 additions & 7 deletions src/optimagic/optimization/algorithm.py
Original file line number Diff line number Diff line change
@@ -10,12 +10,16 @@

from optimagic.exceptions import InvalidAlgoInfoError, InvalidAlgoOptionError
from optimagic.logging.types import StepStatus
from optimagic.optimization.convergence_report import get_convergence_report
from optimagic.optimization.history import History
from optimagic.optimization.internal_optimization_problem import (
InternalOptimizationProblem,
)
from optimagic.optimization.optimize_result import OptimizeResult
from optimagic.parameters.conversion import Converter
from optimagic.type_conversion import TYPE_CONVERTERS
from optimagic.typing import AggregationLevel
from optimagic.typing import AggregationLevel, Direction, ExtraResultFields
from optimagic.utilities import isscalar


@dataclass(frozen=True)
@@ -82,7 +86,6 @@ class InternalOptimizeResult:
max_constraint_violation: float | None = None
info: dict[str, typing.Any] | None = None
history: History | None = None
multistart_info: dict[str, typing.Any] | None = None

def __post_init__(self) -> None:
report: list[str] = []
@@ -142,6 +145,56 @@ def __post_init__(self) -> None:
)
raise TypeError(msg)

def create_optimize_result(
self,
converter: Converter,
solver_type: AggregationLevel,
extra_fields: ExtraResultFields,
) -> OptimizeResult:
"""Process an internal optimizer result."""
params = converter.params_from_internal(self.x)
if isscalar(self.fun):
fun = float(self.fun)
elif solver_type == AggregationLevel.LIKELIHOOD:
fun = float(np.sum(self.fun))
elif solver_type == AggregationLevel.LEAST_SQUARES:
fun = np.dot(self.fun, self.fun)

if extra_fields.direction == Direction.MAXIMIZE:
fun = -fun

if self.history is not None:
conv_report = get_convergence_report(
history=self.history, direction=extra_fields.direction
)
else:
conv_report = None

out = OptimizeResult(
params=params,
fun=fun,
start_fun=extra_fields.start_fun,
start_params=extra_fields.start_params,
algorithm=extra_fields.algorithm,
direction=extra_fields.direction.value,
n_free=extra_fields.n_free,
message=self.message,
success=self.success,
n_fun_evals=self.n_fun_evals,
n_jac_evals=self.n_jac_evals,
n_hess_evals=self.n_hess_evals,
n_iterations=self.n_iterations,
status=self.status,
jac=self.jac,
hess=self.hess,
hess_inv=self.hess_inv,
max_constraint_violation=self.max_constraint_violation,
history=self.history,
algorithm_output=self.info,
convergence_report=conv_report,
)
return out


class AlgorithmMeta(ABCMeta):
"""Metaclass to get repr, algo_info and name for classes, not just instances."""
@@ -234,25 +287,40 @@ def solve_internal_problem(
problem: InternalOptimizationProblem,
x0: NDArray[np.float64],
step_id: int,
) -> InternalOptimizeResult:
) -> OptimizeResult:
problem = problem.with_new_history().with_step_id(step_id)

if problem.logger:
problem.logger.step_store.update(
step_id, {"status": str(StepStatus.RUNNING.value)}
)

result = self._solve_internal_problem(problem, x0)
raw_res = self._solve_internal_problem(problem, x0)

if (not self.algo_info.disable_history) and (result.history is None):
result = replace(result, history=problem.history)
if (not self.algo_info.disable_history) and (raw_res.history is None):
raw_res = replace(raw_res, history=problem.history)

if problem.logger:
problem.logger.step_store.update(
step_id, {"status": str(StepStatus.COMPLETE.value)}
)

return result
# make sure the start params provided in static_result_fields are the same as x0
extra_fields = problem.static_result_fields
x0_problem = problem.converter.params_to_internal(extra_fields.start_params)
if not np.allclose(x0_problem, x0):
start_params = problem.converter.params_from_internal(x0)
extra_fields = replace(
extra_fields, start_params=start_params, start_fun=None
)

res = raw_res.create_optimize_result(
converter=problem.converter,
solver_type=self.algo_info.solver_type,
extra_fields=problem.static_result_fields,
)

return res

def with_option_if_applicable(self, **kwargs: Any) -> Self:
"""Call with_option only with applicable keyword arguments."""
11 changes: 11 additions & 0 deletions src/optimagic/optimization/internal_optimization_problem.py
Original file line number Diff line number Diff line change
@@ -23,6 +23,7 @@
Direction,
ErrorHandling,
EvalTask,
ExtraResultFields,
PyTree,
)

@@ -55,6 +56,7 @@ def __init__(
linear_constraints: list[dict[str, Any]] | None,
nonlinear_constraints: list[dict[str, Any]] | None,
logger: LogStore[Any, Any] | None,
static_result_fields: ExtraResultFields,
# TODO: add hess and hessp
):
self._fun = fun
@@ -73,6 +75,7 @@ def __init__(
self._nonlinear_constraints = nonlinear_constraints
self._logger = logger
self._step_id: int | None = None
self._static_result_fields = static_result_fields

# ==================================================================================
# Public methods used by optimizers
@@ -218,6 +221,14 @@ def bounds(self) -> InternalBounds:
def logger(self) -> LogStore[Any, Any] | None:
return self._logger

@property
def converter(self) -> Converter:
return self._converter

@property
def static_result_fields(self) -> ExtraResultFields:
return self._static_result_fields

# ==================================================================================
# Implementation of the public functions; The main difference is that the lower-
# level implementations return a history entry instead of adding it to the history
39 changes: 26 additions & 13 deletions src/optimagic/optimization/multistart.py
Original file line number Diff line number Diff line change
@@ -12,7 +12,6 @@
"""

import warnings
from dataclasses import replace
from typing import Literal

import numpy as np
@@ -21,7 +20,7 @@

from optimagic.logging.logger import LogStore
from optimagic.logging.types import StepStatus
from optimagic.optimization.algorithm import Algorithm, InternalOptimizeResult
from optimagic.optimization.algorithm import Algorithm
from optimagic.optimization.internal_optimization_problem import (
InternalBounds,
InternalOptimizationProblem,
@@ -30,6 +29,8 @@
from optimagic.optimization.optimization_logging import (
log_scheduled_steps_and_get_ids,
)
from optimagic.optimization.optimize_result import OptimizeResult
from optimagic.optimization.process_multistart_result import process_multistart_result
from optimagic.typing import AggregationLevel, ErrorHandling
from optimagic.utilities import get_rng

@@ -42,7 +43,7 @@ def run_multistart_optimization(
options: InternalMultistartOptions,
logger: LogStore | None,
error_handling: ErrorHandling,
) -> InternalOptimizeResult:
) -> OptimizeResult:
steps = determine_steps(options.n_samples, stopping_maxopt=options.stopping_maxopt)

scheduled_steps = log_scheduled_steps_and_get_ids(
@@ -159,6 +160,7 @@ def single_optimization(x0, step_id):
results=batch_results,
convergence_criteria=convergence_criteria,
solver_type=local_algorithm.algo_info.solver_type,
converter=internal_problem.converter,
)
opt_counter += len(batch)
if is_converged:
@@ -168,15 +170,20 @@ def single_optimization(x0, step_id):
logger.step_store.update(step, {"status": new_status})
break

multistart_info = {
"start_parameters": state["start_history"],
"local_optima": state["result_history"],
"exploration_sample": sorted_sample,
"exploration_results": exploration_res["sorted_values"],
}

raw_res = state["best_res"]
res = replace(raw_res, multistart_info=multistart_info)

expl_sample = [
internal_problem.converter.params_from_internal(s) for s in sorted_sample
]
expl_res = list(exploration_res["sorted_values"])

res = process_multistart_result(
raw_res=raw_res,
extra_fields=internal_problem.static_result_fields,
local_optima=state["result_history"],
exploration_sample=expl_sample,
exploration_results=expl_res,
)

return res

@@ -371,7 +378,12 @@ def get_batched_optimization_sample(sorted_sample, stopping_maxopt, batch_size):


def update_convergence_state(
current_state, starts, results, convergence_criteria, solver_type
current_state,
starts,
results,
convergence_criteria,
solver_type,
converter,
):
"""Update the state of all quantities related to convergence.

@@ -389,6 +401,7 @@ def update_convergence_state(
convergence_criteria (dict): Dict with the entries "xtol" and "max_discoveries"
solver_type: The aggregation level of the local optimizer. Needed to
interpret the output of the internal criterion function.
converter: The converter to map between internal and external parameter spaces.


Returns:
@@ -422,7 +435,7 @@ def update_convergence_state(
# ==================================================================================
valid_results = [results[i] for i in valid_indices]
valid_starts = [starts[i] for i in valid_indices]
valid_new_x = [res.x for res in valid_results]
valid_new_x = [converter.params_to_internal(res.params) for res in valid_results]
valid_new_y = []

# make the criterion output scalar if a least squares optimizer returns an
77 changes: 30 additions & 47 deletions src/optimagic/optimization/optimize.py
Original file line number Diff line number Diff line change
@@ -48,11 +48,6 @@
)
from optimagic.optimization.optimization_logging import log_scheduled_steps_and_get_ids
from optimagic.optimization.optimize_result import OptimizeResult
from optimagic.optimization.process_results import (
ExtraResultFields,
process_multistart_result,
process_single_result,
)
from optimagic.parameters.bounds import Bounds
from optimagic.parameters.conversion import (
get_converter,
@@ -64,6 +59,7 @@
Direction,
ErrorHandling,
ErrorHandlingLiteral,
ExtraResultFields,
NonNegativeFloat,
PyTree,
)
@@ -543,18 +539,6 @@ def _optimize(problem: OptimizationProblem) -> OptimizeResult:
add_soft_bounds=problem.multistart is not None,
)

# ==================================================================================
# initialize the log database
# ==================================================================================
logger: LogStore[Any, Any] | None

if problem.logging:
logger = LogStore.from_options(problem.logging)
problem_data = ProblemInitialization(problem.direction, problem.params)
logger.problem_store.insert(problem_data)
else:
logger = None

# ==================================================================================
# Do some things that require internal parameters or bounds
# ==================================================================================
@@ -583,12 +567,37 @@ def _optimize(problem: OptimizationProblem) -> OptimizeResult:
numdiff_options=problem.numdiff_options,
skip_checks=problem.skip_checks,
)
# Define static information that will be added to the OptimizeResult
_scalar_start_criterion = cast(
float, first_crit_eval.internal_value(AggregationLevel.SCALAR)
)
extra_fields = ExtraResultFields(
start_fun=_scalar_start_criterion,
start_params=problem.params,
algorithm=problem.algorithm.algo_info.name,
direction=problem.direction,
n_free=internal_params.free_mask.sum(),
)

# create x and internal_bounds
x = internal_params.values
internal_bounds = InternalBounds(
lower=internal_params.lower_bounds,
upper=internal_params.upper_bounds,
)

# ==================================================================================
# initialize the log database
# ==================================================================================
logger: LogStore[Any, Any] | None

if problem.logging:
logger = LogStore.from_options(problem.logging)
problem_data = ProblemInitialization(problem.direction, problem.params)
logger.problem_store.insert(problem_data)
else:
logger = None

# ==================================================================================
# Create a batch evaluator
# ==================================================================================
@@ -616,6 +625,7 @@ def _optimize(problem: OptimizationProblem) -> OptimizeResult:
linear_constraints=None,
nonlinear_constraints=internal_nonlinear_constraints,
logger=logger,
static_result_fields=extra_fields,
)

# ==================================================================================
@@ -630,7 +640,7 @@ def _optimize(problem: OptimizationProblem) -> OptimizeResult:
logger=logger,
)[0]

raw_res = problem.algorithm.solve_internal_problem(internal_problem, x, step_id)
res = problem.algorithm.solve_internal_problem(internal_problem, x, step_id)

else:
multistart_options = get_internal_multistart_options_from_public(
@@ -644,7 +654,7 @@ def _optimize(problem: OptimizationProblem) -> OptimizeResult:
upper=internal_params.soft_upper_bounds,
)

raw_res = run_multistart_optimization(
res = run_multistart_optimization(
local_algorithm=problem.algorithm,
internal_problem=internal_problem,
x=x,
@@ -655,37 +665,10 @@ def _optimize(problem: OptimizationProblem) -> OptimizeResult:
)

# ==================================================================================
# Process the result
# Add the log reader to the result
# ==================================================================================

_scalar_start_criterion = cast(
float, first_crit_eval.internal_value(AggregationLevel.SCALAR)
)
log_reader: LogReader[Any] | None

extra_fields = ExtraResultFields(
start_fun=_scalar_start_criterion,
start_params=problem.params,
algorithm=problem.algorithm.algo_info.name,
direction=problem.direction,
n_free=internal_params.free_mask.sum(),
)

if problem.multistart is None:
res = process_single_result(
raw_res=raw_res,
converter=converter,
solver_type=problem.algorithm.algo_info.solver_type,
extra_fields=extra_fields,
)
else:
res = process_multistart_result(
raw_res=raw_res,
converter=converter,
solver_type=problem.algorithm.algo_info.solver_type,
extra_fields=extra_fields,
)

if logger is not None:
assert problem.logging is not None
log_reader = LogReader.from_options(problem.logging)
4 changes: 2 additions & 2 deletions src/optimagic/optimization/optimize_result.py
Original file line number Diff line number Diff line change
@@ -41,7 +41,7 @@ class OptimizeResult:

params: Any
fun: float
start_fun: float
start_fun: float | None
start_params: Any
algorithm: str
direction: str
@@ -78,7 +78,7 @@ def criterion(self) -> float:
return self.fun

@property
def start_criterion(self) -> float:
def start_criterion(self) -> float | None:
msg = (
"The start_criterion attribute is deprecated. Use the start_fun attribute "
"instead."
84 changes: 84 additions & 0 deletions src/optimagic/optimization/process_multistart_result.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
import numpy as np
from numpy.typing import NDArray

from optimagic.optimization.convergence_report import get_convergence_report
from optimagic.optimization.optimize_result import MultistartInfo, OptimizeResult
from optimagic.typing import Direction, ExtraResultFields


def process_multistart_result(
raw_res: OptimizeResult,
extra_fields: ExtraResultFields,
local_optima: list[OptimizeResult],
exploration_sample: list[NDArray[np.float64]],
exploration_results: list[float],
) -> OptimizeResult:
"""Process results of internal optimizers."""

if isinstance(raw_res, str):
res = _dummy_result_from_traceback(raw_res, extra_fields)

Check warning on line 19 in src/optimagic/optimization/process_multistart_result.py

Codecov / codecov/patch

src/optimagic/optimization/process_multistart_result.py#L19

Added line #L19 was not covered by tests
else:
res = raw_res
if extra_fields.direction == Direction.MAXIMIZE:
exploration_results = [-res for res in exploration_results]

info = MultistartInfo(
start_parameters=[opt.start_params for opt in local_optima],
local_optima=local_optima,
exploration_sample=exploration_sample,
exploration_results=exploration_results,
)

# ==============================================================================
# create a convergence report for the multistart optimization; This is not
# the same as the convergence report for the individual local optimizations.
# ==============================================================================
crit_hist = [opt.fun for opt in info.local_optima]
params_hist = [opt.params for opt in info.local_optima]
time_hist = [np.nan for opt in info.local_optima]
hist = {"criterion": crit_hist, "params": params_hist, "runtime": time_hist}

conv_report = get_convergence_report(
history=hist,
direction=extra_fields.direction,
)

res.convergence_report = conv_report

res.algorithm = f"multistart_{res.algorithm}"
res.n_iterations = _sum_or_none([opt.n_iterations for opt in info.local_optima])

res.n_fun_evals = _sum_or_none([opt.n_fun_evals for opt in info.local_optima])
res.n_jac_evals = _sum_or_none([opt.n_jac_evals for opt in info.local_optima])

res.multistart_info = info
return res


def _dummy_result_from_traceback(
candidate: str, extra_fields: ExtraResultFields
) -> OptimizeResult:
if extra_fields.start_fun is None:
start_fun = np.inf

Check warning on line 62 in src/optimagic/optimization/process_multistart_result.py

Codecov / codecov/patch

src/optimagic/optimization/process_multistart_result.py#L61-L62

Added lines #L61 - L62 were not covered by tests
else:
start_fun = extra_fields.start_fun

Check warning on line 64 in src/optimagic/optimization/process_multistart_result.py

Codecov / codecov/patch

src/optimagic/optimization/process_multistart_result.py#L64

Added line #L64 was not covered by tests

out = OptimizeResult(

Check warning on line 66 in src/optimagic/optimization/process_multistart_result.py

Codecov / codecov/patch

src/optimagic/optimization/process_multistart_result.py#L66

Added line #L66 was not covered by tests
params=extra_fields.start_params,
fun=start_fun,
start_fun=start_fun,
start_params=extra_fields.start_params,
algorithm=extra_fields.algorithm,
direction=extra_fields.direction.value,
n_free=extra_fields.n_free,
message=candidate,
)
return out

Check warning on line 76 in src/optimagic/optimization/process_multistart_result.py

Codecov / codecov/patch

src/optimagic/optimization/process_multistart_result.py#L76

Added line #L76 was not covered by tests


def _sum_or_none(summands: list[int | None | float]) -> int | None:
if any(s is None for s in summands):
out = None
else:
out = int(np.array(summands).sum())
return out
193 changes: 0 additions & 193 deletions src/optimagic/optimization/process_results.py

This file was deleted.

11 changes: 11 additions & 0 deletions src/optimagic/typing.py
Original file line number Diff line number Diff line change
@@ -156,3 +156,14 @@ class MultiStartIterationHistory(TupleLikeAccess):
history: IterationHistory
local_histories: list[IterationHistory] | None = None
exploration: IterationHistory | None = None


@dataclass(frozen=True)
class ExtraResultFields:
"""Fields for OptimizeResult that are not part of InternalOptimizeResult."""

start_fun: float | None
start_params: PyTree
algorithm: str
direction: Direction
n_free: int
2 changes: 1 addition & 1 deletion src/optimagic/visualization/history_plots.py
Original file line number Diff line number Diff line change
@@ -344,7 +344,7 @@ def _extract_plotting_data_from_results_object(
res.multistart_info.exploration_sample[::-1] + stacked["params"]
)
stacked["criterion"] = (
res.multistart_info.exploration_results.tolist()[::-1]
list(res.multistart_info.exploration_results)[::-1]
+ stacked["criterion"]
)
else:
29 changes: 25 additions & 4 deletions tests/optimagic/optimization/test_internal_optimization_problem.py
Original file line number Diff line number Diff line change
@@ -18,11 +18,29 @@
InternalOptimizationProblem,
)
from optimagic.parameters.conversion import Converter
from optimagic.typing import AggregationLevel, Direction, ErrorHandling, EvalTask
from optimagic.typing import (
AggregationLevel,
Direction,
ErrorHandling,
EvalTask,
ExtraResultFields,
)


@pytest.fixture
def extra_fields():
out = ExtraResultFields(
start_fun=100,
start_params=np.arange(3),
algorithm="bla",
direction=Direction.MINIMIZE,
n_free=3,
)
return out


@pytest.fixture
def base_problem():
def base_problem(extra_fields):
"""Set up a basic InternalOptimizationProblem that can be modified for tests."""

def fun(params):
@@ -72,6 +90,7 @@ def fun_and_jac(params):
linear_constraints=linear_constraints,
nonlinear_constraints=nonlinear_constraints,
logger=None,
static_result_fields=extra_fields,
)

return problem
@@ -413,7 +432,7 @@ def test_max_problem_exploration_fun(max_problem):


@pytest.fixture
def pytree_problem(base_problem):
def pytree_problem(extra_fields):
def fun(params):
assert isinstance(params, dict)
return LeastSquaresFunctionValue(value=params)
@@ -479,6 +498,7 @@ def derivative_flatten(tree, x):
linear_constraints=linear_constraints,
nonlinear_constraints=nonlinear_constraints,
logger=None,
static_result_fields=extra_fields,
)

return problem
@@ -543,7 +563,7 @@ def test_numerical_fun_and_jac_for_pytree_problem(pytree_problem):


@pytest.fixture
def error_min_problem():
def error_min_problem(extra_fields):
"""Set up a basic InternalOptimizationProblem that can be modified for tests."""

def fun(params):
@@ -603,6 +623,7 @@ def fun_and_jac(params):
linear_constraints=linear_constraints,
nonlinear_constraints=nonlinear_constraints,
logger=None,
static_result_fields=extra_fields,
)

return problem
18 changes: 15 additions & 3 deletions tests/optimagic/optimization/test_multistart.py
Original file line number Diff line number Diff line change
@@ -6,13 +6,13 @@
import pytest
from numpy.testing import assert_array_almost_equal as aaae

from optimagic.optimization.algorithm import InternalOptimizeResult
from optimagic.optimization.multistart import (
_draw_exploration_sample,
get_batched_optimization_sample,
run_explorations,
update_convergence_state,
)
from optimagic.optimization.optimize_result import OptimizeResult


@pytest.fixture()
@@ -129,13 +129,23 @@ def starts():

@pytest.fixture()
def results():
res = InternalOptimizeResult(
x=np.arange(3) + 1e-10,
res = OptimizeResult(
params=np.arange(3) + 1e-10,
fun=4,
start_fun=5,
start_params=np.arange(3),
algorithm="bla",
direction="minimize",
n_free=3,
)
return [res]


class DummyConverter:
def params_to_internal(self, params):
return params


def test_update_state_converged(current_state, starts, results):
criteria = {
"xtol": 1e-3,
@@ -148,6 +158,7 @@ def test_update_state_converged(current_state, starts, results):
results=results,
convergence_criteria=criteria,
solver_type="value",
converter=DummyConverter(),
)

aaae(new_state["best_x"], np.arange(3))
@@ -171,6 +182,7 @@ def test_update_state_not_converged(current_state, starts, results):
results=results,
convergence_criteria=criteria,
solver_type="value",
converter=DummyConverter(),
)

assert not is_converged
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from optimagic.optimization.process_results import _sum_or_none
from optimagic.optimization.process_multistart_result import _sum_or_none


def test_sum_or_none():