Skip to content

Commit

Permalink
refactors
Browse files Browse the repository at this point in the history
  • Loading branch information
AlkiviadisAleiferis committed Dec 22, 2023
1 parent 3e8a973 commit 1cbc00a
Show file tree
Hide file tree
Showing 17 changed files with 256 additions and 192 deletions.
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@ with advance notice in the Deprecations section of releases.

---------------------------

# [1.1.1] - CHECK DATE BEFORE PUBLISHING
# [1.2.0] - CHECK DATE BEFORE PUBLISHING

### Changes
Inner structure changes have been implemented for maximum scalability and maintainability.
Thus for framework purposes the library gets a new MINOR semantic version.

Changes implemented:
- Added ``generate_problem_data`` function for quickstarting purposes in hyperpack.utils.
- Implemented refactoring with mixins/abstractions for better SOLID compliance and maintainability.
- Many inner workings refactors for better readability and functionality.
Expand Down
72 changes: 46 additions & 26 deletions hyperpack/abstract.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
from .loggers import hyperLogger
from abc import ABC, abstractmethod
from itertools import combinations
from time import time


class AbstractLocalSearch:
class AbstractLocalSearch(ABC):
"""
An abstraction for implementing a N-opt local search,
either 'greedy-descent' or 'hill-climbing'.
Expand All @@ -17,14 +18,21 @@ class AbstractLocalSearch:
OPT_NUM = 2
logger = hyperLogger

@abstractmethod
def get_init_solution(self):
raise NotImplementedError
pass

@abstractmethod
def calculate_obj_value(self):
raise NotImplementedError
pass

@abstractmethod
def get_solution(self):
raise NotImplementedError
pass

@abstractmethod
def evaluate_node(self, sequence):
pass

def get_max_neighbors_num(self, throttle, seq_length):
max_constant = getattr(self, "MAX_NEIGHBORS_THROTTLE", float("inf"))
Expand All @@ -40,24 +48,23 @@ def get_optimum_objective_val(self):
"""
if getattr(self, "OPTIMIZATION") == "MAX":
return float("inf")
elif getattr(self, "OPTIMIZATION") == "MIN":
return float("-inf")
else:
return -float("inf")
raise ValueError

def global_check(self, value: float, optimum_value: float):
if getattr(self, "OPTIMIZATION") == "MAX":
return value >= optimum_value
else:
return value <= optimum_value

def node_check(self, new_obj_value, best_obj_value):
def compare_node(self, new_obj_value, best_obj_value):
if getattr(self, "OPTIMIZATION") == "MAX":
return new_obj_value > best_obj_value
else:
return new_obj_value < best_obj_value

def evaluate(self, sequence):
raise NotImplementedError

def debug_local_search(self, **kwargs):
"""
Debug logging after operation. Default implementation.
Expand Down Expand Up @@ -93,20 +100,27 @@ def local_search(
debug=True,
):
# initial data
retain_solution = self.get_init_solution()
retained_solution = self.get_init_solution()
best_obj_value = self.calculate_obj_value()
optimum_obj_value = self.get_optimum_objective_val()

node_seq = init_sequence
node_sequence = init_sequence
node_num = 0
seq_length = len(node_seq)
swaps = list(combinations(range(seq_length), self.OPT_NUM))
seq_length = len(node_sequence)

# swaps is the list of all possible two-tuples
# each containing the 2-opt index swap of
# the sequence list
index_swaps = list(combinations(range(seq_length), self.OPT_NUM))

max_neighbors_num = self.get_max_neighbors_num(throttle, seq_length)

if hasattr(self, "init_operations"):
self.init_operations()

continue_criterion = True

# START of local search
while continue_criterion:
node_num += 1
out_of_time, neighbor_found, global_optima = (
Expand All @@ -116,28 +130,32 @@ def local_search(
)
processed_neighbors = 0

# start of neighborhood search
# traverse each neighbor of node
for swap in swaps:
# create new sequence
current_seq = [el for el in node_seq]
# start of node search
for swap in index_swaps:
i, j = swap
current_seq[i], current_seq[j] = current_seq[j], current_seq[i]

# create new sequence
current_sequence = [el for el in node_sequence]
current_sequence[i], current_sequence[j] = (
current_sequence[j],
current_sequence[i],
)

# should update `self.solution` instance attribute
# or objective value related attributes and instance state
self.evaluate(sequence=current_seq)
self.evaluate_node(sequence=current_sequence)
new_obj_value = self.calculate_obj_value()

processed_neighbors += 1

if self.node_check(new_obj_value, best_obj_value):
if self.compare_node(new_obj_value, best_obj_value):
# set new node
node_seq = [el for el in current_seq]
node_sequence = [el for el in current_sequence]
best_obj_value = new_obj_value

# possible deepcopying mechanism to retain solution state
retain_solution = self.get_solution()
# possible deepcopying mechanism to
# retain solution integrity
retained_solution = self.get_solution()

if hasattr(self, "extra_node_operations"):
self.extra_node_operations()
Expand All @@ -152,7 +170,8 @@ def local_search(

if out_of_time or neighbor_found or global_optima or max_neighbors:
break
# end of neighborhood search

# end of node search

if debug:
self.debug_local_search(
Expand All @@ -166,5 +185,6 @@ def local_search(

# update continue criterion
continue_criterion = neighbor_found and not out_of_time and not global_optima

# END of local search
return retain_solution
return retained_solution
157 changes: 18 additions & 139 deletions hyperpack/heuristics.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,11 +33,13 @@ class PointGenPack(
mixins.PropertiesMixin,
):
"""
Base class for initiating and validating the
attributes of the problem to be solved:
- Items
- Containers
- Settings
Base class for initiating and
validating/managing the
attributes/parameters of the problem:
- items
- containers
- settings
- solution
The mixins used are each of them responsible
for the corresponding functionalities.
Expand Down Expand Up @@ -101,7 +103,8 @@ def __init__(
self._workers_num = None
self._rotation = None
self._settings = settings or {}
self._check_plotly_kaleido()
self._check_plotly_kaleido_versions()

self.validate_settings()

# it's the strategy used for the instance. It can be
Expand All @@ -124,7 +127,7 @@ def _check_strip_pack(self, strip_pack_width) -> None:
``compare_solution``: Makes comparison check if all items are in solution.
``_get_height``: Branches method to return solution height \
``_containers._get_height``: Branches method to return solution height \
or a minimum.
``_container_height``: is the actual container's height used
Expand All @@ -134,7 +137,7 @@ def _check_strip_pack(self, strip_pack_width) -> None:
``_container_min_height``: is the minimum height that the container \
can get (not the solution height!).
``containers``: with container with preset height for strip packing mode.
``containers``: single container with preset height for strip packing mode.
"""
self._container_min_height = None

Expand All @@ -157,7 +160,7 @@ def _check_strip_pack(self, strip_pack_width) -> None:
}
self._containers = Containers(containers, self)

def _check_plotly_kaleido(self) -> None:
def _check_plotly_kaleido_versions(self) -> None:
self._plotly_installed = False
self._plotly_ver_ok = False
self._kaleido_installed = False
Expand Down Expand Up @@ -205,7 +208,7 @@ def validate_settings(self) -> None:
"""

# % ----------------------------------------------------------------------------
# SETTINGS VALIDATION
# SETTINGS FORMAT VALIDATION
settings = self._settings
if not isinstance(settings, dict):
raise SettingsError(SettingsError.TYPE)
Expand Down Expand Up @@ -396,136 +399,12 @@ def log_solution(self) -> str:
return output_log


class LocalSearch(AbstractLocalSearch):
def evaluate(self, sequence):
self.solve(sequence=sequence, debug=False)

def get_solution(self):
return (
self._deepcopy_solution(),
self._copy_objective_val_per_container(),
)

def calculate_obj_value(self):
"""
Calculates the objective value
using '`obj_val_per_container`' attribute.
Returns a float (total utilization).
In case more than 1 bin is used, last bin's
utilization is reduced to push first bin's
maximum utilization.
"""
containers_obj_vals = tuple(self.obj_val_per_container.values())
if self._containers_num == 1:
return sum([u for u in containers_obj_vals])
else:
return (
sum([u for u in containers_obj_vals[:-1]]) + 0.7 * containers_obj_vals[-1]
)

def get_init_solution(self):
self.solve(debug=False)
# deepcopying solution
best_solution = self._deepcopy_solution()
best_obj_val_per_container = self._copy_objective_val_per_container()
return best_solution, best_obj_val_per_container

def extra_node_operations(self, **kwargs):
if self._strip_pack:
# new height is used for the container
# for neighbors of new node
self._containers._set_height()
self._heights_history.append(self._container_height)

def node_check(self, new_obj_value, best_obj_value):
"""
Used in local_search.
Compares new solution value to best for accepting new node. It's the
mechanism for propagating towards new accepted better solutions/nodes.
In bin-packing mode, a simple comparison using solution_operator is made.
In strip-packing mode, extra conditions will be tested:
- If ``self._container_min_height`` is ``None``:
The total of items must be in solution. \
If not, solution is rejected.
- If ``self._container_min_height`` is not ``None``:
Number of items in solution doesn't affect \
solution choice.
"""
better_solution = new_obj_value > best_obj_value

if not self._strip_pack:
return better_solution

if self._container_min_height is None:
extra_cond = len(self.solution[self.STRIP_PACK_CONT_ID]) == len(self._items)
else:
extra_cond = True

return extra_cond and better_solution

def local_search(
self, *, throttle: bool = True, _hypersearch: bool = False, debug: bool = False
) -> None:
"""
Method for deploying a hill-climbing local search operation, using the
default potential points strategy. Solves against the ``self.items`` and
the ``self.containers`` attributes.
**OPERATION**
Updates ``self.solution`` with the best solution found.
Updates ``self.obj_val_per_container`` with the best values found.
**PARAMETERS**
``throttle`` affects the total neighbors parsing before accepting that
no better node can be found. Aims at containing the total execution time
in big instances of the problem. Corresponds to ~ 72 items instance
(max 2500 neighbors).
``_hypersearch``: Either standalone (False), or part of a
superset search (used by hypersearch).
``debug``: for developing debugging.
**RETURNS**
``None``
"""

if not _hypersearch:
start_time = time.time()
else:
start_time = self.start_time
hyperLogger.debug(
"\t\tCURRENT POTENTIAL POINTS STRATEGY:"
f" {self._potential_points_strategy}"
)

if self._strip_pack:
self._heights_history = [self._container_height]

# after local search has ended, restore optimum values
# retain_solution = (solution, obj_val_per_container)
retained_solution = super().local_search(
list(self._items),
throttle,
start_time,
self._max_time_in_seconds,
debug=debug,
)
self.solution, self.obj_val_per_container = retained_solution


class HyperPack(PointGenPack, LocalSearch):
class HyperPack(PointGenPack, mixins.LocalSearchMixin):
"""
This class extends ``PointGenPack`` and ``LocalSearch``,
utilizing their solving functionalities by implementing
a hypersearch hyper-heuristic.
This class extends ``PointGenPack``,
utilizing its solving functionalities
by implementing a hypersearch hyper-heuristic
using the ``LocalSearchMixin`` mixin.
"""

# Potential points strategies constant suffix
Expand Down
Loading

0 comments on commit 1cbc00a

Please sign in to comment.