From be07485d31255a022934f4268c11fbbd66f4188b Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Fri, 27 Jan 2023 01:58:06 -0800 Subject: [PATCH 01/10] OptimizationSolver interface update --- .../optimization/solvers/generic/solver.py | 326 +++++------------- .../generic/test_solver_cpu_backend.py | 47 +-- .../optimization/utils/generators/test_mis.py | 28 +- 3 files changed, 125 insertions(+), 276 deletions(-) diff --git a/src/lava/lib/optimization/solvers/generic/solver.py b/src/lava/lib/optimization/solvers/generic/solver.py index 0792f735..de6922ce 100644 --- a/src/lava/lib/optimization/solvers/generic/solver.py +++ b/src/lava/lib/optimization/solvers/generic/solver.py @@ -2,6 +2,7 @@ # SPDX-License-Identifier: BSD-3-Clause # See: https://spdx.org/licenses/ import typing as ty +from dataclasses import dataclass import numpy.typing as npt import numpy as np @@ -47,11 +48,39 @@ lava.magma.core.resources""" +@dataclass +class SolverConfig: + """Dataclass to store and validate OptimizationSolver configurations.""" + + timeout: int = 1e3 + target_cost: int = 0 + backend: BACKENDS = CPU + hyperparameters: dict = None + probe_time: bool = False + probe_energy: bool = False + log_level: int = 40 + + # TODO: Validation Rules + # timeout > 0 + # target_cost is an integer + # probe_time/probe_energy cannot be True if backend is not in NEUROCORES + + # TODO: Hyperparameters validation + + +@dataclass(frozen=True) +class SolverReport: + best_cost: int = None + best_state: np.ndarray = None + best_timestep: int = None + solver_config: SolverConfig = None + + def solve( - problem: OptimizationProblem, - timeout: int, - target_cost: int = None, - backend: BACKENDS = Loihi2NeuroCore, + problem: OptimizationProblem, + timeout: int, + target_cost: int = None, + backend: BACKENDS = Loihi2NeuroCore, ) -> npt.ArrayLike: """Create solver from problem spec and run until target_cost or timeout. @@ -76,10 +105,14 @@ def solve( Candidate solution to the input optimization problem. """ solver = OptimizationSolver(problem) - solution = solver.solve( - timeout=timeout, target_cost=target_cost, backend=backend + report = solver.solve( + config=SolverConfig( + timeout=timeout, + target_cost=target_cost, + backend=backend + ) ) - return solution + return report.best_state class OptimizationSolver: @@ -95,7 +128,7 @@ class OptimizationSolver: reports it to the user. """ - def __init__(self, problem: OptimizationProblem, run_cfg=None): + def __init__(self, problem: OptimizationProblem): """ Constructor for the OptimizationSolver class. @@ -103,260 +136,73 @@ def __init__(self, problem: OptimizationProblem, run_cfg=None): ---------- problem: OptimizationProblem Optimization problem to be solved. - run_cfg: Any - Run configuration for the OptimizationSolverProcess. """ self.problem = problem - self._run_cfg = run_cfg self._process_builder = SolverProcessBuilder() self.solver_process = None self.solver_model = None - self._hyperparameters = dict(temperature=10, - refract=1) - self._report = dict(solved=None, - best_state=None, - cost=None, - target_cost=None, - steps_to_solution=None, - time_to_solution=None) - self._profiler = None - - @property - def run_cfg(self): - """Run configuration for process model selection.""" - return self._run_cfg - - @run_cfg.setter - def run_cfg(self, value): - self._run_cfg = value - - @property - def hyperparameters(self): - return self._hyperparameters - - @hyperparameters.setter - def hyperparameters(self, - value: ty.Dict[str, ty.Union[int, npt.ArrayLike]]): - self._hyperparameters = value - - @property - def last_run_report(self): - return self._report - - def solve(self, - timeout: int, - target_cost: int = 0, - backend: BACKENDS = CPU, - hyperparameters: ty.Dict[ - str, ty.Union[int, npt.ArrayLike]] = None) \ - -> npt.ArrayLike: + + def solve(self, config: SolverConfig = SolverConfig()) -> SolverReport: """ Create solver from problem spec and run until target_cost or timeout. Parameters ---------- - timeout: int - Maximum number of iterations (timesteps) to be run. If set to -1 - then the solver will run continuously in non-blocking mode until a - solution is found. - target_cost: int, optional - A cost value provided by the user as a target for the solution to be - found by the solver, when a solution with such cost is found and - read, execution ends. - backend: BACKENDS, optional - Specifies the backend where the main solver network will be - deployed. - hyperparameters: ty.Dict[str, ty.Union[int, npt.ArrayLike]], optional - A dictionary specifying values for steps_to_fire, noise_amplitude, - step_size and init_value. All but the last are integers, the initial - value is an array-like of initial values for the variables defining - the problem. + config: SolverConfig, optional - Returns - ---------- - solution: npt.ArrayLike - Candidate solution to the input optimization problem. - """ - if timeout < 0: - raise NotImplementedError("The timeout must be > 0.") - target_cost = self._validated_cost(target_cost) - hyperparameters = hyperparameters or self.hyperparameters - self._create_solver_process(self.problem, - target_cost, - backend, - hyperparameters) - run_cfg = self._get_run_config(backend) - run_condition = self._get_run_condition(timeout) - self.solver_process._log_config.level = 20 - self.solver_process.run(condition=run_condition, run_cfg=run_cfg) - self._update_report(target_cost=target_cost) - self.solver_process.stop() - return self._report["best_state"] - - def measure_time_to_solution( - self, - timeout: int, - target_cost: int, - backend: BACKENDS, - hyperparameters: ty.Dict[str, ty.Union[int, npt.ArrayLike]] = None, - ): - """ - Run solver until target_cost or timeout and returns total time to - solution. - - Parameters - ---------- - timeout: int - Maximum number of iterations (timesteps) to be run. If set to -1 - then the solver will run continuously in non-blocking mode until a - solution is found. - target_cost: int, optional - A cost value provided by the user as a target for the solution to be - found by the solver, when a solution with such cost is found and - read, execution ends. - backend: BACKENDS - At the moment, only the Loihi2 backend can be used. - hyperparameters: ty.Dict[str, ty.Union[int, npt.ArrayLike]], optional - A dictionary specifying values for steps_to_fire, noise_amplitude, - step_size and init_value. All but the last are integers, the initial - value is an array-like of initial values for the variables defining - the problem. Returns ---------- - time_to_solution: npt.ArrayLike - Total time to solution in seconds. + report: SolverReport + An object containing all the data geenrated by the execution. """ - if timeout < 0: - raise NotImplementedError("The timeout must be > 0.") - if backend not in NEUROCORES: - raise ValueError(f"Time measurement can only be performed on " - f"Loihi2 backend, got {backend}.") - - target_cost = self._validated_cost(target_cost) - hyperparameters = hyperparameters or self.hyperparameters - self._create_solver_process(self.problem, - target_cost, - backend, - hyperparameters) - run_cfg = self._get_run_config(backend) - run_condition = self._get_run_condition(timeout) - - from lava.utils.profiler import Profiler - self._profiler = Profiler.init(run_cfg) - self._profiler.execution_time_probe(num_steps=timeout + 1) + self._create_solver_process(config=config) + run_cfg = self._get_run_config(backend=config.backend) + run_condition = RunSteps(num_steps=config.timeout) + + # TODO: Enable profiling with new interface + # from lava.utils.profiler import Profiler + # self._profiler = Profiler.init(run_cfg) + # self._profiler.execution_time_probe(num_steps=timeout) + # self._profiler.energy_probe(num_steps=timeout) self.solver_process.run(condition=run_condition, run_cfg=run_cfg) - self._update_report(target_cost=target_cost) + best_state, best_cost, best_timestep = self._get_results() self.solver_process.stop() - return self._profiler.execution_time - - def measure_energy_to_solution( - self, - timeout: int, - target_cost: int, - backend: BACKENDS, - hyperparameters: ty.Dict[str, ty.Union[int, npt.ArrayLike]] = None, - ): - """ - Run solver until target_cost or timeout and returns energy to solution. - Parameters - ---------- - timeout: int - Maximum number of iterations (timesteps) to be run. If set to -1 - then the solver will run continuously in non-blocking mode until a - solution is found. - target_cost: int, optional - A cost value provided by the user as a target for the solution to be - found by the solver, when a solution with such cost is found and - read, execution ends. - backend: BACKENDS - At the moment, only the Loihi2 backend can be used. - hyperparameters: ty.Dict[str, ty.Union[int, npt.ArrayLike]], optional - A dictionary specifying values for steps_to_fire, noise_amplitude, - step_size and init_value. All but the last are integers, the initial - value is an array-like of initial values for the variables defining - the problem. - - Returns - ---------- - energy_to_solution: npt.ArrayLike - Total energy to solution in Joule. - """ - if timeout < 0: - raise NotImplementedError("The timeout must be > 0.") - if backend not in NEUROCORES: - raise ValueError(f"Enegy measurement can only be performed on " - f"Loihi2 backend, got {backend}.") - - target_cost = self._validated_cost(target_cost) - hyperparameters = hyperparameters or self.hyperparameters - self._create_solver_process(self.problem, - target_cost, - backend, - hyperparameters) - run_cfg = self._get_run_config(backend) - run_condition = self._get_run_condition(timeout) - - from lava.utils.profiler import Profiler - self._profiler = Profiler.init(run_cfg) - self._profiler.execution_time_probe(num_steps=timeout + 1) - self._profiler.energy_probe(num_steps=timeout + 1) + report = SolverReport( + best_cost=best_cost, + best_state=best_state, + best_timestep=best_timestep, + solver_config=config + ) - self.solver_process.run(condition=run_condition, run_cfg=run_cfg) - self._update_report(target_cost=target_cost) - self.solver_process.stop() - return self._profiler.energy + return report - def _update_report(self, target_cost=None, - time_to_solution=None, - energy_to_solution=None): - self._report["target_cost"] = target_cost - best_state = self.solver_process.variable_assignment.aliased_var.get() - self._report["best_state"] = best_state - raw_cost = self.solver_process.optimality.aliased_var.get() - cost = (raw_cost.astype(np.int32) << 8) >> 8 - self._report["cost"] = cost - self._report["solved"] = cost == target_cost - steps_to_solution = self.solver_process.solution_step.aliased_var.get() - self._report["steps_to_solution"] = steps_to_solution - self._report["time_to_solution"] = time_to_solution - self._report["energy_to_solution"] = energy_to_solution - print(self._report) - - def _create_solver_process(self, - problem: OptimizationProblem, - target_cost: ty.Optional[int] = None, - backend: BACKENDS = None, - hyperparameters: ty.Dict[ - str, ty.Union[int, npt.ArrayLike]] = None): + def _create_solver_process(self, config: SolverConfig) -> None: """ Create process and model class as solver for the given problem. Parameters ---------- - problem: OptimizationProblem - Optimization problem defined by cost and constraints which will be - used to build the process and its model. - target_cost: int, optional - A cost value provided by the user as a target for the solution to be - found by the solver, when a solution with such cost is found and - read, execution ends. - backend: BACKENDS, optional - Specifies the backend where the main solver network will be - deployed. - hyperparameters: ty.Dict[str, ty.Union[int, npt.ArrayLike]] + config: SolverConfig + """ - requirements, protocol = self._get_requirements_and_protocol(backend) + requirements, protocol = self._get_requirements_and_protocol( + backend=config.backend + ) self._process_builder.create_solver_process( - problem, hyperparameters or dict() + problem=self.problem, + hyperparameters=config.hyperparameters or dict() ) self._process_builder.create_solver_model( - target_cost, requirements, protocol + target_cost=config.target_cost, + requirements=requirements, + protocol=protocol ) self.solver_process = self._process_builder.solver_process self.solver_model = self._process_builder.solver_model + self.solver_process._log_config.level = config.log_level def _get_requirements_and_protocol( self, backend: BACKENDS @@ -369,13 +215,7 @@ def _get_requirements_and_protocol( Specifies the backend for which requirements and protocol classes will be returned. """ - protocol = LoihiProtocol - if backend in CPUS: - return [CPU], protocol - elif backend in NEUROCORES: - return [Loihi2NeuroCore], protocol - else: - raise NotImplementedError(str(backend) + BACKEND_MSG) + return [CPU] if backend in CPUS else [Loihi2NeuroCore], LoihiProtocol def _get_run_config(self, backend): if backend in CPUS: @@ -399,11 +239,9 @@ def _get_run_config(self, backend): raise NotImplementedError(str(backend) + BACKEND_MSG) return run_cfg - def _validated_cost(self, target_cost): - if target_cost != int(target_cost): - raise ValueError(f"target_cost has to be an integer, received " - f"{target_cost}") - return int(target_cost) - - def _get_run_condition(self, timeout): - return RunSteps(num_steps=timeout + 1) + def _get_results(self): + best_state = self.solver_process.variable_assignment.aliased_var.get() + best_cost = self.solver_process.optimality.aliased_var.get() + best_cost = (best_cost.astype(np.int32) << 8) >> 8 + best_timestep = self.solver_process.solution_step.aliased_var.get() + return best_state, int(best_cost), int(best_timestep) diff --git a/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py b/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py index d1ea6c54..8b7b7ef8 100644 --- a/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py +++ b/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py @@ -11,7 +11,9 @@ from lava.lib.optimization.solvers.generic.hierarchical_processes import ( CostConvergenceChecker, ) -from lava.lib.optimization.solvers.generic.solver import OptimizationSolver +from lava.lib.optimization.solvers.generic.solver import ( + OptimizationSolver, SolverConfig +) from lava.lib.optimization.solvers.generic.read_gate.process import ReadGate from lava.lib.optimization.solvers.generic.monitoring_processes \ .solution_readout.process import SolutionReadout @@ -35,25 +37,27 @@ def test_create_obj(self): def test_solution_has_expected_shape(self): print("test_solution_has_expected_shape") - solution = self.solver.solve(timeout=3000, backend="CPU") - self.assertEqual(solution.shape, self.solution.shape) + report = self.solver.solve(config=SolverConfig(timeout=3000)) + self.assertEqual(report.best_state.shape, self.solution.shape) def test_solve_method(self): print("test_solve_method") np.random.seed(2) - solution = self.solver.solve(timeout=200, target_cost=-11, - backend="CPU") - print(solution) - self.assertTrue((solution == self.solution).all()) + report = self.solver.solve(config=SolverConfig( + timeout=200, + target_cost=-11 + )) + print(report) + self.assertTrue((report.best_state == self.solution).all()) def test_solver_creates_optimizationsolver_process(self): - self.solver._create_solver_process(self.problem, backend="CPU") + self.solver._create_solver_process(config=SolverConfig(backend="CPU")) class_name = type(self.solver.solver_process).__name__ self.assertEqual(class_name, "OptimizationSolverProcess") def test_solves_creates_macrostate_reader_processes(self): self.assertIsNone(self.solver.solver_process) - self.solver.solve(timeout=1) + self.solver.solve(config=SolverConfig(timeout=1)) mr = self.solver.solver_process.model_class( self.solver.solver_process ).macrostate_reader @@ -67,7 +71,7 @@ def test_solves_creates_macrostate_reader_processes(self): def test_macrostate_reader_processes_connections(self): self.assertIsNone(self.solver.solver_process) - self.solver.solve(timeout=1) + self.solver.solve(config=SolverConfig(timeout=1)) mr = self.solver.solver_process.model_class( self.solver.solver_process ).macrostate_reader @@ -90,7 +94,7 @@ def test_macrostate_reader_processes_connections(self): def test_cost_checker_is_connected_to_variables_population(self): self.assertIsNone(self.solver.solver_process) - self.solver.solve(timeout=1) + self.solver.solve(config=SolverConfig(timeout=1)) pm = self.solver.solver_process.model_class( self.solver.solver_process ) @@ -101,7 +105,7 @@ def test_cost_checker_is_connected_to_variables_population(self): ) def test_qubo_cost_defines_weights(self): - self.solver.solve(timeout=1) + self.solver.solve(config=SolverConfig(timeout=1)) pm = self.solver.solver_process.model_class( self.solver.solver_process ) @@ -112,7 +116,7 @@ def test_qubo_cost_defines_weights(self): self.assertTrue(condition) def test_qubo_cost_defines_biases(self): - self.solver.solve(timeout=1) + self.solver.solve(config=SolverConfig(timeout=1)) pm = self.solver.solver_process.model_class( self.solver.solver_process ) @@ -121,7 +125,7 @@ def test_qubo_cost_defines_biases(self): self.assertTrue(condition) def test_qubo_cost_defines_num_vars_in_discrete_variables_process(self): - self.solver.solve(timeout=1) + self.solver.solve(config=SolverConfig(timeout=1)) pm = self.solver.solver_process.model_class( self.solver.solver_process ) @@ -140,12 +144,15 @@ def solve_workload(q, reference_solution, noise_precision=3): problem = QUBO(q) np.random.seed(2) solver = OptimizationSolver(problem) - solution = solver.solve(timeout=20000, - target_cost=expected_cost, - hyperparameters={'noise_precision': noise_precision} - ) - cost = solution @ q @ solution - return solution, cost, expected_cost + report = solver.solve(config=SolverConfig( + timeout=20000, + target_cost=expected_cost, + hyperparameters={ + 'noise_precision': noise_precision + } + )) + cost = report.best_state @ q @ report.best_state + return report.best_state, cost, expected_cost class TestWorkloads(unittest.TestCase): diff --git a/tests/lava/lib/optimization/utils/generators/test_mis.py b/tests/lava/lib/optimization/utils/generators/test_mis.py index 939b9967..fa6d59a7 100644 --- a/tests/lava/lib/optimization/utils/generators/test_mis.py +++ b/tests/lava/lib/optimization/utils/generators/test_mis.py @@ -5,7 +5,9 @@ import unittest import numpy as np -from lava.lib.optimization.solvers.generic.solver import OptimizationSolver +from lava.lib.optimization.solvers.generic.solver import ( + OptimizationSolver, SolverConfig +) from lava.lib.optimization.utils.generators.mis import MISProblem @@ -113,19 +115,21 @@ def test_qubo_solution(self): optimal_cost = -2 qubo = self.problem.get_as_qubo(w_diag=1, w_off=4) - params = {"timeout": 1000, - "target_cost": optimal_cost, - "backend": "CPU", - "hyperparameters": { - "steps_to_fire": 11, - "noise_amplitude": 1, - "noise_precision": 4, - "step_size": 11, - }} + config = SolverConfig( + timeout=1000, + target_cost=optimal_cost, + backend="CPU", + hyperparameters={ + "steps_to_fire": 11, + "noise_amplitude": 1, + "noise_precision": 4, + "step_size": 11, + } + ) solver = OptimizationSolver(qubo) - solution = solver.solve(**params) - self.assertEqual(qubo.evaluate_cost(solution), optimal_cost) + report = solver.solve(config=config) + self.assertEqual(qubo.evaluate_cost(report.best_state), optimal_cost) if __name__ == "__main__": From 7a6fff8a4f6155b84bbb80e236da70fc4b59940d Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Tue, 31 Jan 2023 09:58:25 -0800 Subject: [PATCH 02/10] Integrate profiler --- .../optimization/solvers/generic/solver.py | 95 ++++++++----------- 1 file changed, 39 insertions(+), 56 deletions(-) diff --git a/src/lava/lib/optimization/solvers/generic/solver.py b/src/lava/lib/optimization/solvers/generic/solver.py index de6922ce..4b7d0643 100644 --- a/src/lava/lib/optimization/solvers/generic/solver.py +++ b/src/lava/lib/optimization/solvers/generic/solver.py @@ -60,13 +60,6 @@ class SolverConfig: probe_energy: bool = False log_level: int = 40 - # TODO: Validation Rules - # timeout > 0 - # target_cost is an integer - # probe_time/probe_energy cannot be True if backend is not in NEUROCORES - - # TODO: Hyperparameters validation - @dataclass(frozen=True) class SolverReport: @@ -74,49 +67,29 @@ class SolverReport: best_state: np.ndarray = None best_timestep: int = None solver_config: SolverConfig = None + profiler = None -def solve( - problem: OptimizationProblem, - timeout: int, - target_cost: int = None, - backend: BACKENDS = Loihi2NeuroCore, -) -> npt.ArrayLike: - """Create solver from problem spec and run until target_cost or timeout. +def solve(problem: OptimizationProblem, + config: SolverConfig = SolverConfig()) -> np.ndarray: + """ + Solve the given optimization problem using the passed configuration, and + returns the best candidate solution. Parameters ---------- problem: OptimizationProblem Optimization problem to be solved. - timeout: int - Maximum number of iterations (timesteps) to be run. If set to -1 then - the solver will run continuously in non-blocking mode until a solution - is found. - target_cost: int, optional - A cost value provided by the user as a target for the solution to be - found by the solver, when a solution with such cost is found and read, - execution ends. - backend: BACKENDS, optional - Specifies the backend where the main solver network will be deployed. - - Returns - ---------- - solution: npt.ArrayLike - Candidate solution to the input optimization problem. + config: SolverConfig, optional """ solver = OptimizationSolver(problem) - report = solver.solve( - config=SolverConfig( - timeout=timeout, - target_cost=target_cost, - backend=backend - ) - ) + report = solver.solve(config=config) return report.best_state class OptimizationSolver: - """Generic solver for constrained optimization problems defined by + """ + Generic solver for constrained optimization problems defined by variables, cost and constraints. The problem should behave according to the OptimizationProblem's @@ -141,6 +114,7 @@ def __init__(self, problem: OptimizationProblem): self._process_builder = SolverProcessBuilder() self.solver_process = None self.solver_model = None + self._profiler = None def solve(self, config: SolverConfig = SolverConfig()) -> SolverReport: """ @@ -156,29 +130,29 @@ def solve(self, config: SolverConfig = SolverConfig()) -> SolverReport: report: SolverReport An object containing all the data geenrated by the execution. """ - self._create_solver_process(config=config) - run_cfg = self._get_run_config(backend=config.backend) - run_condition = RunSteps(num_steps=config.timeout) - - # TODO: Enable profiling with new interface - # from lava.utils.profiler import Profiler - # self._profiler = Profiler.init(run_cfg) - # self._profiler.execution_time_probe(num_steps=timeout) - # self._profiler.energy_probe(num_steps=timeout) - + run_condition, run_cfg = self._prepare_solver(config) self.solver_process.run(condition=run_condition, run_cfg=run_cfg) - best_state, best_cost, best_timestep = self._get_results() self.solver_process.stop() + best_state, best_cost, best_timestep = self._get_results() + report = SolverReport( best_cost=best_cost, best_state=best_state, best_timestep=best_timestep, - solver_config=config + solver_config=config, + profiler=self._profiler ) return report + def _prepare_solver(self, config: SolverConfig): + self._create_solver_process(config=config) + run_cfg = self._get_run_config(backend=config.backend) + run_condition = RunSteps(num_steps=config.timeout) + self._prepare_profiler(config=config, run_cfg=run_cfg) + return run_condition, run_cfg + def _create_solver_process(self, config: SolverConfig) -> None: """ Create process and model class as solver for the given problem. @@ -207,7 +181,8 @@ def _create_solver_process(self, config: SolverConfig) -> None: def _get_requirements_and_protocol( self, backend: BACKENDS ) -> ty.Tuple[AbstractComputeResource, AbstractSyncProtocol]: - """Figure out requirements and protocol for a given backend. + """ + Figure out requirements and protocol for a given backend. Parameters ---------- @@ -217,7 +192,7 @@ def _get_requirements_and_protocol( """ return [CPU] if backend in CPUS else [Loihi2NeuroCore], LoihiProtocol - def _get_run_config(self, backend): + def _get_run_config(self, backend: BACKENDS): if backend in CPUS: pdict = {self.solver_process: self.solver_model, ReadGate: ReadGatePyModel, @@ -226,18 +201,26 @@ def _get_run_config(self, backend): BoltzmannAbstractModel, Boltzmann: BoltzmannFixed } - run_cfg = Loihi1SimCfg(exception_proc_model_map=pdict, - select_sub_proc_model=True) + return Loihi1SimCfg(exception_proc_model_map=pdict, + select_sub_proc_model=True) elif backend in NEUROCORES: pdict = {self.solver_process: self.solver_model, BoltzmannAbstract: BoltzmannAbstractModel, } - run_cfg = Loihi2HwCfg(exception_proc_model_map=pdict, - select_sub_proc_model=True) + return Loihi2HwCfg(exception_proc_model_map=pdict, + select_sub_proc_model=True) else: raise NotImplementedError(str(backend) + BACKEND_MSG) - return run_cfg + + def _prepare_profiler(self, config: SolverConfig, run_cfg) -> None: + if config.probe_time or config.probe_energy: + from lava.utils.profiler import Profiler + self._profiler = Profiler.init(run_cfg) + if config.probe_time: + self._profiler.execution_time_probe(num_steps=config.timeout) + if config.probe_energy: + self._profiler.energy_probe(num_steps=config.timeout) def _get_results(self): best_state = self.solver_process.variable_assignment.aliased_var.get() From 22c56f5548d4d9104c8b755f170d9d3d6c537138 Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Tue, 31 Jan 2023 10:07:40 -0800 Subject: [PATCH 03/10] Fix linting issues --- .../solvers/generic/test_solver_cpu_backend.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py b/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py index c9bc1be0..742793d6 100644 --- a/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py +++ b/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py @@ -54,7 +54,7 @@ def test_solve_method_scif(self): timeout=200, target_cost=-11, hyperparameters={"neuron_model": "scif", - "noise_precision": 5}) + "noise_precision": 5} )) print(report) self.assertTrue((report.best_state == self.solution).all()) @@ -158,10 +158,10 @@ def solve_workload(q, reference_solution, noise_precision=3, timeout=20000, target_cost=expected_cost, hyperparameters={ - 'neuron_model': 'scif', - 'noise_amplitude': noise_amplitude, - 'noise_precision': noise_precision, - 'sustained_on_tau': on_tau + 'neuron_model': 'scif', + 'noise_amplitude': noise_amplitude, + 'noise_precision': noise_precision, + 'sustained_on_tau': on_tau } )) cost = report.best_state @ q @ report.best_state From ba437524e3af7869c1992eb2319a163081a1ca8f Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Tue, 31 Jan 2023 10:22:31 -0800 Subject: [PATCH 04/10] Update tQUBO utoorial to match new API --- .../optimization/solvers/generic/solver.py | 7 ++---- tutorials/tutorial_02_solving_qubos.ipynb | 24 +++++++++++++------ 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/src/lava/lib/optimization/solvers/generic/solver.py b/src/lava/lib/optimization/solvers/generic/solver.py index 5e021d05..a36f86e6 100644 --- a/src/lava/lib/optimization/solvers/generic/solver.py +++ b/src/lava/lib/optimization/solvers/generic/solver.py @@ -69,7 +69,6 @@ class SolverReport: best_state: np.ndarray = None best_timestep: int = None solver_config: SolverConfig = None - profiler = None def solve(problem: OptimizationProblem, @@ -134,16 +133,14 @@ def solve(self, config: SolverConfig = SolverConfig()) -> SolverReport: """ run_condition, run_cfg = self._prepare_solver(config) self.solver_process.run(condition=run_condition, run_cfg=run_cfg) - self.solver_process.stop() - best_state, best_cost, best_timestep = self._get_results() + self.solver_process.stop() report = SolverReport( best_cost=best_cost, best_state=best_state, best_timestep=best_timestep, - solver_config=config, - profiler=self._profiler + solver_config=config ) return report diff --git a/tutorials/tutorial_02_solving_qubos.ipynb b/tutorials/tutorial_02_solving_qubos.ipynb index d72d2664..843ac6c8 100644 --- a/tutorials/tutorial_02_solving_qubos.ipynb +++ b/tutorials/tutorial_02_solving_qubos.ipynb @@ -56,7 +56,7 @@ "# Interface for QUBO problems\n", "from lava.lib.optimization.problems.problems import QUBO\n", "# Generic optimization solver\n", - "from lava.lib.optimization.solvers.generic.solver import OptimizationSolver, solve" + "from lava.lib.optimization.solvers.generic.solver import OptimizationSolver, solve, SolverConfig" ] }, { @@ -299,11 +299,16 @@ "\n", "# Solve the QUBO using Lava's OptimizationSolver on CPU\n", "# Change \"backend='Loihi2'\" if your system has physical access to this chip\n", - "solution_loihi = solver.solve(timeout=10000,\n", - " hyperparameters=hyperparameters,\n", - " target_cost=int(-5),\n", - " backend=backend)\n", - "\n", + "solver_report = solver.solve(\n", + " config=SolverConfig(\n", + " timeout=10000,\n", + " hyperparameters=hyperparameters,\n", + " target_cost=int(-5),\n", + " backend=backend\n", + " )\n", + ")\n", + "\n", + "solution_loihi = solver_report.best_state\n", "print(f'\\nSolution of the provided QUBO: {np.where(solution_loihi == 1.)[0]}.')" ] }, @@ -405,7 +410,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": ".venv", "language": "python", "name": "python3" }, @@ -420,6 +425,11 @@ "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.10" + }, + "vscode": { + "interpreter": { + "hash": "0285f4d976e7d5c77fbc79f3712c5561c504e187cba953573dd269218b32f868" + } } }, "nbformat": 4, From 10af5dd78e4de70fc1dcc5c0fece431e43b6a000 Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Wed, 1 Feb 2023 00:37:59 -0800 Subject: [PATCH 05/10] Adapt SolverTuner to new API --- .../lib/optimization/utils/solver_tuner.py | 32 +++++++++++-------- .../optimization/utils/test_solver_tuner.py | 28 ++++++++++------ 2 files changed, 36 insertions(+), 24 deletions(-) diff --git a/src/lava/lib/optimization/utils/solver_tuner.py b/src/lava/lib/optimization/utils/solver_tuner.py index 52d2aa13..61b9b07f 100644 --- a/src/lava/lib/optimization/utils/solver_tuner.py +++ b/src/lava/lib/optimization/utils/solver_tuner.py @@ -2,6 +2,9 @@ # SPDX-License-Identifier: BSD-3-Clause # See: https://spdx.org/licenses/ +from lava.lib.optimization.solvers.generic.solver import ( + OptimizationSolver, SolverConfig, SolverReport +) import itertools as it import typing as ty @@ -51,10 +54,10 @@ def __init__(self, len(self._search_space), dtype=self._store_dtype) def tune(self, - solver, - solver_params: ty.Dict, - fitness_fn: ty.Callable[[float, int], float], + solver: OptimizationSolver, + fitness_fn: ty.Callable[[SolverReport], float], fitness_target: float = None, + config: SolverConfig = SolverConfig() ): """ Perform random search to optimize solver hyper-parameters based on a @@ -64,8 +67,6 @@ def tune(self, ---------- solver: OptimizationSolver Optimization solver to use for solving the problem. - solver_params: ty.Dict - Parameters for the solver. fitness_fn: ty.Callable[[float, int], float] Fitness function to evaluate a given set of hyper-parameters, taking as input the current cost and number of steps to solution. @@ -73,6 +74,8 @@ def tune(self, fitness_target: float, optional Fitness target to reach. If this is not passed, the full grid is explored before stopping search. + config: SolverConfig, optional + Solver configuration to be used. Returns ------- @@ -82,7 +85,6 @@ def tune(self, Flag signaling if the fitness_target has been reached. If no fitness_target is passed, the flag is True. """ - # TODO : Check that hyperparams are arguments for solver self._stored_rows = 0 if self._store.shape[0] < len(self._search_space): self._store = np.zeros( @@ -94,15 +96,17 @@ def tune(self, for params in self._search_space: np.random.seed(self._seed) hyperparams = dict(zip(self._params_names, params)) - solver_params["hyperparameters"] = hyperparams - solver.solve(**solver_params) - cost = solver.last_run_report["cost"] - step_to_sol = solver.last_run_report["steps_to_solution"] - fitness = fitness_fn(cost, step_to_sol) - self._store_trial(hyperparams, cost, step_to_sol, fitness) - if fitness > best_fitness: + config.hyperparameters = hyperparams + report = solver.solve(config=config) + self._store_trial( + params=hyperparams, + cost=report.best_cost, + step_to_sol=report.best_timestep, + fitness=fitness_fn(report) + ) + if fitness_fn(report) > best_fitness: best_hyperparams = hyperparams - best_fitness = fitness + best_fitness = fitness_fn(report) print( f"Better hyperparameters configuration found!\n" f"Hyperparameters: {best_hyperparams}" diff --git a/tests/lava/lib/optimization/utils/test_solver_tuner.py b/tests/lava/lib/optimization/utils/test_solver_tuner.py index d04ba822..0a3c7032 100644 --- a/tests/lava/lib/optimization/utils/test_solver_tuner.py +++ b/tests/lava/lib/optimization/utils/test_solver_tuner.py @@ -7,11 +7,13 @@ import numpy as np from lava.lib.optimization.problems.problems import QUBO -from lava.lib.optimization.solvers.generic.solver import OptimizationSolver +from lava.lib.optimization.solvers.generic.solver import ( + OptimizationSolver, SolverConfig, SolverReport +) from lava.lib.optimization.utils.solver_tuner import SolverTuner -def prepare_solver_and_params(): +def prepare_solver_and_config(): """Generate an example QUBO workload.""" q = np.asarray([[-5, 2, 4, 0], [2, -3, 1, 0], @@ -19,9 +21,13 @@ def prepare_solver_and_params(): qubo_problem = QUBO(q=q) solver = OptimizationSolver(qubo_problem) - solver_params = {"timeout": 1000, "target_cost": -11, "backend": "CPU"} + config = SolverConfig( + timeout=1000, + target_cost=-11, + backend="CPU" + ) - return solver, solver_params + return solver, config class TestSolverTuner(unittest.TestCase): @@ -100,19 +106,21 @@ def test_tune_success(self): """Tests the correct set of hyper-parameters is found, for a known problem.""" - solver, solver_params = prepare_solver_and_params() + solver, config = prepare_solver_and_config() - def fitness(cost, step_to_sol): - return - step_to_sol if cost <= solver_params['target_cost'] \ - else - float("inf") + def fitness(report: SolverReport) -> float: + if report.best_cost <= config.target_cost: + return - report.best_timestep + else: + return - float("inf") fitness_target = -21 hyperparams, success = self.solver_tuner.tune( solver=solver, - solver_params=solver_params, fitness_fn=fitness, - fitness_target=fitness_target + fitness_target=fitness_target, + config=config ) print(hyperparams) From 0fcd1836019c829bd2ac840f52fb7c55f0ee097c Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Wed, 1 Feb 2023 00:43:39 -0800 Subject: [PATCH 06/10] Update unittests for new API --- .../solvers/generic/test_solver_cpu_backend.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py b/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py index 742793d6..1a58abe4 100644 --- a/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py +++ b/tests/lava/lib/optimization/solvers/generic/test_solver_cpu_backend.py @@ -41,11 +41,15 @@ def test_solution_has_expected_shape(self): def test_solve_method_nebm(self): print("test_solve_method") np.random.seed(2) - solution = self.solver.solve(timeout=200, target_cost=-11, - backend="CPU", - hyperparameters={"neuron_model": "nebm"}) - print(solution) - self.assertTrue((solution == self.solution).all()) + config = SolverConfig( + timeout=200, + target_cost=-11, + backend="CPU", + hyperparameters={"neuron_model": "nebm"} + ) + report = self.solver.solve(config=config) + print(report) + self.assertTrue((report.best_state == self.solution).all()) def test_solve_method_scif(self): print("test_solve_method") From 567e1b2c59170bdd6126814ce31ec6e2e507e390 Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Wed, 1 Feb 2023 01:08:41 -0800 Subject: [PATCH 07/10] Upddate docstrings --- .../optimization/solvers/generic/solver.py | 49 +++++++++++++++++-- 1 file changed, 46 insertions(+), 3 deletions(-) diff --git a/src/lava/lib/optimization/solvers/generic/solver.py b/src/lava/lib/optimization/solvers/generic/solver.py index a36f86e6..6bba4be3 100644 --- a/src/lava/lib/optimization/solvers/generic/solver.py +++ b/src/lava/lib/optimization/solvers/generic/solver.py @@ -52,8 +52,36 @@ @dataclass class SolverConfig: - """Dataclass to store and validate OptimizationSolver configurations.""" + """ + Dataclass to store and validate OptimizationSolver configurations. + Parameters + ---------- + timeout: int + Maximum number of iterations (timesteps) to be run. If set to -1 + then the solver will run continuously in non-blocking mode until a + solution is found. + target_cost: int, optional + A cost value provided by the user as a target for the solution to be + found by the solver, when a solution with such cost is found and + read, execution ends. + backend: BACKENDS, optional + Specifies the backend where the main solver network will be + deployed. + hyperparameters: ty.Dict[str, ty.Union[int, npt.ArrayLike]], optional + A dictionary specifying values for steps_to_fire, noise_amplitude, + step_size and init_value. All but the last are integers, the initial + value is an array-like of initial values for the variables defining + the problem. + probe_time: bool + A boolean flag to request time profiling, available only on "Loihi2" + backend. + probe_energy: bool + A boolean flag to request time profiling, available only on "Loihi2" + backend. + log_level: int + Select log verbosity (40: default, 20: verbose). + """ timeout: int = 1e3 target_cost: int = 0 backend: BACKENDS = CPU @@ -65,6 +93,20 @@ class SolverConfig: @dataclass(frozen=True) class SolverReport: + """ + Dataclass to store OptimizationSolver results. + + Parameters + ---------- + best_cost: int + Best cost found during the execution. + best_state: np.ndarray + Candidate solution associated to the best cost. + best_timestep: int + Execution timestep during which the best solution was found. + solver_config: SolverConfig + Solver configuraiton used. Refers to SolverConfig documentation. + """ best_cost: int = None best_state: np.ndarray = None best_timestep: int = None @@ -82,6 +124,7 @@ def solve(problem: OptimizationProblem, problem: OptimizationProblem Optimization problem to be solved. config: SolverConfig, optional + Solver configuraiton used. Refers to SolverConfig documentation. """ solver = OptimizationSolver(problem) report = solver.solve(config=config) @@ -124,7 +167,7 @@ def solve(self, config: SolverConfig = SolverConfig()) -> SolverReport: Parameters ---------- config: SolverConfig, optional - + Solver configuraiton used. Refers to SolverConfig documentation. Returns ---------- @@ -159,7 +202,7 @@ def _create_solver_process(self, config: SolverConfig) -> None: Parameters ---------- config: SolverConfig - + Solver configuraiton used. Refers to SolverConfig documentation. """ requirements, protocol = self._get_requirements_and_protocol( backend=config.backend From b5d1af6a4573392727e4346dd9aca594f39423ff Mon Sep 17 00:00:00 2001 From: "Pierro, Alessandro" Date: Wed, 1 Feb 2023 01:19:59 -0800 Subject: [PATCH 08/10] Add profilerr to SolverReport --- src/lava/lib/optimization/solvers/generic/solver.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/lava/lib/optimization/solvers/generic/solver.py b/src/lava/lib/optimization/solvers/generic/solver.py index 6bba4be3..5d8bb75b 100644 --- a/src/lava/lib/optimization/solvers/generic/solver.py +++ b/src/lava/lib/optimization/solvers/generic/solver.py @@ -3,6 +3,7 @@ # See: https://spdx.org/licenses/ import typing as ty from dataclasses import dataclass +from lava.utils.profiler import Profiler import numpy.typing as npt import numpy as np @@ -106,11 +107,14 @@ class SolverReport: Execution timestep during which the best solution was found. solver_config: SolverConfig Solver configuraiton used. Refers to SolverConfig documentation. + profiler: Profiler + Profiler instance containing time, energy and activity measurements. """ best_cost: int = None best_state: np.ndarray = None best_timestep: int = None solver_config: SolverConfig = None + profiler: Profiler = None def solve(problem: OptimizationProblem, @@ -183,7 +187,8 @@ def solve(self, config: SolverConfig = SolverConfig()) -> SolverReport: best_cost=best_cost, best_state=best_state, best_timestep=best_timestep, - solver_config=config + solver_config=config, + profiler=self._profiler ) return report @@ -258,12 +263,13 @@ def _get_run_config(self, backend: BACKENDS): def _prepare_profiler(self, config: SolverConfig, run_cfg) -> None: if config.probe_time or config.probe_energy: - from lava.utils.profiler import Profiler self._profiler = Profiler.init(run_cfg) if config.probe_time: self._profiler.execution_time_probe(num_steps=config.timeout) if config.probe_energy: self._profiler.energy_probe(num_steps=config.timeout) + else: + self._profiler = None def _get_results(self): best_state = self.solver_process.variable_assignment.aliased_var.get() From d421b5a5bf3ddd4f4d45721aac992a62474be43f Mon Sep 17 00:00:00 2001 From: Alessandro Pierro Date: Wed, 1 Feb 2023 11:14:47 +0100 Subject: [PATCH 09/10] Update SolverTuner docstring --- src/lava/lib/optimization/utils/solver_tuner.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/lava/lib/optimization/utils/solver_tuner.py b/src/lava/lib/optimization/utils/solver_tuner.py index 61b9b07f..7301c635 100644 --- a/src/lava/lib/optimization/utils/solver_tuner.py +++ b/src/lava/lib/optimization/utils/solver_tuner.py @@ -67,10 +67,11 @@ def tune(self, ---------- solver: OptimizationSolver Optimization solver to use for solving the problem. - fitness_fn: ty.Callable[[float, int], float] + fitness_fn: ty.Callable[[SolverReport], float] Fitness function to evaluate a given set of hyper-parameters, - taking as input the current cost and number of steps to solution. - This is the function that is maximized by the SolverTuner. + taking as input a SolverReport instance (refers to its documentation + for the available parameters). This is the function that is + maximized by the SolverTuner. fitness_target: float, optional Fitness target to reach. If this is not passed, the full grid is explored before stopping search. From df471292c0e80b4162df2d2209b2dfde5616f23e Mon Sep 17 00:00:00 2001 From: Alessandro Pierro Date: Wed, 1 Feb 2023 11:17:31 +0100 Subject: [PATCH 10/10] Fix linting --- src/lava/lib/optimization/utils/solver_tuner.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lava/lib/optimization/utils/solver_tuner.py b/src/lava/lib/optimization/utils/solver_tuner.py index 7301c635..cba1b6d5 100644 --- a/src/lava/lib/optimization/utils/solver_tuner.py +++ b/src/lava/lib/optimization/utils/solver_tuner.py @@ -70,7 +70,7 @@ def tune(self, fitness_fn: ty.Callable[[SolverReport], float] Fitness function to evaluate a given set of hyper-parameters, taking as input a SolverReport instance (refers to its documentation - for the available parameters). This is the function that is + for the available parameters). This is the function that is maximized by the SolverTuner. fitness_target: float, optional Fitness target to reach. If this is not passed, the full grid is