From 17b24280fde7b16cef021e36f6f1f4cd047a9c6f Mon Sep 17 00:00:00 2001 From: Alessandro Pierro Date: Fri, 20 Oct 2023 18:33:24 +0200 Subject: [PATCH] Add reference --- paper.bib | 17 +++++++++++++++++ paper.md | 3 +-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/paper.bib b/paper.bib index e69de29b..cd0c4e00 100644 --- a/paper.bib +++ b/paper.bib @@ -0,0 +1,17 @@ +@inproceedings{10.1145/3589737.3605998, +author = {Snyder, Shay and Risbud, Sumedh R. and Parsa, Maryam}, +title = {Neuromorphic Bayesian Optimization in Lava}, +year = {2023}, +isbn = {9798400701757}, +publisher = {Association for Computing Machinery}, +address = {New York, NY, USA}, +url = {https://doi.org/10.1145/3589737.3605998}, +doi = {10.1145/3589737.3605998}, +abstract = {The ever-increasing demands of computationally expensive and high-dimensional problems require novel optimization methods to find near-optimal solutions in a reasonable amount of time. Bayesian Optimization (BO) stands as one of the best methodologies for learning the underlying relationships within multi-variate problems. This allows users to optimize time consuming and computationally expensive black-box functions in feasible time frames. Existing BO implementations use traditional von-Neumann architectures, in which data and memory are separate. In this work, we introduce Lava Bayesian Optimization (LavaBO) as a contribution to the open-source Lava Software Framework. LavaBO is the first step towards developing a BO system compatible with heterogeneous, fine-grained parallel, in-memory neuromorphic computing architectures (e.g., Intel's Loihi platform). We evaluate the algorithmic performance of the LavaBO system on multiple problems such as training state-of-the-art spiking neural networks through back-propagation and evolutionary learning. Compared to traditional algorithms (such as grid and random search), we highlight the ability of LavaBO to explore the parameter search space with fewer expensive function evaluations, while discovering the optimal solutions.}, +booktitle = {Proceedings of the 2023 International Conference on Neuromorphic Systems}, +articleno = {9}, +numpages = {5}, +keywords = {bayesian optimization, neuromorphic computing, asynchronous computing}, +location = {Santa Fe, NM, USA}, +series = {ICONS '23} +} \ No newline at end of file diff --git a/paper.md b/paper.md index b34f67c8..852c8a75 100644 --- a/paper.md +++ b/paper.md @@ -52,7 +52,6 @@ As this short list shows, JOSS papers are only expected to contain a limited set # Summary -A summary describing the high-level functionality and purpose of the software for a diverse, non-specialist audience. - Challenges of optimization and opportunities of neuromorphic computing - Scalability, low latency, optimality, energy @@ -61,7 +60,7 @@ A summary describing the high-level functionality and purpose of the software fo - `Lava Optimization` increases productivity on developing and testing novel neuromorphic algorithms and applications - The library abstracts away the neuromoprhic aspect of the backend, exposing an API typical of constrained optimization (variables, constraints, cost, etc.) - Supports the community in developing algorithms that are iterative, discrete, and distributed -- We leveraged the library architecture to develop multi-backend QUBO and QP solvers, and received contirbutions from the community for a Bayesian and LCA solvers +- We leveraged the library architecture to develop multi-backend QUBO and QP solvers, and received contirbutions from the community for a Bayesian `[@10.1145/3589737.3605998]` and LCA solvers # Statement of need