author = {Fraccaroli, Michele
and Lamma, Evelina
and Riguzzi, Fabrizio},
  editor = {Nicosia, Giuseppe
and Ojha, Varun
and La Malfa, Emanuele
and Jansen, Giorgio
and Sciacca, Vincenzo
and Pardalos, Panos
and Giuffrida, Giovanni
and Umeton, Renato},
  title = {Automatic Setting of {DNN} Hyper-Parameters by Mixing {Bayesian Optimization} and Tuning Rules},
  booktitle = {Machine Learning, Optimization, and Data Science, 6th International Conference, LOD 2020, Siena, Italy, July 19–23, 2020, Revised Selected Papers, Part I},
  year = {2020},
  publisher = {Springer International Publishing},
  address = {Cham},
  pages = {477--488},
  abstract = {Deep learning techniques play an increasingly important role in industrial and research environments due to their outstanding results. However, the large number of hyper-parameters to be set may lead to errors if they are set manually. The state-of-the-art hyper-parameters tuning methods are grid search, random search, and Bayesian Optimization. The first two methods are expensive because they try, respectively, all possible combinations and random combinations of hyper-parameters. Bayesian Optimization, instead, builds a surrogate model of the objective function, quantifies the uncertainty in the surrogate using Gaussian Process Regression and uses an acquisition function to decide where to sample the new set of hyper-parameters. This work faces the field of Hyper-Parameters Optimization (HPO). The aim is to improve Bayesian Optimization applied to Deep Neural Networks. For this goal, we build a new algorithm for evaluating and analyzing the results of the network on the training and validation sets and use a set of tuning rules to add new hyper-parameters and/or to reduce the hyper-parameter search space to select a better combination.},
  isbn = {978-3-030-64583-0},
  doi = {10.1007/978-3-030-64583-0_43},
  note = {The final publication is available at Springer via \url{}},
  copyright = {Springer},
  pdf = {},
  series = {Lecture Notes in Computer Science},
  volume = {12565}

This file was generated by bibtex2html 1.98.