@inproceedings{AzzBizzFracBertLam23-CSCI-IC, author = {Azzolini, Damiano and Bizzarri, Alice and Fraccaroli, Michele and Bertasi, Francesco and Lamma, Evelina}, booktitle = {2023 International Conference on Computational Science and Computational Intelligence (CSCI)}, title = {A Machine Learning Pipeline to Analyse Multispectral and Hyperspectral Images: Full/Regular Research Paper (CSCI-RTHI)}, year = {2023}, volume = {}, number = {}, pages = {1306-1311}, doi = {10.1109/CSCI62032.2023.00216} }
@article{BizFraLam24-FAI-IJ, author = {Bizzarri, Alice and Fraccaroli, Michele and Lamma, Evelina and Riguzzi, Fabrizio}, title = {Integration between constrained optimization and deep networks: a survey}, journal = {Frontiers in Artificial Intelligence}, volume = {7}, year = {2024}, url = {https://www.frontiersin.org/articles/10.3389/frai.2024.1414707}, doi = {10.3389/frai.2024.1414707}, issn = {2624-8212}, abstract = {Integration between constrained optimization and deep networks has garnered significant interest from both research and industrial laboratories. Optimization techniques can be employed to optimize the choice of network structure based not only on loss and accuracy but also on physical constraints. Additionally, constraints can be imposed during training to enhance the performance of networks in specific contexts. This study surveys the literature on the integration of constrained optimization with deep networks. Specifically, we examine the integration of hyper-parameter tuning with physical constraints, such as the number of FLOPS (FLoating point Operations Per Second), a measure of computational capacity, latency, and other factors. This study also considers the use of context-specific knowledge constraints to improve network performance. We discuss the integration of constraints in neural architecture search (NAS), considering the problem as both a multi-objective optimization (MOO) challenge and through the imposition of penalties in the loss function. Furthermore, we explore various approaches that integrate logic with deep neural networks (DNNs). In particular, we examine logic-neural integration through constrained optimization applied during the training of NNs and the use of semantic loss, which employs the probabilistic output of the networks to enforce constraints on the output.} }
@article{FerZanFraBizLam24-SSRN-IJ, author = {Ferrari, Niccolò and Zanarini, Nicola and Fraccaroli, Michele and Bizzarri, Alice and Lamma, Evelina}, editor = {SSRN}, title = {Integration of Deep Generative Anomaly Detection Algorithm in High-Speed Industrial Line}, year = {2024}, url = {https://papers.ssrn.com/sol3/papers.cfm?abstract_id=485866} }
This file was generated by bibtex2html 1.98.