latest.bib

@article{RocChiNal2022-APPSCI-IJ,
  author = {Rocchi, Alessandro and Chiozzi, Andrea and Nale, Marco and Nikolic, Zeljana and Riguzzi, Fabrizio and Mantovan, Luana and Gilli, Alessandro and Benvenuti, Elena},
  title = {A Machine Learning Framework for Multi-Hazard Risk Assessment at the Regional Scale in Earthquake and Flood-Prone Areas},
  journal = {Applied Sciences},
  volume = {12},
  year = {2022},
  number = {2},
  article-number = {583},
  url = {https://www.mdpi.com/2076-3417/12/2/583},
  issn = {2076-3417},
  abstract = {Communities are confronted with the rapidly growing impact of disasters, due to many factors that cause an increase in the vulnerability of society combined with an increase in hazardous events such as earthquakes and floods. The possible impacts of such events are large, also in developed countries, and governments and stakeholders must adopt risk reduction strategies at different levels of management stages of the communities. This study is aimed at proposing a sound qualitative multi-hazard risk analysis methodology for the assessment of combined seismic and hydraulic risk at the regional scale, which can assist governments and stakeholders in decision making and prioritization of interventions. The method is based on the use of machine learning techniques to aggregate large datasets made of many variables different in nature each of which carries information related to specific risk components and clusterize observations. The framework is applied to the case study of the Emilia Romagna region, for which the different municipalities are grouped into four homogeneous clusters ranked in terms of relative levels of combined risk. The proposed approach proves to be robust and delivers a very useful tool for hazard management and disaster mitigation, particularly for multi-hazard modeling at the regional scale.},
  doi = {10.3390/app12020583}
}
@article{AzzBellFer2022-IJAR-IJ,
  title = {Abduction with probabilistic logic programming under the distribution semantics},
  journal = {International Journal of Approximate Reasoning},
  volume = {142},
  pages = {41-63},
  year = {2022},
  issn = {0888-613X},
  doi = {10.1016/j.ijar.2021.11.003},
  url = {https://www.sciencedirect.com/science/article/pii/S0888613X2100181X},
  author = {Damiano Azzolini and Elena Bellodi and Stefano Ferilli and Fabrizio Riguzzi and Riccardo Zese},
  keywords = {Abduction, Distribution semantics, Probabilistic logic programming, Statistical relational artificial intelligence},
  abstract = {In Probabilistic Abductive Logic Programming we are given a probabilistic logic program, a set of abducible facts, and a set of constraints. Inference in probabilistic abductive logic programs aims to find a subset of the abducible facts that is compatible with the constraints and that maximizes the joint probability of the query and the constraints. In this paper, we extend the PITA reasoner with an algorithm to perform abduction on probabilistic abductive logic programs exploiting Binary Decision Diagrams. Tests on several synthetic datasets show the effectiveness of our approach.},
  scopus = {2-s2.0-85119493622}
}
@article{FraLamRig2022-SwX-IJ,
  title = {Symbolic {DNN-Tuner}: A {Python} and {ProbLog}-based system for optimizing Deep Neural Networks hyperparameters},
  journal = {SoftwareX},
  volume = {17},
  pages = {100957},
  year = {2022},
  issn = {2352-7110},
  doi = {10.1016/j.softx.2021.100957},
  url = {https://www.sciencedirect.com/science/article/pii/S2352711021001825},
  author = {Michele Fraccaroli and Evelina Lamma and Fabrizio Riguzzi},
  keywords = {Deep learning, Probabilistic Logic Programming, Hyper-parameters tuning, Neural-symbolic integration},
  abstract = {The application of deep learning models to increasingly complex contexts has led to a rise in the complexity of the models themselves. Due to this, there is an increase in the number of hyper-parameters (HPs) to be set and Hyper-Parameter Optimization (HPO) algorithms occupy a fundamental role in deep learning. Bayesian Optimization (BO) is the state-of-the-art of HPO for deep learning models. BO keeps track of past results and uses them to build a probabilistic model, building a probability density of HPs. This work aims to improve BO applied to Deep Neural Networks (DNNs) by an analysis of the results of the network on training and validation sets. This analysis is obtained by applying symbolic tuning rules, implemented in Probabilistic Logic Programming (PLP). The resulting system, called Symbolic DNN-Tuner, logically evaluates the results obtained from the training and the validation phase and, by applying symbolic tuning rules, fixes the network architecture, and its HPs, leading to improved performance. In this paper, we present the general system and its implementation. We also show its graphical interface and a simple example of execution.}
}
@article{LosVen22-JEGTP-IJ,
  author = {Losi, Enzo and Venturini, Mauro and Manservigi, Lucrezia and Ceschini, Giuseppe Fabio and Bechini, Giovanni and Cota, Giuseppe and Riguzzi, Fabrizio},
  title = {Prediction of Gas Turbine Trip: A Novel Methodology Based on Random Forest Models},
  journal = {Journal of Engineering for Gas Turbines and Power},
  volume = {144},
  number = {3},
  year = {2022},
  issn = {0742-4795},
  doi = {10.1115/1.4053194},
  publisher = asme_p,
  note = {{GTP-21-1324}}
}
@inproceedings{AzzRigBelLam22-BSCT-IW,
  title = {A Probabilistic Logic Model of Lightning Network},
  author = {Azzolini, Damiano and Riguzzi, Fabrizio and Bellodi, Elena and Lamma, Evelina},
  booktitle = {Business Information Systems Workshops},
  year = {2022},
  editor = {Abramowicz, Witold and Auer, S{\"o}ren and Str{\'o}{\.{z}}yna, Milena},
  pages = {321--333},
  series = {Lecture Notes in Business Information Processing (LNBIP)},
  publisher = {Springer International Publishing},
  address = {Cham, Switzerland},
  eventdate = {June 14-17, 2021},
  doi = {10.1007/978-3-031-04216-4_28},
  url = {https://link.springer.com/chapter/10.1007/978-3-031-04216-4_28},
  pdf = {http://ml.unife.it/wp-content/uploads/Papers/AzzRigBelLam22-BSCT-IW.pdf}
}
@inbook{ZesBelFraRigLam22-MLNVM-BC,
  author = {Zese, Riccardo and Bellodi, Elena and Fraccaroli, Michele and Riguzzi, Fabrizio and Lamma, Evelina},
  editor = {Micheloni, Rino and Zambelli, Cristian},
  title = {Neural Networks and Deep Learning Fundamentals},
  booktitle = {Machine Learning and Non-volatile Memories},
  year = {2022},
  publisher = {Springer International Publishing},
  address = {Cham},
  pages = {23--42},
  abstract = {In the last decade, Neural Networks (NNs) have come to the fore as one of the most powerful and versatile approaches to many machine learning tasks. Deep Learning (DL)Deep Learning (DL), the latest incarnation of NNs, is nowadays applied in every scenario that needs models able to predict or classify data. From computer vision to speech-to-text, DLDeep Learning (DL) techniques are able to achieve super-human performance in many cases. This chapter is devoted to give a (not comprehensive) introduction to the field, describing the main branches and model architectures, in order to try to give a roadmap of this area to the reader.},
  isbn = {978-3-031-03841-9},
  doi = {10.1007/978-3-031-03841-9_2},
  url = {https://doi.org/10.1007/978-3-031-03841-9_2}
}
@article{AzzRig2022-CRYPT-IJ,
  author = {Azzolini, Damiano and Riguzzi, Fabrizio},
  title = {Probabilistic Logic Models for the Lightning Network},
  journal = {Cryptography},
  volume = {6},
  year = {2022},
  number = {2},
  article-number = {29},
  url = {https://www.mdpi.com/2410-387X/6/2/29},
  pdf = {https://www.mdpi.com/2410-387X/6/2/29/pdf?version=1655360685},
  issn = {2410-387X},
  doi = {10.3390/cryptography6020029}
}
@article{NguRigBerTru2021-BioDM-IJ,
  abstract = {With the increase in the size of genomic datasets describing variability in populations, extracting relevant information becomes increasingly useful as well as complex. Recently, computational methodologies such as Supervised Machine Learning and specifically Convolutional Neural Networks have been proposed to make inferences on demographic and adaptive processes using genomic data. Even though it was already shown to be powerful and efficient in different fields of investigation, Supervised Machine Learning has still to be explored as to unfold its enormous potential in evolutionary genomics.},
  author = {Nguembang Fadja, Arnaud and Riguzzi, Fabrizio and Bertorelle, Giorgio and Trucchi, Emiliano},
  doi = {10.1186/s13040-021-00280-9},
  isbn = {1756-0381},
  journal = {BioData Mining},
  number = {1},
  pages = {51},
  title = {Identification of natural selection in genomic data with deep convolutional neural network},
  volume = {14},
  year = {2021}
}
@article{FraLamRig21-ML-IJ,
  title = {Symbolic {DNN-Tuner}},
  author = {Michele Fraccaroli and
               Evelina Lamma and
               Fabrizio Riguzzi},
  journal = {Machine Learning},
  publisher = {Springer},
  copyright = {Springer},
  year = {2021},
  abstract = {Hyper-Parameter Optimization (HPO) occupies a fundamental role
in Deep Learning systems due to the number of hyper-parameters (HPs) to be
set. The state-of-the-art of HPO methods are Grid Search, Random Search and
Bayesian Optimization. The  rst two methods try all possible combinations
and random combination of the HPs values, respectively. This is performed in
a blind manner, without any information for choosing the new set of HPs val-
ues. Bayesian Optimization (BO), instead, keeps track of past results and uses
them to build a probabilistic model mapping HPs into a probability density of
the objective function. Bayesian Optimization builds a surrogate probabilistic
model of the objective function,  nds the HPs values that perform best on the
surrogate model and updates it with new results. In this paper, we improve BO
applied to Deep Neural Network (DNN) by adding an analysis of the results
of the network on training and validation sets. This analysis is performed by
exploiting rule-based programming, and in particular by using Probabilistic
Logic Programming. The resulting system, called Symbolic DNN-Tuner, logi-
cally evaluates the results obtained from the training and the validation phase
and, by applying symbolic tuning rules,  xes the network architecture, and its
HPs, therefore improving performance. We also show the e ectiveness of the
proposed approach, by an experimental evaluation on literature and real-life
datasets.},
  keywords = {Deep Learning   Hyper-Parameter Optimization   Probabilistic
Logic Programming},
  doi = {10.1007/s10994-021-06097-1},
  isbn = {1573-0565}
}
@article{NguRigLam21-ML-IJ,
  author = {Nguembang Fadja, Arnaud  and Fabrizio Riguzzi and Evelina Lamma},
  title = {Learning Hierarchical Probabilistic Logic Programs},
  journal = {Machine Learning},
  publisher = {Springer},
  copyright = {Springer},
  year = {2021},
  doi = {10.1007/s10994-021-06016-4},
  url = {https://link.springer.com/content/pdf/10.1007/s10994-021-06016-4.pdf},
  abstract = {
Probabilistic logic programming (PLP) combines logic programs and probabilities. Due to its expressiveness and simplicity, it has been considered as a powerful tool for learning and reasoning in relational domains characterized by uncertainty. Still, learning the parameter and the structure of general PLP is computationally expensive due to the inference cost. We have recently proposed a restriction of the general PLP language called hierarchical PLP (HPLP) in which clauses and predicates are hierarchically organized. HPLPs can be converted into arithmetic circuits or deep neural networks and inference is much cheaper than for general PLP. In this paper we present algorithms for learning both the parameters and the structure of HPLPs from data. We first present an algorithm, called parameter learning for hierarchical probabilistic logic programs (PHIL) which performs parameter estimation of HPLPs using gradient descent and expectation maximization. We also propose structure learning of hierarchical probabilistic logic programming (SLEAHP), that learns both the structure and the parameters of HPLPs from data. Experiments were performed comparing PHIL and SLEAHP with PLP and Markov Logic Networks state-of-the art systems for parameter and structure learning respectively. PHIL was compared with EMBLEM, ProbLog2 and Tuffy and SLEAHP with SLIPCOVER, PROBFOIL+, MLB-BC, MLN-BT and RDN-B. The experiments on five well known datasets show that our algorithms achieve similar and often better accuracies but in a shorter time.
},
  keywords = {Probabilistic Logic Programming, Distribution Semantics, Arithmetic Circuits, Gradient Descent, Back-propagation},
  address = {Berlin, Germany},
  scopus = {2-s2.0-85107994928},
  volume = {110},
  number = {7},
  pages = {1637--1693},
  isbn = {1573-0565}
}
@inproceedings{AzzRig2021syntreq-ICLP-IC,
  author = {Damiano Azzolini and Fabrizio Riguzzi},
  title = {Syntactic Requirements for Well-defined Hybrid Probabilistic Logic Programs},
  booktitle = {Proceedings 37th International Conference on Logic Programming (Technical Communications)},
  editor = {Andrea Formisano and Yanhong Annie Liu and Bart Bogaerts and Alex Brik and Veronica Dahl and Carmine Dodaro and Paul Fodor and Gian Luca Pozzato and Joost Vennekens and Neng-Fa Zhou},
  year = {2021},
  publisher = {Open Publishing Association},
  address = {Waterloo, Australia},
  issn = {2075-2180},
  venue = {Porto, Portugal},
  eventdate = {September 20-27th},
  copyright = {by the authors},
  url = {http://eptcs.web.cse.unsw.edu.au/paper.cgi?ICLP2021.12},
  pdf = {http://eptcs.web.cse.unsw.edu.au/paper.cgi?ICLP2021.12.pdf},
  doi = {10.4204/EPTCS.345},
  pages = {14--26}
}
@inproceedings{AzzRigLam2021summary-ICLP-IC,
  author = {Damiano Azzolini and Fabrizio Riguzzi and Evelina Lamma},
  title = {Summary of semantics for hybrid probabilistic logic programs with function symbols},
  booktitle = {Proceedings 37th International Conference on Logic Programming (Technical Communications)},
  editor = {Andrea Formisano and Yanhong Annie Liu and Bart Bogaerts and Alex Brik and Veronica Dahl and Carmine Dodaro and Paul Fodor and Gian Luca Pozzato and Joost Vennekens and Neng-Fa Zhou},
  year = {2021},
  publisher = {Open Publishing Association},
  address = {Waterloo, Australia},
  issn = {2075-2180},
  venue = {Porto, Portugal},
  eventdate = {September 20-27th},
  copyright = {by the authors},
  url = {http://eptcs.web.cse.unsw.edu.au/paper.cgi?ICLP2021.37},
  pdf = {http://eptcs.web.cse.unsw.edu.au/paper.cgi?ICLP2021.37.pdf},
  doi = {10.4204/EPTCS.345},
  pages = {234--235}
}
@article{AzzRig21-ICLP-IJ,
  title = {Optimizing Probabilities in Probabilistic Logic Programs},
  doi = {10.1017/S1471068421000260},
  journal = {Theory and Practice of Logic Programming},
  publisher = {Cambridge University Press},
  copyright = {Cambridge University Press},
  author = {Azzolini, Damiano and Riguzzi, Fabrizio},
  year = {2021},
  volume = {21},
  number = {5},
  pages = {543--556},
  url = {https://arxiv.org/pdf/2108.03095},
  pdf = {https://arxiv.org/pdf/2108.03095.pdf}
}
@inproceedings{AzzRig21-RuleML-IC,
  title = {Reducing Probabilistic Logic Programs},
  author = {Azzolini, Damiano and Riguzzi, Fabrizio},
  year = {2021},
  editor = {Ahmet Soylu and Alireza Tamaddoni Nezhad and Nikolay Nikolov and Ioan Toma and Anna Fensel and Joost Vennekens},
  booktitle = {Proceedings of the 15th International Rule Challenge, 7th Industry Track, and 5th Doctoral Consortium at RuleML+RR 2021 co-located with 17th Reasoning Web Summer School (RW 2021) and 13th DecisionCAMP 2021 as part of Declarative AI 2021},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  address = {Aachen, Germany},
  issn = {1613-0073},
  venue = {Leuven, Belgium},
  copyright = {By the authors},
  url = {http://ceur-ws.org/Vol-2956/paper5.pdf},
  pdf = {http://ceur-ws.org/Vol-2956/paper5.pdf},
  pages = {1--13}
}
@article{AzzRigLam21-AIJ-IJ,
  title = {A Semantics for Hybrid Probabilistic Logic Programs with Function Symbols},
  author = {Azzolini, Damiano and Riguzzi, Fabrizio and Lamma, Evelina},
  journal = {Artificial Intelligence},
  year = {2021},
  copyright = {Elsevier},
  issn = {0004-3702},
  url = {http://ml.unife.it/wp-content/uploads/Papers/AzzRigLam21-AIJ-IJ.pdf},
  doi = {10.1016/j.artint.2021.103452},
  note = {The final publication is available at Elsevier via \url{https://doi.org/10.1016/j.artint.2021.103452} },
  volume = {294},
  pages = {103452}
}
@article{LosVen21-JEGTP-IJ,
  author = {Losi, Enzo and Venturini, Mauro and Manservigi, Lucrezia and Ceschini, Giuseppe Fabio and Bechini, Giovanni and Cota, Giuseppe and Riguzzi, Fabrizio},
  title = {Structured Methodology for Clustering Gas Turbine Transients by means of Multi-variate Time Series},
  year = {2021},
  publisher = {ASME},
  journal = {Journal of Engineering for Gas Turbines and Power},
  volume = {143},
  number = {3},
  pages = {031014-1 (13 pages)},
  doi = {10.1115/1.4049503}
}
@inproceedings{LosVen21Data-TurboExpo-IC,
  title = {Data Selection and Feature Engineering for the Application of Machine Learning to the Prediction of Gas Turbine Trip},
  author = {Losi, Enzo and Venturini, Mauro and Manservigi, Lucrezia and Ceschini, Giuseppe Fabio and Bechini, Giovanni and Cota, Giuseppe and Riguzzi, Fabrizio},
  booktitle = {Proceedings of the ASME Turbo Expo 2021: Turbomachinery Technical Conference and Exposition, June 7–11, 2021 Virtual, Online},
  year = {2021},
  doi = {10.1115/GT2021-58914},
  volume = {8},
  publisher = {ASME},
  pages = {V008T20A004}
}
@inproceedings{LosVen21Trip-TurboExpo-IC,
  title = {Prediction of Gas Turbine Trip: a Novel Methodology Based on Random Forest Models},
  author = {Losi, Enzo and Venturini, Mauro and Manservigi, Lucrezia and Ceschini, Giuseppe Fabio and Bechini, Giovanni and Cota, Giuseppe and Riguzzi, Fabrizio},
  booktitle = {Proceedings of the ASME Turbo Expo 2021: Turbomachinery Technical Conference and Exposition, June 7–11, 2021 Virtual, Online},
  year = {2021},
  publisher = {ASME},
  pages = {V008T20A005},
  volume = {8},
  doi = {10.1115/GT2021-58916}
}
@article{RigBelZesAlbLam21-ML-IJ,
  author = {Riguzzi, Fabrizio and Bellodi, Elena and Zese, Riccardo and Alberti, Marco and Lamma, Evelina},
  title = {Probabilistic inductive constraint logic},
  journal = {Machine Learning},
  year = {2021},
  volume = {110},
  issue = {4},
  pages = {723-754},
  doi = {10.1007/s10994-020-05911-6},
  pdf = {https://link.springer.com/content/pdf/10.1007/s10994-020-05911-6.pdf},
  publisher = {Springer},
  issn = {08856125},
  abstract = {Probabilistic logical models deal effectively with uncertain relations and entities typical of many real world domains. In the field of probabilistic logic programming usually the aim is to learn these kinds of models to predict specific atoms or predicates of the domain, called target atoms/predicates. However, it might also be useful to learn classifiers for interpretations as a whole: to this end, we consider the models produced by the inductive constraint logic system, represented by sets of integrity constraints, and we propose a probabilistic version of them. Each integrity constraint is annotated with a probability, and the resulting probabilistic logical constraint model assigns a probability of being positive to interpretations. To learn both the structure and the parameters of such probabilistic models we propose the system PASCAL for “probabilistic inductive constraint logic”. Parameter learning can be performed using gradient descent or L-BFGS. PASCAL has been tested on 11 datasets and compared with a few statistical relational systems and a system that builds relational decision trees (TILDE): we demonstrate that this system achieves better or comparable results in terms of area under the precision–recall and receiver operating characteristic curves, in a comparable execution time.}
}
@article{BelAlbRig21-TPLP-IJ,
  author = {Elena Bellodi and
               Marco Gavanelli and
               Riccardo Zese and
               Evelina Lamma and
               Fabrizio Riguzzi},
  title = {Nonground Abductive Logic Programming with Probabilistic Integrity Constraints},
  journal = {Theory and Practice of Logic Programming},
  publisher = {Cambridge University Press},
  copyright = {Cambridge University Press},
  year = {2021},
  url = {https://arxiv.org/abs/2108.03033},
  volume = {21},
  doi = {10.1017/S1471068421000417},
  pdf = {https://arxiv.org/pdf/2108.03033.pdf},
  number = {5},
  pages = {557--574}
}

This file was generated by bibtex2html 1.98.