2021.bib

@article{FraLamRig21-ML-IJ,
  title = {Symbolic {DNN-Tuner}},
  author = {Michele Fraccaroli and
               Evelina Lamma and
               Fabrizio Riguzzi},
  journal = {Machine Learning},
  publisher = {Springer},
  copyright = {Springer},
  year = {2021},
  abstract = {Hyper-Parameter Optimization (HPO) occupies a fundamental role
in Deep Learning systems due to the number of hyper-parameters (HPs) to be
set. The state-of-the-art of HPO methods are Grid Search, Random Search and
Bayesian Optimization. The  rst two methods try all possible combinations
and random combination of the HPs values, respectively. This is performed in
a blind manner, without any information for choosing the new set of HPs val-
ues. Bayesian Optimization (BO), instead, keeps track of past results and uses
them to build a probabilistic model mapping HPs into a probability density of
the objective function. Bayesian Optimization builds a surrogate probabilistic
model of the objective function,  nds the HPs values that perform best on the
surrogate model and updates it with new results. In this paper, we improve BO
applied to Deep Neural Network (DNN) by adding an analysis of the results
of the network on training and validation sets. This analysis is performed by
exploiting rule-based programming, and in particular by using Probabilistic
Logic Programming. The resulting system, called Symbolic DNN-Tuner, logi-
cally evaluates the results obtained from the training and the validation phase
and, by applying symbolic tuning rules,  xes the network architecture, and its
HPs, therefore improving performance. We also show the e ectiveness of the
proposed approach, by an experimental evaluation on literature and real-life
datasets.},
  keywords = {Deep Learning   Hyper-Parameter Optimization   Probabilistic
Logic Programming},
  doi = {10.1007/s10994-021-06097-1},
  isbn = {1573-0565}
}
@article{NguRigLam21-ML-IJ,
  author = {Nguembang Fadja, Arnaud  and Fabrizio Riguzzi and Evelina Lamma},
  title = {Learning Hierarchical Probabilistic Logic Programs},
  journal = {Machine Learning},
  publisher = {Springer},
  copyright = {Springer},
  year = {2021},
  doi = {10.1007/s10994-021-06016-4},
  url = {https://link.springer.com/content/pdf/10.1007/s10994-021-06016-4.pdf},
  abstract = {
Probabilistic logic programming (PLP) combines logic programs and probabilities. Due to its expressiveness and simplicity, it has been considered as a powerful tool for learning and reasoning in relational domains characterized by uncertainty. Still, learning the parameter and the structure of general PLP is computationally expensive due to the inference cost. We have recently proposed a restriction of the general PLP language called hierarchical PLP (HPLP) in which clauses and predicates are hierarchically organized. HPLPs can be converted into arithmetic circuits or deep neural networks and inference is much cheaper than for general PLP. In this paper we present algorithms for learning both the parameters and the structure of HPLPs from data. We first present an algorithm, called parameter learning for hierarchical probabilistic logic programs (PHIL) which performs parameter estimation of HPLPs using gradient descent and expectation maximization. We also propose structure learning of hierarchical probabilistic logic programming (SLEAHP), that learns both the structure and the parameters of HPLPs from data. Experiments were performed comparing PHIL and SLEAHP with PLP and Markov Logic Networks state-of-the art systems for parameter and structure learning respectively. PHIL was compared with EMBLEM, ProbLog2 and Tuffy and SLEAHP with SLIPCOVER, PROBFOIL+, MLB-BC, MLN-BT and RDN-B. The experiments on five well known datasets show that our algorithms achieve similar and often better accuracies but in a shorter time.
},
  keywords = {Probabilistic Logic Programming, Distribution Semantics, Arithmetic Circuits, Gradient Descent, Back-propagation},
  address = {Berlin, Germany},
  scopus = {2-s2.0-85107994928},
  volume = {110},
  number = {7},
  pages = {1637--1693},
  isbn = {1573-0565}
}
@inproceedings{AzzRigLam2021summary-ICLP-IC,
  author = {Damiano Azzolini and Fabrizio Riguzzi and Evelina Lamma},
  title = {Summary of semantics for hybrid probabilistic logic programs with function symbols},
  booktitle = {Proceedings 37th International Conference on Logic Programming (Technical Communications)},
  editor = {Andrea Formisano and Yanhong Annie Liu and Bart Bogaerts and Alex Brik and Veronica Dahl and Carmine Dodaro and Paul Fodor and Gian Luca Pozzato and Joost Vennekens and Neng-Fa Zhou},
  year = {2021},
  publisher = {Open Publishing Association},
  address = {Waterloo, Australia},
  issn = {2075-2180},
  venue = {Porto, Portugal},
  eventdate = {September 20-27th},
  copyright = {by the authors},
  url = {http://eptcs.web.cse.unsw.edu.au/paper.cgi?ICLP2021.37},
  pdf = {http://eptcs.web.cse.unsw.edu.au/paper.cgi?ICLP2021.37.pdf},
  doi = {10.4204/EPTCS.345},
  pages = {234--235}
}
@article{AzzRigLam21-AIJ-IJ,
  title = {A Semantics for Hybrid Probabilistic Logic Programs with Function Symbols},
  author = {Azzolini, Damiano and Riguzzi, Fabrizio and Lamma, Evelina},
  journal = {Artificial Intelligence},
  year = {2021},
  copyright = {Elsevier},
  issn = {0004-3702},
  url = {http://ml.unife.it/wp-content/uploads/Papers/AzzRigLam21-AIJ-IJ.pdf},
  doi = {10.1016/j.artint.2021.103452},
  note = {The final publication is available at Elsevier via \url{https://doi.org/10.1016/j.artint.2021.103452} },
  volume = {294},
  pages = {103452}
}
@article{RigBelZesAlbLam21-ML-IJ,
  author = {Riguzzi, Fabrizio and Bellodi, Elena and Zese, Riccardo and Alberti, Marco and Lamma, Evelina},
  title = {Probabilistic inductive constraint logic},
  journal = {Machine Learning},
  year = {2021},
  volume = {110},
  issue = {4},
  pages = {723-754},
  doi = {10.1007/s10994-020-05911-6},
  pdf = {https://link.springer.com/content/pdf/10.1007/s10994-020-05911-6.pdf},
  publisher = {Springer},
  issn = {08856125},
  abstract = {Probabilistic logical models deal effectively with uncertain relations and entities typical of many real world domains. In the field of probabilistic logic programming usually the aim is to learn these kinds of models to predict specific atoms or predicates of the domain, called target atoms/predicates. However, it might also be useful to learn classifiers for interpretations as a whole: to this end, we consider the models produced by the inductive constraint logic system, represented by sets of integrity constraints, and we propose a probabilistic version of them. Each integrity constraint is annotated with a probability, and the resulting probabilistic logical constraint model assigns a probability of being positive to interpretations. To learn both the structure and the parameters of such probabilistic models we propose the system PASCAL for “probabilistic inductive constraint logic”. Parameter learning can be performed using gradient descent or L-BFGS. PASCAL has been tested on 11 datasets and compared with a few statistical relational systems and a system that builds relational decision trees (TILDE): we demonstrate that this system achieves better or comparable results in terms of area under the precision–recall and receiver operating characteristic curves, in a comparable execution time.}
}
@article{BelAlbRig21-TPLP-IJ,
  author = {Elena Bellodi and
               Marco Gavanelli and
               Riccardo Zese and
               Evelina Lamma and
               Fabrizio Riguzzi},
  title = {Nonground Abductive Logic Programming with Probabilistic Integrity Constraints},
  journal = {Theory and Practice of Logic Programming},
  publisher = {Cambridge University Press},
  copyright = {Cambridge University Press},
  year = {2021},
  url = {https://arxiv.org/abs/2108.03033},
  volume = {21},
  doi = {10.1017/S1471068421000417},
  pdf = {https://arxiv.org/pdf/2108.03033.pdf},
  number = {5},
  pages = {557--574}
}

This file was generated by bibtex2html 1.98.