workshops.bib

@inproceedings{CotZes15-AIIADC-IW,
  title = {Learning Probabilistic Ontologies with Distributed Parameter Learning },
  author = {Giuseppe Cota and Riccardo Zese and Elena Bellodi and Evelina Lamma and Fabrizio Riguzzi},
  pages = {7--12},
  pdf = {http://ceur-ws.org/Vol-1485/paper2.pdf},
  booktitle = {Proceedings of the Doctoral Consortium (DC)
co-located with the 14th Conference of the Italian Association for Artificial Intelligence (AI*IA 2015)},
  year = 2015,
  editor = {Elena Bellodi and Alessio Bonfietti},
  volume = 1485,
  series = {CEUR Workshop Proceedings},
  address = {Aachen, Germany},
  issn = {1613-0073},
  venue = {Ferrara, Italy},
  eventdate = {2015-09-23/24},
  publisher = {Sun {SITE} Central Europe},
  copyright = {by the authors},
  abstract = {
We consider the problem of learning both the structure and
the parameters of Probabilistic Description Logics under DISPONTE.
DISPONTE ("DIstribution Semantics for Probabilistic ONTologiEs")
adapts the distribution semantics for Probabilistic Logic Programming
to Description Logics. The system LEAP for "LEArning Probabilistic
description logics" learns both the structure and the parameters of
DISPONTE knowledge bases (KBs) by exploiting the algorithms CELOE
and EDGE. The former stands for "Class Expression Learning for Ontology
Engineering" and it is used to generate good candidate axioms
to add to the KB, while the latter learns the probabilistic parameters
and evaluates the KB. EDGE for "Em over bDds for description loGics
paramEter learning" is an algorithm for learning the parameters of probabilistic
ontologies from data. In order to contain the computational cost,
a distributed version of EDGE called EDGEMR was developed. EDGEMR
exploits the MapReduce (MR) strategy by means of the Message Passing
Interface. In this paper we propose the system LEAPMR. It is a
re-engineered version of LEAP which is able to use distributed parallel
parameter learning algorithms such as EDGEMR.
},
  keywords = {Probabilistic Description Logics, Structure Learning,
Parameter Learning, MapReduce, Message Passing Interface.
}
}
@inproceedings{CotZesBel15-ECMLDC-IW,
  year = {2015},
  booktitle = {Doctoral Consortium of the European Conference on Machine Learning and Principles and Practice of Knowledge Discovery in Databases},
  editor = {Jaakko Hollmen and Panagiotis Papapetrou },
  title = {Structure Learning with Distributed Parameter
Learning for Probabilistic Ontologies},
  author = {Giuseppe Cota and Riccardo Zese and Elena Bellodi and Evelina Lamma and Fabrizio Riguzzi},
  pages = {75--84},
  copyright = {by the authors},
  url = {http://urn.fi/URN:ISBN:978-952-60-6443-7},
  pdf = {https://aaltodoc.aalto.fi/bitstream/handle/123456789/18224/isbn9789526064437.pdf#page=79},
  isbn = {978-952-60-6443-7},
  issn = {1799-490X},
  issn = {1799-4896},
  abstract = {We consider the problem of learning both the structure and
the parameters of Probabilistic Description Logics under DISPONTE.
DISPONTE ("DIstribution Semantics for Probabilistic ONTologiEs")
adapts the distribution semantics for Probabilistic Logic Programming
to Description Logics. The system LEAP for "LEArning Probabilistic
description logics" learns both the structure and the parameters of
DISPONTE knowledge bases (KBs) by exploiting the algorithms CELOE
and EDGE. The former stands for "Class Expression Learning for Ontology
Engineering" and it is used to generate good candidate axioms
to add to the KB, while the latter learns the probabilistic parameters
and evaluates the KB. EDGE for "Em over bDds for description loGics
paramEter learning" is an algorithm for learning the parameters of probabilistic
ontologies from data. In order to contain the computational cost,
a distributed version of EDGE called EDGEMR was developed. EDGEMR
exploits the MapReduce (MR) strategy by means of the Message Passing
Interface. In this paper we propose the system LEAPMR. It is a
re-engineered version of LEAP which is able to use distributed parallel
parameter learning algorithms such as EDGEMR.},
  keywords = {Probabilistic Description Logics, Structure Learning,
Parameter Learning, MapReduce, Message Passing Interface}
}
@inproceedings{AlbBelCot16-PLP-IW,
  title = {Probabilistic Constraint Logic Theories},
  author = {Marco Alberti and Elena Bellodi and Giuseppe Cota and Evelina Lamma and Fabrizio Riguzzi and Riccardo Zese},
  pages = {15--28},
  url = {http://ceur-ws.org/Vol-1661/#paper-02},
  pdf = {http://ceur-ws.org/Vol-1661/paper-02.pdf},
  booktitle = {Proceedings of the 3nd International Workshop on Probabilistic Logic Programming ({PLP})},
  year = 2016,
  editor = {Arjen Hommersom and
Samer Abdallah},
  volume = 1661,
  series = {CEUR Workshop Proceedings},
  address = {Aachen, Germany},
  issn = {1613-0073},
  venue = {London, UK},
  eventdate = {2016-09-03},
  publisher = {Sun {SITE} Central Europe},
  copyright = {by the authors},
  abstract = {Probabilistic logic models are used ever more often to deal with
the uncertain relations typical of the real world.
However, these models usually require expensive inference procedures. Very recently the problem of identifying tractable
languages has come to the fore.
In this paper we consider the  models used by the learning from interpretations
ILP setting, namely
sets of integrity constraints, and propose a probabilistic version
of them. A semantics in the style of the distribution semantics is adopted, where each integrity constraint is annotated with a probability.
These probabilistic constraint logic models assign a probability of being positive to interpretations. This probability can be computed
in a time that is logarithmic in the
number of ground instantiations of violated constraints.
This formalism can be used as the target language in learning systems and
for declaratively specifying the behavior of a system.
In the latter case, inference corresponds to computing the probability of compliance
of a system's behavior to the model.
},
  keywords = {
Probabilistic Logic Programming, Distribution Semantics, Constraint Logic
Theories},
  scopus = {2-s2.0-84987763948}
}
@inproceedings{RigLamAlb17-URANIA-IW,
  title = {Probabilistic Logic Programming for Natural Language Processing },
  author = {Fabrizio Riguzzi and Evelina Lamma and Marco Alberti and Elena Bellodi and Riccardo Zese and Giuseppe Cota},
  pages = {30--37},
  url = {http://ceur-ws.org/Vol-1802/},
  pdf = {http://ceur-ws.org/Vol-1802/paper4.pdf},
  booktitle = {{URANIA} 2016,
Deep Understanding and Reasoning: A Challenge for Next-generation Intelligent Agents,
Proceedings of the {AI*IA} Workshop on Deep Understanding and Reasoning: A Challenge for Next-generation Intelligent Agents 2016
co-located with 15th International Conference of the Italian Association for Artificial Intelligence ({AIxIA} 2016)},
  year = 2017,
  editor = {Federico Chesani and Paola Mello and Michela Milano},
  volume = 1802,
  series = {CEUR Workshop Proceedings},
  address = {Aachen, Germany},
  issn = {1613-0073},
  venue = {Genova, Italy},
  eventdate = {2016-11-28},
  publisher = {Sun {SITE} Central Europe},
  copyright = {by the authors},
  abstract = {The ambition of Artificial Intelligence is to solve problems without human intervention. Often the problem description is given in human (natural) language. Therefore it is crucial to find an automatic way to understand a text written by a human. The research field concerned with the interactions between computers and natural languages is known under the name of Natural Language Processing (NLP), one of the most studied fields of Artificial Intelligence.

In this paper we show that Probabilistic Logic Programming (PLP) is a suitable approach for NLP in various scenarios. For this purpose we use \texttt{cplint} on SWISH, a web application for Probabilistic Logic Programming. \texttt{cplint} on SWISH allows users to perform inference and learning with the framework \texttt{cplint} using just a web browser, with the computation performed on the server.},
  keywords = {Probabilistic Logic Programming, Probabilistic Logical Inference, Natural Language Processing},
  scopus = {2-s2.0-85015943369}
}

This file was generated by bibtex2html 1.98.