2012.bib

@inproceedings{RigZelILP2012-CEUR-EB,
  title = {Preface},
  booktitle = {Late Breaking Papers of the 22nd International Conference on Inductive Logic Programming (ILP)},
  year = 2012,
  author = {Fabrizio Riguzzi and Filip  \v{Z}elezn\'{y} },
  editor = {Fabrizio Riguzzi and Filip  \v{Z}elezn\'{y} },
  volume = {975},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  address = {Aachen, Germany},
  issn = {1613-0073},
  url = {http://ceur-ws.org/Vol-975/preface.pdf},
  venue = {Dubrovnik, Croatia},
  eventdate = {September 17-19, 2012},
  copyright = {by the authors},
  scopus = {2-s2.0-84922654847},
  pages = {3–4}
}
@inproceedings{RigBelLamZes12-URSW12-IW,
  author = {Fabrizio Riguzzi and Elena Bellodi and Evelina Lamma and Riccardo Zese},
  title = {Epistemic and Statistical Probabilistic Ontologies},
  booktitle = {Proceedings of the 8th International Workshop on Uncertain Reasoning for the Semantic Web (URSW2012), Boston, USA, 11 November 2012},
  year = {2012},
  editor = {Fernando Bobillo and
Rommel Carvalho and
da Costa, Paulo C. G. and
Nicola Fanizzi and
Laskey, Kathryn B.  and
Laskey, Kenneth J.  and
Thomas Lukasiewicz and
Trevor Martin and
Matthias Nickles and
Michael Pool},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  issn = {1613-0073},
  address = {Aachen, Germany},
  number = {900},
  pages = {3-14},
  pdf = {http://ceur-ws.org/Vol-900/paper1.pdf},
  abstract = {We present DISPONTE, a semantics for probabilistic ontologies that is based on the distribution semantics for probabilistic logic programs. In DISPONTE the axioms of a probabilistic ontology can be
annotated with an epistemic or a statistical probability. The epistemic probability represents a degree of confidence in the axiom, while the statistical probability considers the populations to which the axiom is applied.}
}
@inproceedings{BelRig12-AIIADC12-IW,
  title = {Parameter and Structure Learning Algorithms for Statistical Relational Learning},
  pages = {5-9},
  author = {Elena Bellodi and Fabrizio Riguzzi },
  editor = {Paolo Liberatore and
Michele Lombardi and
Floriano Scioscia},
  booktitle = {Doctoral Consortium of the 12th AI*IA Symposium on Artificial Intelligence, Proceedings of the Doctoral Consortium of the 12th Symposium of the Italian Association for Artificial Intelligence
Rome, Italy, June 15, 2012},
  copyright = {by the authors},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  issn = {1613-0073},
  address = {Aachen, Germany},
  volume = {926},
  year = {2012},
  pdf = {http://ceur-ws.org/Vol-926/paper1.pdf},
  scopus = {2-s2.0-84891770795
},
  abstract = {My research activity focuses on the field of Machine Learning. Two key challenges
in most machine learning applications are uncertainty and complexity.
The standard framework for handling uncertainty is probability, for complexity
is first-order logic. Thus we would like to be able to learn and perform inference
in representation languages that combine the two. This is the focus of the field
of Statistical Relational Learning.
},
  keywords = {Statistical relational learning, machine learning, first order logic}
}
@inproceedings{RigBelLamZese12-PAI12-NW,
  title = {Semantics and Inference for Probabilistic Ontologies},
  pages = { 41-46},
  author = {Fabrizio Riguzzi and Evelina Lamma and Elena Bellodi and Riccardo Zese},
  editor = {Matteo Baldoni and Federico Chesani and Bernardo Magnini and Paola Mello and Marco Montali},
  booktitle = { Popularize Artificial Intelligence. Proceedings of the AI*IA Workshop and Prize for Celebrating 100th Anniversary of Alan Turing's Birth ({PAI  2012}), Rome, Italy, June 15, 2012},
  copyright = {by the authors},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  issn = {1613-0073},
  address = {Aachen, Germany},
  volume = {860},
  year = {2012},
  pdf = {http://ceur-ws.org/Vol-860/paper3.pdf}
}
@inproceedings{BufLamRigFor12-PAI12-NW,
  title = {Un sistema di Vision Inspection basato su reti neurali},
  author = {Ludovico Buffon and Evelina Lamma and Fabrizio Riguzzi and Davide Formenti},
  pages = { 1-6},
  editor = {Matteo Baldoni and Federico Chesani and Bernardo Magnini and Paola Mello and Marco Montai},
  booktitle = { Popularize Artificial Intelligence. Proceedings of the AI*IA Workshop and Prize for Celebrating 100th Anniversary of Alan Turing's Birth ({PAI 2012}), Rome, Italy, June 15, 2012},
  copyright = {by the authors},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  issn = {1613-0073},
  address = {Aachen, Germany},
  volume = {860},
  year = {2012},
  pdf = {http://ceur-ws.org/Vol-860/paper9.pdf}
}
@inproceedings{RigBelLam12-DL12-IW,
  author = {Fabrizio Riguzzi and Elena Bellodi and Evelina Lamma},
  title = {Probabilistic {Datalog+/-} under the Distribution Semantics},
  booktitle = {Proceedings of the 25th International Workshop on Description Logics ({DL2012}),
Roma, Italy, 7-10 June 2012},
  editor = {Yevgeny Kazakov and Domenico Lembo and Frank Wolter},
  year = {2012},
  abstract = {We apply the distribution semantics for probabilistic ontologies (named DISPONTE) to the Datalog+/- language.
In DISPONTE the formulas of a probabilistic ontology can be annotated with an epistemic or a statistical probability.  The epistemic probability represents a degree of confidence in the formula, while the statistical probability considers the populations to which the formula is applied.
The probability of a query is defined in terms of finite set of finite explanations for the query, where an explanation is a set of possibly instantiated formulas that is sufficient for entailing the query.
The probability of a query is computed from the set of explanations by making them mutually exclusive.
We also compare the DISPONTE approach for Datalog+/- ontologies  with that of Probabilistic Datalog+/-,  where an ontology is composed of a Datalog+/- theory whose formulas are associated to an assignment of values for the random variables of a companion Markov Logic Network.
},
  copyright = {by the authors},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  issn = {1613-0073},
  address = {Aachen, Germany},
  url = {http://mcs.unife.it/~friguzzi/Papers/RigBelLam12-DL12.pdf},
  pdf = {http://ceur-ws.org/Vol-846/paper_25.pdf},
  volume = {846},
  pages = {519-529}
}
@inproceedings{RigBelLam12-CILC12-NC,
  author = {Fabrizio Riguzzi and Elena Bellodi and Evelina Lamma},
  title = {Probabilistic Ontologies in {Datalog+/-}},
  booktitle = {Proceedings of the 27th Italian Conference on Computational Logic ({CILC2012}),
Roma, Italy, 6-7 June 2012},
  year = {2012},
  abstract = {In logic programming the distribution semantics is one of the most popular approaches for dealing with uncertain information. In this paper
we apply the distribution semantics to the  Datalog+/- language that is grounded in logic programming and allows tractable ontology querying. In the resulting semantics, called DISPONTE, formulas of a probabilistic ontology can be annotated with an epistemic or a statistical probability.  The epistemic probability represents a degree of confidence in the formula, while the statistical probability considers the populations to which the formula is applied.
The probability of a query is defined in terms of finite set of finite explanations for the query.
We also compare the DISPONTE approach for Datalog+/- ontologies  with that of Probabilistic Datalog+/-  where an ontology is composed of a Datalog+/- theory whose formulas are associated to an assignment of values for the random variables of a companion Markov Logic Network.
},
  copyright = {by the authors},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  issn = {1613-0073},
  volume = {857},
  address = {Aachen, Germany},
  url = {http://mcs.unife.it/~friguzzi/Papers/RigBelLam12-CILC12.pdf},
  pdf = {http://ceur-ws.org/Vol-857/paper_f16.pdf},
  pages = {221-235}
}
@inproceedings{Rig12-CILC12-NC,
  author = {Fabrizio Riguzzi},
  title = {Optimizing Inference for Probabilistic Logic Programs Exploiting Independence and Exclusiveness},
  booktitle = {Proceedings of the 27th Italian Conference on Computational Logic ({CILC2012}),
Roma, Italy, 6-7 June 2012},
  year = {2012},
  abstract = {Probabilistic Logic Programming (PLP) is gaining popularity due to its many applications in particular in Machine Learning. An important problem in PLP is how to compute the probability of queries.
PITA is an algorithm for solving such a problem that exploits tabling, answer subsumption and Binary Decision Diagrams (BDDs). PITA does not impose any restriction on the programs. Other algorithms, such as PRISM, achieve a higher speed by imposing two restrictions on the program, namely that subgoals are independent and that clause bodies are mutually exclusive. Another assumption that simplifies inference is that clause bodies are independent. In this paper we  present the algorithms PITA(IND,IND) and PITA(OPT). PITA(IND,IND) assumes that subgoals and clause bodies are independent. PITA(OPT) instead first checks whether these assumptions hold for subprograms and subgoals: if they hold, PITA(OPT) uses a simplified calculation, otherwise it resorts to BDDs. Experiments on a number of benchmark datasets show that PITA(IND,IND) is the fastest on datasets respecting the assumptions while PITA(OPT) is a good option when nothing is known about a dataset.
},
  copyright = {by the authors},
  series = {CEUR Workshop Proceedings},
  publisher = {Sun {SITE} Central Europe},
  issn = {1613-0073},
  address = {Aachen, Germany},
  url = {http://mcs.unife.it/~friguzzi/Papers/Rig12-CILC12.pdf},
  pdf = {http://ceur-ws.org/Vol-857/paper_f15.pdf},
  volume = {857},
  pages = {206-220}
}
@inproceedings{GavRiguMilCag12-ISAIM12-IC,
  author = {Marco Gavanelli and
               Fabrizio Riguzzi and
               Michela Milano and
               Paolo Cagnoli},
  title = {Constraint and Optimization techniques for supporting Policy
               Making},
  booktitle = {International Symposium on Artificial Intelligence and Mathematics
               (ISAIM 2012), Fort Lauderdale, Florida, USA, January 9-11,
               2012},
  year = {2012},
  pdf = {http://www.cs.uic.edu/pub/Isaim2012/WebPreferences/ISAIM2012_Gavanelli_etal.pdf}
}
@inproceedings{BelRig12-ILP11-IC,
  author = {Elena Bellodi and Fabrizio Riguzzi},
  title = {Learning the Structure of Probabilistic Logic Programs},
  booktitle = {Inductive Logic Programming
21st International Conference, ILP 2011, London, UK, July 31 - August 3, 2011. Revised Papers },
  year = {2012},
  editor = {Muggleton, Stephen H. and Tamaddoni-Nezhad, Alireza and Lisi, Francesca A.},
  doi = {10.1007/978-3-642-31951-8_10},
  series = {LNCS},
  volume = {7207},
  publisher = {Springer},
  address = {Heidelberg, Germany},
  pages = {61-75},
  note = {The original publication is available at \url{http://www.springerlink.com}},
  pdf = {http://mcs.unife.it/~friguzzi/Papers/BelRig12-ILP11-IC.pdf},
  keywords = {Probabilistic Inductive Logic Programming, Logic Programs with Annotated Disjunctions, ProbLog},
  copyright = {Springer},
  abstract = {There is a growing interest in the field of
Probabilistic Inductive Logic Programming, which uses languages that
integrate logic programming and probability.
Many of these languages are based on the distribution semantics and recently various authors have proposed systems for learning the parameters (PRISM, LeProbLog, LFI-ProbLog
and EMBLEM) or both the structure and the parameters (SEM-CP-logic) of these languages.
EMBLEM for example uses an Expectation Maximization approach in which  the expectations are computed on Binary Decision Diagrams.
In this paper we present the algorithm SLIPCASE for ``Structure LearnIng of ProbabilistiC logic progrAmS with Em over bdds''. It performs a beam search in the space of the language of Logic Programs with Annotated Disjunctions (LPAD) using the log likelihood of the data as the guiding heuristics. To estimate the log likelihood of theory refinements it performs a limited number of Expectation Maximization iterations of EMBLEM.
SLIPCASE has been tested on three real world datasets and compared with SEM-CP-logic and  Learning using Structural Motifs, an algorithm for Markov Logic Networks. The results show that SLIPCASE achieves higher areas under the precision-recall and ROC curves and is more scalable.
}
}
@article{BelRig12-IA-IJ,
  author = {Elena Bellodi and Fabrizio Riguzzi},
  title = { Experimentation of an Expectation Maximization Algorithm for Probabilistic Logic Programs},
  year = {2012},
  journal = {Intelligenza Artificiale},
  publisher = {IOS Press},
  copyright = {IOS Press},
  pdf = {http://mcs.unife.it/~friguzzi/Papers/BelRig12-IA-IJ.pdf},
  abstract = {Statistical Relational Learning and Probabilistic Inductive Logic Programming are two emerging fields that use representation languages able to combine logic and probability. In the field of Logic Programming, the distribution semantics is one of the prominent approaches for representing uncertainty and underlies many languages such as ICL, PRISM, ProbLog and LPADs.
Learning the parameters for such languages requires an Expectation Maximization algorithm since their equivalent Bayesian networks contain hidden variables.
EMBLEM (EM over BDDs for probabilistic Logic programs Efficient Mining) is an EM algorithm for languages following the distribution semantics that computes expectations directly on the Binary Decision Diagrams that are built for inference.
In this paper we present experiments comparing EMBLEM with LeProbLog, Alchemy, CEM, RIB and LFI-ProbLog on six real world datasets. The results show that EMBLEM is able to solve problems on which the other systems fail and it often achieves significantly higher areas under the Precision Recall and the ROC curves in a similar time.},
  keywords = {Statistical Relational Learning, Probabilistic Inductive Logic Programming, Probabilistic Logic Programming,  Expectation Maximization, Binary Decision Diagrams,
Logic Programs with Annotated Disjunctions
},
  volume = {8},
  number = {1},
  pages = {3-18},
  doi = {10.3233/IA-2012-0027}
}
@article{RigDiM12-ML-IJ,
  author = {Fabrizio Riguzzi and  Di Mauro, Nicola},
  title = {Applying the Information Bottleneck to Statistical Relational Learning},
  year = {2012},
  journal = {Machine Learning},
  volume = {86},
  number = {1},
  pages = {89--114},
  note = {The original publication is available at \url{http://www.springerlink.com}},
  pdf = {http://mcs.unife.it/~friguzzi/Papers/RigDiM11-ML-IJ.pdf},
  doi = {10.1007/s10994-011-5247-6},
  publisher = {Springer},
  copyright = {Springer},
  address = {Heidelberg, Germany},
  abstract = {In this  paper we  propose to apply  the Information  Bottleneck (IB) approach  to the  sub-class of
Statistical Relational  Learning (SRL) languages that  are reducible to Bayesian  networks. When the
resulting networks involve hidden variables, learning these languages requires the use of techniques
for  learning from incomplete  data such  as the  Expectation Maximization  (EM) algorithm.
Recently, the IB approach was shown to be able to avoid some of the local maxima in which EM can get
trapped when learning with hidden variables.  Here  we present the algorithm Relational Information Bottleneck
(RIB)
that learns the parameters of SRL languages reducible
to Bayesian Networks.
 In particular, we present the specialization  of RIB to a  language belonging to the family of languages based on the distribution semantics, Logic Programs with  Annotated Disjunction (LPADs). This language is prototypical for such a family and its equivalent Bayesian networks contain hidden  variables. RIB is evaluated on the IMDB, Cora and artificial datasets and compared with LeProbLog, EM, Alchemy and PRISM.
The  experimental results show that  RIB has good performances especially when some logical atoms are unobserved.
Moreover, it is particularly suitable when learning from interpretations that share the same Herbrand base.},
  keywords = {Statistical Relational Learning, Probabilistic Inductive Logic Programming, Probabilistic Logic Programming,  Information Bottleneck,
Logic Programs with Annotated Disjunctions
}
}

This file was generated by bibtex2html 1.96.