@article{LamMelRig03-NGC-IJ, author = {Evelina Lamma and Fabrizio Riguzzi and Sergio Storari and Paola Mello and Annamaria Nanetti}, title = {Discovering Validation Rules from Micro-biological Data}, journal = {New Generation Computing}, year = {2003}, volume = {21}, number = {2}, pages = {123--134}, publisher = {Ohmsha, Ltd. and Springer}, address = {Tokyo, \Japan}, month = feb, pdf = {http://ml.unife.it/wp-content/uploads/Papers/LamRigStoMelNan-NGC03.pdf}, http = {http://www.springerlink.com/content/b816tm18j5715810}, doi = {10.1007/BF03037630}, copyright = {Ohmsha, Ltd. and Springer}, abstract = {A huge amount of data is daily collected from clinical mi- crobiology laboratories. These data concern the resistance or susceptibil- ity of bacteria to tested antibiotics. Almost all microbiology laboratories follow standard antibiotic testing guidelines which suggest antibiotic test execution methods and result interpretation and validation (among them, those annually published by NCCLS). Guidelines basically specify, for each species, the antibiotics to be tested, how to interpret the results of tests and a list of exceptions regarding particular antibiotic test results. Even if these standards are quite assessed, they do not consider pecu- liar features of a given hospital laboratory, which possibly influence the antimicrobial test results, and the further validation process. In order to improve and better tailor the validation process, we have applied knowledge discovery techniques, and data mining in particular, to microbiological data with the purpose of discovering new validation rules, not yet included in NCCLS guidelines, but considered plausible and correct by interviewed experts. In particular, we applied the knowledge discovery process in order to find (association) rules relating to each other the susceptibility or resistance of a bacterium to different antibiotics. This approach is not antithetic, but complementary to that based on NCCLS rules: it proved very effective in validating some of them, and also in extending that compendium. In this respect, the new discovered knowledge has lead microbiologists to be aware of new correlations among some antimicrobial test results, which were previously unnoticed. Last but not least, the new discovered rules, taking into account the history of the considered laboratory, are better tailored to the hospital situation, and this is very important since some resistances to antibiotics are specific to particular, local hospital environments.}, keywords = {Knowledge Discovery and Data mining, Microbiology, Knowledge Based Systems, Knowledge Elicitation} }
@article{LamRigPer03-NGC-IJ, author = {Evelina Lamma and Fabrizio Riguzzi and Lu\'\i{}s Moniz Pereira}, title = {Belief Revision via {L}amarckian Evolution}, journal = {New Generation Computing}, abstract = {We present a system for performing belief revision in a multi-agent environment. The system is called GBR (Genetic Belief Revisor) and it is based on a genetic algorithm. In this setting, different individuals are exposed to different experiences. This may happen because the world surrounding an agent changes over time or because we allow agents exploring different parts of the world. The algorithm permits the exchange of chromosomes from different agents and combines two different evolution strategies, one based on Darwin's and the other on Lamarck's evolutionary theory. The algorithm therefore includes also a Lamarckian operator that changes the memes of an agent in order to improve their fitness. The operator is implemented by means of a belief revision procedure that, by tracing logical derivations, identifies the memes leading to contradiction. Moreover, the algorithm comprises a special crossover mechanism for memes in which a meme can be acquired from another agent only if the other agent has ``accessed'' the meme, i.e. if an application of the Lamarckian operator has read or modified the meme. Experiments have been performed on the $n$-queen problem and on a problem of digital circuit diagnosis. In the case of the $n$-queen problem, the addition of the Lamarckian operator in the single agent case improves the fitness of the best solution. In both cases the experiments show that the distribution of constraints, even if it may lead to a reduction of the fitness of the best solution, does not produce a significant reduction.}, publisher = {Ohmsha, Ltd. and Springer}, address = {Tokyo, \Japan}, keywords = {Genetic_Algorithms,Theory_Revision}, year = {2003}, volume = {21}, number = {3}, month = aug, pages = {247--275}, pdf = {http://ml.unife.it/wp-content/uploads/Papers/LamRigPer-NGC03.pdf}, http = {http://www.springerlink.com/content/063764w6n3847825/}, doi = {10.1007/BF03037475}, copyright = {Ohmsha, Ltd. and Springer} }
@inproceedings{LamRigSta03-AI*IA03-IC, author = {Evelina Lamma and Fabrizio Riguzzi and Andrea Stambazzi and Sergio Storari}, title = {Improving the {SLA} algorithm using association rules}, booktitle = {{AI*IA} 2003: Advances in Artificial Intelligence: 8th Congress of the Italian Association for Artificial Intelligence Pisa, Italy, September 23-26, 2003 Proceedings}, editor = {Amedeo Cappelli and Franco Turini}, abstract = {A bayesian network is an appropriate tool for working with uncertainty and probability, that are typical of real-life applications. In literature we find different approaches for bayesian network learning. Some of them are based on search and score methodology and the others follow an information theory based approach. One of the most known algorithm for learning bayesian network is the SLA algorithm. This algorithm constructs a bayesian network by analyzing conditional independence relationships among nodes. The SLA algorithm has three phases: drafting, thickening and thinning. In this work, we propose an alternative method for performing the drafting phase. This new methodology uses data mining techniques, and in particular the computation of a number of parameters usually defined in relation to association rules, in order to learn an initial structure of a bayesian network. In this paper, we present the BNL-rules algorithm (Bayesian Network Learner with association rules) that exploits a number of association rules parameters to infer the structure of a bayesian network. We will also present the comparisons between SLA and BNL-rules algorithms on learning four bayesian networks. }, year = {2003}, month = sep, publisher = {Springer Verlag}, address = {Heidelberg, \Germany}, series = {{Lecture Notes on Artificial Intelligence}}, volume = {2829}, note = {The original publication is available at \url{http://www.springerlink.com}}, pages = {165--175}, keywords = {Bayesian Networks Learning}, issn = {0302-9743}, doi = {10.1007/b13658}, isbn = {3-540-20119-X}, http = {http://dx.medra.org/10.1007/b13658}, pdf = {http://ml.unife.it/wp-content/uploads/Papers/LamRigSta-AIIA03.pdf}, copyright = {Springer} }
@inproceedings{AlbCiaGav03-ceemas-IC, author = {Marco Alberti and Anna Ciampolini and Marco Gavanelli and Evelina Lamma and Paola Mello and Paolo Torroni}, title = {A Social {ACL} Semantics by Deontic Constraints}, booktitle = {Proceedings of the 3rd International/Central and Eastern European Conference on Multi-Agent Systems}, editor = {V.Marik and J.Muller and M.Pechoucek}, year = 2003, address = {Prague, Czech Republic}, month = {June}, series = {Lecture Notes in Artificial Intelligence}, number = 2691, pages = {204-213}, issn = {0302-9743}, isbn = {3-540-40450-3}, abstract = { In most proposals for multi-agent systems, an Agent Communication Language (ACL) is the formalism designed to express knowledge exchange among agents. However, a universally accepted standard for ACLs is still missing. Among the different approaches to the definition of ACL semantics, the \textit{social} approach seems the most appropriate to express semantics of communication in open societies of autonomous and heterogeneous agents. In this paper we propose a formalism (\textit{deontic constraints}) to express social ACL semantics, which can be grounded on a computational logic framework, thus allowing automatic verification of compliance by means of appropriate proof procedures. We also show how several common communication performatives can be defined by means of deontic constraints.} }
@inproceedings{AlbGavLam03-AIIA-IC, author = {Marco Alberti and Marco Gavanelli and Evelina Lamma and Paola Mello and Paolo Torroni}, title = {An Abductive Interpretation for Open Agent Societies}, booktitle = {AI*IA 2003: Advances in Artificial Intelligence: 8th Congress of the Italian Association for Artificial Intelligence Pisa, Italy, September 23-26, 2003 Proceedings}, year = {2003}, volume = {2829}, series = {Lecture Notes in Artificial Intelligence}, address = {Pisa, Italy}, month = sep # { 23-26}, publisher = {Springer Verlag}, pages = {287-299}, url = {http://springerlink.metapress.com/openurl.asp?genre=article&issn=0302-9743&volume=2829&spage=287}, pdf = {http://springerlink.metapress.com/media/3pvxje0ttl7xrg84qwtm/contributions/l/d/8/p/ld8p2ja8d94wkv6m_html/BodyRef/PDF/558_10956106_Chapter_24.pdf}, editor = {A. Cappelli and F. Turini}, issn = {0302-9743}, isbn = {3-540-20119-X}, abstract = {The focus of this work is on the interactions among (possibly heterogeneous) agents that form an open society, and on the definition of a computational logic-based architecture for agent interaction. We propose a model where the society defines the allowed interaction protocols, which determine the ``socially'' allowed agent interaction patterns. The semantics of protocols can be defined by means of social integrity constraints. The main advantages of this approach are in the design of societies of agents, and in the possibility to detect undesirable behavior. In the paper, we present the model for societies ruled by protocols expressed as integrity constraints, and its declarative semantics. A sketch of the operational counterpart is also given.} }
This file was generated by bibtex2html 1.98.