BibTex format
@inproceedings{Proietti:2023,
author = {Proietti, M and Toni, F},
pages = {1--8},
publisher = {CEUR Workshop Proceedings},
title = {A roadmap for neuro-argumentative learning},
url = {https://ceur-ws.org/Vol-3432/},
year = {2023}
}
In this section
@inproceedings{Proietti:2023,
author = {Proietti, M and Toni, F},
pages = {1--8},
publisher = {CEUR Workshop Proceedings},
title = {A roadmap for neuro-argumentative learning},
url = {https://ceur-ws.org/Vol-3432/},
year = {2023}
}
TY - CPAPER
AB - Computational argumentation (CA) has emerged, in recent decades, as a powerful formalism for knowl-edge representation and reasoning in the presence of conflicting information, notably when reasoningnon-monotonically with rules and exceptions. Much existing work in CA has focused, to date, on rea-soning with given argumentation frameworks (AFs) or, more recently, on using AFs, possibly automat-ically drawn from other systems, for supporting forms of XAI. In this short paper we focus insteadon the problem of learning AFs from data, with a focus on neuro-symbolic approaches. Specifically,we overview existing forms of neuro-argumentative (machine) learning, resulting from a combinationof neural machine learning mechanisms and argumentative (symbolic) reasoning. We include in ouroverview neuro-symbolic paradigms that integrate reasoners with a natural understanding in argumen-tative terms, notably those capturing forms of non-monotonic reasoning in logic programming. We alsooutline avenues and challenges for future work in this spectrum.
AU - Proietti,M
AU - Toni,F
EP - 8
PB - CEUR Workshop Proceedings
PY - 2023///
SN - 1613-0073
SP - 1
TI - A roadmap for neuro-argumentative learning
UR - https://ceur-ws.org/Vol-3432/
UR - http://hdl.handle.net/10044/1/104353
ER -
Artificial Intelligence Network
South Kensington Campus
Imperial College London
SW7 2AZ
To reach the elected speaker of the network, Dr Rossella Arcucci, please contact:
To reach the network manager, Diana O'Malley - including to join the network - please contact: