@inproceedings{bc2572033bd74c67ad8325b581afbe25,
title = "Measuring unfairness through game-theoretic interpretability",
abstract = "One often finds in the literature connections between measures of fairness and measures of feature importance employed to interpret trained classifiers. However, there seems to be no study that compares fairness measures and feature importance measures. In this paper we propose ways to evaluate and compare such measures. We focus in particular on SHAP, a game-theoretic measure of feature importance; we present results for a number of unfairness-prone datasets.",
keywords = "Feature importance, Group and individual fairness, Interpretability, Shapley value",
author = "Juliana Cesaro and {Gagliardi Cozman}, Fabio",
note = "Publisher Copyright: {\textcopyright} Springer Nature Switzerland AG 2020.; null ; Conference date: 16-09-2019 Through 20-09-2019",
year = "2020",
doi = "10.1007/978-3-030-43823-4_22",
language = "Ingl{\'e}s",
isbn = "9783030438227",
series = "Communications in Computer and Information Science",
publisher = "Springer Verlag",
pages = "253--264",
editor = "Peggy Cellier and Kurt Driessens",
booktitle = "Machine Learning and Knowledge Discovery in Databases - International Workshops of ECML PKDD 2019, Proceedings",
address = "Alemania",
}