@inbook{319e735509334fe6a57c025d74b6b40c,
title = "Why unbiased computational processes can lead to discriminative decision procedures (Chapter 3)",
abstract = "Nowadays, more and more decision procedures are supported or even guided by automated processes. An important technique in this automation is data mining. In this chapter we study how such automatically generated decision support models may exhibit discriminatory behavior towards certain groups based upon, e.g., gender or ethnicity. Surprisingly, such behavior may even be observed when sensitive information is removed or suppressed and the whole procedure is guided by neutral arguments such as predictive accuracy only. The reason for this phenomenon is that most data mining methods are based upon assumptions that are not always satisfied in reality, namely, that the data is correct and represents the population well. In this chapter we discuss the implicit modeling assumptions made by most data mining algorithms and show situations in which they are not satisfied. Then we outline three realistic scenarios in which an unbiased process can lead to discriminatory models. The effects of the implicit assumptions not being fulfilled are illustrated by examples. The chapter concludes with an outline of the main challenges and problems to be solved.",
author = "T.G.K. Calders and I. Zliobaite",
year = "2013",
doi = "10.1007/978-3-642-30487-3_3",
language = "English",
isbn = "978-3-642-30486-6",
series = "Studies in Applied Philosophy, Epistemology and Rational Ethics",
publisher = "Springer",
pages = "43--57",
editor = "B.H.M. Custers and T.G.K. Calders and B.W. Schermer and T.Z. Zarsky",
booktitle = "Discrimination and Privacy in the Information Society: Effects of Data Mining and Profiling Large Databases",
}