@inproceedings{f4d73167de014857afaf7013cd102221,
title = "Bias disparity in collaborative recommendation: algorithmic evaluation and comparison",
abstract = "Research on fairness in machine learning has been recently extended to recommender systems. One of the factors that may impact fairness is bias disparity, the degree to which a group{\textquoteright}s preferences on various item categories fail to be reflected in the recommendations they receive. In some cases biases in the original data may be amplified or reversed by the underlying recommendation algorithm. In this paper, we explore how different recommendation algorithms reflect the tradeoff between ranking quality and bias disparity. Our experiments include neighborhood-based, model-based, and trust-aware recommendation algorithms.",
keywords = "Bias disparity, Fairness, Recommender systems, Trust ratings",
author = "Masoud Mansoury and Bamshad Mobasher and Robin Burke and Mykola Pechenizkiy",
year = "2019",
month = jan,
day = "1",
language = "English",
series = "CEUR Workshop Proceedings",
publisher = "CEUR-WS.org",
editor = "Robin Burke and Himan Abdollahpouri and Edward Malthouse",
booktitle = "Proceedings of the Workshop on Recommendation in Multi-stakeholder Environments co-located with the 13th ACM Conference on Recommender Systems (RecSys 2019)",
note = "2019 Workshop on Recommendation in Multi-Stakeholder Environments, RMSE 2019 ; Conference date: 20-09-2019",
}