@inproceedings{1c052b226118423b9e002d3c0e103e93,
title = "Evaluating crowdsourced relevance assessments using self-reported traits and task speed",
abstract = "Relevance is the strength of the relationship between a user's perceived information need and an information object. Systems designed to help users identify relevant information can often rely on high quality labelled datasets. However, the subjective and personal nature of relevance means that establishing ground truth labels is difficult. In this work, we conduct a user study on text documents to crowdsource relevance assessments against four topics. Workers' self-reported measures and task completion speed are used to calculate a range of ground truth measures against which classification performance can be assessed. Our results indicate that average subjective relevance and confidence-weighted measures are on par with the annotations from an expert panel. Further work is planned to expand these findings.",
keywords = "Crowdsourcing, Ground truth, Relevance assessments, Subjective relevance",
author = "Christopher Chow and Tom Gedeon",
note = "Publisher Copyright: {\textcopyright} 2017 Association for Computing Machinery. All rights reserved.; 29th Australian Computer-Human Interaction Conference, OzCHI 2017 ; Conference date: 28-11-2017 Through 01-12-2017",
year = "2017",
month = nov,
day = "28",
doi = "10.1145/3152771.3156146",
language = "English",
series = "ACM International Conference Proceeding Series",
publisher = "Association for Computing Machinery",
pages = "407--411",
editor = "Margot Brereton and Dhaval Vyas and Alessandro Soro and Bernd Ploderer and Jenny Waycott and Ann Morrison",
booktitle = "Proceedings of the 29th Australian Computer-Human Interaction Conference",
}