@inproceedings{b8ee601922b7413997f2b5e8d97df374,
title = "Understanding two graphical visualizations from observer's pupillary responses and neural network",
abstract = "This paper investigates observers' pupillary responses while they viewed two graphical visualizations (circular and organizational). The graphical visualizations are snapshots of the kind of data used in checking the degree of compliance with corporate governance best practice. Six very similar questions were asked from 24 observers for each visualization. In particular, we developed a neural network based classification model to understand these two visualizations from temporal features of observers' pupillary responses. We predicted that whether each observer is more accurate in understanding the two visualizations from their unconscious pupillary responses or conscious verbal responses, by answering relevant questions. We found that observers were physiologically 96.5% and 95.1% accurate, and verbally 80.6% and 81.3% accurate, for the circular and organizational visualizations, respectively.",
keywords = "Information visualizations, Neural network, Pupillary responses",
author = "{Zakir Hossain}, Md and Tom Gedeon and Atiqul Islam",
note = "Publisher Copyright: {\textcopyright} 2018 Association for Computing Machinery.; 30th Australian Conference on Computer-Human Interaction, OzCHI 2018 ; Conference date: 04-12-2018 Through 07-12-2018",
year = "2018",
month = dec,
day = "4",
doi = "10.1145/3292147.3292187",
language = "English",
series = "ACM International Conference Proceeding Series",
publisher = "Association for Computing Machinery",
pages = "215--218",
editor = "Ann Morrison and George Buchanan and Jenny Waycott and Mark Billinghurst and Duncan Stevenson and J.H.-J. Choi and Mark Billinghurst and Ryan Kelly and Dana McKay and Artur Lugmayr",
booktitle = "Proceedings of the 30th Australian Computer-Human Interaction Conference, OzCHI 2018",
}