We have developed a collection of stimuli (with accompanying comprehension questions and subjective-evaluation questions) that can be used to evaluate the perception and understanding of facial expressions in ASL animations or videos. The stimuli have been designed as part of our laboratory's on-going research on synthesizing ASL facial expressions such as Topic, Negation, Yes/No Questions, WH-questions, and RH-questions. This paper announces the release of this resource, describes the collection and its creation, and provides sufficient details to enable researchers determine if it would benefit their work. Using this collection of stimuli and questions, we are seeking to evaluate computational models of ASL animations with linguistically meaningful facial expressions, which have accessibility applications for deaf users.
@inproceedings{huenerfauth:14010:sign-lang:lrec,
author = {Huenerfauth, Matt and Kacorri, Hernisa},
title = {Release of Experimental stimuli and questions for evaluating facial expressions in animations of {American} {Sign} {Language}},
pages = {71--76},
editor = {Crasborn, Onno and Efthimiou, Eleni and Fotinea, Stavroula-Evita and Hanke, Thomas and Hochgesang, Julie A. and Kristoffersen, Jette and Mesch, Johanna},
booktitle = {Proceedings of the {LREC2014} 6th Workshop on the Representation and Processing of Sign Languages: Beyond the Manual Channel},
maintitle = {9th International Conference on Language Resources and Evaluation ({LREC} 2014)},
publisher = {{European Language Resources Association (ELRA)}},
address = {Reykjavik, Iceland},
day = {31},
month = may,
year = {2014},
language = {english},
url = {https://www.sign-lang.uni-hamburg.de/lrec/pub/14010.pdf}
}