The process of transcribing and annotating non-manual features presents challenges for sign language researchers. This paper describes the approach used by our research team to integrate the Facial Action Coding System (FACS) with the EUDICO Linguistic Annotator (ELAN) program to allow us to more accurately and efficiently code non-manual features. Preliminary findings are presented which demonstrate that this approach is useful for a fuller description of facial expressions.
@inproceedings{mulrooney:14020:sign-lang:lrec,
author = {Mulrooney, Kristin and Hochgesang, Julie A. and Morris, Carla and Lee, Katie},
title = {The ``how-to'' of integrating {FACS} and {ELAN} for analysis of non-manual features in {ASL}},
pages = {123--126},
editor = {Crasborn, Onno and Efthimiou, Eleni and Fotinea, Stavroula-Evita and Hanke, Thomas and Kristoffersen, Jette and Mesch, Johanna},
booktitle = {Proceedings of the {LREC2014} 6th Workshop on the Representation and Processing of Sign Languages: Beyond the Manual Channel},
maintitle = {9th International Conference on Language Resources and Evaluation ({LREC} 2014)},
publisher = {{European Language Resources Association (ELRA)}},
address = {Reykjavik, Iceland},
day = {31},
month = may,
year = {2014},
language = {english},
url = {https://www.sign-lang.uni-hamburg.de/lrec/pub/14020.pdf}
}