@inproceedings{lunajimenez:26008:sign-lang:lrec,
  author    = {Luna-Jimenez, Cristina and Eing, Lennart and Esteban Romero, Sergio and Schneeberger, Tanja and Gebhard, Patrick and Nunnari, Fabrizio and Andr{\'e}, Elisabeth},
  title     = {Emotion Recognition in {German} {Sign} {Language} with Facial Action Units},
  pages     = {297--305},
  editor    = {Efthimiou, Eleni and Fotinea, Stavroula-Evita and Hanke, Thomas and Hochgesang, Julie A. and Mesch, Johanna and Schulder, Marc},
  booktitle = {Proceedings of the {LREC2026} 12th Workshop on the Representation and Processing of Sign Languages: Language in Motion},
  maintitle = {15th International Conference on Language Resources and Evaluation ({LREC} 2026)},
  publisher = {{European Language Resources Association (ELRA)}},
  address   = {Palma, Mallorca, Spain},
  day       = {16},
  month     = may,
  year      = {2026},
  isbn      = {978-2-493814-82-1},
  language  = {english},
  url       = {https://www.sign-lang.uni-hamburg.de/lrec/pub/26008.html},
  abstract  = {Emotion Recognition research in Sign Languages is still in its infancy. Still today, there exists a lack of knowledge about appropriate annotation guidelines and the impact that facial expressions, body postures and head positions have in recognizing emotions while signing, considering that sign language encompasses manual and non-manual cues with linguistic purposes. In this article, we present an acquisition protocol to record acted emotions in German Sign Language under four scenarios (High-Valence and High-Arousal, High-Valence and Low Arousal, Low-Valence and High-Arousal, and Low-Valence and Low-Arousal). The goal is to provide a reference dataset to explore the use of machine learning techniques for an automated classification of emotions in sign language utterances. As a baseline reference, we trained static models with features extracted from the facial muscle activations. The best model achieved an accuracy of 68.84{\%} and a F1 of 67.96{\%} with a random forest trained on the statistics extracted from Action Units. These results highlight the importance of facial expression in sign language, not only for carrying linguistic information but also for transmitting emotions. Results also indicate challenges in detecting emotions in the High-Valence and Low Arousal scenario, which suggests future investigation lines to explore.}
}

