From e2b38dc743a67a2325b8e48a17a3a2fb4f33f5ff Mon Sep 17 00:00:00 2001 From: Leonhard Hennig Date: Thu, 17 Aug 2023 10:05:51 +0200 Subject: [PATCH] updated paper --- content/publication/acl2023-zhu-sign/cite.bib | 15 +++++++++++++++ content/publication/acl2023-zhu-sign/index.md | 4 ++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/content/publication/acl2023-zhu-sign/cite.bib b/content/publication/acl2023-zhu-sign/cite.bib index e69de29b..ccff6462 100644 --- a/content/publication/acl2023-zhu-sign/cite.bib +++ b/content/publication/acl2023-zhu-sign/cite.bib @@ -0,0 +1,15 @@ +@inproceedings{zhu-etal-2023-neural, + title = "Neural Machine Translation Methods for Translating Text to Sign Language Glosses", + author = "Zhu, Dele and + Czehmann, Vera and + Avramidis, Eleftherios", + booktitle = "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)", + month = jul, + year = "2023", + address = "Toronto, Canada", + publisher = "Association for Computational Linguistics", + url = "https://aclanthology.org/2023.acl-long.700", + doi = "10.18653/v1/2023.acl-long.700", + pages = "12523--12541", + abstract = "State-of-the-art techniques common to low resource Machine Translation (MT) are applied to improve MT of spoken language text to Sign Language (SL) glosses. In our experiments, we improve the performance of the transformer-based models via (1) data augmentation, (2) semi-supervised Neural Machine Translation (NMT), (3) transfer learning and (4) multilingual NMT. The proposed methods are implemented progressively on two German SL corpora containing gloss annotations. Multilingual NMT combined with data augmentation appear to be the most successful setting, yielding statistically significant improvements as measured by three automatic metrics (up to over 6 points BLEU), and confirmed via human evaluation. Our best setting outperforms all previous work that report on the same test-set and is also confirmed on a corpus of the American Sign Language (ASL).", +} diff --git a/content/publication/acl2023-zhu-sign/index.md b/content/publication/acl2023-zhu-sign/index.md index 7ab1dbb8..d8d88e08 100644 --- a/content/publication/acl2023-zhu-sign/index.md +++ b/content/publication/acl2023-zhu-sign/index.md @@ -19,7 +19,7 @@ publication_types: ["1"] publication: "Proceedings of the 61st Annual Meeting of the Association for Computational Linguistics (Volume 1: Long Papers)" publication_short: "ACL 2023" -abstract: "" +abstract: "State-of-the-art techniques common to low resource Machine Translation (MT) are applied to improve MT of spoken language text to Sign Language (SL) glosses. In our experiments, we improve the performance of the transformer-based models via (1) data augmentation, (2) semi-supervised Neural Machine Translation (NMT), (3) transfer learning and (4) multilingual NMT. The proposed methods are implemented progressively on two German SL corpora containing gloss annotations. Multilingual NMT combined with data augmentation appear to be the most successful setting, yielding statistically significant improvements as measured by three automatic metrics (up to over 6 points BLEU), and confirmed via human evaluation. Our best setting outperforms all previous work that report on the same test-set and is also confirmed on a corpus of the American Sign Language (ASL)." # Summary. An optional shortened abstract. summary: "" @@ -35,7 +35,7 @@ featured: false # icon_pack: fab # icon: twitter -url_pdf: "" +url_pdf: "https://aclanthology.org/2023.acl-long.700.pdf" url_code: "" url_dataset: url_poster: