@incollection{DazaFrank2019, author = {Angel Daza and Anette Frank}, title = {Translate and label! An encoder-decoder approach for cross-lingual semantic role labeling}, series = {Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing and 9th International Joint Conference on Natural Language Processing (EMNLP-IJCNLP), 3.–7. November 2019, Hong Kong, China}, editor = {Kentaro Inui and Jing Jiang and Vincent Ng and Xiaojun Wan}, publisher = {The Association for Computational Linguistics}, address = {Stroudsburg, PA, USA}, isbn = {978-1-950737-90-1}, doi = {10.18653/v1/D19-1056}, url = {https://nbn-resolving.org/urn:nbn:de:bsz:mh39-94395}, pages = {603 -- 615}, year = {2019}, abstract = {We propose a Cross-lingual Encoder-Decoder model that simultaneously translates and generates sentences with Semantic Role Labeling annotations in a resource-poor target language. Unlike annotation projection techniques, our model does not need parallel data during inference time. Our approach can be applied in monolingual, multilingual and cross-lingual settings and is able to produce dependencybased and span-based SRL annotations. We benchmark the labeling performance of our model in different monolingual and multilingual settings using well-known SRL datasets. We then train our model in a cross-lingual setting to generate new SRL labeled data. Finally, we measure the effectiveness of our method by using the generated data to augment the training basis for resource-poor languages and perform manual evaluation to show that it produces high-quality sentences and assigns accurate semantic role annotations. Our proposed architecture offers a flexible method for leveraging SRL data in multiple languages.}, language = {en} }