@inproceedings{LangeLjungloef2022, author = {Herbert Lange and Peter Ljungl{\"o}f}, title = {Learning domain-specific grammars from a small number of examples}, series = {Proceedings of the 12th International Conference on Agents and Artificial Intelligence (ICAART 2020) - Volume 1. February 22-24, 2020, in Valletta, Malta}, editor = {Ana Rocha and Luc Steels and Jaap van den Herik}, publisher = {SciTePress}, address = {Set{\´u}bal}, isbn = {978-989-758-395-7}, issn = {2184-433X}, doi = {10.5220/0009371304220430}, url = {https://nbn-resolving.org/urn:nbn:de:bsz:mh39-112109}, pages = {422 -- 430}, year = {2022}, abstract = {In this paper we investigate the problem of grammar inference from a different perspective. The common approach is to try to infer a grammar directly from example sentences, which either requires a large training set or suffers from bad accuracy. We instead view it as a problem of grammar restriction or sub-grammar extraction. We start from a large-scale resource grammar and a small number of examples, and find a sub-grammar that still covers all the examples. To do this we formulate the problem as a constraint satisfaction problem, and use an existing constraint solver to find the optimal grammar. We have made experiments with English, Finnish, German, Swedish and Spanish, which show that 10–20 examples are often sufficient to learn an interesting domain grammar. Possible applications include computer-assisted language learning, domain-specific dialogue systems, computer games, Q/A-systems, and others.}, language = {en} }