@inproceedings{manome-etal-2018-neural,
title = "Neural sentence generation from formal semantics",
author = "Manome, Kana and
Yoshikawa, Masashi and
Yanaka, Hitomi and
Mart{\'\i}nez-G{\'o}mez, Pascual and
Mineshima, Koji and
Bekki, Daisuke",
editor = "Krahmer, Emiel and
Gatt, Albert and
Goudbeek, Martijn",
booktitle = "Proceedings of the 11th International Conference on Natural Language Generation",
month = nov,
year = "2018",
address = "Tilburg University, The Netherlands",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/W18-6549",
doi = "10.18653/v1/W18-6549",
pages = "408--414",
abstract = "Sequence-to-sequence models have shown strong performance in a wide range of NLP tasks, yet their applications to sentence generation from logical representations are underdeveloped. In this paper, we present a sequence-to-sequence model for generating sentences from logical meaning representations based on event semantics. We use a semantic parsing system based on Combinatory Categorial Grammar (CCG) to obtain data annotated with logical formulas. We augment our sequence-to-sequence model with masking for predicates to constrain output sentences. We also propose a novel evaluation method for generation using Recognizing Textual Entailment (RTE). Combining parsing and generation, we test whether or not the output sentence entails the original text and vice versa. Experiments showed that our model outperformed a baseline with respect to both BLEU scores and accuracies in RTE.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="manome-etal-2018-neural">
<titleInfo>
<title>Neural sentence generation from formal semantics</title>
</titleInfo>
<name type="personal">
<namePart type="given">Kana</namePart>
<namePart type="family">Manome</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Masashi</namePart>
<namePart type="family">Yoshikawa</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Hitomi</namePart>
<namePart type="family">Yanaka</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Pascual</namePart>
<namePart type="family">Martínez-Gómez</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Koji</namePart>
<namePart type="family">Mineshima</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Daisuke</namePart>
<namePart type="family">Bekki</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2018-11</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 11th International Conference on Natural Language Generation</title>
</titleInfo>
<name type="personal">
<namePart type="given">Emiel</namePart>
<namePart type="family">Krahmer</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Albert</namePart>
<namePart type="family">Gatt</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Martijn</namePart>
<namePart type="family">Goudbeek</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Tilburg University, The Netherlands</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Sequence-to-sequence models have shown strong performance in a wide range of NLP tasks, yet their applications to sentence generation from logical representations are underdeveloped. In this paper, we present a sequence-to-sequence model for generating sentences from logical meaning representations based on event semantics. We use a semantic parsing system based on Combinatory Categorial Grammar (CCG) to obtain data annotated with logical formulas. We augment our sequence-to-sequence model with masking for predicates to constrain output sentences. We also propose a novel evaluation method for generation using Recognizing Textual Entailment (RTE). Combining parsing and generation, we test whether or not the output sentence entails the original text and vice versa. Experiments showed that our model outperformed a baseline with respect to both BLEU scores and accuracies in RTE.</abstract>
<identifier type="citekey">manome-etal-2018-neural</identifier>
<identifier type="doi">10.18653/v1/W18-6549</identifier>
<location>
<url>https://aclanthology.org/W18-6549</url>
</location>
<part>
<date>2018-11</date>
<extent unit="page">
<start>408</start>
<end>414</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T Neural sentence generation from formal semantics
%A Manome, Kana
%A Yoshikawa, Masashi
%A Yanaka, Hitomi
%A Martínez-Gómez, Pascual
%A Mineshima, Koji
%A Bekki, Daisuke
%Y Krahmer, Emiel
%Y Gatt, Albert
%Y Goudbeek, Martijn
%S Proceedings of the 11th International Conference on Natural Language Generation
%D 2018
%8 November
%I Association for Computational Linguistics
%C Tilburg University, The Netherlands
%F manome-etal-2018-neural
%X Sequence-to-sequence models have shown strong performance in a wide range of NLP tasks, yet their applications to sentence generation from logical representations are underdeveloped. In this paper, we present a sequence-to-sequence model for generating sentences from logical meaning representations based on event semantics. We use a semantic parsing system based on Combinatory Categorial Grammar (CCG) to obtain data annotated with logical formulas. We augment our sequence-to-sequence model with masking for predicates to constrain output sentences. We also propose a novel evaluation method for generation using Recognizing Textual Entailment (RTE). Combining parsing and generation, we test whether or not the output sentence entails the original text and vice versa. Experiments showed that our model outperformed a baseline with respect to both BLEU scores and accuracies in RTE.
%R 10.18653/v1/W18-6549
%U https://aclanthology.org/W18-6549
%U https://doi.org/10.18653/v1/W18-6549
%P 408-414
Markdown (Informal)
[Neural sentence generation from formal semantics](https://aclanthology.org/W18-6549) (Manome et al., INLG 2018)
ACL
- Kana Manome, Masashi Yoshikawa, Hitomi Yanaka, Pascual Martínez-Gómez, Koji Mineshima, and Daisuke Bekki. 2018. Neural sentence generation from formal semantics. In Proceedings of the 11th International Conference on Natural Language Generation, pages 408–414, Tilburg University, The Netherlands. Association for Computational Linguistics.