@inproceedings{bb112918f8334970aa60e36fbe0914d3,
title = "A Token-Wise CNN-Based Method for Sentence Compression",
abstract = "Sentence compression is a Natural Language Processing (NLP) task aimed at shortening original sentences and preserving their key information. Its applications can benefit many fields e.g., one can build tools for language education. However, current methods are largely based on Recurrent Neural Network (RNN) models which suffer from poor processing speed. To address this issue, in this paper, we propose a token-wiseConvolutional Neural Network, a CNN-based model along with pre-trained Bidirectional Encoder Representations from Transformers (BERT) features for deletion-based sentence compression. We also compare our model with RNN-based models and fine-tuned BERT. Although one of the RNN-based models outperforms marginally other models given the same input, our CNN-based model was ten times faster than the RNN-based approach.",
keywords = "Application, NLP, Neural networks",
author = "Weiwei Hou and Hanna Suominen and Piotr Koniusz and Sabrina Caldwell and Tom Gedeon",
note = "Publisher Copyright: {\textcopyright} 2020, Springer Nature Switzerland AG.; 27th International Conference on Neural Information Processing, ICONIP 2020 ; Conference date: 18-11-2020 Through 22-11-2020",
year = "2020",
doi = "10.1007/978-3-030-63830-6_56",
language = "English",
isbn = "9783030638290",
series = "Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)",
publisher = "Springer Science and Business Media Deutschland GmbH",
pages = "668--679",
editor = "Haiqin Yang and Kitsuchart Pasupa and Leung, {Andrew Chi-Sing} and Kwok, {James T.} and Chan, {Jonathan H.} and Irwin King",
booktitle = "Neural Information Processing - 27th International Conference, ICONIP 2020, Proceedings",
address = "Germany",
}