@inproceedings{nguyen-etal-2017-mixture,
title = "A Mixture Model for Learning Multi-Sense Word Embeddings",
author = "Nguyen, Dai Quoc and
Nguyen, Dat Quoc and
Modi, Ashutosh and
Thater, Stefan and
Pinkal, Manfred",
editor = "Ide, Nancy and
Herbelot, Aur{\'e}lie and
M{\`a}rquez, Llu{\'\i}s",
booktitle = "Proceedings of the 6th Joint Conference on Lexical and Computational Semantics (*{SEM} 2017)",
month = aug,
year = "2017",
address = "Vancouver, Canada",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/S17-1015",
doi = "10.18653/v1/S17-1015",
pages = "121--127",
abstract = "Word embeddings are now a standard technique for inducing meaning representations for words. For getting good representations, it is important to take into account different senses of a word. In this paper, we propose a mixture model for learning multi-sense word embeddings. Our model generalizes the previous works in that it allows to induce different weights of different senses of a word. The experimental results show that our model outperforms previous models on standard evaluation tasks.",
}
<?xml version="1.0" encoding="UTF-8"?>
<modsCollection xmlns="http://www.loc.gov/mods/v3">
<mods ID="nguyen-etal-2017-mixture">
<titleInfo>
<title>A Mixture Model for Learning Multi-Sense Word Embeddings</title>
</titleInfo>
<name type="personal">
<namePart type="given">Dai</namePart>
<namePart type="given">Quoc</namePart>
<namePart type="family">Nguyen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Dat</namePart>
<namePart type="given">Quoc</namePart>
<namePart type="family">Nguyen</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Ashutosh</namePart>
<namePart type="family">Modi</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Stefan</namePart>
<namePart type="family">Thater</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Manfred</namePart>
<namePart type="family">Pinkal</namePart>
<role>
<roleTerm authority="marcrelator" type="text">author</roleTerm>
</role>
</name>
<originInfo>
<dateIssued>2017-08</dateIssued>
</originInfo>
<typeOfResource>text</typeOfResource>
<relatedItem type="host">
<titleInfo>
<title>Proceedings of the 6th Joint Conference on Lexical and Computational Semantics (*SEM 2017)</title>
</titleInfo>
<name type="personal">
<namePart type="given">Nancy</namePart>
<namePart type="family">Ide</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Aurélie</namePart>
<namePart type="family">Herbelot</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<name type="personal">
<namePart type="given">Lluís</namePart>
<namePart type="family">Màrquez</namePart>
<role>
<roleTerm authority="marcrelator" type="text">editor</roleTerm>
</role>
</name>
<originInfo>
<publisher>Association for Computational Linguistics</publisher>
<place>
<placeTerm type="text">Vancouver, Canada</placeTerm>
</place>
</originInfo>
<genre authority="marcgt">conference publication</genre>
</relatedItem>
<abstract>Word embeddings are now a standard technique for inducing meaning representations for words. For getting good representations, it is important to take into account different senses of a word. In this paper, we propose a mixture model for learning multi-sense word embeddings. Our model generalizes the previous works in that it allows to induce different weights of different senses of a word. The experimental results show that our model outperforms previous models on standard evaluation tasks.</abstract>
<identifier type="citekey">nguyen-etal-2017-mixture</identifier>
<identifier type="doi">10.18653/v1/S17-1015</identifier>
<location>
<url>https://aclanthology.org/S17-1015</url>
</location>
<part>
<date>2017-08</date>
<extent unit="page">
<start>121</start>
<end>127</end>
</extent>
</part>
</mods>
</modsCollection>
%0 Conference Proceedings
%T A Mixture Model for Learning Multi-Sense Word Embeddings
%A Nguyen, Dai Quoc
%A Nguyen, Dat Quoc
%A Modi, Ashutosh
%A Thater, Stefan
%A Pinkal, Manfred
%Y Ide, Nancy
%Y Herbelot, Aurélie
%Y Màrquez, Lluís
%S Proceedings of the 6th Joint Conference on Lexical and Computational Semantics (*SEM 2017)
%D 2017
%8 August
%I Association for Computational Linguistics
%C Vancouver, Canada
%F nguyen-etal-2017-mixture
%X Word embeddings are now a standard technique for inducing meaning representations for words. For getting good representations, it is important to take into account different senses of a word. In this paper, we propose a mixture model for learning multi-sense word embeddings. Our model generalizes the previous works in that it allows to induce different weights of different senses of a word. The experimental results show that our model outperforms previous models on standard evaluation tasks.
%R 10.18653/v1/S17-1015
%U https://aclanthology.org/S17-1015
%U https://doi.org/10.18653/v1/S17-1015
%P 121-127
Markdown (Informal)
[A Mixture Model for Learning Multi-Sense Word Embeddings](https://aclanthology.org/S17-1015) (Nguyen et al., *SEM 2017)
ACL
- Dai Quoc Nguyen, Dat Quoc Nguyen, Ashutosh Modi, Stefan Thater, and Manfred Pinkal. 2017. A Mixture Model for Learning Multi-Sense Word Embeddings. In Proceedings of the 6th Joint Conference on Lexical and Computational Semantics (*SEM 2017), pages 121–127, Vancouver, Canada. Association for Computational Linguistics.