From 6ec0c843842dbb8d8caa1cadb73c009ad7ed965e Mon Sep 17 00:00:00 2001 From: Haibin Lin Date: Mon, 18 Mar 2019 21:47:33 -0700 Subject: [PATCH] [DOC] Update Readme with correct mxnet version (#635) * Update README.rst * Update index.rst * Update index.rst * Update README.rst * Update data.rst * Update wikitext.py --- README.rst | 7 +++++-- docs/api/modules/data.rst | 2 ++ docs/index.rst | 2 +- scripts/bert/index.rst | 2 ++ src/gluonnlp/data/corpora/wikitext.py | 4 ++-- 5 files changed, 12 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index acc760e231..9b1dcbda3b 100644 --- a/README.rst +++ b/README.rst @@ -30,9 +30,12 @@ Language Processing (NLP) research. News ==== -- GluonNLP is featured in: +- Tutorial proposal for GluonNLP is accepted at `EMNLP 2019 `__, Hong Kong. + +- GluonNLP was featured in: - **AWS re:invent 2018 in Las Vegas, 2018-11-28**! Checkout `details `_. + - **PyData 2018 NYC, 2018-10-18**! Checkout the `awesome talk `__ by Sneha Jha. - **KDD 2018 London, 2018-08-21, Apache MXNet Gluon tutorial**! Check out **https://kdd18.mxnet.io**. Installation @@ -48,7 +51,7 @@ In particular, if you want to install the most recent ``MXNet`` release: :: - pip install --upgrade mxnet>=1.3.0 + pip install --upgrade mxnet>=1.4.0 Else, if you want to install the most recent ``MXNet`` nightly build: diff --git a/docs/api/modules/data.rst b/docs/api/modules/data.rst index 885cd0d7aa..43890492fd 100644 --- a/docs/api/modules/data.rst +++ b/docs/api/modules/data.rst @@ -24,6 +24,8 @@ The dataset is available under the Creative Commons Attribution-ShareAlike Licen WikiText2 WikiText103 + WikiText2Raw + WikiText103Raw Language modeling: Google 1 Billion Words ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/index.rst b/docs/index.rst index ef00f789b3..204a59b3de 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -51,7 +51,7 @@ command installs the latest version of MXNet. .. code-block:: console - pip install --upgrade mxnet>=1.3.0 + pip install --upgrade mxnet>=1.4.0 .. note:: diff --git a/scripts/bert/index.rst b/scripts/bert/index.rst index 65a74dc9f6..2eef1699d5 100644 --- a/scripts/bert/index.rst +++ b/scripts/bert/index.rst @@ -5,6 +5,8 @@ Bidirectional Encoder Representations from Transformers Reference: Devlin, Jacob, et al. "`Bert: Pre-training of deep bidirectional transformers for language understanding. `_" arXiv preprint arXiv:1810.04805 (2018). +Note: BERT model requires `nightly version of MXNet `__. + The following pre-trained BERT models are available from the **gluonnlp.model.get_model** API: +-----------------------------+----------------+-----------------+ diff --git a/src/gluonnlp/data/corpora/wikitext.py b/src/gluonnlp/data/corpora/wikitext.py index 393760e6bf..6501354674 100644 --- a/src/gluonnlp/data/corpora/wikitext.py +++ b/src/gluonnlp/data/corpora/wikitext.py @@ -79,7 +79,7 @@ class WikiText2(_WikiText): WikiText2 is implemented as CorpusDataset with the default flatten=True. From - https://einstein.ai/research/the-wikitext-long-term-dependency-language-modeling-dataset + https://www.salesforce.com/products/einstein/ai-research/the-wikitext-dependency-language-modeling-dataset/ License: Creative Commons Attribution-ShareAlike @@ -166,7 +166,7 @@ class WikiText103(_WikiText): WikiText103 is implemented as CorpusDataset with the default flatten=True. From - https://einstein.ai/research/the-wikitext-long-term-dependency-language-modeling-dataset + https://www.salesforce.com/products/einstein/ai-research/the-wikitext-dependency-language-modeling-dataset/ License: Creative Commons Attribution-ShareAlike