From dbe692c2bef1d4a5ece1f3dd6e9d3111d8d8d8a4 Mon Sep 17 00:00:00 2001 From: Liang Shuailong Date: Thu, 18 May 2017 09:57:15 +0800 Subject: [PATCH] minor math equation error add "-" sign --- beginner_source/nlp/word_embeddings_tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/nlp/word_embeddings_tutorial.py b/beginner_source/nlp/word_embeddings_tutorial.py index 373ea376f4d..4444b4fc48c 100644 --- a/beginner_source/nlp/word_embeddings_tutorial.py +++ b/beginner_source/nlp/word_embeddings_tutorial.py @@ -290,7 +290,7 @@ def forward(self, inputs): # and :math:`w_{i+1}, \dots, w_{i+N}`, referring to all context words # collectively as :math:`C`, CBOW tries to minimize # -# .. math:: -\log p(w_i | C) = \log \text{Softmax}(A(\sum_{w \in C} q_w) + b) +# .. math:: -\log p(w_i | C) = -\log \text{Softmax}(A(\sum_{w \in C} q_w) + b) # # where :math:`q_w` is the embedding for word :math:`w`. #