diff --git a/beginner_source/nlp/word_embeddings_tutorial.py b/beginner_source/nlp/word_embeddings_tutorial.py index 373ea376f4d..4444b4fc48c 100644 --- a/beginner_source/nlp/word_embeddings_tutorial.py +++ b/beginner_source/nlp/word_embeddings_tutorial.py @@ -290,7 +290,7 @@ def forward(self, inputs): # and :math:`w_{i+1}, \dots, w_{i+N}`, referring to all context words # collectively as :math:`C`, CBOW tries to minimize # -# .. math:: -\log p(w_i | C) = \log \text{Softmax}(A(\sum_{w \in C} q_w) + b) +# .. math:: -\log p(w_i | C) = -\log \text{Softmax}(A(\sum_{w \in C} q_w) + b) # # where :math:`q_w` is the embedding for word :math:`w`. #