Skip to content

Commit f0b0e3a

Browse files
committed
auto-generating sphinx docs
1 parent dc1a54a commit f0b0e3a

File tree

749 files changed

+1108
-1093
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

749 files changed

+1108
-1093
lines changed

docs/master/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@
159159

160160

161161
<div class="version">
162-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
162+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
163163
</div>
164164

165165

docs/master/_modules/index.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch.html

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

@@ -614,7 +614,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
614614
<span class="k">return</span> <span class="nb">type</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span> <span class="ow">in</span> <span class="n">_storage_classes</span></div>
615615

616616

617-
<div class="viewcode-block" id="set_default_tensor_type"><a class="viewcode-back" href="../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type">[docs]</a><span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
617+
<span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
618618
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Sets the default ``torch.Tensor`` type to floating point tensor type</span>
619619
<span class="sd"> ``t``. This type will also be used as default floating point type for</span>
620620
<span class="sd"> type inference in :func:`torch.tensor`.</span>
@@ -635,10 +635,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
635635
<span class="sd"> &quot;&quot;&quot;</span>
636636
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">t</span><span class="p">,</span> <span class="n">_string_classes</span><span class="p">):</span>
637637
<span class="n">t</span> <span class="o">=</span> <span class="n">_import_dotted_name</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
638-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span></div>
638+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
639639

640640

641-
<div class="viewcode-block" id="set_default_dtype"><a class="viewcode-back" href="../generated/torch.set_default_dtype.html#torch.set_default_dtype">[docs]</a><span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
641+
<span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
642642
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Sets the default floating point dtype to :attr:`d`.</span>
643643
<span class="sd"> This dtype is:</span>
644644

@@ -666,9 +666,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
666666
<span class="sd"> torch.complex128</span>
667667

668668
<span class="sd"> &quot;&quot;&quot;</span>
669-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span></div>
669+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
670670

671-
<div class="viewcode-block" id="set_deterministic"><a class="viewcode-back" href="../generated/torch.set_deterministic.html#torch.set_deterministic">[docs]</a><span class="k">def</span> <span class="nf">set_deterministic</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
671+
<span class="k">def</span> <span class="nf">set_deterministic</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
672672
<span class="sa">r</span><span class="sd">&quot;&quot;&quot; Sets whether PyTorch operations must use &quot;deterministic&quot;</span>
673673
<span class="sd"> algorithms. That is, algorithms which, given the same input, and when</span>
674674
<span class="sd"> run on the same software and hardware, always produce the same output.</span>
@@ -740,7 +740,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
740740
<span class="sd"> d (:class:`bool`): If True, force operations to be deterministic.</span>
741741
<span class="sd"> If False, allow non-deterministic operations.</span>
742742
<span class="sd"> &quot;&quot;&quot;</span>
743-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic</span><span class="p">(</span><span class="n">d</span><span class="p">)</span></div>
743+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
744744

745745
<div class="viewcode-block" id="is_deterministic"><a class="viewcode-back" href="../generated/torch.is_deterministic.html#torch.is_deterministic">[docs]</a><span class="k">def</span> <span class="nf">is_deterministic</span><span class="p">():</span>
746746
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global deterministic flag is turned on. Refer to</span>
@@ -760,8 +760,8 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
760760
<span class="k">pass</span>
761761

762762

763-
<span class="k">class</span> <span class="nc">FloatStorage</span><span class="p">(</span><span class="n">_C</span><span class="o">.</span><span class="n">FloatStorageBase</span><span class="p">,</span> <span class="n">_StorageBase</span><span class="p">):</span>
764-
<span class="k">pass</span>
763+
<div class="viewcode-block" id="FloatStorage"><a class="viewcode-back" href="../storage.html#torch.FloatStorage">[docs]</a><span class="k">class</span> <span class="nc">FloatStorage</span><span class="p">(</span><span class="n">_C</span><span class="o">.</span><span class="n">FloatStorageBase</span><span class="p">,</span> <span class="n">_StorageBase</span><span class="p">):</span>
764+
<span class="k">pass</span></div>
765765

766766

767767
<span class="k">class</span> <span class="nc">HalfStorage</span><span class="p">(</span><span class="n">_C</span><span class="o">.</span><span class="n">HalfStorageBase</span><span class="p">,</span> <span class="n">_StorageBase</span><span class="p">):</span>
@@ -810,11 +810,13 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
810810
<span class="k">class</span> <span class="nc">QInt32Storage</span><span class="p">(</span><span class="n">_C</span><span class="o">.</span><span class="n">QInt32StorageBase</span><span class="p">,</span> <span class="n">_StorageBase</span><span class="p">):</span>
811811
<span class="k">pass</span>
812812

813+
<span class="k">class</span> <span class="nc">QUInt4x2Storage</span><span class="p">(</span><span class="n">_C</span><span class="o">.</span><span class="n">QUInt4x2StorageBase</span><span class="p">,</span> <span class="n">_StorageBase</span><span class="p">):</span>
814+
<span class="k">pass</span>
813815

814816
<span class="n">_storage_classes</span> <span class="o">=</span> <span class="p">{</span>
815817
<span class="n">DoubleStorage</span><span class="p">,</span> <span class="n">FloatStorage</span><span class="p">,</span> <span class="n">LongStorage</span><span class="p">,</span> <span class="n">IntStorage</span><span class="p">,</span> <span class="n">ShortStorage</span><span class="p">,</span>
816818
<span class="n">CharStorage</span><span class="p">,</span> <span class="n">ByteStorage</span><span class="p">,</span> <span class="n">HalfStorage</span><span class="p">,</span> <span class="n">BoolStorage</span><span class="p">,</span> <span class="n">QUInt8Storage</span><span class="p">,</span> <span class="n">QInt8Storage</span><span class="p">,</span>
817-
<span class="n">QInt32Storage</span><span class="p">,</span> <span class="n">BFloat16Storage</span><span class="p">,</span> <span class="n">ComplexFloatStorage</span><span class="p">,</span> <span class="n">ComplexDoubleStorage</span>
819+
<span class="n">QInt32Storage</span><span class="p">,</span> <span class="n">BFloat16Storage</span><span class="p">,</span> <span class="n">ComplexFloatStorage</span><span class="p">,</span> <span class="n">ComplexDoubleStorage</span><span class="p">,</span> <span class="n">QUInt4x2Storage</span>
818820
<span class="p">}</span>
819821

820822
<span class="c1"># The _tensor_classes set is initialized by the call to _C._initialize_tensor_type_bindings()</span>
@@ -883,6 +885,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
883885
<span class="k">del</span> <span class="n">BFloat16StorageBase</span>
884886
<span class="k">del</span> <span class="n">ComplexDoubleStorageBase</span>
885887
<span class="k">del</span> <span class="n">ComplexFloatStorageBase</span>
888+
<span class="k">del</span> <span class="n">QUInt4x2StorageBase</span>
886889

887890
<span class="c1">################################################################################</span>
888891
<span class="c1"># Import most common subpackages</span>
@@ -925,9 +928,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
925928
<span class="k">del</span> <span class="n">_torch_docs</span><span class="p">,</span> <span class="n">_tensor_docs</span><span class="p">,</span> <span class="n">_storage_docs</span>
926929

927930

928-
<div class="viewcode-block" id="compiled_with_cxx11_abi"><a class="viewcode-back" href="../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi">[docs]</a><span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
931+
<span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
929932
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1&quot;&quot;&quot;</span>
930-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span></div>
933+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span>
931934

932935

933936
<span class="c1"># Import the ops &quot;namespace&quot;</span>

docs/master/_modules/torch/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/_jit_internal.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/_lobpcg.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/_lowrank.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/_tensor_str.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/_utils.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/_vmap_internals.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/autograd.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/autograd/anomaly_mode.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/autograd/function.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/autograd/functional.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/autograd/grad_mode.html

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

@@ -394,7 +394,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
394394
<span class="k">raise</span> <span class="ne">NotImplementedError</span>
395395

396396

397-
<div class="viewcode-block" id="no_grad"><a class="viewcode-back" href="../../../generated/torch.no_grad.html#torch.no_grad">[docs]</a><span class="k">class</span> <span class="nc">no_grad</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span>
397+
<span class="k">class</span> <span class="nc">no_grad</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span>
398398
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Context-manager that disabled gradient calculation.</span>
399399

400400
<span class="sd"> Disabling gradient calculation is useful for inference, when you are sure</span>
@@ -434,7 +434,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
434434
<span class="n">torch</span><span class="o">.</span><span class="n">set_grad_enabled</span><span class="p">(</span><span class="kc">False</span><span class="p">)</span>
435435

436436
<span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">exc_type</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">exc_value</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">traceback</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
437-
<span class="n">torch</span><span class="o">.</span><span class="n">set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span></div>
437+
<span class="n">torch</span><span class="o">.</span><span class="n">set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span>
438438

439439

440440
<div class="viewcode-block" id="enable_grad"><a class="viewcode-back" href="../../../generated/torch.enable_grad.html#torch.enable_grad">[docs]</a><span class="k">class</span> <span class="nc">enable_grad</span><span class="p">(</span><span class="n">_DecoratorContextManager</span><span class="p">):</span>
@@ -476,7 +476,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
476476
<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span></div>
477477

478478

479-
<div class="viewcode-block" id="set_grad_enabled"><a class="viewcode-back" href="../../../generated/torch.set_grad_enabled.html#torch.set_grad_enabled">[docs]</a><span class="k">class</span> <span class="nc">set_grad_enabled</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
479+
<span class="k">class</span> <span class="nc">set_grad_enabled</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
480480
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Context-manager that sets gradient calculation to on or off.</span>
481481

482482
<span class="sd"> ``set_grad_enabled`` will enable or disable grads based on its argument :attr:`mode`.</span>
@@ -518,7 +518,7 @@ <h1>Source code for torch.autograd.grad_mode</h1><div class="highlight"><pre>
518518
<span class="k">pass</span>
519519

520520
<span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">exc_type</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">exc_value</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">traceback</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
521-
<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span></div>
521+
<span class="n">torch</span><span class="o">.</span><span class="n">_C</span><span class="o">.</span><span class="n">_set_grad_enabled</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">prev</span><span class="p">)</span>
522522
</pre></div>
523523

524524
</article>

docs/master/_modules/torch/autograd/gradcheck.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

docs/master/_modules/torch/autograd/profiler.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@
158158

159159

160160
<div class="version">
161-
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+34f3207 &#x25BC</a>
161+
<a href='http://pytorch.org/docs/versions.html'>1.7.0a0+65919bf &#x25BC</a>
162162
</div>
163163

164164

0 commit comments

Comments
 (0)