Skip to content

Commit 7132774

Browse files
committed
Generate Python docs from pytorch/pytorch@ff47dcd
1 parent 925cda1 commit 7132774

File tree

1,941 files changed

+8750
-3458
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,941 files changed

+8750
-3458
lines changed

docs/master/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@
194194
<div class="pytorch-left-menu-search">
195195

196196
<div class="version">
197-
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitef07820 ) &#x25BC</a>
197+
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitff47dcd ) &#x25BC</a>
198198
</div>
199199

200200

docs/master/_images/RReLU.png

-175 Bytes
Loading

docs/master/_modules/index.html

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitef07820 ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitff47dcd ) &#x25BC</a>
197197
</div>
198198

199199

@@ -518,6 +518,7 @@ <h1>All modules for which code is available</h1>
518518
<li><a href="torch/distributions/utils.html">torch.distributions.utils</a></li>
519519
<li><a href="torch/distributions/von_mises.html">torch.distributions.von_mises</a></li>
520520
<li><a href="torch/distributions/weibull.html">torch.distributions.weibull</a></li>
521+
<li><a href="torch/distributions/wishart.html">torch.distributions.wishart</a></li>
521522
<li><a href="torch/functional.html">torch.functional</a></li>
522523
<li><a href="torch/futures.html">torch.futures</a></li>
523524
<li><a href="torch/fx/_symbolic_trace.html">torch.fx._symbolic_trace</a></li>
@@ -628,6 +629,7 @@ <h1>All modules for which code is available</h1>
628629
<li><a href="torch/utils/benchmark/utils/valgrind_wrapper/timer_interface.html">torch.utils.benchmark.utils.valgrind_wrapper.timer_interface</a></li>
629630
<li><a href="torch/utils/checkpoint.html">torch.utils.checkpoint</a></li>
630631
<li><a href="torch/utils/cpp_extension.html">torch.utils.cpp_extension</a></li>
632+
<li><a href="torch/utils/data/_utils/collate.html">torch.utils.data._utils.collate</a></li>
631633
<li><a href="torch/utils/data/_utils/worker.html">torch.utils.data._utils.worker</a></li>
632634
<li><a href="torch/utils/data/dataloader.html">torch.utils.data.dataloader</a></li>
633635
<li><a href="torch/utils/data/dataset.html">torch.utils.data.dataset</a></li>

docs/master/_modules/torch.html

Lines changed: 63 additions & 59 deletions
Large diffs are not rendered by default.

docs/master/_modules/torch/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitef07820 ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitff47dcd ) &#x25BC</a>
197197
</div>
198198

199199

docs/master/_modules/torch/_jit_internal.html

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitef07820 ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitff47dcd ) &#x25BC</a>
197197
</div>
198198

199199

@@ -856,7 +856,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
856856
<span class="s2">&quot;if this method is not scripted, copy the python method onto the scripted model&quot;</span>
857857

858858

859-
<span class="k">def</span> <span class="nf">export</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
859+
<div class="viewcode-block" id="export"><a class="viewcode-back" href="../../jit.html#torch.jit.export">[docs]</a><span class="k">def</span> <span class="nf">export</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
860860
<span class="sd">&quot;&quot;&quot;</span>
861861
<span class="sd"> This decorator indicates that a method on an ``nn.Module`` is used as an entry point into a</span>
862862
<span class="sd"> :class:`ScriptModule` and should be compiled.</span>
@@ -899,10 +899,10 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
899899
<span class="sd"> m = torch.jit.script(MyModule())</span>
900900
<span class="sd"> &quot;&quot;&quot;</span>
901901
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">EXPORT</span>
902-
<span class="k">return</span> <span class="n">fn</span>
902+
<span class="k">return</span> <span class="n">fn</span></div>
903903

904904

905-
<div class="viewcode-block" id="unused"><a class="viewcode-back" href="../../generated/torch.jit.unused.html#torch.jit.unused">[docs]</a><span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
905+
<span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
906906
<span class="sd">&quot;&quot;&quot;</span>
907907
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
908908
<span class="sd"> be ignored and replaced with the raising of an exception. This allows you</span>
@@ -949,7 +949,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
949949
<span class="k">return</span> <span class="n">prop</span>
950950

951951
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">UNUSED</span>
952-
<span class="k">return</span> <span class="n">fn</span></div>
952+
<span class="k">return</span> <span class="n">fn</span>
953953

954954
<span class="c1"># No op context manager from python side</span>
955955
<span class="k">class</span> <span class="nc">_IgnoreContextManager</span><span class="p">(</span><span class="n">contextlib</span><span class="o">.</span><span class="n">AbstractContextManager</span><span class="p">):</span>
@@ -959,7 +959,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
959959
<span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">exc_type</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">exc_value</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">traceback</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
960960
<span class="k">pass</span>
961961

962-
<div class="viewcode-block" id="ignore"><a class="viewcode-back" href="../../generated/torch.jit.ignore.html#torch.jit.ignore">[docs]</a><span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
962+
<span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
963963
<span class="sd">&quot;&quot;&quot;</span>
964964
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
965965
<span class="sd"> be ignored and left as a Python function. This allows you to leave code in</span>
@@ -1050,7 +1050,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
10501050
<span class="k">else</span><span class="p">:</span>
10511051
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">IGNORE</span>
10521052
<span class="k">return</span> <span class="n">fn</span>
1053-
<span class="k">return</span> <span class="n">decorator</span></div>
1053+
<span class="k">return</span> <span class="n">decorator</span>
10541054

10551055

10561056
<span class="k">def</span> <span class="nf">_copy_to_script_wrapper</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
@@ -1348,7 +1348,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
13481348
<span class="nb">globals</span><span class="p">()[</span><span class="sa">f</span><span class="s2">&quot;BroadcastingList</span><span class="si">{</span><span class="n">i</span><span class="si">}</span><span class="s2">&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">BroadcastingList1</span>
13491349

13501350

1351-
<span class="k">def</span> <span class="nf">is_scripting</span><span class="p">()</span> <span class="o">-&gt;</span> <span class="nb">bool</span><span class="p">:</span>
1351+
<div class="viewcode-block" id="is_scripting"><a class="viewcode-back" href="../../jit_language_reference.html#torch.jit.is_scripting">[docs]</a><span class="k">def</span> <span class="nf">is_scripting</span><span class="p">()</span> <span class="o">-&gt;</span> <span class="nb">bool</span><span class="p">:</span>
13521352
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
13531353
<span class="sd"> Function that returns True when in compilation and False otherwise. This</span>
13541354
<span class="sd"> is useful especially with the @unused decorator to leave code in your</span>
@@ -1367,7 +1367,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
13671367
<span class="sd"> else:</span>
13681368
<span class="sd"> return unsupported_linear_op(x)</span>
13691369
<span class="sd"> &quot;&quot;&quot;</span>
1370-
<span class="k">return</span> <span class="kc">False</span>
1370+
<span class="k">return</span> <span class="kc">False</span></div>
13711371

13721372

13731373
<span class="c1"># Retrieves a fully-qualified name (module hierarchy + classname) for a given obj.</span>

docs/master/_modules/torch/_lobpcg.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitef07820 ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitff47dcd ) &#x25BC</a>
197197
</div>
198198

199199

docs/master/_modules/torch/_lowrank.html

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitef07820 ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.11.0a0+gitff47dcd ) &#x25BC</a>
197197
</div>
198198

199199

@@ -476,7 +476,7 @@ <h1>Source code for torch._lowrank</h1><div class="highlight"><pre>
476476
<span class="k">return</span> <span class="n">Q</span>
477477

478478

479-
<div class="viewcode-block" id="svd_lowrank"><a class="viewcode-back" href="../../generated/torch.svd_lowrank.html#torch.svd_lowrank">[docs]</a><span class="k">def</span> <span class="nf">svd_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">:</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">q</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="mi">6</span><span class="p">,</span> <span class="n">niter</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="mi">2</span><span class="p">,</span>
479+
<span class="k">def</span> <span class="nf">svd_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">:</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">q</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="mi">6</span><span class="p">,</span> <span class="n">niter</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="mi">2</span><span class="p">,</span>
480480
<span class="n">M</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="n">Tensor</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Tuple</span><span class="p">[</span><span class="n">Tensor</span><span class="p">,</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">Tensor</span><span class="p">]:</span>
481481
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Return the singular value decomposition ``(U, S, V)`` of a matrix,</span>
482482
<span class="sd"> batches of matrices, or a sparse matrix :math:`A` such that</span>
@@ -521,7 +521,7 @@ <h1>Source code for torch._lowrank</h1><div class="highlight"><pre>
521521
<span class="n">tensor_ops</span> <span class="o">=</span> <span class="p">(</span><span class="n">A</span><span class="p">,</span> <span class="n">M</span><span class="p">)</span>
522522
<span class="k">if</span> <span class="p">(</span><span class="ow">not</span> <span class="nb">set</span><span class="p">(</span><span class="nb">map</span><span class="p">(</span><span class="nb">type</span><span class="p">,</span> <span class="n">tensor_ops</span><span class="p">))</span><span class="o">.</span><span class="n">issubset</span><span class="p">((</span><span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">,</span> <span class="nb">type</span><span class="p">(</span><span class="kc">None</span><span class="p">)))</span> <span class="ow">and</span> <span class="n">has_torch_function</span><span class="p">(</span><span class="n">tensor_ops</span><span class="p">)):</span>
523523
<span class="k">return</span> <span class="n">handle_torch_function</span><span class="p">(</span><span class="n">svd_lowrank</span><span class="p">,</span> <span class="n">tensor_ops</span><span class="p">,</span> <span class="n">A</span><span class="p">,</span> <span class="n">q</span><span class="o">=</span><span class="n">q</span><span class="p">,</span> <span class="n">niter</span><span class="o">=</span><span class="n">niter</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="n">M</span><span class="p">)</span>
524-
<span class="k">return</span> <span class="n">_svd_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">,</span> <span class="n">q</span><span class="o">=</span><span class="n">q</span><span class="p">,</span> <span class="n">niter</span><span class="o">=</span><span class="n">niter</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="n">M</span><span class="p">)</span></div>
524+
<span class="k">return</span> <span class="n">_svd_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">,</span> <span class="n">q</span><span class="o">=</span><span class="n">q</span><span class="p">,</span> <span class="n">niter</span><span class="o">=</span><span class="n">niter</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="n">M</span><span class="p">)</span>
525525

526526

527527
<span class="k">def</span> <span class="nf">_svd_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">:</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">q</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="mi">6</span><span class="p">,</span> <span class="n">niter</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="mi">2</span><span class="p">,</span>
@@ -571,7 +571,7 @@ <h1>Source code for torch._lowrank</h1><div class="highlight"><pre>
571571
<span class="k">return</span> <span class="n">U</span><span class="p">,</span> <span class="n">S</span><span class="p">,</span> <span class="n">V</span>
572572

573573

574-
<span class="k">def</span> <span class="nf">pca_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">:</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">q</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">,</span> <span class="n">center</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">,</span>
574+
<div class="viewcode-block" id="pca_lowrank"><a class="viewcode-back" href="../../generated/torch.pca_lowrank.html#torch.pca_lowrank">[docs]</a><span class="k">def</span> <span class="nf">pca_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">:</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">q</span><span class="p">:</span> <span class="n">Optional</span><span class="p">[</span><span class="nb">int</span><span class="p">]</span> <span class="o">=</span> <span class="kc">None</span><span class="p">,</span> <span class="n">center</span><span class="p">:</span> <span class="nb">bool</span> <span class="o">=</span> <span class="kc">True</span><span class="p">,</span>
575575
<span class="n">niter</span><span class="p">:</span> <span class="nb">int</span> <span class="o">=</span> <span class="mi">2</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="n">Tuple</span><span class="p">[</span><span class="n">Tensor</span><span class="p">,</span> <span class="n">Tensor</span><span class="p">,</span> <span class="n">Tensor</span><span class="p">]:</span>
576576
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Performs linear Principal Component Analysis (PCA) on a low-rank</span>
577577
<span class="sd"> matrix, batches of such matrices, or sparse matrix.</span>
@@ -672,7 +672,7 @@ <h1>Source code for torch._lowrank</h1><div class="highlight"><pre>
672672
<span class="k">return</span> <span class="n">_svd_lowrank</span><span class="p">(</span><span class="n">A</span><span class="p">,</span> <span class="n">q</span><span class="p">,</span> <span class="n">niter</span><span class="o">=</span><span class="n">niter</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="n">M</span><span class="p">)</span>
673673
<span class="k">else</span><span class="p">:</span>
674674
<span class="n">C</span> <span class="o">=</span> <span class="n">A</span><span class="o">.</span><span class="n">mean</span><span class="p">(</span><span class="n">dim</span><span class="o">=</span><span class="p">(</span><span class="o">-</span><span class="mi">2</span><span class="p">,),</span> <span class="n">keepdim</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
675-
<span class="k">return</span> <span class="n">_svd_lowrank</span><span class="p">(</span><span class="n">A</span> <span class="o">-</span> <span class="n">C</span><span class="p">,</span> <span class="n">q</span><span class="p">,</span> <span class="n">niter</span><span class="o">=</span><span class="n">niter</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="kc">None</span><span class="p">)</span>
675+
<span class="k">return</span> <span class="n">_svd_lowrank</span><span class="p">(</span><span class="n">A</span> <span class="o">-</span> <span class="n">C</span><span class="p">,</span> <span class="n">q</span><span class="p">,</span> <span class="n">niter</span><span class="o">=</span><span class="n">niter</span><span class="p">,</span> <span class="n">M</span><span class="o">=</span><span class="kc">None</span><span class="p">)</span></div>
676676
</pre></div>
677677

678678
</article>

0 commit comments

Comments
 (0)