Skip to content

Commit 4b6bb1b

Browse files
committed
Generate Python docs from pytorch/pytorch@701771a
1 parent c540ec0 commit 4b6bb1b

File tree

1,733 files changed

+2384
-2390
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,733 files changed

+2384
-2390
lines changed

docs/master/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@
194194
<div class="pytorch-left-menu-search">
195195

196196
<div class="version">
197-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git6cf311c ) &#x25BC</a>
197+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git701771a ) &#x25BC</a>
198198
</div>
199199

200200

docs/master/_modules/index.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git6cf311c ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git701771a ) &#x25BC</a>
197197
</div>
198198

199199

docs/master/_modules/torch.html

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git6cf311c ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git701771a ) &#x25BC</a>
197197
</div>
198198

199199

@@ -658,7 +658,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
658658
<span class="k">return</span> <span class="n">module</span> <span class="o">+</span> <span class="n">class_name</span>
659659

660660

661-
<div class="viewcode-block" id="is_tensor"><a class="viewcode-back" href="../generated/torch.is_tensor.html#torch.is_tensor">[docs]</a><span class="k">def</span> <span class="nf">is_tensor</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
661+
<span class="k">def</span> <span class="nf">is_tensor</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
662662
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if `obj` is a PyTorch tensor.</span>
663663

664664
<span class="sd"> Note that this function is simply doing ``isinstance(obj, Tensor)``.</span>
@@ -675,19 +675,19 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
675675
<span class="sd"> True</span>
676676

677677
<span class="sd"> &quot;&quot;&quot;</span>
678-
<span class="k">return</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">)</span></div>
678+
<span class="k">return</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">obj</span><span class="p">,</span> <span class="n">torch</span><span class="o">.</span><span class="n">Tensor</span><span class="p">)</span>
679679

680680

681-
<div class="viewcode-block" id="is_storage"><a class="viewcode-back" href="../generated/torch.is_storage.html#torch.is_storage">[docs]</a><span class="k">def</span> <span class="nf">is_storage</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
681+
<span class="k">def</span> <span class="nf">is_storage</span><span class="p">(</span><span class="n">obj</span><span class="p">):</span>
682682
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if `obj` is a PyTorch storage object.</span>
683683

684684
<span class="sd"> Args:</span>
685685
<span class="sd"> obj (Object): Object to test</span>
686686
<span class="sd"> &quot;&quot;&quot;</span>
687-
<span class="k">return</span> <span class="nb">type</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span> <span class="ow">in</span> <span class="n">_storage_classes</span></div>
687+
<span class="k">return</span> <span class="nb">type</span><span class="p">(</span><span class="n">obj</span><span class="p">)</span> <span class="ow">in</span> <span class="n">_storage_classes</span>
688688

689689

690-
<span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
690+
<div class="viewcode-block" id="set_default_tensor_type"><a class="viewcode-back" href="../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type">[docs]</a><span class="k">def</span> <span class="nf">set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">):</span>
691691
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Sets the default ``torch.Tensor`` type to floating point tensor type</span>
692692
<span class="sd"> ``t``. This type will also be used as default floating point type for</span>
693693
<span class="sd"> type inference in :func:`torch.tensor`.</span>
@@ -708,10 +708,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
708708
<span class="sd"> &quot;&quot;&quot;</span>
709709
<span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">t</span><span class="p">,</span> <span class="n">_string_classes</span><span class="p">):</span>
710710
<span class="n">t</span> <span class="o">=</span> <span class="n">_import_dotted_name</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
711-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span>
711+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_tensor_type</span><span class="p">(</span><span class="n">t</span><span class="p">)</span></div>
712712

713713

714-
<span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
714+
<div class="viewcode-block" id="set_default_dtype"><a class="viewcode-back" href="../generated/torch.set_default_dtype.html#torch.set_default_dtype">[docs]</a><span class="k">def</span> <span class="nf">set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">):</span>
715715
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
716716

717717
<span class="sd"> Sets the default floating point dtype to :attr:`d`. Supports torch.float32</span>
@@ -754,9 +754,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
754754
<span class="sd"> torch.complex128</span>
755755

756756
<span class="sd"> &quot;&quot;&quot;</span>
757-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
757+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_default_dtype</span><span class="p">(</span><span class="n">d</span><span class="p">)</span></div>
758758

759-
<div class="viewcode-block" id="use_deterministic_algorithms"><a class="viewcode-back" href="../generated/torch.use_deterministic_algorithms.html#torch.use_deterministic_algorithms">[docs]</a><span class="k">def</span> <span class="nf">use_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">):</span>
759+
<span class="k">def</span> <span class="nf">use_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">):</span>
760760
<span class="sa">r</span><span class="sd">&quot;&quot;&quot; Sets whether PyTorch operations must use &quot;deterministic&quot;</span>
761761
<span class="sd"> algorithms. That is, algorithms which, given the same input, and when</span>
762762
<span class="sd"> run on the same software and hardware, always produce the same output.</span>
@@ -871,15 +871,15 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
871871
<span class="sd"> ...</span>
872872
<span class="sd"> RuntimeError: index_add_cuda_ does not have a deterministic implementation...</span>
873873
<span class="sd"> &quot;&quot;&quot;</span>
874-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">)</span></div>
874+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_deterministic_algorithms</span><span class="p">(</span><span class="n">mode</span><span class="p">)</span>
875875

876876
<span class="k">def</span> <span class="nf">are_deterministic_algorithms_enabled</span><span class="p">():</span>
877877
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global deterministic flag is turned on. Refer to</span>
878878
<span class="sd"> :func:`torch.use_deterministic_algorithms` documentation for more details.</span>
879879
<span class="sd"> &quot;&quot;&quot;</span>
880880
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_deterministic_algorithms</span><span class="p">()</span>
881881

882-
<span class="k">def</span> <span class="nf">set_warn_always</span><span class="p">(</span><span class="n">b</span><span class="p">):</span>
882+
<div class="viewcode-block" id="set_warn_always"><a class="viewcode-back" href="../generated/torch.set_warn_always.html#torch.set_warn_always">[docs]</a><span class="k">def</span> <span class="nf">set_warn_always</span><span class="p">(</span><span class="n">b</span><span class="p">):</span>
883883
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;When this flag is False (default) then some PyTorch warnings may only</span>
884884
<span class="sd"> appear once per process. This helps avoid excessive warning information.</span>
885885
<span class="sd"> Setting it to True causes these warnings to always appear, which may be</span>
@@ -889,13 +889,13 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
889889
<span class="sd"> b (:class:`bool`): If True, force warnings to always be emitted</span>
890890
<span class="sd"> If False, set to the default behaviour</span>
891891
<span class="sd"> &quot;&quot;&quot;</span>
892-
<span class="n">_C</span><span class="o">.</span><span class="n">_set_warnAlways</span><span class="p">(</span><span class="n">b</span><span class="p">)</span>
892+
<span class="n">_C</span><span class="o">.</span><span class="n">_set_warnAlways</span><span class="p">(</span><span class="n">b</span><span class="p">)</span></div>
893893

894-
<div class="viewcode-block" id="is_warn_always_enabled"><a class="viewcode-back" href="../generated/torch.is_warn_always_enabled.html#torch.is_warn_always_enabled">[docs]</a><span class="k">def</span> <span class="nf">is_warn_always_enabled</span><span class="p">():</span>
894+
<span class="k">def</span> <span class="nf">is_warn_always_enabled</span><span class="p">():</span>
895895
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns True if the global warn_always flag is turned on. Refer to</span>
896896
<span class="sd"> :func:`torch.set_warn_always` documentation for more details.</span>
897897
<span class="sd"> &quot;&quot;&quot;</span>
898-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_warnAlways</span><span class="p">()</span></div>
898+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_get_warnAlways</span><span class="p">()</span>
899899

900900
<span class="c1">################################################################################</span>
901901
<span class="c1"># Define numeric constants</span>
@@ -1120,9 +1120,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
11201120
<span class="k">del</span> <span class="n">_torch_docs</span><span class="p">,</span> <span class="n">_tensor_docs</span><span class="p">,</span> <span class="n">_storage_docs</span>
11211121

11221122

1123-
<span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
1123+
<div class="viewcode-block" id="compiled_with_cxx11_abi"><a class="viewcode-back" href="../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi">[docs]</a><span class="k">def</span> <span class="nf">compiled_with_cxx11_abi</span><span class="p">():</span>
11241124
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1&quot;&quot;&quot;</span>
1125-
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span>
1125+
<span class="k">return</span> <span class="n">_C</span><span class="o">.</span><span class="n">_GLIBCXX_USE_CXX11_ABI</span></div>
11261126

11271127

11281128
<span class="c1"># Import the ops &quot;namespace&quot;</span>

docs/master/_modules/torch/__config__.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git6cf311c ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git701771a ) &#x25BC</a>
197197
</div>
198198

199199

docs/master/_modules/torch/_jit_internal.html

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git6cf311c ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git701771a ) &#x25BC</a>
197197
</div>
198198

199199

@@ -900,7 +900,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
900900
<span class="k">return</span> <span class="n">fn</span></div>
901901

902902

903-
<span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
903+
<div class="viewcode-block" id="unused"><a class="viewcode-back" href="../../generated/torch.jit.unused.html#torch.jit.unused">[docs]</a><span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
904904
<span class="sd">&quot;&quot;&quot;</span>
905905
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
906906
<span class="sd"> be ignored and replaced with the raising of an exception. This allows you</span>
@@ -947,7 +947,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
947947
<span class="k">return</span> <span class="n">prop</span>
948948

949949
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">UNUSED</span>
950-
<span class="k">return</span> <span class="n">fn</span>
950+
<span class="k">return</span> <span class="n">fn</span></div>
951951

952952
<span class="c1"># No op context manager from python side</span>
953953
<span class="k">class</span> <span class="nc">_IgnoreContextManager</span><span class="p">(</span><span class="n">contextlib</span><span class="o">.</span><span class="n">AbstractContextManager</span><span class="p">):</span>
@@ -957,7 +957,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
957957
<span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">exc_type</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">exc_value</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">traceback</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
958958
<span class="k">pass</span>
959959

960-
<div class="viewcode-block" id="ignore"><a class="viewcode-back" href="../../generated/torch.jit.ignore.html#torch.jit.ignore">[docs]</a><span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
960+
<span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
961961
<span class="sd">&quot;&quot;&quot;</span>
962962
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
963963
<span class="sd"> be ignored and left as a Python function. This allows you to leave code in</span>
@@ -1048,7 +1048,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
10481048
<span class="k">else</span><span class="p">:</span>
10491049
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">IGNORE</span>
10501050
<span class="k">return</span> <span class="n">fn</span>
1051-
<span class="k">return</span> <span class="n">decorator</span></div>
1051+
<span class="k">return</span> <span class="n">decorator</span>
10521052

10531053

10541054
<span class="k">def</span> <span class="nf">_copy_to_script_wrapper</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>

docs/master/_modules/torch/_lobpcg.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git6cf311c ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git701771a ) &#x25BC</a>
197197
</div>
198198

199199

docs/master/_modules/torch/_lowrank.html

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -193,7 +193,7 @@
193193
<div class="pytorch-left-menu-search">
194194

195195
<div class="version">
196-
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git6cf311c ) &#x25BC</a>
196+
<a href='https://pytorch.org/docs/versions.html'>master (1.10.0a0+git701771a ) &#x25BC</a>
197197
</div>
198198

199199

0 commit comments

Comments
 (0)