Skip to content

Commit 27f1ba0

Browse files
committed
Generate Python docs from pytorch/pytorch@f690a55
1 parent a0a98de commit 27f1ba0

File tree

16 files changed

+156
-81
lines changed

16 files changed

+156
-81
lines changed

docs/master/_images/RReLU.png

49 Bytes
Loading

docs/master/_modules/torch/_jit_internal.html

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -866,7 +866,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
866866
<span class="s2">&quot;if this method is not scripted, copy the python method onto the scripted model&quot;</span>
867867

868868

869-
<span class="k">def</span> <span class="nf">export</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
869+
<div class="viewcode-block" id="export"><a class="viewcode-back" href="../../jit.html#torch.jit.export">[docs]</a><span class="k">def</span> <span class="nf">export</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
870870
<span class="sd">&quot;&quot;&quot;</span>
871871
<span class="sd"> This decorator indicates that a method on an ``nn.Module`` is used as an entry point into a</span>
872872
<span class="sd"> :class:`ScriptModule` and should be compiled.</span>
@@ -909,10 +909,10 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
909909
<span class="sd"> m = torch.jit.script(MyModule())</span>
910910
<span class="sd"> &quot;&quot;&quot;</span>
911911
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">EXPORT</span>
912-
<span class="k">return</span> <span class="n">fn</span>
912+
<span class="k">return</span> <span class="n">fn</span></div>
913913

914914

915-
<div class="viewcode-block" id="unused"><a class="viewcode-back" href="../../generated/torch.jit.unused.html#torch.jit.unused">[docs]</a><span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
915+
<span class="k">def</span> <span class="nf">unused</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
916916
<span class="sd">&quot;&quot;&quot;</span>
917917
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
918918
<span class="sd"> be ignored and replaced with the raising of an exception. This allows you</span>
@@ -959,7 +959,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
959959
<span class="k">return</span> <span class="n">prop</span>
960960

961961
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">UNUSED</span>
962-
<span class="k">return</span> <span class="n">fn</span></div>
962+
<span class="k">return</span> <span class="n">fn</span>
963963

964964
<span class="c1"># No op context manager from python side</span>
965965
<span class="k">class</span> <span class="nc">_IgnoreContextManager</span><span class="p">(</span><span class="n">contextlib</span><span class="o">.</span><span class="n">AbstractContextManager</span><span class="p">):</span>
@@ -969,7 +969,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
969969
<span class="k">def</span> <span class="fm">__exit__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">exc_type</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">exc_value</span><span class="p">:</span> <span class="n">Any</span><span class="p">,</span> <span class="n">traceback</span><span class="p">:</span> <span class="n">Any</span><span class="p">)</span> <span class="o">-&gt;</span> <span class="kc">None</span><span class="p">:</span>
970970
<span class="k">pass</span>
971971

972-
<div class="viewcode-block" id="ignore"><a class="viewcode-back" href="../../generated/torch.jit.ignore.html#torch.jit.ignore">[docs]</a><span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
972+
<span class="k">def</span> <span class="nf">ignore</span><span class="p">(</span><span class="n">drop</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
973973
<span class="sd">&quot;&quot;&quot;</span>
974974
<span class="sd"> This decorator indicates to the compiler that a function or method should</span>
975975
<span class="sd"> be ignored and left as a Python function. This allows you to leave code in</span>
@@ -1060,7 +1060,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
10601060
<span class="k">else</span><span class="p">:</span>
10611061
<span class="n">fn</span><span class="o">.</span><span class="n">_torchscript_modifier</span> <span class="o">=</span> <span class="n">FunctionModifiers</span><span class="o">.</span><span class="n">IGNORE</span>
10621062
<span class="k">return</span> <span class="n">fn</span>
1063-
<span class="k">return</span> <span class="n">decorator</span></div>
1063+
<span class="k">return</span> <span class="n">decorator</span>
10641064

10651065

10661066
<span class="k">def</span> <span class="nf">_copy_to_script_wrapper</span><span class="p">(</span><span class="n">fn</span><span class="p">):</span>
@@ -1358,7 +1358,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
13581358
<span class="nb">globals</span><span class="p">()[</span><span class="sa">f</span><span class="s2">&quot;BroadcastingList</span><span class="si">{</span><span class="n">i</span><span class="si">}</span><span class="s2">&quot;</span><span class="p">]</span> <span class="o">=</span> <span class="n">BroadcastingList1</span>
13591359

13601360

1361-
<span class="k">def</span> <span class="nf">is_scripting</span><span class="p">()</span> <span class="o">-&gt;</span> <span class="nb">bool</span><span class="p">:</span>
1361+
<div class="viewcode-block" id="is_scripting"><a class="viewcode-back" href="../../jit_language_reference.html#torch.jit.is_scripting">[docs]</a><span class="k">def</span> <span class="nf">is_scripting</span><span class="p">()</span> <span class="o">-&gt;</span> <span class="nb">bool</span><span class="p">:</span>
13621362
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;</span>
13631363
<span class="sd"> Function that returns True when in compilation and False otherwise. This</span>
13641364
<span class="sd"> is useful especially with the @unused decorator to leave code in your</span>
@@ -1377,7 +1377,7 @@ <h1>Source code for torch._jit_internal</h1><div class="highlight"><pre>
13771377
<span class="sd"> else:</span>
13781378
<span class="sd"> return unsupported_linear_op(x)</span>
13791379
<span class="sd"> &quot;&quot;&quot;</span>
1380-
<span class="k">return</span> <span class="kc">False</span>
1380+
<span class="k">return</span> <span class="kc">False</span></div>
13811381

13821382

13831383
<span class="c1"># Retrieves a fully-qualified name (module hierarchy + classname) for a given obj.</span>

docs/master/_modules/torch/_tensor.html

Lines changed: 3 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -931,11 +931,11 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
931931
<span class="k">else</span><span class="p">:</span>
932932
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">flip</span><span class="p">(</span><span class="mi">0</span><span class="p">)</span>
933933

934-
<div class="viewcode-block" id="Tensor.norm"><a class="viewcode-back" href="../../generated/torch.Tensor.norm.html#torch.Tensor.norm">[docs]</a> <span class="k">def</span> <span class="nf">norm</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">p</span><span class="o">=</span><span class="s2">&quot;fro&quot;</span><span class="p">,</span> <span class="n">dim</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">keepdim</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
934+
<span class="k">def</span> <span class="nf">norm</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">p</span><span class="o">=</span><span class="s2">&quot;fro&quot;</span><span class="p">,</span> <span class="n">dim</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">keepdim</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
935935
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;See :func:`torch.norm`&quot;&quot;&quot;</span>
936936
<span class="k">if</span> <span class="n">has_torch_function_unary</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
937937
<span class="k">return</span> <span class="n">handle_torch_function</span><span class="p">(</span><span class="n">Tensor</span><span class="o">.</span><span class="n">norm</span><span class="p">,</span> <span class="p">(</span><span class="bp">self</span><span class="p">,),</span> <span class="bp">self</span><span class="p">,</span> <span class="n">p</span><span class="o">=</span><span class="n">p</span><span class="p">,</span> <span class="n">dim</span><span class="o">=</span><span class="n">dim</span><span class="p">,</span> <span class="n">keepdim</span><span class="o">=</span><span class="n">keepdim</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">)</span>
938-
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">norm</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">p</span><span class="p">,</span> <span class="n">dim</span><span class="p">,</span> <span class="n">keepdim</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">)</span></div>
938+
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">norm</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">p</span><span class="p">,</span> <span class="n">dim</span><span class="p">,</span> <span class="n">keepdim</span><span class="p">,</span> <span class="n">dtype</span><span class="o">=</span><span class="n">dtype</span><span class="p">)</span>
939939

940940
<span class="k">def</span> <span class="nf">lu</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">pivot</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">get_infos</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
941941
<span class="sa">r</span><span class="sd">&quot;&quot;&quot;See :func:`torch.lu`&quot;&quot;&quot;</span>
@@ -1471,20 +1471,7 @@ <h1>Source code for torch._tensor</h1><div class="highlight"><pre>
14711471
<span class="sd"> 25</span>
14721472

14731473
<span class="sd"> &quot;&quot;&quot;</span>
1474-
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">is_sparse</span><span class="p">:</span>
1475-
<span class="k">return</span> <span class="bp">self</span>
1476-
<span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">is_sparse_csr</span><span class="p">:</span>
1477-
<span class="n">crow_indices</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">crow_indices</span><span class="p">()</span>
1478-
<span class="n">col_indices</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">col_indices</span><span class="p">()</span>
1479-
<span class="n">indices</span> <span class="o">=</span> <span class="n">torch</span><span class="o">.</span><span class="n">_convert_indices_from_csr_to_coo</span><span class="p">(</span><span class="n">crow_indices</span><span class="p">,</span> <span class="n">col_indices</span><span class="p">,</span>
1480-
<span class="n">out_int32</span><span class="o">=</span><span class="n">crow_indices</span><span class="o">.</span><span class="n">dtype</span> <span class="o">==</span> <span class="n">torch</span><span class="o">.</span><span class="n">int32</span><span class="p">)</span>
1481-
<span class="k">return</span> <span class="n">torch</span><span class="o">.</span><span class="n">sparse_coo_tensor</span><span class="p">(</span><span class="n">indices</span><span class="p">,</span>
1482-
<span class="bp">self</span><span class="o">.</span><span class="n">values</span><span class="p">(),</span>
1483-
<span class="n">size</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">shape</span><span class="p">,</span>
1484-
<span class="n">dtype</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">dtype</span><span class="p">,</span>
1485-
<span class="n">device</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">device</span><span class="p">)</span>
1486-
<span class="k">else</span><span class="p">:</span>
1487-
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">to_sparse</span><span class="p">()</span>
1474+
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">to_sparse</span><span class="p">()</span>
14881475

14891476
<span class="k">def</span> <span class="nf">to_sparse_csr</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
14901477
<span class="sd">&quot;&quot;&quot; Convert a tensor to compressed row storage format. Only works with 2D tensors.</span>

0 commit comments

Comments
 (0)