158
158
159
159
160
160
< div class ="version ">
161
- < a href ='http://pytorch.org/docs/versions.html '> 1.7.0a0+34f3207 ▼</ a >
161
+ < a href ='http://pytorch.org/docs/versions.html '> 1.7.0a0+65919bf ▼</ a >
162
162
</ div >
163
163
164
164
@@ -614,7 +614,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
614
614
< span class ="k "> return</ span > < span class ="nb "> type</ span > < span class ="p "> (</ span > < span class ="n "> obj</ span > < span class ="p "> )</ span > < span class ="ow "> in</ span > < span class ="n "> _storage_classes</ span > </ div >
615
615
616
616
617
- < div class =" viewcode-block " id =" set_default_tensor_type " > < a class =" viewcode-back " href =" ../generated/torch.set_default_tensor_type.html#torch.set_default_tensor_type " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
617
+ < span class ="k "> def</ span > < span class ="nf "> set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ):</ span >
618
618
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default ``torch.Tensor`` type to floating point tensor type</ span >
619
619
< span class ="sd "> ``t``. This type will also be used as default floating point type for</ span >
620
620
< span class ="sd "> type inference in :func:`torch.tensor`.</ span >
@@ -635,10 +635,10 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
635
635
< span class ="sd "> """</ span >
636
636
< span class ="k "> if</ span > < span class ="nb "> isinstance</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> ,</ span > < span class ="n "> _string_classes</ span > < span class ="p "> ):</ span >
637
637
< span class ="n "> t</ span > < span class ="o "> =</ span > < span class ="n "> _import_dotted_name</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
638
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span > </ div >
638
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_tensor_type</ span > < span class ="p "> (</ span > < span class ="n "> t</ span > < span class ="p "> )</ span >
639
639
640
640
641
- < div class =" viewcode-block " id =" set_default_dtype " > < a class =" viewcode-back " href =" ../generated/torch.set_default_dtype.html#torch.set_default_dtype " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
641
+ < span class ="k "> def</ span > < span class ="nf "> set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
642
642
< span class ="sa "> r</ span > < span class ="sd "> """Sets the default floating point dtype to :attr:`d`.</ span >
643
643
< span class ="sd "> This dtype is:</ span >
644
644
@@ -666,9 +666,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
666
666
< span class ="sd "> torch.complex128</ span >
667
667
668
668
< span class ="sd "> """</ span >
669
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
669
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_default_dtype</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
670
670
671
- < div class =" viewcode-block " id =" set_deterministic " > < a class =" viewcode-back " href =" ../generated/torch.set_deterministic.html#torch.set_deterministic " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
671
+ < span class ="k "> def</ span > < span class ="nf "> set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> ):</ span >
672
672
< span class ="sa "> r</ span > < span class ="sd "> """ Sets whether PyTorch operations must use "deterministic"</ span >
673
673
< span class ="sd "> algorithms. That is, algorithms which, given the same input, and when</ span >
674
674
< span class ="sd "> run on the same software and hardware, always produce the same output.</ span >
@@ -740,7 +740,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
740
740
< span class ="sd "> d (:class:`bool`): If True, force operations to be deterministic.</ span >
741
741
< span class ="sd "> If False, allow non-deterministic operations.</ span >
742
742
< span class ="sd "> """</ span >
743
- < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span > </ div >
743
+ < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _set_deterministic</ span > < span class ="p "> (</ span > < span class ="n "> d</ span > < span class ="p "> )</ span >
744
744
745
745
< div class ="viewcode-block " id ="is_deterministic "> < a class ="viewcode-back " href ="../generated/torch.is_deterministic.html#torch.is_deterministic "> [docs]</ a > < span class ="k "> def</ span > < span class ="nf "> is_deterministic</ span > < span class ="p "> ():</ span >
746
746
< span class ="sa "> r</ span > < span class ="sd "> """Returns True if the global deterministic flag is turned on. Refer to</ span >
@@ -760,8 +760,8 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
760
760
< span class ="k "> pass</ span >
761
761
762
762
763
- < span class ="k "> class</ span > < span class ="nc "> FloatStorage</ span > < span class ="p "> (</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> FloatStorageBase</ span > < span class ="p "> ,</ span > < span class ="n "> _StorageBase</ span > < span class ="p "> ):</ span >
764
- < span class ="k "> pass</ span >
763
+ < div class =" viewcode-block " id =" FloatStorage " > < a class =" viewcode-back " href =" ../storage.html#torch.FloatStorage " > [docs] </ a > < span class ="k "> class</ span > < span class ="nc "> FloatStorage</ span > < span class ="p "> (</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> FloatStorageBase</ span > < span class ="p "> ,</ span > < span class ="n "> _StorageBase</ span > < span class ="p "> ):</ span >
764
+ < span class ="k "> pass</ span > </ div >
765
765
766
766
767
767
< span class ="k "> class</ span > < span class ="nc "> HalfStorage</ span > < span class ="p "> (</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> HalfStorageBase</ span > < span class ="p "> ,</ span > < span class ="n "> _StorageBase</ span > < span class ="p "> ):</ span >
@@ -810,11 +810,13 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
810
810
< span class ="k "> class</ span > < span class ="nc "> QInt32Storage</ span > < span class ="p "> (</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> QInt32StorageBase</ span > < span class ="p "> ,</ span > < span class ="n "> _StorageBase</ span > < span class ="p "> ):</ span >
811
811
< span class ="k "> pass</ span >
812
812
813
+ < span class ="k "> class</ span > < span class ="nc "> QUInt4x2Storage</ span > < span class ="p "> (</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> QUInt4x2StorageBase</ span > < span class ="p "> ,</ span > < span class ="n "> _StorageBase</ span > < span class ="p "> ):</ span >
814
+ < span class ="k "> pass</ span >
813
815
814
816
< span class ="n "> _storage_classes</ span > < span class ="o "> =</ span > < span class ="p "> {</ span >
815
817
< span class ="n "> DoubleStorage</ span > < span class ="p "> ,</ span > < span class ="n "> FloatStorage</ span > < span class ="p "> ,</ span > < span class ="n "> LongStorage</ span > < span class ="p "> ,</ span > < span class ="n "> IntStorage</ span > < span class ="p "> ,</ span > < span class ="n "> ShortStorage</ span > < span class ="p "> ,</ span >
816
818
< span class ="n "> CharStorage</ span > < span class ="p "> ,</ span > < span class ="n "> ByteStorage</ span > < span class ="p "> ,</ span > < span class ="n "> HalfStorage</ span > < span class ="p "> ,</ span > < span class ="n "> BoolStorage</ span > < span class ="p "> ,</ span > < span class ="n "> QUInt8Storage</ span > < span class ="p "> ,</ span > < span class ="n "> QInt8Storage</ span > < span class ="p "> ,</ span >
817
- < span class ="n "> QInt32Storage</ span > < span class ="p "> ,</ span > < span class ="n "> BFloat16Storage</ span > < span class ="p "> ,</ span > < span class ="n "> ComplexFloatStorage</ span > < span class ="p "> ,</ span > < span class ="n "> ComplexDoubleStorage</ span >
819
+ < span class ="n "> QInt32Storage</ span > < span class ="p "> ,</ span > < span class ="n "> BFloat16Storage</ span > < span class ="p "> ,</ span > < span class ="n "> ComplexFloatStorage</ span > < span class ="p "> ,</ span > < span class ="n "> ComplexDoubleStorage</ span > < span class =" p " > , </ span > < span class =" n " > QUInt4x2Storage </ span >
818
820
< span class ="p "> }</ span >
819
821
820
822
< span class ="c1 "> # The _tensor_classes set is initialized by the call to _C._initialize_tensor_type_bindings()</ span >
@@ -883,6 +885,7 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
883
885
< span class ="k "> del</ span > < span class ="n "> BFloat16StorageBase</ span >
884
886
< span class ="k "> del</ span > < span class ="n "> ComplexDoubleStorageBase</ span >
885
887
< span class ="k "> del</ span > < span class ="n "> ComplexFloatStorageBase</ span >
888
+ < span class ="k "> del</ span > < span class ="n "> QUInt4x2StorageBase</ span >
886
889
887
890
< span class ="c1 "> ################################################################################</ span >
888
891
< span class ="c1 "> # Import most common subpackages</ span >
@@ -925,9 +928,9 @@ <h1>Source code for torch</h1><div class="highlight"><pre>
925
928
< span class ="k "> del</ span > < span class ="n "> _torch_docs</ span > < span class ="p "> ,</ span > < span class ="n "> _tensor_docs</ span > < span class ="p "> ,</ span > < span class ="n "> _storage_docs</ span >
926
929
927
930
928
- < div class =" viewcode-block " id =" compiled_with_cxx11_abi " > < a class =" viewcode-back " href =" ../generated/torch.compiled_with_cxx11_abi.html#torch.compiled_with_cxx11_abi " > [docs] </ a > < span class ="k "> def</ span > < span class ="nf "> compiled_with_cxx11_abi</ span > < span class ="p "> ():</ span >
931
+ < span class ="k "> def</ span > < span class ="nf "> compiled_with_cxx11_abi</ span > < span class ="p "> ():</ span >
929
932
< span class ="sa "> r</ span > < span class ="sd "> """Returns whether PyTorch was built with _GLIBCXX_USE_CXX11_ABI=1"""</ span >
930
- < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _GLIBCXX_USE_CXX11_ABI</ span > </ div >
933
+ < span class ="k "> return</ span > < span class ="n "> _C</ span > < span class ="o "> .</ span > < span class ="n "> _GLIBCXX_USE_CXX11_ABI</ span >
931
934
932
935
933
936
< span class ="c1 "> # Import the ops "namespace"</ span >
0 commit comments