@@ -181,10 +181,10 @@ def _get_urls(data, version):
181
181
182
182
183
183
def download_url (root , value ):
184
- print (value ["url" ])
185
184
with url_lib .urlopen (value ["url" ]) as response :
186
185
data = response .read ()
187
- if hashlib .sha256 (data ) == value ["hash" ]:
186
+ if hashlib .sha256 (data ).hexdigest () == value ["hash" ]:
187
+ print ("Download: " , value ["url" ])
188
188
with zipfile .ZipFile (io .BytesIO (data ), "r" ) as wheel :
189
189
for zip_info in wheel .infolist ():
190
190
# Ignore dist info since we are merging multiple wheels
@@ -195,7 +195,6 @@ def download_url(root, value):
195
195
196
196
197
197
def _download_and_extract (root , url ):
198
- print ("donload and extract" )
199
198
if "manylinux" in url or "macosx" in url or "win_amd64" in url :
200
199
root = os .getcwd () if root is None or root == "." else root
201
200
print (url )
@@ -206,12 +205,11 @@ def _download_and_extract(root, url):
206
205
# Ignore dist info since we are merging multiple wheels
207
206
if ".dist-info/" in zip_info .filename :
208
207
continue
209
- # print("\t" + zip_info.filename)
208
+ print ("\t " + zip_info .filename )
210
209
wheel .extract (zip_info .filename , root )
211
210
212
211
213
212
def _install_package (root , package_name , version = "latest" ):
214
- print ("package" )
215
213
from packaging .version import parse as version_parser
216
214
217
215
data = _get_pypi_package_data (package_name )
@@ -220,7 +218,7 @@ def _install_package(root, package_name, version="latest"):
220
218
use_version = max (data ["releases" ].keys (), key = version_parser )
221
219
else :
222
220
use_version = version
223
- print ( _get_urls ( data , use_version ))
221
+
224
222
for url in _get_urls (data , use_version ):
225
223
_download_and_extract (root , url )
226
224
0 commit comments