maven_artifact: download with shutil.copyfileobj to a temp file (#44956)
* maven_artifact: replace homemade chunked download with shutil.copyfileobj * maven_artifact: copy to temp file first
This commit is contained in:
parent
b0d6867fbb
commit
5b358db83f
1 changed files with 29 additions and 45 deletions
|
@ -158,9 +158,9 @@ EXAMPLES = '''
|
|||
import hashlib
|
||||
import os
|
||||
import posixpath
|
||||
import sys
|
||||
import shutil
|
||||
import io
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
from lxml import etree
|
||||
|
@ -371,56 +371,40 @@ class MavenDownloader:
|
|||
raise ValueError(failmsg + " because of " + info['msg'] + "for URL " + url_to_use)
|
||||
return None
|
||||
|
||||
def download(self, artifact, verify_download, filename=None):
|
||||
filename = artifact.get_filename(filename)
|
||||
def download(self, tmpdir, artifact, verify_download, filename=None):
|
||||
if not artifact.version or artifact.version == "latest":
|
||||
artifact = Artifact(artifact.group_id, artifact.artifact_id, self.find_latest_version_available(artifact),
|
||||
artifact.classifier, artifact.extension)
|
||||
url = self.find_uri_for_artifact(artifact)
|
||||
if self.local:
|
||||
parsed_url = urlparse(url)
|
||||
if os.path.isfile(parsed_url.path):
|
||||
shutil.copy2(parsed_url.path, filename)
|
||||
tempfd, tempname = tempfile.mkstemp(dir=tmpdir)
|
||||
|
||||
try:
|
||||
# copy to temp file
|
||||
if self.local:
|
||||
parsed_url = urlparse(url)
|
||||
if os.path.isfile(parsed_url.path):
|
||||
shutil.copy2(parsed_url.path, tempname)
|
||||
else:
|
||||
return "Can not find local file: " + parsed_url.path
|
||||
else:
|
||||
return "Can not find local file: " + parsed_url.path
|
||||
else:
|
||||
response = self._request(url, "Failed to download artifact " + str(artifact))
|
||||
with io.open(filename, 'wb') as f:
|
||||
self._write_chunks(response, f, report_hook=self.chunk_report)
|
||||
if verify_download:
|
||||
invalid_md5 = self.is_invalid_md5(filename, url)
|
||||
if invalid_md5:
|
||||
# if verify_change was set, the previous file would be deleted
|
||||
os.remove(filename)
|
||||
return invalid_md5
|
||||
response = self._request(url, "Failed to download artifact " + str(artifact))
|
||||
with os.fdopen(tempfd, 'wb') as f:
|
||||
shutil.copyfileobj(response, f)
|
||||
|
||||
if verify_download:
|
||||
invalid_md5 = self.is_invalid_md5(tempname, url)
|
||||
if invalid_md5:
|
||||
# if verify_change was set, the previous file would be deleted
|
||||
os.remove(tempname)
|
||||
return invalid_md5
|
||||
except Exception as e:
|
||||
os.remove(tempname)
|
||||
raise e
|
||||
|
||||
# all good, now copy temp file to target
|
||||
shutil.move(tempname, artifact.get_filename(filename))
|
||||
return None
|
||||
|
||||
def chunk_report(self, bytes_so_far, chunk_size, total_size):
|
||||
percent = float(bytes_so_far) / total_size
|
||||
percent = round(percent * 100, 2)
|
||||
sys.stdout.write("Downloaded %d of %d bytes (%0.2f%%)\r" %
|
||||
(bytes_so_far, total_size, percent))
|
||||
if bytes_so_far >= total_size:
|
||||
sys.stdout.write('\n')
|
||||
|
||||
def _write_chunks(self, response, filehandle, chunk_size=8192, report_hook=None):
|
||||
total_size = response.info().get('Content-Length').strip()
|
||||
total_size = int(total_size)
|
||||
bytes_so_far = 0
|
||||
|
||||
while True:
|
||||
chunk = response.read(chunk_size)
|
||||
bytes_so_far += len(chunk)
|
||||
|
||||
if not chunk:
|
||||
break
|
||||
|
||||
filehandle.write(chunk)
|
||||
if report_hook:
|
||||
report_hook(bytes_so_far, chunk_size, total_size)
|
||||
|
||||
return bytes_so_far
|
||||
|
||||
def is_invalid_md5(self, file, remote_url):
|
||||
if os.path.exists(file):
|
||||
local_md5 = self._local_md5(file)
|
||||
|
@ -546,7 +530,7 @@ def main():
|
|||
|
||||
if prev_state == "absent":
|
||||
try:
|
||||
download_error = downloader.download(artifact, verify_download, b_dest)
|
||||
download_error = downloader.download(module.tmpdir, artifact, verify_download, b_dest)
|
||||
if download_error is None:
|
||||
changed = True
|
||||
else:
|
||||
|
|
Loading…
Reference in a new issue