2013-03-18 05:06:52 +08:00
|
|
|
#
|
2013-10-28 04:59:47 +08:00
|
|
|
# Copyright 2006-2007, 2013 Red Hat, Inc.
|
2013-03-18 05:06:52 +08:00
|
|
|
# Daniel P. Berrange <berrange@redhat.com>
|
|
|
|
#
|
2018-04-04 21:35:41 +08:00
|
|
|
# This work is licensed under the GNU GPLv2 or later.
|
2018-03-21 03:00:02 +08:00
|
|
|
# See the COPYING file in the top-level directory.
|
2013-03-18 05:06:52 +08:00
|
|
|
|
2013-08-09 08:14:10 +08:00
|
|
|
import ftplib
|
2017-10-11 19:35:50 +08:00
|
|
|
import io
|
2013-03-18 05:06:52 +08:00
|
|
|
import logging
|
|
|
|
import os
|
2013-08-09 08:14:10 +08:00
|
|
|
import subprocess
|
|
|
|
import tempfile
|
2018-04-03 23:38:37 +08:00
|
|
|
import urllib
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2016-04-19 04:42:12 +08:00
|
|
|
import requests
|
|
|
|
|
2013-04-14 02:34:52 +08:00
|
|
|
|
2018-06-13 01:49:25 +08:00
|
|
|
###########################################################################
|
|
|
|
# Backends for the various URL types we support (http, https, ftp, local) #
|
|
|
|
###########################################################################
|
2013-04-14 02:34:52 +08:00
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
class _URLFetcher(object):
|
2013-08-09 08:14:10 +08:00
|
|
|
"""
|
|
|
|
This is a generic base class for fetching/extracting files from
|
2018-06-13 01:49:25 +08:00
|
|
|
a media source, such as CD ISO, or HTTP/HTTPS/FTP server
|
2013-08-09 08:14:10 +08:00
|
|
|
"""
|
2015-09-19 06:31:56 +08:00
|
|
|
_block_size = 16384
|
|
|
|
|
2013-09-27 01:04:28 +08:00
|
|
|
def __init__(self, location, scratchdir, meter):
|
2013-08-09 08:14:10 +08:00
|
|
|
self.location = location
|
|
|
|
self.scratchdir = scratchdir
|
2013-09-27 01:04:28 +08:00
|
|
|
self.meter = meter
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
self._srcdir = None
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
logging.debug("Using scratchdir=%s", scratchdir)
|
2014-02-15 00:02:35 +08:00
|
|
|
|
2015-09-19 06:31:56 +08:00
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
####################
|
|
|
|
# Internal helpers #
|
|
|
|
####################
|
|
|
|
|
|
|
|
def _make_full_url(self, filename):
|
|
|
|
"""
|
|
|
|
Generate a full fetchable URL from the passed filename, which
|
|
|
|
is relative to the self.location
|
|
|
|
"""
|
|
|
|
ret = self._srcdir or self.location
|
|
|
|
if not filename:
|
|
|
|
return ret
|
|
|
|
|
|
|
|
if not ret.endswith("/"):
|
|
|
|
ret += "/"
|
|
|
|
return ret + filename
|
|
|
|
|
2015-09-19 06:31:56 +08:00
|
|
|
def _grabURL(self, filename, fileobj):
|
2015-09-19 04:49:18 +08:00
|
|
|
"""
|
2015-09-19 06:31:56 +08:00
|
|
|
Download the filename from self.location, and write contents to
|
|
|
|
fileobj
|
2015-09-19 04:49:18 +08:00
|
|
|
"""
|
2015-09-19 06:31:56 +08:00
|
|
|
url = self._make_full_url(filename)
|
|
|
|
|
|
|
|
try:
|
|
|
|
urlobj, size = self._grabber(url)
|
2017-05-06 00:47:21 +08:00
|
|
|
except Exception as e:
|
2015-09-19 06:31:56 +08:00
|
|
|
raise ValueError(_("Couldn't acquire file %s: %s") %
|
|
|
|
(url, str(e)))
|
|
|
|
|
|
|
|
logging.debug("Fetching URI: %s", url)
|
|
|
|
self.meter.start(
|
|
|
|
text=_("Retrieving file %s...") % os.path.basename(filename),
|
|
|
|
size=size)
|
|
|
|
|
|
|
|
total = self._write(urlobj, fileobj)
|
|
|
|
self.meter.end(total)
|
|
|
|
|
|
|
|
def _write(self, urlobj, fileobj):
|
|
|
|
"""
|
|
|
|
Write the contents of urlobj to python file like object fileobj
|
|
|
|
"""
|
|
|
|
total = 0
|
2015-09-19 05:11:37 +08:00
|
|
|
while 1:
|
2015-09-19 06:31:56 +08:00
|
|
|
buff = urlobj.read(self._block_size)
|
2015-09-19 05:11:37 +08:00
|
|
|
if not buff:
|
|
|
|
break
|
|
|
|
fileobj.write(buff)
|
2015-09-19 06:31:56 +08:00
|
|
|
total += len(buff)
|
|
|
|
self.meter.update(total)
|
|
|
|
return total
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2015-09-19 06:31:56 +08:00
|
|
|
def _grabber(self, url):
|
2015-09-19 06:01:57 +08:00
|
|
|
"""
|
2015-09-19 06:31:56 +08:00
|
|
|
Returns the urlobj, size for the passed URL. urlobj is whatever
|
|
|
|
data needs to be passed to self._write
|
2015-09-19 06:01:57 +08:00
|
|
|
"""
|
2015-09-19 06:31:56 +08:00
|
|
|
raise NotImplementedError("must be implemented in subclass")
|
2015-09-19 06:01:57 +08:00
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
|
|
|
|
##############
|
|
|
|
# Public API #
|
|
|
|
##############
|
|
|
|
|
2013-08-09 08:14:10 +08:00
|
|
|
def prepareLocation(self):
|
2015-09-19 04:49:18 +08:00
|
|
|
"""
|
|
|
|
Perform any necessary setup
|
|
|
|
"""
|
2014-09-08 02:22:56 +08:00
|
|
|
pass
|
2013-08-09 08:14:10 +08:00
|
|
|
|
|
|
|
def cleanupLocation(self):
|
2015-09-19 04:49:18 +08:00
|
|
|
"""
|
|
|
|
Perform any necessary cleanup
|
|
|
|
"""
|
2013-08-09 08:14:10 +08:00
|
|
|
pass
|
|
|
|
|
2018-03-30 07:22:28 +08:00
|
|
|
def can_access(self):
|
|
|
|
"""
|
|
|
|
Return True if the location URL seems to be valid
|
|
|
|
"""
|
|
|
|
return True
|
|
|
|
|
2016-03-25 02:51:36 +08:00
|
|
|
def _hasFile(self, url):
|
|
|
|
raise NotImplementedError("Must be implemented in subclass")
|
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
def hasFile(self, filename):
|
|
|
|
"""
|
|
|
|
Return True if self.location has the passed filename
|
|
|
|
"""
|
2016-03-25 02:51:36 +08:00
|
|
|
url = self._make_full_url(filename)
|
|
|
|
ret = self._hasFile(url)
|
|
|
|
logging.debug("hasFile(%s) returning %s", url, ret)
|
|
|
|
return ret
|
2015-09-19 04:49:18 +08:00
|
|
|
|
2013-09-27 01:04:28 +08:00
|
|
|
def acquireFile(self, filename):
|
2015-09-19 04:49:18 +08:00
|
|
|
"""
|
|
|
|
Grab the passed filename from self.location and save it to
|
|
|
|
a temporary file, returning the temp filename
|
|
|
|
"""
|
2015-09-19 06:01:57 +08:00
|
|
|
prefix = "virtinst-" + os.path.basename(filename) + "."
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2016-04-19 04:42:12 +08:00
|
|
|
# pylint: disable=redefined-variable-type
|
2015-09-19 06:01:57 +08:00
|
|
|
if "VIRTINST_TEST_SUITE" in os.environ:
|
2015-09-19 06:31:56 +08:00
|
|
|
fn = os.path.join("/tmp", prefix)
|
2017-10-22 07:33:30 +08:00
|
|
|
fileobj = open(fn, "wb")
|
2015-09-19 06:01:57 +08:00
|
|
|
else:
|
|
|
|
fileobj = tempfile.NamedTemporaryFile(
|
|
|
|
dir=self.scratchdir, prefix=prefix, delete=False)
|
2015-09-19 06:31:56 +08:00
|
|
|
fn = fileobj.name
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2015-09-19 06:31:56 +08:00
|
|
|
self._grabURL(filename, fileobj)
|
2018-03-02 16:01:23 +08:00
|
|
|
logging.debug("Saved file to %s", fn)
|
2015-09-19 06:31:56 +08:00
|
|
|
return fn
|
2015-09-19 06:01:57 +08:00
|
|
|
|
|
|
|
def acquireFileContent(self, filename):
|
|
|
|
"""
|
|
|
|
Grab the passed filename from self.location and return it as a string
|
|
|
|
"""
|
2017-11-08 14:23:28 +08:00
|
|
|
fileobj = io.BytesIO()
|
2015-09-19 06:31:56 +08:00
|
|
|
self._grabURL(filename, fileobj)
|
2018-01-28 04:23:22 +08:00
|
|
|
return fileobj.getvalue().decode("utf-8")
|
2013-08-09 08:14:10 +08:00
|
|
|
|
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
class _HTTPURLFetcher(_URLFetcher):
|
2018-03-30 05:56:40 +08:00
|
|
|
_session = None
|
|
|
|
|
|
|
|
def prepareLocation(self):
|
|
|
|
self._session = requests.Session()
|
|
|
|
|
|
|
|
def cleanupLocation(self):
|
|
|
|
if self._session:
|
|
|
|
try:
|
|
|
|
self._session.close()
|
|
|
|
except Exception:
|
|
|
|
logging.debug("Error closing requests.session", exc_info=True)
|
|
|
|
self._session = None
|
|
|
|
|
2018-03-30 07:22:28 +08:00
|
|
|
def can_access(self):
|
|
|
|
return self.hasFile("")
|
|
|
|
|
2016-03-25 02:51:36 +08:00
|
|
|
def _hasFile(self, url):
|
2015-09-19 06:31:56 +08:00
|
|
|
"""
|
|
|
|
We just do a HEAD request to see if the file exists
|
|
|
|
"""
|
2013-08-09 08:14:10 +08:00
|
|
|
try:
|
2018-03-30 05:56:40 +08:00
|
|
|
response = self._session.head(url, allow_redirects=True)
|
2015-09-19 06:31:56 +08:00
|
|
|
response.raise_for_status()
|
2017-05-06 00:47:21 +08:00
|
|
|
except Exception as e:
|
2016-03-25 02:51:36 +08:00
|
|
|
logging.debug("HTTP hasFile request failed: %s", str(e))
|
2013-08-09 08:14:10 +08:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2015-09-19 06:31:56 +08:00
|
|
|
def _grabber(self, url):
|
|
|
|
"""
|
|
|
|
Use requests for this
|
|
|
|
"""
|
2018-03-30 05:56:40 +08:00
|
|
|
response = self._session.get(url, stream=True)
|
2015-09-19 06:31:56 +08:00
|
|
|
response.raise_for_status()
|
2016-01-13 01:45:02 +08:00
|
|
|
try:
|
|
|
|
size = int(response.headers.get('content-length'))
|
2017-07-24 16:26:48 +08:00
|
|
|
except Exception:
|
2016-01-13 01:45:02 +08:00
|
|
|
size = None
|
|
|
|
return response, size
|
2015-09-19 06:31:56 +08:00
|
|
|
|
|
|
|
def _write(self, urlobj, fileobj):
|
|
|
|
"""
|
|
|
|
The requests object doesn't have a file-like read() option, so
|
2018-05-22 03:42:50 +08:00
|
|
|
we need to implement it ourselves
|
2015-09-19 06:31:56 +08:00
|
|
|
"""
|
|
|
|
total = 0
|
|
|
|
for data in urlobj.iter_content(chunk_size=self._block_size):
|
|
|
|
fileobj.write(data)
|
|
|
|
total += len(data)
|
|
|
|
self.meter.update(total)
|
|
|
|
return total
|
|
|
|
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
class _FTPURLFetcher(_URLFetcher):
|
|
|
|
_ftp = None
|
2013-08-09 08:14:10 +08:00
|
|
|
|
|
|
|
def prepareLocation(self):
|
2015-09-19 04:49:18 +08:00
|
|
|
if self._ftp:
|
2014-09-08 02:22:56 +08:00
|
|
|
return
|
|
|
|
|
virtinst: early detect ftp connection errors
It fixes two problems:
i) "ftp://" was accepted as valid URL but then it causes this
exception:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib64/python2.7/ftplib.py", line 387, in login
resp = self.sendcmd('USER ' + user)
File "/usr/lib64/python2.7/ftplib.py", line 243, in sendcmd
self.putcmd(cmd)
File "/usr/lib64/python2.7/ftplib.py", line 178, in putcmd
self.putline(line)
File "/usr/lib64/python2.7/ftplib.py", line 173, in putline
self.sock.sendall(line)
AttributeError: 'NoneType' object has no attribute 'sendall'
ii) only a cryptic error message "Unable to complete install: '[Errno
-2] Name or service not known'" was showed to users when the DNS
lookup failed. The exception is now intercepted and decorated with
more information.
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1086554
Signed-off-by: Giuseppe Scrivano <gscrivan@redhat.com>
2014-04-14 20:49:21 +08:00
|
|
|
try:
|
2018-01-28 03:19:12 +08:00
|
|
|
parsed = urllib.parse.urlparse(self.location)
|
2016-03-25 04:15:21 +08:00
|
|
|
self._ftp = ftplib.FTP()
|
2018-04-18 21:23:33 +08:00
|
|
|
username = urllib.parse.unquote(parsed.username or '')
|
|
|
|
password = urllib.parse.unquote(parsed.password or '')
|
2018-01-28 04:23:22 +08:00
|
|
|
self._ftp.connect(parsed.hostname, parsed.port or 0)
|
2018-04-18 21:23:33 +08:00
|
|
|
self._ftp.login(username, password)
|
2017-08-18 04:10:46 +08:00
|
|
|
# Force binary mode
|
|
|
|
self._ftp.voidcmd("TYPE I")
|
2017-05-06 00:47:21 +08:00
|
|
|
except Exception as e:
|
virtinst: early detect ftp connection errors
It fixes two problems:
i) "ftp://" was accepted as valid URL but then it causes this
exception:
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib64/python2.7/ftplib.py", line 387, in login
resp = self.sendcmd('USER ' + user)
File "/usr/lib64/python2.7/ftplib.py", line 243, in sendcmd
self.putcmd(cmd)
File "/usr/lib64/python2.7/ftplib.py", line 178, in putcmd
self.putline(line)
File "/usr/lib64/python2.7/ftplib.py", line 173, in putline
self.sock.sendall(line)
AttributeError: 'NoneType' object has no attribute 'sendall'
ii) only a cryptic error message "Unable to complete install: '[Errno
-2] Name or service not known'" was showed to users when the DNS
lookup failed. The exception is now intercepted and decorated with
more information.
Closes: https://bugzilla.redhat.com/show_bug.cgi?id=1086554
Signed-off-by: Giuseppe Scrivano <gscrivan@redhat.com>
2014-04-14 20:49:21 +08:00
|
|
|
raise ValueError(_("Opening URL %s failed: %s.") %
|
|
|
|
(self.location, str(e)))
|
|
|
|
|
2015-09-19 06:31:56 +08:00
|
|
|
def _grabber(self, url):
|
|
|
|
"""
|
2017-10-11 19:35:49 +08:00
|
|
|
Use urllib and ftplib to grab the file
|
2015-09-19 06:31:56 +08:00
|
|
|
"""
|
2018-01-28 03:19:12 +08:00
|
|
|
request = urllib.request.Request(url)
|
|
|
|
urlobj = urllib.request.urlopen(request)
|
|
|
|
size = self._ftp.size(urllib.parse.urlparse(url)[2])
|
2015-09-19 06:31:56 +08:00
|
|
|
return urlobj, size
|
|
|
|
|
|
|
|
|
2014-09-08 02:22:56 +08:00
|
|
|
def cleanupLocation(self):
|
2015-09-19 04:49:18 +08:00
|
|
|
if not self._ftp:
|
2014-09-08 02:22:56 +08:00
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
2015-09-19 04:49:18 +08:00
|
|
|
self._ftp.quit()
|
2017-07-24 16:26:48 +08:00
|
|
|
except Exception:
|
2014-09-08 02:22:56 +08:00
|
|
|
logging.debug("Error quitting ftp connection", exc_info=True)
|
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
self._ftp = None
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2016-03-25 02:51:36 +08:00
|
|
|
def _hasFile(self, url):
|
2018-01-28 03:19:12 +08:00
|
|
|
path = urllib.parse.urlparse(url)[2]
|
2013-08-09 08:14:10 +08:00
|
|
|
|
|
|
|
try:
|
|
|
|
try:
|
|
|
|
# If it's a file
|
2015-09-19 06:31:56 +08:00
|
|
|
self._ftp.size(path)
|
2013-08-09 08:14:10 +08:00
|
|
|
except ftplib.all_errors:
|
|
|
|
# If it's a dir
|
2015-09-19 06:31:56 +08:00
|
|
|
self._ftp.cwd(path)
|
2017-05-06 00:47:21 +08:00
|
|
|
except ftplib.all_errors as e:
|
2013-08-09 08:14:10 +08:00
|
|
|
logging.debug("FTP hasFile: couldn't access %s: %s",
|
2015-09-19 04:49:18 +08:00
|
|
|
url, str(e))
|
2013-08-09 08:14:10 +08:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2015-09-19 04:49:18 +08:00
|
|
|
class _LocalURLFetcher(_URLFetcher):
|
|
|
|
"""
|
|
|
|
For grabbing files from a local directory
|
|
|
|
"""
|
2016-03-25 02:51:36 +08:00
|
|
|
def _hasFile(self, url):
|
|
|
|
return os.path.exists(url)
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2015-09-19 06:31:56 +08:00
|
|
|
def _grabber(self, url):
|
2018-01-28 03:08:46 +08:00
|
|
|
urlobj = open(url, "rb")
|
2015-09-19 06:31:56 +08:00
|
|
|
size = os.path.getsize(url)
|
|
|
|
return urlobj, size
|
|
|
|
|
2013-08-09 08:14:10 +08:00
|
|
|
|
2017-11-24 13:11:14 +08:00
|
|
|
class _ISOURLFetcher(_URLFetcher):
|
|
|
|
_cache_file_list = None
|
|
|
|
|
|
|
|
def _make_full_url(self, filename):
|
|
|
|
return "/" + filename
|
|
|
|
|
|
|
|
def _grabber(self, url):
|
|
|
|
"""
|
|
|
|
Use isoinfo to grab the file
|
|
|
|
"""
|
2018-03-29 02:02:23 +08:00
|
|
|
if not self._hasFile(url):
|
|
|
|
raise RuntimeError("isoinfo didn't find file=%s" % url)
|
|
|
|
|
2017-11-24 13:11:14 +08:00
|
|
|
cmd = ["isoinfo", "-J", "-i", self.location, "-x", url]
|
|
|
|
|
|
|
|
logging.debug("Running isoinfo: %s", cmd)
|
|
|
|
output = subprocess.check_output(cmd)
|
|
|
|
|
|
|
|
return io.BytesIO(output), len(output)
|
|
|
|
|
|
|
|
def _hasFile(self, url):
|
|
|
|
"""
|
|
|
|
Use isoinfo to list and search for the file
|
|
|
|
"""
|
|
|
|
if not self._cache_file_list:
|
|
|
|
cmd = ["isoinfo", "-J", "-i", self.location, "-f"]
|
|
|
|
|
|
|
|
logging.debug("Running isoinfo: %s", cmd)
|
|
|
|
output = subprocess.check_output(cmd)
|
|
|
|
|
|
|
|
self._cache_file_list = output.splitlines(False)
|
|
|
|
|
2018-01-28 04:23:22 +08:00
|
|
|
return url.encode("ascii") in self._cache_file_list
|
2017-11-24 13:11:14 +08:00
|
|
|
|
|
|
|
|
2013-09-27 01:04:28 +08:00
|
|
|
def fetcherForURI(uri, *args, **kwargs):
|
2013-09-26 22:24:28 +08:00
|
|
|
if uri.startswith("http://") or uri.startswith("https://"):
|
2015-09-19 04:49:18 +08:00
|
|
|
fclass = _HTTPURLFetcher
|
2013-03-18 05:06:52 +08:00
|
|
|
elif uri.startswith("ftp://"):
|
2015-09-19 04:49:18 +08:00
|
|
|
fclass = _FTPURLFetcher
|
|
|
|
elif os.path.isdir(uri):
|
|
|
|
# Pointing to a local tree
|
|
|
|
fclass = _LocalURLFetcher
|
2013-03-18 05:06:52 +08:00
|
|
|
else:
|
2017-11-24 13:11:14 +08:00
|
|
|
# Pointing to a path (e.g. iso), or a block device (e.g. /dev/cdrom)
|
|
|
|
fclass = _ISOURLFetcher
|
2013-09-27 01:04:28 +08:00
|
|
|
return fclass(uri, *args, **kwargs)
|