2013-03-18 05:06:52 +08:00
|
|
|
#
|
|
|
|
# List of OS Specific data
|
|
|
|
#
|
2014-09-09 19:37:20 +08:00
|
|
|
# Copyright 2006-2008, 2013-2014 Red Hat, Inc.
|
2013-03-18 05:06:52 +08:00
|
|
|
#
|
2018-04-04 21:35:41 +08:00
|
|
|
# This work is licensed under the GNU GPLv2 or later.
|
2018-03-21 03:00:02 +08:00
|
|
|
# See the COPYING file in the top-level directory.
|
2013-03-18 05:06:52 +08:00
|
|
|
|
2018-09-30 03:22:44 +08:00
|
|
|
import datetime
|
2019-06-08 08:55:11 +08:00
|
|
|
import os
|
2014-09-23 05:20:07 +08:00
|
|
|
import re
|
|
|
|
|
2019-03-06 01:34:51 +08:00
|
|
|
from gi.repository import Libosinfo
|
2014-09-23 05:20:07 +08:00
|
|
|
|
2019-06-17 09:12:39 +08:00
|
|
|
from .logger import log
|
|
|
|
|
2013-08-11 06:48:43 +08:00
|
|
|
|
2019-06-08 08:55:11 +08:00
|
|
|
def _in_testsuite():
|
|
|
|
return "VIRTINST_TEST_SUITE" in os.environ
|
|
|
|
|
|
|
|
|
2019-08-02 07:15:08 +08:00
|
|
|
def _media_create_from_location(location):
|
|
|
|
if not hasattr(Libosinfo.Media, "create_from_location_with_flags"):
|
2020-01-27 23:13:03 +08:00
|
|
|
return Libosinfo.Media.create_from_location( # pragma: no cover
|
|
|
|
location, None)
|
2019-08-02 07:15:08 +08:00
|
|
|
|
|
|
|
# We prefer this API, because by default it will not
|
|
|
|
# reject non-bootable media, like debian s390x
|
|
|
|
# pylint: disable=no-member
|
|
|
|
return Libosinfo.Media.create_from_location_with_flags(location, None, 0)
|
|
|
|
|
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
###################
|
|
|
|
# Sorting helpers #
|
|
|
|
###################
|
2014-02-17 23:40:01 +08:00
|
|
|
|
2018-09-30 04:04:05 +08:00
|
|
|
def _sortby(osobj):
|
|
|
|
"""
|
|
|
|
Combines distro+version to make a more sort friendly string. Examples
|
|
|
|
|
|
|
|
fedora25 -> fedora-0025000000000000
|
|
|
|
ubuntu17.04 -> ubuntu-0017000400000000
|
|
|
|
win2k8r2 -> win-0006000100000000
|
|
|
|
"""
|
|
|
|
if osobj.is_generic():
|
|
|
|
# Sort generic at the end of the list
|
|
|
|
return "zzzzzz-000000000000"
|
|
|
|
|
|
|
|
version = osobj.version
|
|
|
|
try:
|
|
|
|
t = version.split(".")
|
|
|
|
t = t[:min(4, len(t))] + [0] * (4 - min(4, len(t)))
|
|
|
|
new_version = ""
|
|
|
|
for n in t:
|
|
|
|
new_version = new_version + ("%.4i" % int(n))
|
|
|
|
version = new_version
|
|
|
|
except Exception:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return "%s-%s" % (osobj.distro, version)
|
|
|
|
|
2018-10-01 23:37:28 +08:00
|
|
|
|
2018-05-01 19:51:23 +08:00
|
|
|
def _sort(tosort):
|
2013-03-18 05:06:52 +08:00
|
|
|
sortby_mappings = {}
|
|
|
|
distro_mappings = {}
|
|
|
|
retlist = []
|
|
|
|
|
|
|
|
for key, osinfo in tosort.items():
|
2015-04-05 02:13:13 +08:00
|
|
|
# Libosinfo has some duplicate version numbers here, so append .1
|
|
|
|
# if there's a collision
|
2018-09-30 04:04:05 +08:00
|
|
|
sortby = _sortby(osinfo)
|
2014-02-17 23:40:00 +08:00
|
|
|
while sortby_mappings.get(sortby):
|
|
|
|
sortby = sortby + ".1"
|
2013-03-18 05:06:52 +08:00
|
|
|
sortby_mappings[sortby] = key
|
|
|
|
|
2018-09-01 20:41:22 +08:00
|
|
|
# Group by distro first, so debian is clumped together, fedora, etc.
|
|
|
|
distro = osinfo.distro
|
2018-09-30 04:04:05 +08:00
|
|
|
if osinfo.is_generic():
|
|
|
|
distro = "zzzzzz"
|
2013-03-18 05:06:52 +08:00
|
|
|
if distro not in distro_mappings:
|
|
|
|
distro_mappings[distro] = []
|
|
|
|
distro_mappings[distro].append(sortby)
|
|
|
|
|
|
|
|
# We want returned lists to be sorted descending by 'distro', so we get
|
|
|
|
# debian5, debian4, fedora14, fedora13
|
|
|
|
# rather than
|
|
|
|
# debian4, debian5, fedora13, fedora14
|
2017-10-11 19:35:46 +08:00
|
|
|
for distro_list in list(distro_mappings.values()):
|
2013-03-18 05:06:52 +08:00
|
|
|
distro_list.sort()
|
|
|
|
distro_list.reverse()
|
|
|
|
|
2017-10-11 19:35:46 +08:00
|
|
|
sorted_distro_list = list(distro_mappings.keys())
|
2013-03-18 05:06:52 +08:00
|
|
|
sorted_distro_list.sort()
|
|
|
|
|
2015-04-05 02:13:13 +08:00
|
|
|
# Build the final list of sorted os objects
|
2013-03-18 05:06:52 +08:00
|
|
|
for distro in sorted_distro_list:
|
|
|
|
distro_list = distro_mappings[distro]
|
|
|
|
for key in distro_list:
|
|
|
|
orig_key = sortby_mappings[key]
|
2013-08-12 02:52:30 +08:00
|
|
|
retlist.append(tosort[orig_key])
|
2013-03-18 05:06:52 +08:00
|
|
|
|
|
|
|
return retlist
|
|
|
|
|
2013-04-14 02:34:52 +08:00
|
|
|
|
2019-03-06 05:10:16 +08:00
|
|
|
class _OsinfoIter:
|
|
|
|
"""
|
|
|
|
Helper to turn osinfo style get_length/get_nth lists into python
|
|
|
|
iterables
|
|
|
|
"""
|
|
|
|
def __init__(self, listobj):
|
|
|
|
self.current = 0
|
|
|
|
self.listobj = listobj
|
2019-05-12 05:53:07 +08:00
|
|
|
self.high = -1
|
|
|
|
if self.listobj:
|
|
|
|
self.high = self.listobj.get_length() - 1
|
2019-03-06 05:10:16 +08:00
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self
|
|
|
|
def __next__(self):
|
|
|
|
if self.current > self.high:
|
|
|
|
raise StopIteration
|
|
|
|
ret = self.listobj.get_nth(self.current)
|
|
|
|
self.current += 1
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
class _OSDB(object):
|
|
|
|
"""
|
|
|
|
Entry point for the public API
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.__os_loader = None
|
|
|
|
self.__all_variants = None
|
|
|
|
|
|
|
|
# This is only for back compatibility with pre-libosinfo support.
|
|
|
|
# This should never change.
|
|
|
|
_aliases = {
|
2017-08-05 14:39:32 +08:00
|
|
|
"altlinux": "altlinux1.0",
|
|
|
|
"debianetch": "debian4",
|
|
|
|
"debianlenny": "debian5",
|
|
|
|
"debiansqueeze": "debian6",
|
|
|
|
"debianwheezy": "debian7",
|
|
|
|
"freebsd10": "freebsd10.0",
|
|
|
|
"freebsd6": "freebsd6.0",
|
|
|
|
"freebsd7": "freebsd7.0",
|
|
|
|
"freebsd8": "freebsd8.0",
|
|
|
|
"freebsd9": "freebsd9.0",
|
|
|
|
"mandriva2009": "mandriva2009.0",
|
|
|
|
"mandriva2010": "mandriva2010.0",
|
|
|
|
"mbs1": "mbs1.0",
|
|
|
|
"msdos": "msdos6.22",
|
|
|
|
"openbsd4": "openbsd4.2",
|
|
|
|
"opensolaris": "opensolaris2009.06",
|
|
|
|
"opensuse11": "opensuse11.4",
|
|
|
|
"opensuse12": "opensuse12.3",
|
|
|
|
"rhel4": "rhel4.0",
|
|
|
|
"rhel5": "rhel5.0",
|
|
|
|
"rhel6": "rhel6.0",
|
|
|
|
"rhel7": "rhel7.0",
|
|
|
|
"ubuntuhardy": "ubuntu8.04",
|
|
|
|
"ubuntuintrepid": "ubuntu8.10",
|
|
|
|
"ubuntujaunty": "ubuntu9.04",
|
|
|
|
"ubuntukarmic": "ubuntu9.10",
|
|
|
|
"ubuntulucid": "ubuntu10.04",
|
|
|
|
"ubuntumaverick": "ubuntu10.10",
|
|
|
|
"ubuntunatty": "ubuntu11.04",
|
|
|
|
"ubuntuoneiric": "ubuntu11.10",
|
|
|
|
"ubuntuprecise": "ubuntu12.04",
|
|
|
|
"ubuntuquantal": "ubuntu12.10",
|
|
|
|
"ubunturaring": "ubuntu13.04",
|
|
|
|
"ubuntusaucy": "ubuntu13.10",
|
2015-04-05 00:37:46 +08:00
|
|
|
"virtio26": "fedora10",
|
2017-08-05 14:39:32 +08:00
|
|
|
"vista": "winvista",
|
|
|
|
"winxp64": "winxp",
|
2015-04-05 00:04:11 +08:00
|
|
|
|
2015-04-05 00:37:46 +08:00
|
|
|
# Old --os-type values
|
2017-08-05 14:39:32 +08:00
|
|
|
"linux": "generic",
|
|
|
|
"windows": "winxp",
|
|
|
|
"solaris": "solaris10",
|
2015-08-11 01:01:04 +08:00
|
|
|
"unix": "freebsd9.0",
|
2015-04-05 00:37:46 +08:00
|
|
|
"other": "generic",
|
2015-04-05 00:04:11 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
#################
|
|
|
|
# Internal APIs #
|
|
|
|
#################
|
|
|
|
|
|
|
|
def _make_default_variants(self):
|
|
|
|
ret = {}
|
2013-08-11 02:44:20 +08:00
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
# Generic variant
|
|
|
|
v = _OsVariant(None)
|
|
|
|
ret[v.name] = v
|
|
|
|
return ret
|
2013-08-10 06:14:42 +08:00
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
@property
|
|
|
|
def _os_loader(self):
|
|
|
|
if not self.__os_loader:
|
2019-03-06 01:34:51 +08:00
|
|
|
loader = Libosinfo.Loader()
|
2015-04-05 00:04:11 +08:00
|
|
|
loader.process_default_path()
|
|
|
|
|
|
|
|
self.__os_loader = loader
|
|
|
|
return self.__os_loader
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _all_variants(self):
|
|
|
|
if not self.__all_variants:
|
|
|
|
loader = self._os_loader
|
|
|
|
allvariants = self._make_default_variants()
|
|
|
|
db = loader.get_db()
|
|
|
|
oslist = db.get_os_list()
|
2019-03-06 05:10:16 +08:00
|
|
|
for o in _OsinfoIter(oslist):
|
|
|
|
osi = _OsVariant(o)
|
2019-08-02 06:57:36 +08:00
|
|
|
for name in osi.get_short_ids():
|
|
|
|
allvariants[name] = osi
|
2013-08-11 06:48:43 +08:00
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
self.__all_variants = allvariants
|
|
|
|
return self.__all_variants
|
|
|
|
|
|
|
|
|
|
|
|
###############
|
|
|
|
# Public APIs #
|
|
|
|
###############
|
|
|
|
|
2019-06-14 08:56:16 +08:00
|
|
|
def lookup_os_by_full_id(self, full_id, raise_error=False):
|
2018-09-14 03:03:36 +08:00
|
|
|
for osobj in self._all_variants.values():
|
|
|
|
if osobj.full_id == full_id:
|
|
|
|
return osobj
|
2019-06-14 08:56:16 +08:00
|
|
|
if raise_error:
|
|
|
|
raise ValueError(_("Unknown libosinfo ID '%s'") % full_id)
|
2018-09-14 03:03:36 +08:00
|
|
|
|
2019-06-14 08:26:26 +08:00
|
|
|
def lookup_os(self, key, raise_error=False):
|
2019-08-02 06:57:36 +08:00
|
|
|
if key not in self._all_variants and key in self._aliases:
|
2018-10-02 19:53:26 +08:00
|
|
|
alias = self._aliases[key]
|
|
|
|
# Added 2018-10-02. Maybe remove aliases in a year
|
2020-07-12 05:31:40 +08:00
|
|
|
msg = (_("OS name '%(oldname)s' is deprecated, using '%(newname)s' "
|
|
|
|
"instead. This alias will be removed in the future.") %
|
|
|
|
{"oldname": key, "newname": alias})
|
|
|
|
log.warning(msg)
|
2018-10-02 19:53:26 +08:00
|
|
|
key = alias
|
2019-06-14 08:26:26 +08:00
|
|
|
|
|
|
|
ret = self._all_variants.get(key)
|
|
|
|
if ret is None and raise_error:
|
|
|
|
raise ValueError(_("Unknown OS name '%s'. "
|
|
|
|
"See `osinfo-query os` for valid values.") % key)
|
|
|
|
return ret
|
2015-04-05 00:04:11 +08:00
|
|
|
|
2019-01-31 02:01:19 +08:00
|
|
|
def guess_os_by_iso(self, location):
|
|
|
|
try:
|
2019-08-02 07:15:08 +08:00
|
|
|
media = _media_create_from_location(location)
|
2019-01-31 02:01:19 +08:00
|
|
|
except Exception as e:
|
2019-06-17 09:12:39 +08:00
|
|
|
log.debug("Error creating libosinfo media object: %s", str(e))
|
2015-04-14 05:05:25 +08:00
|
|
|
return None
|
2019-01-31 02:01:19 +08:00
|
|
|
|
2019-02-06 21:21:30 +08:00
|
|
|
if not self._os_loader.get_db().identify_media(media):
|
2019-06-13 06:36:14 +08:00
|
|
|
return None # pragma: no cover
|
2019-06-09 23:26:28 +08:00
|
|
|
return media.get_os().get_short_id(), _OsMedia(media)
|
2015-04-05 00:04:11 +08:00
|
|
|
|
2019-02-04 07:14:33 +08:00
|
|
|
def guess_os_by_tree(self, location):
|
|
|
|
if location.startswith("/"):
|
|
|
|
location = "file://" + location
|
2019-06-08 08:55:11 +08:00
|
|
|
|
|
|
|
if _in_testsuite() and not location.startswith("file:"):
|
|
|
|
# We have mock network tests, but we don't want to pass the
|
|
|
|
# fake URL to libosinfo because it slows down the testcase
|
|
|
|
return None
|
|
|
|
|
2019-02-04 07:14:33 +08:00
|
|
|
try:
|
|
|
|
tree = Libosinfo.Tree.create_from_location(location, None)
|
|
|
|
except Exception as e:
|
2019-06-17 09:12:39 +08:00
|
|
|
log.debug("Error creating libosinfo tree object for "
|
2019-06-09 22:49:47 +08:00
|
|
|
"location=%s : %s", location, str(e))
|
2019-02-04 07:14:33 +08:00
|
|
|
return None
|
|
|
|
|
2019-07-16 23:14:32 +08:00
|
|
|
db = self._os_loader.get_db()
|
|
|
|
if hasattr(db, "identify_tree"):
|
|
|
|
# osinfo_db_identify_tree is part of libosinfo 1.6.0
|
|
|
|
if not db.identify_tree(tree):
|
|
|
|
return None # pragma: no cover
|
|
|
|
return tree.get_os().get_short_id(), _OsTree(tree)
|
2020-01-27 23:13:03 +08:00
|
|
|
else: # pragma: no cover
|
2019-07-16 23:14:32 +08:00
|
|
|
osobj, treeobj = self._os_loader.get_db().guess_os_from_tree(tree)
|
|
|
|
if not osobj:
|
|
|
|
return None # pragma: no cover
|
|
|
|
return osobj.get_short_id(), _OsTree(treeobj)
|
2019-02-04 07:14:33 +08:00
|
|
|
|
2018-05-01 19:51:23 +08:00
|
|
|
def list_os(self):
|
2015-04-05 02:13:13 +08:00
|
|
|
"""
|
|
|
|
List all OSes in the DB
|
|
|
|
"""
|
2015-04-05 00:04:11 +08:00
|
|
|
sortmap = {}
|
|
|
|
|
2019-08-02 06:57:36 +08:00
|
|
|
for osobj in self._all_variants.values():
|
|
|
|
sortmap[osobj.name] = osobj
|
2015-04-05 00:04:11 +08:00
|
|
|
|
2018-05-01 19:51:23 +08:00
|
|
|
return _sort(sortmap)
|
2015-04-05 00:04:11 +08:00
|
|
|
|
|
|
|
|
2019-03-06 01:16:49 +08:00
|
|
|
OSDB = _OSDB()
|
|
|
|
|
|
|
|
|
2019-05-12 05:53:07 +08:00
|
|
|
#####################
|
|
|
|
# OsResources class #
|
|
|
|
#####################
|
|
|
|
|
|
|
|
class _OsResources:
|
|
|
|
def __init__(self, minimum, recommended):
|
|
|
|
self._minimum = self._convert_to_dict(minimum)
|
|
|
|
self._recommended = self._convert_to_dict(recommended)
|
|
|
|
|
|
|
|
def _convert_to_dict(self, resources):
|
|
|
|
"""
|
|
|
|
Convert an OsResources object to a dictionary for easier
|
|
|
|
lookups. Layout is: {arch: {strkey: value}}
|
|
|
|
"""
|
|
|
|
ret = {}
|
|
|
|
for r in _OsinfoIter(resources):
|
|
|
|
vals = {}
|
|
|
|
vals["ram"] = r.get_ram()
|
|
|
|
vals["n-cpus"] = r.get_n_cpus()
|
|
|
|
vals["storage"] = r.get_storage()
|
|
|
|
ret[r.get_architecture()] = vals
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def _get_key(self, resources, key, arch):
|
|
|
|
for checkarch in [arch, "all"]:
|
|
|
|
if checkarch in resources and key in resources[checkarch]:
|
|
|
|
return resources[checkarch][key]
|
|
|
|
|
2019-06-14 23:24:10 +08:00
|
|
|
def _get_minimum_key(self, key, arch):
|
|
|
|
val = self._get_key(self._minimum, key, arch)
|
|
|
|
if val and val > 0:
|
|
|
|
return val
|
|
|
|
|
2019-05-12 05:53:07 +08:00
|
|
|
def _get_recommended_key(self, key, arch):
|
|
|
|
val = self._get_key(self._recommended, key, arch)
|
|
|
|
if val and val > 0:
|
|
|
|
return val
|
|
|
|
# If we are looking for a recommended value, but the OS
|
|
|
|
# DB only has minimum resources tracked, double the minimum
|
|
|
|
# value as an approximation at a 'recommended' value
|
2019-06-14 23:24:10 +08:00
|
|
|
val = self._get_minimum_key(key, arch)
|
|
|
|
if val:
|
2019-06-17 09:12:39 +08:00
|
|
|
log.debug("No recommended value found for key='%s', "
|
2019-06-14 23:24:10 +08:00
|
|
|
"using minimum=%s * 2", key, val)
|
2019-05-12 05:53:07 +08:00
|
|
|
return val * 2
|
|
|
|
return None
|
|
|
|
|
2019-06-14 23:24:10 +08:00
|
|
|
def get_minimum_ram(self, arch):
|
|
|
|
return self._get_minimum_key("ram", arch)
|
|
|
|
|
2019-05-12 05:53:07 +08:00
|
|
|
def get_recommended_ram(self, arch):
|
|
|
|
return self._get_recommended_key("ram", arch)
|
|
|
|
|
|
|
|
def get_recommended_ncpus(self, arch):
|
|
|
|
return self._get_recommended_key("n-cpus", arch)
|
|
|
|
|
|
|
|
def get_recommended_storage(self, arch):
|
|
|
|
return self._get_recommended_key("storage", arch)
|
|
|
|
|
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
#####################
|
|
|
|
# OsVariant classes #
|
|
|
|
#####################
|
2013-08-18 05:53:17 +08:00
|
|
|
|
2015-04-05 00:37:46 +08:00
|
|
|
class _OsVariant(object):
|
2015-04-04 22:44:54 +08:00
|
|
|
def __init__(self, o):
|
|
|
|
self._os = o
|
2015-04-05 00:37:46 +08:00
|
|
|
self._family = self._os and self._os.get_family() or None
|
2014-09-23 05:33:55 +08:00
|
|
|
|
2019-08-02 06:57:36 +08:00
|
|
|
self._short_ids = ["generic"]
|
|
|
|
if self._os:
|
2020-01-27 23:13:03 +08:00
|
|
|
self._short_ids = [self._os.get_short_id()]
|
2019-08-02 06:57:36 +08:00
|
|
|
if hasattr(self._os, "get_short_id_list"):
|
|
|
|
self._short_ids = self._os.get_short_id_list()
|
|
|
|
self.name = self._short_ids[0]
|
|
|
|
|
2018-09-14 03:03:36 +08:00
|
|
|
self.full_id = self._os and self._os.get_id() or None
|
2018-09-30 04:04:05 +08:00
|
|
|
self.label = self._os and self._os.get_name() or "Generic default"
|
2015-11-25 10:52:06 +08:00
|
|
|
self.codename = self._os and self._os.get_codename() or ""
|
2017-04-24 22:20:40 +08:00
|
|
|
self.distro = self._os and self._os.get_distro() or ""
|
2018-09-30 04:04:05 +08:00
|
|
|
self.version = self._os and self._os.get_version() or None
|
2014-09-23 05:33:55 +08:00
|
|
|
|
2018-09-01 20:14:33 +08:00
|
|
|
self.eol = self._get_eol()
|
2015-04-05 04:08:00 +08:00
|
|
|
|
2018-09-30 04:13:56 +08:00
|
|
|
def __repr__(self):
|
|
|
|
return "<%s name=%s>" % (self.__class__.__name__, self.name)
|
|
|
|
|
2015-04-05 04:08:00 +08:00
|
|
|
|
|
|
|
########################
|
|
|
|
# Internal helper APIs #
|
|
|
|
########################
|
|
|
|
|
2019-06-08 09:26:02 +08:00
|
|
|
def _is_related_to(self, related_os_list, osobj=None,
|
2017-09-20 15:36:27 +08:00
|
|
|
check_derives=True, check_upgrades=True, check_clones=True):
|
2019-06-08 09:26:02 +08:00
|
|
|
osobj = osobj or self._os
|
|
|
|
if not osobj:
|
2015-04-05 04:08:00 +08:00
|
|
|
return False
|
|
|
|
|
2019-06-08 09:26:02 +08:00
|
|
|
if osobj.get_short_id() in related_os_list:
|
2015-04-05 04:08:00 +08:00
|
|
|
return True
|
|
|
|
|
|
|
|
check_list = []
|
|
|
|
def _extend(newl):
|
|
|
|
for obj in newl:
|
|
|
|
if obj not in check_list:
|
|
|
|
check_list.append(obj)
|
|
|
|
|
|
|
|
if check_derives:
|
2019-06-08 09:26:02 +08:00
|
|
|
_extend(osobj.get_related(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.ProductRelationship.DERIVES_FROM).get_elements())
|
2015-04-05 04:08:00 +08:00
|
|
|
if check_clones:
|
2019-06-08 09:26:02 +08:00
|
|
|
_extend(osobj.get_related(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.ProductRelationship.CLONES).get_elements())
|
2015-04-05 04:08:00 +08:00
|
|
|
if check_upgrades:
|
2019-06-08 09:26:02 +08:00
|
|
|
_extend(osobj.get_related(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.ProductRelationship.UPGRADES).get_elements())
|
2015-04-05 04:08:00 +08:00
|
|
|
|
|
|
|
for checkobj in check_list:
|
|
|
|
if (checkobj.get_short_id() in related_os_list or
|
2019-06-08 09:26:02 +08:00
|
|
|
self._is_related_to(related_os_list, osobj=checkobj,
|
2015-04-05 04:08:00 +08:00
|
|
|
check_upgrades=check_upgrades,
|
|
|
|
check_derives=check_derives,
|
|
|
|
check_clones=check_clones)):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2014-02-17 23:40:00 +08:00
|
|
|
|
2018-09-01 23:13:23 +08:00
|
|
|
def _get_all_devices(self):
|
|
|
|
if not self._os:
|
|
|
|
return []
|
2019-03-06 05:10:16 +08:00
|
|
|
return list(_OsinfoIter(self._os.get_all_devices()))
|
2018-09-01 23:13:23 +08:00
|
|
|
|
2019-09-27 19:25:39 +08:00
|
|
|
def _device_filter(self, devids=None, cls=None, extra_devs=None):
|
2018-09-01 23:13:23 +08:00
|
|
|
ret = []
|
2018-10-01 23:24:45 +08:00
|
|
|
devids = devids or []
|
2018-09-01 23:13:23 +08:00
|
|
|
for dev in self._get_all_devices():
|
2018-10-01 23:24:45 +08:00
|
|
|
if devids and dev.get_id() not in devids:
|
2018-09-01 23:13:23 +08:00
|
|
|
continue
|
|
|
|
if cls and not re.match(cls, dev.get_class()):
|
|
|
|
continue
|
|
|
|
ret.append(dev.get_name())
|
2019-09-27 19:25:39 +08:00
|
|
|
|
|
|
|
extra_devs = extra_devs or []
|
|
|
|
for dev in extra_devs:
|
|
|
|
if dev.get_id() not in devids:
|
|
|
|
continue
|
|
|
|
ret.append(dev.get_name())
|
|
|
|
|
2018-09-01 23:13:23 +08:00
|
|
|
return ret
|
|
|
|
|
2014-09-10 00:39:53 +08:00
|
|
|
|
2015-04-04 22:44:54 +08:00
|
|
|
###############
|
|
|
|
# Cached APIs #
|
|
|
|
###############
|
2014-07-07 06:46:16 +08:00
|
|
|
|
2018-09-01 20:14:33 +08:00
|
|
|
def _get_eol(self):
|
|
|
|
eol = self._os and self._os.get_eol_date() or None
|
|
|
|
rel = self._os and self._os.get_release_date() or None
|
|
|
|
|
2019-02-15 20:28:58 +08:00
|
|
|
# We can use os.get_release_status() & osinfo.ReleaseStatus.ROLLING
|
|
|
|
# if we require libosinfo >= 1.4.0.
|
|
|
|
release_status = self._os and self._os.get_param_value(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.OS_PROP_RELEASE_STATUS) or None
|
2019-02-15 20:28:58 +08:00
|
|
|
|
2018-09-30 03:22:44 +08:00
|
|
|
def _glib_to_datetime(glibdate):
|
|
|
|
date = "%s-%s" % (glibdate.get_year(), glibdate.get_day_of_year())
|
|
|
|
return datetime.datetime.strptime(date, "%Y-%j")
|
|
|
|
|
|
|
|
now = datetime.datetime.today()
|
2018-09-01 20:14:33 +08:00
|
|
|
if eol is not None:
|
2018-09-30 03:22:44 +08:00
|
|
|
return now > _glib_to_datetime(eol)
|
|
|
|
|
2019-02-15 20:28:58 +08:00
|
|
|
# Rolling distributions are never EOL.
|
|
|
|
if release_status == "rolling":
|
|
|
|
return False
|
|
|
|
|
2018-09-30 03:22:44 +08:00
|
|
|
# If no EOL is present, assume EOL if release was > 5 years ago
|
|
|
|
if rel is not None:
|
|
|
|
rel5 = _glib_to_datetime(rel) + datetime.timedelta(days=365 * 5)
|
|
|
|
return now > rel5
|
2018-09-01 20:14:33 +08:00
|
|
|
return False
|
|
|
|
|
2014-09-10 00:39:53 +08:00
|
|
|
|
2015-04-04 22:44:54 +08:00
|
|
|
###############
|
|
|
|
# Public APIs #
|
|
|
|
###############
|
|
|
|
|
2019-03-06 00:53:20 +08:00
|
|
|
def get_handle(self):
|
|
|
|
return self._os
|
|
|
|
|
2018-09-30 04:04:05 +08:00
|
|
|
def is_generic(self):
|
|
|
|
return self._os is None
|
|
|
|
|
2015-04-04 22:44:54 +08:00
|
|
|
def is_windows(self):
|
2018-09-01 19:55:19 +08:00
|
|
|
return self._family in ['win9x', 'winnt', 'win16']
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-08-02 06:57:36 +08:00
|
|
|
def get_short_ids(self):
|
|
|
|
return self._short_ids[:]
|
|
|
|
|
2018-09-05 02:43:24 +08:00
|
|
|
def broken_uefi_with_hyperv(self):
|
|
|
|
# Some windows versions are broken with hyperv enlightenments + UEFI
|
|
|
|
# https://bugzilla.redhat.com/show_bug.cgi?id=1185253
|
|
|
|
# https://bugs.launchpad.net/qemu/+bug/1593605
|
|
|
|
return self.name in ("win2k8r2", "win7")
|
|
|
|
|
2015-04-04 22:44:54 +08:00
|
|
|
def get_clock(self):
|
2015-04-05 00:37:46 +08:00
|
|
|
if self.is_windows() or self._family in ['solaris']:
|
|
|
|
return "localtime"
|
2015-04-04 22:44:54 +08:00
|
|
|
return "utc"
|
|
|
|
|
2018-09-01 23:44:36 +08:00
|
|
|
def supported_netmodels(self):
|
|
|
|
return self._device_filter(cls="net")
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_usbtablet(self, extra_devs=None):
|
2018-09-30 01:34:48 +08:00
|
|
|
# If no OS specified, still default to tablet
|
|
|
|
if not self._os:
|
|
|
|
return True
|
2018-10-01 23:24:45 +08:00
|
|
|
|
|
|
|
devids = ["http://usb.org/usb/80ee/0021"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtiodisk(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
# virtio-block and virtio1.0-block
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1001",
|
|
|
|
"http://pcisig.com/pci/1af4/1042"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioscsi(self, extra_devs=None):
|
2019-03-05 00:11:14 +08:00
|
|
|
# virtio-scsi and virtio1.0-scsi
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1004",
|
|
|
|
"http://pcisig.com/pci/1af4/1048"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2019-03-05 00:11:14 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtionet(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
# virtio-net and virtio1.0-net
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1000",
|
|
|
|
"http://pcisig.com/pci/1af4/1041"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtiorng(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
# virtio-rng and virtio1.0-rng
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1005",
|
|
|
|
"http://pcisig.com/pci/1af4/1044"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioballoon(self, extra_devs=None):
|
2019-06-13 18:55:08 +08:00
|
|
|
# virtio-balloon and virtio1.0-balloon
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1002",
|
|
|
|
"http://pcisig.com/pci/1af4/1045"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2019-06-13 18:55:08 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioserial(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
devids = ["http://pcisig.com/pci/1af4/1003",
|
|
|
|
"http://pcisig.com/pci/1af4/1043"]
|
2019-09-27 19:29:34 +08:00
|
|
|
if self._device_filter(devids=devids, extra_devs=extra_devs):
|
2018-09-02 01:18:49 +08:00
|
|
|
return True
|
2018-10-01 23:39:58 +08:00
|
|
|
# osinfo data was wrong for RHEL/centos here until Oct 2018
|
|
|
|
# Remove this hack after 6 months or so
|
2018-09-02 01:18:49 +08:00
|
|
|
return self._is_related_to("rhel6.0")
|
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioinput(self, extra_devs=None):
|
2019-03-20 23:52:34 +08:00
|
|
|
# virtio1.0-input
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1052"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2019-03-20 23:52:34 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_usb3(self, extra_devs=None):
|
2018-10-04 06:53:16 +08:00
|
|
|
# qemu-xhci
|
|
|
|
devids = ["http://pcisig.com/pci/1b36/0004"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2018-10-04 06:53:16 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtio1(self, extra_devs=None):
|
2018-10-05 02:59:54 +08:00
|
|
|
# Use virtio1.0-net device as a proxy for virtio1.0 as a whole
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1041"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2018-10-05 02:59:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_chipset_q35(self, extra_devs=None):
|
2018-10-05 02:59:54 +08:00
|
|
|
# For our purposes, check for the union of q35 + virtio1.0 support
|
2019-09-27 19:30:22 +08:00
|
|
|
if (self.supports_virtionet(extra_devs=extra_devs) and
|
|
|
|
not self.supports_virtio1(extra_devs=extra_devs)):
|
2018-10-05 02:59:54 +08:00
|
|
|
return False
|
|
|
|
devids = ["http://qemu.org/chipset/x86/q35"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2018-09-13 02:49:10 +08:00
|
|
|
|
2019-05-12 06:05:59 +08:00
|
|
|
def get_recommended_resources(self):
|
2019-05-12 05:53:07 +08:00
|
|
|
minimum = self._os and self._os.get_minimum_resources() or None
|
|
|
|
recommended = self._os and self._os.get_recommended_resources() or None
|
2019-05-12 06:05:59 +08:00
|
|
|
return _OsResources(minimum, recommended)
|
2014-02-17 23:40:04 +08:00
|
|
|
|
2019-06-09 22:14:48 +08:00
|
|
|
def get_network_install_required_ram(self, guest):
|
|
|
|
if hasattr(self._os, "get_network_install_resources"):
|
|
|
|
resources = self._os.get_network_install_resources()
|
|
|
|
for r in _OsinfoIter(resources):
|
|
|
|
arch = r.get_architecture()
|
|
|
|
if arch == guest.os.arch or arch == "all":
|
|
|
|
return r.get_ram()
|
2019-03-22 23:23:35 +08:00
|
|
|
|
2019-02-01 06:11:39 +08:00
|
|
|
def get_kernel_url_arg(self):
|
|
|
|
"""
|
|
|
|
Kernel argument name the distro's installer uses to reference
|
|
|
|
a network source, possibly bypassing some installer prompts
|
|
|
|
"""
|
|
|
|
if not self._os:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# SUSE distros
|
|
|
|
if self.distro in ["caasp", "sle", "sled", "sles", "opensuse"]:
|
|
|
|
return "install"
|
|
|
|
|
|
|
|
if self.distro not in ["centos", "rhel", "fedora"]:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Red Hat distros
|
|
|
|
try:
|
2019-06-13 06:36:14 +08:00
|
|
|
if re.match(r"[0-9]+-unknown", self.version):
|
|
|
|
version = float(self.version.split("-")[0])
|
|
|
|
else:
|
|
|
|
version = float(self.version)
|
2019-02-01 06:11:39 +08:00
|
|
|
except Exception:
|
2019-06-08 09:12:55 +08:00
|
|
|
# Can hit this for -rawhide or -unknown
|
|
|
|
version = 999
|
2019-02-01 06:11:39 +08:00
|
|
|
|
2019-06-08 09:12:55 +08:00
|
|
|
if self.distro in ["centos", "rhel"] and version < 7:
|
|
|
|
return "method"
|
2019-02-01 06:11:39 +08:00
|
|
|
|
2019-06-08 09:12:55 +08:00
|
|
|
if self.distro in ["fedora"] and version < 19:
|
|
|
|
return "method"
|
|
|
|
|
|
|
|
return "inst.repo"
|
2019-02-01 06:11:39 +08:00
|
|
|
|
2019-09-07 00:06:08 +08:00
|
|
|
def _get_generic_location(self, treelist, arch, profile):
|
2020-01-27 23:13:03 +08:00
|
|
|
if not hasattr(Libosinfo.Tree, "get_os_variants"): # pragma: no cover
|
2019-09-07 00:06:08 +08:00
|
|
|
for tree in treelist:
|
|
|
|
if tree.get_architecture() == arch:
|
|
|
|
return tree.get_url()
|
|
|
|
return None
|
|
|
|
|
|
|
|
fallback_tree = None
|
2019-09-12 00:19:09 +08:00
|
|
|
if profile == "jeos":
|
|
|
|
profile = "Server"
|
|
|
|
elif profile == "desktop":
|
|
|
|
profile = "Workstation"
|
|
|
|
elif not profile:
|
2019-09-07 00:06:08 +08:00
|
|
|
profile = "Everything"
|
|
|
|
|
|
|
|
for tree in treelist:
|
|
|
|
if tree.get_architecture() != arch:
|
|
|
|
continue
|
|
|
|
|
|
|
|
variant_list = tree.get_os_variants()
|
|
|
|
fallback_tree = tree
|
2020-01-27 23:13:03 +08:00
|
|
|
for variant in _OsinfoIter(variant_list):
|
2019-09-07 00:06:08 +08:00
|
|
|
if profile in variant.get_name():
|
|
|
|
return tree.get_url()
|
|
|
|
|
|
|
|
if fallback_tree:
|
|
|
|
return fallback_tree.get_url()
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_location(self, arch, profile=None):
|
2019-03-06 05:10:16 +08:00
|
|
|
treelist = []
|
2019-03-06 02:59:17 +08:00
|
|
|
if self._os:
|
2019-03-06 05:10:16 +08:00
|
|
|
treelist = list(_OsinfoIter(self._os.get_tree_list()))
|
2019-03-06 02:59:17 +08:00
|
|
|
|
2019-03-06 05:10:16 +08:00
|
|
|
if not treelist:
|
2019-03-06 02:59:17 +08:00
|
|
|
raise RuntimeError(
|
2019-03-09 06:26:00 +08:00
|
|
|
_("OS '%s' does not have a URL location") % self.name)
|
2019-02-08 23:23:56 +08:00
|
|
|
|
|
|
|
# Some distros have more than one URL for a specific architecture,
|
|
|
|
# which is the case for Fedora and different variants (Server,
|
|
|
|
# Workstation). Later on, we'll have to differentiate that and return
|
2019-09-07 00:06:08 +08:00
|
|
|
# the right one. However, for now, let's just rely on returning the
|
|
|
|
# most generic tree possible.
|
|
|
|
location = self._get_generic_location(treelist, arch, profile)
|
|
|
|
if location:
|
|
|
|
return location
|
2019-03-06 05:10:16 +08:00
|
|
|
|
|
|
|
raise RuntimeError(
|
2020-07-12 05:31:40 +08:00
|
|
|
_("OS '%(osname)s' does not have a URL location "
|
|
|
|
"for the architecture '%(archname)s'") %
|
|
|
|
{"osname": self.name, "archname": arch})
|
2019-02-01 06:11:39 +08:00
|
|
|
|
2019-06-09 23:26:28 +08:00
|
|
|
def get_install_script_list(self):
|
|
|
|
if not self._os:
|
2019-06-13 06:36:14 +08:00
|
|
|
return [] # pragma: no cover
|
2019-06-09 23:26:28 +08:00
|
|
|
return list(_OsinfoIter(self._os.get_install_script_list()))
|
2019-03-22 23:23:36 +08:00
|
|
|
|
2019-07-30 23:57:15 +08:00
|
|
|
def _get_installable_drivers(self, arch):
|
|
|
|
if not self._os:
|
|
|
|
return []
|
|
|
|
|
|
|
|
installable_drivers = []
|
|
|
|
device_drivers = list(_OsinfoIter(self._os.get_device_drivers()))
|
|
|
|
for device_driver in device_drivers:
|
|
|
|
if arch != "all" and device_driver.get_architecture() != arch:
|
|
|
|
continue
|
|
|
|
|
|
|
|
installable_drivers.append(device_driver)
|
|
|
|
return installable_drivers
|
|
|
|
|
|
|
|
def _get_pre_installable_drivers(self, arch):
|
|
|
|
installable_drivers = self._get_installable_drivers(arch)
|
|
|
|
pre_inst_drivers = []
|
|
|
|
for driver in installable_drivers:
|
|
|
|
if not driver.get_pre_installable():
|
|
|
|
continue
|
|
|
|
|
|
|
|
pre_inst_drivers.append(driver)
|
|
|
|
return pre_inst_drivers
|
|
|
|
|
|
|
|
def _get_post_installable_drivers(self, arch):
|
|
|
|
installable_drivers = self._get_installable_drivers(arch)
|
|
|
|
post_inst_drivers = []
|
|
|
|
for driver in installable_drivers:
|
|
|
|
if driver.get_pre_installable():
|
|
|
|
continue
|
|
|
|
|
|
|
|
post_inst_drivers.append(driver)
|
|
|
|
return post_inst_drivers
|
|
|
|
|
2019-07-30 23:58:37 +08:00
|
|
|
def _get_drivers_location(self, drivers):
|
|
|
|
locations = []
|
|
|
|
for driver in drivers:
|
|
|
|
filenames = driver.get_files()
|
|
|
|
for filename in filenames:
|
|
|
|
location = os.path.join(driver.get_location(), filename)
|
|
|
|
locations.append(location)
|
|
|
|
return locations
|
|
|
|
|
|
|
|
def get_pre_installable_drivers_location(self, arch):
|
|
|
|
pre_inst_drivers = self._get_pre_installable_drivers(arch)
|
|
|
|
|
|
|
|
return self._get_drivers_location(pre_inst_drivers)
|
|
|
|
|
|
|
|
def get_post_installable_drivers_location(self, arch):
|
|
|
|
post_inst_drivers = self._get_post_installable_drivers(arch)
|
|
|
|
|
|
|
|
return self._get_drivers_location(post_inst_drivers)
|
|
|
|
|
2019-09-27 19:24:40 +08:00
|
|
|
def get_pre_installable_devices(self, arch):
|
|
|
|
drivers = self._get_pre_installable_drivers(arch)
|
|
|
|
devices = []
|
|
|
|
for driver in drivers:
|
|
|
|
devices += list(_OsinfoIter(driver.get_devices()))
|
|
|
|
return devices
|
|
|
|
|
2019-08-02 23:10:20 +08:00
|
|
|
def supports_unattended_drivers(self, arch):
|
2019-09-24 20:07:46 +08:00
|
|
|
if self._get_pre_installable_drivers(arch):
|
2019-08-02 23:10:20 +08:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-09-24 20:10:27 +08:00
|
|
|
def supports_unattended_agents(self, arch):
|
|
|
|
if self._get_post_installable_drivers(arch):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-03-22 23:23:36 +08:00
|
|
|
|
2019-06-09 23:26:28 +08:00
|
|
|
class _OsMedia(object):
|
2019-03-22 23:23:36 +08:00
|
|
|
def __init__(self, osinfo_media):
|
2019-06-09 23:26:28 +08:00
|
|
|
self._media = osinfo_media
|
|
|
|
|
|
|
|
def get_kernel_path(self):
|
|
|
|
return self._media.get_kernel_path()
|
|
|
|
def get_initrd_path(self):
|
|
|
|
return self._media.get_initrd_path()
|
|
|
|
def supports_installer_script(self):
|
|
|
|
return self._media.supports_installer_script()
|
|
|
|
|
|
|
|
def is_netinst(self):
|
|
|
|
variants = list(_OsinfoIter(self._media.get_os_variants()))
|
|
|
|
for variant in variants:
|
|
|
|
if "netinst" in variant.get_id():
|
|
|
|
return True
|
2020-01-27 23:13:03 +08:00
|
|
|
return False # pragma: no cover
|
2019-06-09 23:26:28 +08:00
|
|
|
|
|
|
|
def get_install_script_list(self):
|
|
|
|
return list(_OsinfoIter(self._media.get_install_script_list()))
|
2019-07-12 21:02:20 +08:00
|
|
|
|
|
|
|
def get_osinfo_media(self):
|
|
|
|
return self._media
|
2019-07-16 23:14:24 +08:00
|
|
|
|
|
|
|
|
|
|
|
class _OsTree(object):
|
|
|
|
def __init__(self, osinfo_tree):
|
|
|
|
self._tree = osinfo_tree
|
|
|
|
|
|
|
|
def get_osinfo_tree(self):
|
|
|
|
return self._tree
|