2013-03-18 05:06:52 +08:00
|
|
|
#
|
|
|
|
# List of OS Specific data
|
|
|
|
#
|
2014-09-09 19:37:20 +08:00
|
|
|
# Copyright 2006-2008, 2013-2014 Red Hat, Inc.
|
2013-03-18 05:06:52 +08:00
|
|
|
#
|
2018-04-04 21:35:41 +08:00
|
|
|
# This work is licensed under the GNU GPLv2 or later.
|
2018-03-21 03:00:02 +08:00
|
|
|
# See the COPYING file in the top-level directory.
|
2013-03-18 05:06:52 +08:00
|
|
|
|
2018-09-30 03:22:44 +08:00
|
|
|
import datetime
|
2019-06-08 08:55:11 +08:00
|
|
|
import os
|
2014-09-23 05:20:07 +08:00
|
|
|
import re
|
|
|
|
|
2019-03-06 01:34:51 +08:00
|
|
|
from gi.repository import Libosinfo
|
2014-09-23 05:20:07 +08:00
|
|
|
|
2020-07-18 06:46:54 +08:00
|
|
|
from . import xmlutil
|
2019-06-17 09:12:39 +08:00
|
|
|
from .logger import log
|
|
|
|
|
2013-08-11 06:48:43 +08:00
|
|
|
|
2019-08-02 07:15:08 +08:00
|
|
|
def _media_create_from_location(location):
|
|
|
|
if not hasattr(Libosinfo.Media, "create_from_location_with_flags"):
|
2020-01-27 23:13:03 +08:00
|
|
|
return Libosinfo.Media.create_from_location( # pragma: no cover
|
|
|
|
location, None)
|
2019-08-02 07:15:08 +08:00
|
|
|
|
|
|
|
# We prefer this API, because by default it will not
|
|
|
|
# reject non-bootable media, like debian s390x
|
|
|
|
# pylint: disable=no-member
|
|
|
|
return Libosinfo.Media.create_from_location_with_flags(location, None, 0)
|
|
|
|
|
|
|
|
|
2019-03-06 05:10:16 +08:00
|
|
|
class _OsinfoIter:
|
|
|
|
"""
|
|
|
|
Helper to turn osinfo style get_length/get_nth lists into python
|
|
|
|
iterables
|
|
|
|
"""
|
|
|
|
def __init__(self, listobj):
|
|
|
|
self.current = 0
|
|
|
|
self.listobj = listobj
|
2019-05-12 05:53:07 +08:00
|
|
|
self.high = -1
|
|
|
|
if self.listobj:
|
|
|
|
self.high = self.listobj.get_length() - 1
|
2019-03-06 05:10:16 +08:00
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self
|
|
|
|
def __next__(self):
|
|
|
|
if self.current > self.high:
|
|
|
|
raise StopIteration
|
|
|
|
ret = self.listobj.get_nth(self.current)
|
|
|
|
self.current += 1
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
class _OSDB(object):
|
|
|
|
"""
|
|
|
|
Entry point for the public API
|
|
|
|
"""
|
|
|
|
def __init__(self):
|
|
|
|
self.__os_loader = None
|
2021-11-27 02:54:40 +08:00
|
|
|
self.__os_generic = None
|
2015-04-05 00:04:11 +08:00
|
|
|
|
|
|
|
#################
|
|
|
|
# Internal APIs #
|
|
|
|
#################
|
|
|
|
|
2021-11-27 02:54:40 +08:00
|
|
|
@property
|
|
|
|
def _os_generic(self):
|
|
|
|
if not self.__os_generic:
|
|
|
|
# Add our custom generic variant
|
|
|
|
o = Libosinfo.Os()
|
|
|
|
o.set_param("short-id", "generic")
|
2022-02-12 02:40:09 +08:00
|
|
|
o.set_param("name",
|
|
|
|
_("Generic or unknown OS. Usage is not recommended."))
|
2021-11-27 02:54:40 +08:00
|
|
|
self.__os_generic = _OsVariant(o)
|
|
|
|
return self.__os_generic
|
2013-08-10 06:14:42 +08:00
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
@property
|
|
|
|
def _os_loader(self):
|
|
|
|
if not self.__os_loader:
|
2019-03-06 01:34:51 +08:00
|
|
|
loader = Libosinfo.Loader()
|
2015-04-05 00:04:11 +08:00
|
|
|
loader.process_default_path()
|
|
|
|
|
|
|
|
self.__os_loader = loader
|
|
|
|
return self.__os_loader
|
|
|
|
|
2021-11-27 02:51:49 +08:00
|
|
|
@property
|
|
|
|
def _os_db(self):
|
|
|
|
return self._os_loader.get_db()
|
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
###############
|
|
|
|
# Public APIs #
|
|
|
|
###############
|
|
|
|
|
2019-06-14 08:56:16 +08:00
|
|
|
def lookup_os_by_full_id(self, full_id, raise_error=False):
|
2021-11-27 02:43:54 +08:00
|
|
|
osobj = self._os_db.get_os(full_id)
|
|
|
|
if osobj is None:
|
|
|
|
if raise_error:
|
|
|
|
raise ValueError(_("Unknown libosinfo ID '%s'") % full_id)
|
|
|
|
return None
|
|
|
|
return _OsVariant(osobj)
|
2018-09-14 03:03:36 +08:00
|
|
|
|
2019-06-14 08:26:26 +08:00
|
|
|
def lookup_os(self, key, raise_error=False):
|
2021-11-27 02:43:54 +08:00
|
|
|
if key == self._os_generic.name:
|
|
|
|
return self._os_generic
|
|
|
|
|
|
|
|
flt = Libosinfo.Filter()
|
|
|
|
flt.add_constraint(Libosinfo.PRODUCT_PROP_SHORT_ID,
|
|
|
|
key)
|
|
|
|
oslist = self._os_db.get_os_list().new_filtered(flt).get_elements()
|
|
|
|
if len(oslist) == 0:
|
|
|
|
if raise_error:
|
|
|
|
raise ValueError(_("Unknown OS name '%s'. "
|
2022-02-12 04:46:38 +08:00
|
|
|
"See `--osinfo list` for valid values.") % key)
|
2021-11-27 02:43:54 +08:00
|
|
|
return None
|
|
|
|
return _OsVariant(oslist[0])
|
2015-04-05 00:04:11 +08:00
|
|
|
|
2019-01-31 02:01:19 +08:00
|
|
|
def guess_os_by_iso(self, location):
|
|
|
|
try:
|
2019-08-02 07:15:08 +08:00
|
|
|
media = _media_create_from_location(location)
|
2019-01-31 02:01:19 +08:00
|
|
|
except Exception as e:
|
2019-06-17 09:12:39 +08:00
|
|
|
log.debug("Error creating libosinfo media object: %s", str(e))
|
2015-04-14 05:05:25 +08:00
|
|
|
return None
|
2019-01-31 02:01:19 +08:00
|
|
|
|
2021-11-27 02:51:49 +08:00
|
|
|
if not self._os_db.identify_media(media):
|
2022-01-20 02:45:46 +08:00
|
|
|
return None
|
2019-06-09 23:26:28 +08:00
|
|
|
return media.get_os().get_short_id(), _OsMedia(media)
|
2015-04-05 00:04:11 +08:00
|
|
|
|
2019-02-04 07:14:33 +08:00
|
|
|
def guess_os_by_tree(self, location):
|
|
|
|
if location.startswith("/"):
|
|
|
|
location = "file://" + location
|
2019-06-08 08:55:11 +08:00
|
|
|
|
2020-07-18 06:46:54 +08:00
|
|
|
if xmlutil.in_testsuite() and not location.startswith("file:"):
|
2019-06-08 08:55:11 +08:00
|
|
|
# We have mock network tests, but we don't want to pass the
|
|
|
|
# fake URL to libosinfo because it slows down the testcase
|
|
|
|
return None
|
|
|
|
|
2019-02-04 07:14:33 +08:00
|
|
|
try:
|
|
|
|
tree = Libosinfo.Tree.create_from_location(location, None)
|
|
|
|
except Exception as e:
|
2019-06-17 09:12:39 +08:00
|
|
|
log.debug("Error creating libosinfo tree object for "
|
2019-06-09 22:49:47 +08:00
|
|
|
"location=%s : %s", location, str(e))
|
2019-02-04 07:14:33 +08:00
|
|
|
return None
|
|
|
|
|
2021-11-27 02:51:49 +08:00
|
|
|
if hasattr(self._os_db, "identify_tree"):
|
2019-07-16 23:14:32 +08:00
|
|
|
# osinfo_db_identify_tree is part of libosinfo 1.6.0
|
2021-11-27 02:51:49 +08:00
|
|
|
if not self._os_db.identify_tree(tree):
|
2019-07-16 23:14:32 +08:00
|
|
|
return None # pragma: no cover
|
|
|
|
return tree.get_os().get_short_id(), _OsTree(tree)
|
2020-01-27 23:13:03 +08:00
|
|
|
else: # pragma: no cover
|
2021-11-27 02:51:49 +08:00
|
|
|
osobj, treeobj = self._os_db.guess_os_from_tree(tree)
|
2019-07-16 23:14:32 +08:00
|
|
|
if not osobj:
|
|
|
|
return None # pragma: no cover
|
|
|
|
return osobj.get_short_id(), _OsTree(treeobj)
|
2019-02-04 07:14:33 +08:00
|
|
|
|
2022-02-12 01:17:21 +08:00
|
|
|
def list_os(self, sortkey="name"):
|
2015-04-05 02:13:13 +08:00
|
|
|
"""
|
2022-02-12 01:17:21 +08:00
|
|
|
List all OSes in the DB, sorting by the passes _OsVariant attribute
|
2015-04-05 02:13:13 +08:00
|
|
|
"""
|
2022-02-12 01:17:21 +08:00
|
|
|
oslist = [_OsVariant(osent) for osent in
|
|
|
|
self._os_db.get_os_list().get_elements()]
|
|
|
|
oslist.append(self._os_generic)
|
|
|
|
|
|
|
|
# human/natural sort, but with reverse sorted numbers
|
|
|
|
def to_int(text):
|
|
|
|
return (int(text) * -1) if text.isdigit() else text.lower()
|
|
|
|
def alphanum_key(obj):
|
|
|
|
val = getattr(obj, sortkey)
|
|
|
|
return [to_int(c) for c in re.split('([0-9]+)', val)]
|
|
|
|
return list(sorted(oslist, key=alphanum_key))
|
2015-04-05 00:04:11 +08:00
|
|
|
|
|
|
|
|
2019-03-06 01:16:49 +08:00
|
|
|
OSDB = _OSDB()
|
|
|
|
|
|
|
|
|
2019-05-12 05:53:07 +08:00
|
|
|
#####################
|
|
|
|
# OsResources class #
|
|
|
|
#####################
|
|
|
|
|
|
|
|
class _OsResources:
|
|
|
|
def __init__(self, minimum, recommended):
|
|
|
|
self._minimum = self._convert_to_dict(minimum)
|
|
|
|
self._recommended = self._convert_to_dict(recommended)
|
|
|
|
|
|
|
|
def _convert_to_dict(self, resources):
|
|
|
|
"""
|
|
|
|
Convert an OsResources object to a dictionary for easier
|
|
|
|
lookups. Layout is: {arch: {strkey: value}}
|
|
|
|
"""
|
|
|
|
ret = {}
|
|
|
|
for r in _OsinfoIter(resources):
|
|
|
|
vals = {}
|
|
|
|
vals["ram"] = r.get_ram()
|
|
|
|
vals["n-cpus"] = r.get_n_cpus()
|
|
|
|
vals["storage"] = r.get_storage()
|
|
|
|
ret[r.get_architecture()] = vals
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def _get_key(self, resources, key, arch):
|
|
|
|
for checkarch in [arch, "all"]:
|
2020-08-12 04:50:50 +08:00
|
|
|
val = resources.get(checkarch, {}).get(key, -1)
|
|
|
|
if val != -1:
|
|
|
|
return val
|
2019-05-12 05:53:07 +08:00
|
|
|
|
2019-06-14 23:24:10 +08:00
|
|
|
def _get_minimum_key(self, key, arch):
|
|
|
|
val = self._get_key(self._minimum, key, arch)
|
|
|
|
if val and val > 0:
|
|
|
|
return val
|
|
|
|
|
2019-05-12 05:53:07 +08:00
|
|
|
def _get_recommended_key(self, key, arch):
|
|
|
|
val = self._get_key(self._recommended, key, arch)
|
|
|
|
if val and val > 0:
|
|
|
|
return val
|
|
|
|
# If we are looking for a recommended value, but the OS
|
|
|
|
# DB only has minimum resources tracked, double the minimum
|
|
|
|
# value as an approximation at a 'recommended' value
|
2019-06-14 23:24:10 +08:00
|
|
|
val = self._get_minimum_key(key, arch)
|
|
|
|
if val:
|
2019-06-17 09:12:39 +08:00
|
|
|
log.debug("No recommended value found for key='%s', "
|
2019-06-14 23:24:10 +08:00
|
|
|
"using minimum=%s * 2", key, val)
|
2019-05-12 05:53:07 +08:00
|
|
|
return val * 2
|
|
|
|
return None
|
|
|
|
|
2019-06-14 23:24:10 +08:00
|
|
|
def get_minimum_ram(self, arch):
|
|
|
|
return self._get_minimum_key("ram", arch)
|
|
|
|
|
2019-05-12 05:53:07 +08:00
|
|
|
def get_recommended_ram(self, arch):
|
|
|
|
return self._get_recommended_key("ram", arch)
|
|
|
|
|
|
|
|
def get_recommended_ncpus(self, arch):
|
|
|
|
return self._get_recommended_key("n-cpus", arch)
|
|
|
|
|
|
|
|
def get_recommended_storage(self, arch):
|
|
|
|
return self._get_recommended_key("storage", arch)
|
|
|
|
|
|
|
|
|
2015-04-05 00:04:11 +08:00
|
|
|
#####################
|
|
|
|
# OsVariant classes #
|
|
|
|
#####################
|
2013-08-18 05:53:17 +08:00
|
|
|
|
2015-04-05 00:37:46 +08:00
|
|
|
class _OsVariant(object):
|
2015-04-04 22:44:54 +08:00
|
|
|
def __init__(self, o):
|
|
|
|
self._os = o
|
2014-09-23 05:33:55 +08:00
|
|
|
|
2020-10-22 06:40:23 +08:00
|
|
|
self._short_ids = [self._os.get_short_id()]
|
|
|
|
if hasattr(self._os, "get_short_id_list"):
|
|
|
|
self._short_ids = self._os.get_short_id_list()
|
2019-08-02 06:57:36 +08:00
|
|
|
self.name = self._short_ids[0]
|
2022-02-12 00:50:13 +08:00
|
|
|
self.all_names = list(sorted(set(self._short_ids)))
|
2019-08-02 06:57:36 +08:00
|
|
|
|
2020-10-22 06:40:23 +08:00
|
|
|
self._family = self._os.get_family()
|
|
|
|
self.full_id = self._os.get_id()
|
|
|
|
self.label = self._os.get_name()
|
|
|
|
self.codename = self._os.get_codename() or ""
|
|
|
|
self.distro = self._os.get_distro() or ""
|
|
|
|
self.version = self._os.get_version()
|
2014-09-23 05:33:55 +08:00
|
|
|
|
2018-09-01 20:14:33 +08:00
|
|
|
self.eol = self._get_eol()
|
2015-04-05 04:08:00 +08:00
|
|
|
|
2018-09-30 04:13:56 +08:00
|
|
|
def __repr__(self):
|
|
|
|
return "<%s name=%s>" % (self.__class__.__name__, self.name)
|
|
|
|
|
2015-04-05 04:08:00 +08:00
|
|
|
|
|
|
|
########################
|
|
|
|
# Internal helper APIs #
|
|
|
|
########################
|
|
|
|
|
2019-06-08 09:26:02 +08:00
|
|
|
def _is_related_to(self, related_os_list, osobj=None,
|
2017-09-20 15:36:27 +08:00
|
|
|
check_derives=True, check_upgrades=True, check_clones=True):
|
2019-06-08 09:26:02 +08:00
|
|
|
osobj = osobj or self._os
|
|
|
|
if osobj.get_short_id() in related_os_list:
|
2015-04-05 04:08:00 +08:00
|
|
|
return True
|
|
|
|
|
|
|
|
check_list = []
|
|
|
|
def _extend(newl):
|
|
|
|
for obj in newl:
|
|
|
|
if obj not in check_list:
|
|
|
|
check_list.append(obj)
|
|
|
|
|
|
|
|
if check_derives:
|
2019-06-08 09:26:02 +08:00
|
|
|
_extend(osobj.get_related(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.ProductRelationship.DERIVES_FROM).get_elements())
|
2015-04-05 04:08:00 +08:00
|
|
|
if check_clones:
|
2019-06-08 09:26:02 +08:00
|
|
|
_extend(osobj.get_related(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.ProductRelationship.CLONES).get_elements())
|
2015-04-05 04:08:00 +08:00
|
|
|
if check_upgrades:
|
2019-06-08 09:26:02 +08:00
|
|
|
_extend(osobj.get_related(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.ProductRelationship.UPGRADES).get_elements())
|
2015-04-05 04:08:00 +08:00
|
|
|
|
|
|
|
for checkobj in check_list:
|
|
|
|
if (checkobj.get_short_id() in related_os_list or
|
2019-06-08 09:26:02 +08:00
|
|
|
self._is_related_to(related_os_list, osobj=checkobj,
|
2015-04-05 04:08:00 +08:00
|
|
|
check_upgrades=check_upgrades,
|
|
|
|
check_derives=check_derives,
|
|
|
|
check_clones=check_clones)):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
2014-02-17 23:40:00 +08:00
|
|
|
|
2018-09-01 23:13:23 +08:00
|
|
|
def _get_all_devices(self):
|
2019-03-06 05:10:16 +08:00
|
|
|
return list(_OsinfoIter(self._os.get_all_devices()))
|
2018-09-01 23:13:23 +08:00
|
|
|
|
2019-09-27 19:25:39 +08:00
|
|
|
def _device_filter(self, devids=None, cls=None, extra_devs=None):
|
2018-09-01 23:13:23 +08:00
|
|
|
ret = []
|
2018-10-01 23:24:45 +08:00
|
|
|
devids = devids or []
|
2018-09-01 23:13:23 +08:00
|
|
|
for dev in self._get_all_devices():
|
2018-10-01 23:24:45 +08:00
|
|
|
if devids and dev.get_id() not in devids:
|
2018-09-01 23:13:23 +08:00
|
|
|
continue
|
|
|
|
if cls and not re.match(cls, dev.get_class()):
|
|
|
|
continue
|
|
|
|
ret.append(dev.get_name())
|
2019-09-27 19:25:39 +08:00
|
|
|
|
|
|
|
extra_devs = extra_devs or []
|
|
|
|
for dev in extra_devs:
|
|
|
|
if dev.get_id() not in devids:
|
|
|
|
continue
|
|
|
|
ret.append(dev.get_name())
|
|
|
|
|
2018-09-01 23:13:23 +08:00
|
|
|
return ret
|
|
|
|
|
2014-09-10 00:39:53 +08:00
|
|
|
|
2015-04-04 22:44:54 +08:00
|
|
|
###############
|
|
|
|
# Cached APIs #
|
|
|
|
###############
|
2014-07-07 06:46:16 +08:00
|
|
|
|
2018-09-01 20:14:33 +08:00
|
|
|
def _get_eol(self):
|
2020-10-22 06:40:23 +08:00
|
|
|
eol = self._os.get_eol_date()
|
|
|
|
rel = self._os.get_release_date()
|
2018-09-01 20:14:33 +08:00
|
|
|
|
2019-02-15 20:28:58 +08:00
|
|
|
# We can use os.get_release_status() & osinfo.ReleaseStatus.ROLLING
|
|
|
|
# if we require libosinfo >= 1.4.0.
|
2020-10-22 06:40:23 +08:00
|
|
|
release_status = self._os.get_param_value(
|
2019-03-06 01:34:51 +08:00
|
|
|
Libosinfo.OS_PROP_RELEASE_STATUS) or None
|
2019-02-15 20:28:58 +08:00
|
|
|
|
2018-09-30 03:22:44 +08:00
|
|
|
def _glib_to_datetime(glibdate):
|
|
|
|
date = "%s-%s" % (glibdate.get_year(), glibdate.get_day_of_year())
|
|
|
|
return datetime.datetime.strptime(date, "%Y-%j")
|
|
|
|
|
|
|
|
now = datetime.datetime.today()
|
2018-09-01 20:14:33 +08:00
|
|
|
if eol is not None:
|
2018-09-30 03:22:44 +08:00
|
|
|
return now > _glib_to_datetime(eol)
|
|
|
|
|
2019-02-15 20:28:58 +08:00
|
|
|
# Rolling distributions are never EOL.
|
|
|
|
if release_status == "rolling":
|
|
|
|
return False
|
|
|
|
|
osdict: extend non-EOL distros range to 10 years
Commit d52d9885c85623b8d924dbf0aceecb08b33e9122 added a logic to
consider as EOL a distribution with no EOL date set and release date
earlier than 5 years from the current day. This was done because there
were (and still are, even if fewer now) many old OSes in osinfo-db with
no EOL date set, which were thus considered "supported". Sadly, OSes
that are still supported, like Windows 10, Windows Server 2012, or
earlier versions of RHEL/CentOS/OL 6/7, are now considered "EOL".
As a hack on top of the initial hack, extend the range from 5 years to
10 years: this will consider some of the aforementioned OSes as
supported, without adding too many other OSes.
Of course the long term solution is to make sure all the OSes in
osinfo-db that are EOL upstream have a EOL date set, so there is no more
need to arbitrary exclusion logic.
Reviewed-by: Cole Robinson <crobinso@redhat.com>
Signed-off-by: Pino Toscano <ptoscano@redhat.com>
2020-08-19 00:47:42 +08:00
|
|
|
# If no EOL is present, assume EOL if release was > 10 years ago
|
2018-09-30 03:22:44 +08:00
|
|
|
if rel is not None:
|
osdict: extend non-EOL distros range to 10 years
Commit d52d9885c85623b8d924dbf0aceecb08b33e9122 added a logic to
consider as EOL a distribution with no EOL date set and release date
earlier than 5 years from the current day. This was done because there
were (and still are, even if fewer now) many old OSes in osinfo-db with
no EOL date set, which were thus considered "supported". Sadly, OSes
that are still supported, like Windows 10, Windows Server 2012, or
earlier versions of RHEL/CentOS/OL 6/7, are now considered "EOL".
As a hack on top of the initial hack, extend the range from 5 years to
10 years: this will consider some of the aforementioned OSes as
supported, without adding too many other OSes.
Of course the long term solution is to make sure all the OSes in
osinfo-db that are EOL upstream have a EOL date set, so there is no more
need to arbitrary exclusion logic.
Reviewed-by: Cole Robinson <crobinso@redhat.com>
Signed-off-by: Pino Toscano <ptoscano@redhat.com>
2020-08-19 00:47:42 +08:00
|
|
|
rel5 = _glib_to_datetime(rel) + datetime.timedelta(days=365 * 10)
|
2018-09-30 03:22:44 +08:00
|
|
|
return now > rel5
|
2018-09-01 20:14:33 +08:00
|
|
|
return False
|
|
|
|
|
2014-09-10 00:39:53 +08:00
|
|
|
|
2015-04-04 22:44:54 +08:00
|
|
|
###############
|
|
|
|
# Public APIs #
|
|
|
|
###############
|
|
|
|
|
2019-03-06 00:53:20 +08:00
|
|
|
def get_handle(self):
|
|
|
|
return self._os
|
|
|
|
|
2018-09-30 04:04:05 +08:00
|
|
|
def is_generic(self):
|
2020-10-22 06:40:23 +08:00
|
|
|
return self.name == "generic"
|
2018-09-30 04:04:05 +08:00
|
|
|
|
2022-02-12 02:40:09 +08:00
|
|
|
def is_linux_generic(self):
|
|
|
|
return re.match(r"linux\d\d\d\d", self.name)
|
|
|
|
|
2015-04-04 22:44:54 +08:00
|
|
|
def is_windows(self):
|
2018-09-01 19:55:19 +08:00
|
|
|
return self._family in ['win9x', 'winnt', 'win16']
|
2015-04-04 22:44:54 +08:00
|
|
|
|
|
|
|
def get_clock(self):
|
2015-04-05 00:37:46 +08:00
|
|
|
if self.is_windows() or self._family in ['solaris']:
|
|
|
|
return "localtime"
|
2015-04-04 22:44:54 +08:00
|
|
|
return "utc"
|
|
|
|
|
2018-09-01 23:44:36 +08:00
|
|
|
def supported_netmodels(self):
|
|
|
|
return self._device_filter(cls="net")
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtiodisk(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
# virtio-block and virtio1.0-block
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1001",
|
|
|
|
"http://pcisig.com/pci/1af4/1042"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioscsi(self, extra_devs=None):
|
2019-03-05 00:11:14 +08:00
|
|
|
# virtio-scsi and virtio1.0-scsi
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1004",
|
|
|
|
"http://pcisig.com/pci/1af4/1048"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2019-03-05 00:11:14 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtionet(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
# virtio-net and virtio1.0-net
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1000",
|
|
|
|
"http://pcisig.com/pci/1af4/1041"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtiorng(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
# virtio-rng and virtio1.0-rng
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1005",
|
|
|
|
"http://pcisig.com/pci/1af4/1044"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2015-04-04 22:44:54 +08:00
|
|
|
|
2022-02-04 02:11:20 +08:00
|
|
|
def supports_virtiogpu(self, extra_devs=None):
|
|
|
|
# virtio1.0-gpu and virtio1.0
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1050"]
|
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioballoon(self, extra_devs=None):
|
2019-06-13 18:55:08 +08:00
|
|
|
# virtio-balloon and virtio1.0-balloon
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1002",
|
|
|
|
"http://pcisig.com/pci/1af4/1045"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2019-06-13 18:55:08 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioserial(self, extra_devs=None):
|
2018-10-01 23:24:45 +08:00
|
|
|
devids = ["http://pcisig.com/pci/1af4/1003",
|
|
|
|
"http://pcisig.com/pci/1af4/1043"]
|
2019-09-27 19:29:34 +08:00
|
|
|
if self._device_filter(devids=devids, extra_devs=extra_devs):
|
2018-09-02 01:18:49 +08:00
|
|
|
return True
|
2018-10-01 23:39:58 +08:00
|
|
|
# osinfo data was wrong for RHEL/centos here until Oct 2018
|
|
|
|
# Remove this hack after 6 months or so
|
2018-09-02 01:18:49 +08:00
|
|
|
return self._is_related_to("rhel6.0")
|
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtioinput(self, extra_devs=None):
|
2019-03-20 23:52:34 +08:00
|
|
|
# virtio1.0-input
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1052"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2019-03-20 23:52:34 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_usb3(self, extra_devs=None):
|
2018-10-04 06:53:16 +08:00
|
|
|
# qemu-xhci
|
|
|
|
devids = ["http://pcisig.com/pci/1b36/0004"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2018-10-04 06:53:16 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_virtio1(self, extra_devs=None):
|
2018-10-05 02:59:54 +08:00
|
|
|
# Use virtio1.0-net device as a proxy for virtio1.0 as a whole
|
|
|
|
devids = ["http://pcisig.com/pci/1af4/1041"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2018-10-05 02:59:54 +08:00
|
|
|
|
2019-09-27 19:27:20 +08:00
|
|
|
def supports_chipset_q35(self, extra_devs=None):
|
2018-10-05 02:59:54 +08:00
|
|
|
# For our purposes, check for the union of q35 + virtio1.0 support
|
2019-09-27 19:30:22 +08:00
|
|
|
if (self.supports_virtionet(extra_devs=extra_devs) and
|
|
|
|
not self.supports_virtio1(extra_devs=extra_devs)):
|
2018-10-05 02:59:54 +08:00
|
|
|
return False
|
|
|
|
devids = ["http://qemu.org/chipset/x86/q35"]
|
2019-09-27 19:29:34 +08:00
|
|
|
return bool(self._device_filter(devids=devids, extra_devs=extra_devs))
|
2018-09-13 02:49:10 +08:00
|
|
|
|
2021-11-24 02:52:44 +08:00
|
|
|
def _get_firmware_list(self):
|
2022-03-02 23:52:06 +08:00
|
|
|
if hasattr(self._os, "get_complete_firmware_list"): # pragma: no cover
|
2021-11-24 02:52:44 +08:00
|
|
|
return self._os.get_complete_firmware_list().get_elements()
|
|
|
|
return []
|
|
|
|
|
|
|
|
def _supports_firmware_type(self, name, arch, default):
|
|
|
|
firmwares = self._get_firmware_list()
|
|
|
|
|
2022-03-02 23:52:06 +08:00
|
|
|
for firmware in firmwares: # pragma: no cover
|
2021-11-24 02:52:44 +08:00
|
|
|
if firmware.get_architecture() != arch:
|
|
|
|
continue
|
|
|
|
if firmware.get_firmware_type() == name:
|
|
|
|
return firmware.is_supported()
|
|
|
|
|
|
|
|
return default
|
|
|
|
|
2022-03-02 23:52:06 +08:00
|
|
|
def requires_firmware_efi(self, arch):
|
|
|
|
ret = False
|
|
|
|
try:
|
|
|
|
supports_efi = self._supports_firmware_type("efi", arch, False)
|
|
|
|
supports_bios = self._supports_firmware_type("bios", arch, True)
|
|
|
|
ret = supports_efi and not supports_bios
|
|
|
|
except Exception: # pragma: no cover
|
|
|
|
log.debug("Error checking osinfo firmware support", exc_info=True)
|
2021-11-24 02:52:44 +08:00
|
|
|
|
2022-03-02 23:57:36 +08:00
|
|
|
if self.name == "win11": # pragma: no cover
|
|
|
|
# 2022-03 the libosinfo APIs for firmware haven't landed, and
|
|
|
|
# there's no osinfo-db entry for win11. But we know win11 requires
|
|
|
|
# UEFI. Hardcode it for now, so the next virt-install release has
|
|
|
|
# a better chance of doing the right thing for win11 when
|
|
|
|
# it pops up in a osinfo-db release.
|
|
|
|
ret = True
|
2022-03-02 23:52:06 +08:00
|
|
|
return ret
|
2021-11-24 02:52:44 +08:00
|
|
|
|
2019-05-12 06:05:59 +08:00
|
|
|
def get_recommended_resources(self):
|
2020-10-22 06:40:23 +08:00
|
|
|
minimum = self._os.get_minimum_resources()
|
|
|
|
recommended = self._os.get_recommended_resources()
|
2019-05-12 06:05:59 +08:00
|
|
|
return _OsResources(minimum, recommended)
|
2014-02-17 23:40:04 +08:00
|
|
|
|
2019-06-09 22:14:48 +08:00
|
|
|
def get_network_install_required_ram(self, guest):
|
|
|
|
if hasattr(self._os, "get_network_install_resources"):
|
|
|
|
resources = self._os.get_network_install_resources()
|
|
|
|
for r in _OsinfoIter(resources):
|
|
|
|
arch = r.get_architecture()
|
|
|
|
if arch == guest.os.arch or arch == "all":
|
|
|
|
return r.get_ram()
|
2019-03-22 23:23:35 +08:00
|
|
|
|
2019-02-01 06:11:39 +08:00
|
|
|
def get_kernel_url_arg(self):
|
|
|
|
"""
|
|
|
|
Kernel argument name the distro's installer uses to reference
|
|
|
|
a network source, possibly bypassing some installer prompts
|
|
|
|
"""
|
2021-11-01 22:06:04 +08:00
|
|
|
# Let's ask the OS for its kernel argument for the source
|
|
|
|
if hasattr(self._os, "get_kernel_url_argument"):
|
|
|
|
osarg = self._os.get_kernel_url_argument()
|
|
|
|
if osarg is not None:
|
|
|
|
return osarg
|
|
|
|
|
2019-02-01 06:11:39 +08:00
|
|
|
# SUSE distros
|
|
|
|
if self.distro in ["caasp", "sle", "sled", "sles", "opensuse"]:
|
|
|
|
return "install"
|
|
|
|
|
|
|
|
if self.distro not in ["centos", "rhel", "fedora"]:
|
|
|
|
return None
|
|
|
|
|
2022-01-12 01:32:02 +08:00
|
|
|
# Default for RH distros, in case libosinfo data isn't complete
|
2022-01-12 00:59:52 +08:00
|
|
|
return "inst.repo" # pragma: no cover
|
2019-02-01 06:11:39 +08:00
|
|
|
|
2019-09-07 00:06:08 +08:00
|
|
|
def _get_generic_location(self, treelist, arch, profile):
|
2020-01-27 23:13:03 +08:00
|
|
|
if not hasattr(Libosinfo.Tree, "get_os_variants"): # pragma: no cover
|
2019-09-07 00:06:08 +08:00
|
|
|
for tree in treelist:
|
|
|
|
if tree.get_architecture() == arch:
|
|
|
|
return tree.get_url()
|
|
|
|
return None
|
|
|
|
|
|
|
|
fallback_tree = None
|
2019-09-12 00:19:09 +08:00
|
|
|
if profile == "jeos":
|
|
|
|
profile = "Server"
|
|
|
|
elif profile == "desktop":
|
|
|
|
profile = "Workstation"
|
|
|
|
elif not profile:
|
2019-09-07 00:06:08 +08:00
|
|
|
profile = "Everything"
|
|
|
|
|
|
|
|
for tree in treelist:
|
|
|
|
if tree.get_architecture() != arch:
|
|
|
|
continue
|
|
|
|
|
|
|
|
variant_list = tree.get_os_variants()
|
|
|
|
fallback_tree = tree
|
2020-01-27 23:13:03 +08:00
|
|
|
for variant in _OsinfoIter(variant_list):
|
2019-09-07 00:06:08 +08:00
|
|
|
if profile in variant.get_name():
|
|
|
|
return tree.get_url()
|
|
|
|
|
|
|
|
if fallback_tree:
|
|
|
|
return fallback_tree.get_url()
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_location(self, arch, profile=None):
|
2020-10-22 06:40:23 +08:00
|
|
|
treelist = list(_OsinfoIter(self._os.get_tree_list()))
|
2019-03-06 02:59:17 +08:00
|
|
|
|
2019-03-06 05:10:16 +08:00
|
|
|
if not treelist:
|
2019-03-06 02:59:17 +08:00
|
|
|
raise RuntimeError(
|
2019-03-09 06:26:00 +08:00
|
|
|
_("OS '%s' does not have a URL location") % self.name)
|
2019-02-08 23:23:56 +08:00
|
|
|
|
|
|
|
# Some distros have more than one URL for a specific architecture,
|
|
|
|
# which is the case for Fedora and different variants (Server,
|
|
|
|
# Workstation). Later on, we'll have to differentiate that and return
|
2019-09-07 00:06:08 +08:00
|
|
|
# the right one. However, for now, let's just rely on returning the
|
|
|
|
# most generic tree possible.
|
|
|
|
location = self._get_generic_location(treelist, arch, profile)
|
|
|
|
if location:
|
|
|
|
return location
|
2019-03-06 05:10:16 +08:00
|
|
|
|
|
|
|
raise RuntimeError(
|
2020-07-12 05:31:40 +08:00
|
|
|
_("OS '%(osname)s' does not have a URL location "
|
|
|
|
"for the architecture '%(archname)s'") %
|
|
|
|
{"osname": self.name, "archname": arch})
|
2019-02-01 06:11:39 +08:00
|
|
|
|
2019-06-09 23:26:28 +08:00
|
|
|
def get_install_script_list(self):
|
|
|
|
return list(_OsinfoIter(self._os.get_install_script_list()))
|
2019-03-22 23:23:36 +08:00
|
|
|
|
2019-07-30 23:57:15 +08:00
|
|
|
def _get_installable_drivers(self, arch):
|
|
|
|
installable_drivers = []
|
|
|
|
device_drivers = list(_OsinfoIter(self._os.get_device_drivers()))
|
|
|
|
for device_driver in device_drivers:
|
|
|
|
if arch != "all" and device_driver.get_architecture() != arch:
|
|
|
|
continue
|
|
|
|
|
|
|
|
installable_drivers.append(device_driver)
|
|
|
|
return installable_drivers
|
|
|
|
|
|
|
|
def _get_pre_installable_drivers(self, arch):
|
|
|
|
installable_drivers = self._get_installable_drivers(arch)
|
|
|
|
pre_inst_drivers = []
|
|
|
|
for driver in installable_drivers:
|
|
|
|
if driver.get_pre_installable():
|
2020-07-18 07:48:03 +08:00
|
|
|
pre_inst_drivers.append(driver)
|
|
|
|
return pre_inst_drivers
|
2019-07-30 23:57:15 +08:00
|
|
|
|
2019-07-30 23:58:37 +08:00
|
|
|
def _get_drivers_location(self, drivers):
|
|
|
|
locations = []
|
|
|
|
for driver in drivers:
|
|
|
|
filenames = driver.get_files()
|
|
|
|
for filename in filenames:
|
|
|
|
location = os.path.join(driver.get_location(), filename)
|
|
|
|
locations.append(location)
|
|
|
|
return locations
|
|
|
|
|
|
|
|
def get_pre_installable_drivers_location(self, arch):
|
|
|
|
pre_inst_drivers = self._get_pre_installable_drivers(arch)
|
|
|
|
|
|
|
|
return self._get_drivers_location(pre_inst_drivers)
|
|
|
|
|
2019-09-27 19:24:40 +08:00
|
|
|
def get_pre_installable_devices(self, arch):
|
|
|
|
drivers = self._get_pre_installable_drivers(arch)
|
|
|
|
devices = []
|
|
|
|
for driver in drivers:
|
|
|
|
devices += list(_OsinfoIter(driver.get_devices()))
|
|
|
|
return devices
|
|
|
|
|
2019-08-02 23:10:20 +08:00
|
|
|
def supports_unattended_drivers(self, arch):
|
2019-09-24 20:07:46 +08:00
|
|
|
if self._get_pre_installable_drivers(arch):
|
2019-08-02 23:10:20 +08:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2019-03-22 23:23:36 +08:00
|
|
|
|
2019-06-09 23:26:28 +08:00
|
|
|
class _OsMedia(object):
|
2019-03-22 23:23:36 +08:00
|
|
|
def __init__(self, osinfo_media):
|
2019-06-09 23:26:28 +08:00
|
|
|
self._media = osinfo_media
|
|
|
|
|
|
|
|
def get_kernel_path(self):
|
|
|
|
return self._media.get_kernel_path()
|
|
|
|
def get_initrd_path(self):
|
|
|
|
return self._media.get_initrd_path()
|
|
|
|
def supports_installer_script(self):
|
|
|
|
return self._media.supports_installer_script()
|
|
|
|
|
|
|
|
def is_netinst(self):
|
|
|
|
variants = list(_OsinfoIter(self._media.get_os_variants()))
|
|
|
|
for variant in variants:
|
|
|
|
if "netinst" in variant.get_id():
|
|
|
|
return True
|
2020-01-27 23:13:03 +08:00
|
|
|
return False # pragma: no cover
|
2019-06-09 23:26:28 +08:00
|
|
|
|
|
|
|
def get_install_script_list(self):
|
|
|
|
return list(_OsinfoIter(self._media.get_install_script_list()))
|
2019-07-12 21:02:20 +08:00
|
|
|
|
|
|
|
def get_osinfo_media(self):
|
|
|
|
return self._media
|
2019-07-16 23:14:24 +08:00
|
|
|
|
|
|
|
|
|
|
|
class _OsTree(object):
|
|
|
|
def __init__(self, osinfo_tree):
|
|
|
|
self._tree = osinfo_tree
|
|
|
|
|
|
|
|
def get_osinfo_tree(self):
|
|
|
|
return self._tree
|