Merge branch 'uu_test' into 'backend_uu'
fix uu problems See merge request kylinos-src/update-manager-group/kylin-system-updater!513
This commit is contained in:
commit
a1bec222ab
|
@ -101,12 +101,19 @@ import time
|
|||
import subprocess
|
||||
import json
|
||||
|
||||
SOURCESLIST = "/etc/apt/sources.list"
|
||||
RELEASEOFFSET = 1
|
||||
ORIGINOFFSET = 2
|
||||
HTTPTYPE = "HTTP"
|
||||
FTPTYPE = "FTP"
|
||||
ARCHITECTUREMAP = ['arm64','amd64','armhf','i386','loongarch64','mips64el','sw64']
|
||||
|
||||
KYLIN_VERSION_FILE = "/etc/kylin-version/kylin-system-version.conf"
|
||||
OTA_RESULT_FILE_PATH="/opt/apt_result/"
|
||||
OTA_RESULT_FILE="/opt/apt_result/ota_result"
|
||||
SYSTEM_UPDATER_CORE_LIB_PATH="/usr/share/kylin-system-updater/SystemUpdater/Core"
|
||||
sys.path.append(SYSTEM_UPDATER_CORE_LIB_PATH)
|
||||
from OriginFilter import UnattendUpgradeFilter
|
||||
# sys.path.append(SYSTEM_UPDATER_CORE_LIB_PATH)
|
||||
# from OriginFilter import UnattendUpgradeFilter
|
||||
CONFIG_FILE_ROOT_PATH="/var/lib/unattended-upgrades"
|
||||
UNATTENDED_UPGRADE_CONFIG_FILE_PATH="/var/lib/unattended-upgrades/unattended-upgrade.conf"
|
||||
WHITE_LIST_FILE_PATH="/var/lib/kylin-system-updater/system-updater.conf"
|
||||
|
@ -527,8 +534,241 @@ class ConfigFileManager:
|
|||
else:
|
||||
logging.error("no config file")
|
||||
return True
|
||||
|
||||
def not_empty(s):
|
||||
return s and s.strip()
|
||||
|
||||
class OriginProperty():
|
||||
|
||||
def __init__(self):
|
||||
# 包含了本地所有源 http & ftp
|
||||
self.local_sourcelist = {"http":[],"ftp":[]}
|
||||
# 经过解析后的本地源,获取所有的分发属性
|
||||
self.local_origin = {"http":[],"ftp":[]}
|
||||
# 允许的源列表
|
||||
self.allow_sources = []
|
||||
# 允许的源+属性
|
||||
self.allow_origin = {"http":[],"ftp":[]}
|
||||
# 加载本地所有源
|
||||
self.init_local_origin()
|
||||
# 进行属性解析
|
||||
self.analytic_properties(self.local_sourcelist)
|
||||
|
||||
def init_local_origin(self):
|
||||
http_origin = {}
|
||||
ftp_orgin = {}
|
||||
#apt policy
|
||||
sh_retval = os.popen("apt-cache policy").read().split("\n")
|
||||
# policy = [ rv for rv in sh_retval if "http" in rv or "ftp" in rv or "release" in rv or "origin" in rv]
|
||||
for rv in sh_retval:
|
||||
if "http" in rv:
|
||||
http_origin['sources'] = rv
|
||||
http_origin['release'] = sh_retval[sh_retval.index(rv) + RELEASEOFFSET]
|
||||
http_origin['origin'] = sh_retval[sh_retval.index(rv) + ORIGINOFFSET]
|
||||
self.local_sourcelist['http'].append(http_origin.copy())
|
||||
elif "ftp" in rv:
|
||||
ftp_orgin['sources'] = rv
|
||||
ftp_orgin['release'] = sh_retval[sh_retval.index(rv) + RELEASEOFFSET]
|
||||
ftp_orgin['origin'] = sh_retval[sh_retval.index(rv) + ORIGINOFFSET]
|
||||
self.local_sourcelist['ftp'].append(ftp_orgin.copy())
|
||||
|
||||
def merge_origin(self, source_type, source_origin):
|
||||
is_append = True
|
||||
if source_type == HTTPTYPE:
|
||||
if self.local_origin['http']:
|
||||
for lo in self.local_origin['http']:
|
||||
if lo['origin_source'] == source_origin['origin_source'] and lo['dist'] == source_origin['dist']:
|
||||
lo['component'] = list(set(lo['component']).union(set(source_origin['component'])))
|
||||
is_append = False
|
||||
if is_append:
|
||||
self.local_origin['http'].append(source_origin.copy())
|
||||
else:
|
||||
self.local_origin['http'].append(source_origin.copy())
|
||||
elif source_type == FTPTYPE:
|
||||
if self.local_origin['ftp']:
|
||||
for lo in self.local_origin['ftp']:
|
||||
if lo['origin_source'] == source_origin['origin_source'] and lo['dist'] == source_origin['dist']:
|
||||
lo['component'] = list(set(lo['component']).union(set(source_origin['component'])))
|
||||
is_append = False
|
||||
if is_append:
|
||||
self.local_origin['ftp'].append(source_origin.copy())
|
||||
else:
|
||||
self.local_origin['ftp'].append(source_origin.copy())
|
||||
|
||||
def analytic_properties(self, local_sourcelist):
|
||||
http_origin = {"component":[],"release":{}}
|
||||
ftp_orgin = {"component":[],"release":{}}
|
||||
dist_list = []
|
||||
# 经过解析后的本地源,获取所有的分发属性
|
||||
for ls in local_sourcelist['http']:
|
||||
for item in filter(not_empty, ls['sources'].split(' ')):
|
||||
if item.isdigit():
|
||||
http_origin['policy_priority'] = item
|
||||
elif "http" in item:
|
||||
http_origin['origin_source'] = item
|
||||
elif "/" in item:
|
||||
dist_list = item.split("/")
|
||||
dist_list.pop()
|
||||
http_origin['dist'] = "/".join(dist_list)
|
||||
http_origin['component'].append(item.split("/")[1])
|
||||
elif item not in ARCHITECTUREMAP and item != "Packages":
|
||||
http_origin['component'].append(item)
|
||||
release_list = ls['release'].split(',')
|
||||
release_list = [ rl.strip() for rl in release_list ]
|
||||
if "release" in release_list[0]:
|
||||
release_list[0] = release_list[0].lstrip("release").strip()
|
||||
for rl in release_list:
|
||||
if "=" in rl:
|
||||
self.generate_dict(http_origin['release'], rl)
|
||||
for item in filter(not_empty, ls['origin'].split(' ')):
|
||||
if "origin" not in ls['origin']:
|
||||
break
|
||||
elif "origin" != item:
|
||||
http_origin['origin'] = item
|
||||
self.merge_origin(HTTPTYPE, http_origin)
|
||||
http_origin = {"component":[],"release":{}}
|
||||
|
||||
for ls in local_sourcelist['ftp']:
|
||||
for item in filter(not_empty, ls['sources'].split(' ')):
|
||||
if item.isdigit():
|
||||
ftp_orgin['policy_priority'] = item
|
||||
elif "ftp" in item:
|
||||
ftp_orgin['origin_source'] = item
|
||||
elif "/" in item:
|
||||
ftp_orgin['dist'] = item.split("/")[0]
|
||||
ftp_orgin['component'].append(item.split("/")[1])
|
||||
elif item not in ARCHITECTUREMAP and item != "Packages":
|
||||
ftp_orgin['component'].append(item)
|
||||
release_list = ls['release'].split(',')
|
||||
if "release " in release_list[0]:
|
||||
release_list[0] = release_list[0].lstrip("release ")
|
||||
for rl in release_list:
|
||||
if "=" in rl:
|
||||
self.generate_dict(ftp_orgin['release'], rl)
|
||||
for item in filter(not_empty, ls['origin'].split(' ')):
|
||||
if "origin" not in ls['origin']:
|
||||
break
|
||||
elif "origin" != item:
|
||||
ftp_orgin['origin'] = item
|
||||
self.merge_origin(FTPTYPE, ftp_orgin)
|
||||
ftp_orgin = {"component":[],"release":{}}
|
||||
|
||||
def generate_dict(self, dict, item):
|
||||
item = item.strip()
|
||||
if item == "":
|
||||
logging.warning("empty match string matches nothing")
|
||||
return False
|
||||
(what, value) = [ s for s in item.split("=")]
|
||||
if what in ('o', 'origin'):
|
||||
dict['origin'] = value
|
||||
elif what in ("l", "label"):
|
||||
dict['label'] = value
|
||||
elif what in ("a", "suite", "archive"):
|
||||
dict['archive'] = value
|
||||
elif what in ("c", "component"):
|
||||
dict['component'] = value
|
||||
elif what in ("site",):
|
||||
dict['site'] = value
|
||||
elif what in ("n", "codename",):
|
||||
dict['codename'] = value
|
||||
else:
|
||||
dict[what] = value
|
||||
# raise UnknownMatcherError(
|
||||
# "Unknown whitelist entry for matcher %s (value %s)" % (
|
||||
# what, value))
|
||||
|
||||
def get_allowed_sources(self):
|
||||
# 源地址,在本地源列表中查找. 源服务器下发source.list为允许的源, 本模块屏蔽了sources.list.d下的源
|
||||
# 获取允许的源
|
||||
try:
|
||||
old_sources_list = apt_pkg.config.find("Dir::Etc::sourcelist")
|
||||
old_sources_list_d = apt_pkg.config.find("Dir::Etc::sourceparts")
|
||||
old_cleanup = apt_pkg.config.find("APT::List-Cleanup")
|
||||
apt_pkg.config.set("Dir::Etc::sourcelist",
|
||||
os.path.abspath(SOURCESLIST))
|
||||
apt_pkg.config.set("Dir::Etc::sourceparts", "xxx")
|
||||
apt_pkg.config.set("APT::List-Cleanup", "0")
|
||||
slist = apt_pkg.SourceList()
|
||||
slist.read_main_list()
|
||||
self.allow_sources = slist.list
|
||||
except Exception as e:
|
||||
logging.error(str(e))
|
||||
finally:
|
||||
apt_pkg.config.set("Dir::Etc::sourcelist",
|
||||
old_sources_list)
|
||||
apt_pkg.config.set("Dir::Etc::sourceparts",
|
||||
old_sources_list_d)
|
||||
apt_pkg.config.set("APT::List-Cleanup",
|
||||
old_cleanup)
|
||||
|
||||
def get_allowed_origin(self):
|
||||
# 获取允许的源
|
||||
# 生成源与属性
|
||||
self.local_origin
|
||||
self.allow_sources
|
||||
self.allow_origin
|
||||
try:
|
||||
for item in self.allow_sources:
|
||||
for lo in self.local_origin['http']:
|
||||
if item.uri.strip('/') == lo['origin_source'].strip('/') and item.dist == lo['dist']:
|
||||
self.allow_origin['http'].append(lo)
|
||||
for lo in self.local_origin['ftp']:
|
||||
if item.uri.strip('/') == lo['origin_source'].strip('/') and item.dist == lo['dist']:
|
||||
self.allow_origin['ftp'].append(lo)
|
||||
except Exception as e:
|
||||
logging.error(str(e))
|
||||
|
||||
def get_allowed_origins(allow_origin):
|
||||
""" return a list of allowed origins
|
||||
"""
|
||||
allowed_origins = []
|
||||
origin = ''
|
||||
archive = ''
|
||||
uri = ''
|
||||
label = ''
|
||||
for ao in (allow_origin['http']+allow_origin['ftp']):
|
||||
if 'origin' in ao['release']:
|
||||
origin = 'o='+ao['release']['origin']
|
||||
else:
|
||||
origin = 'o='
|
||||
if 'archive' in ao['release']:
|
||||
archive = 'a='+ao['release']['archive']
|
||||
else:
|
||||
archive = 'a='
|
||||
if 'label' in ao['release']:
|
||||
label = 'l='+ao['release']['label']
|
||||
else:
|
||||
label = 'l='
|
||||
if 'origin_source' in ao:
|
||||
uri = 'uri='+ao['origin_source']
|
||||
else:
|
||||
uri = 'uri='
|
||||
allowed_origins.append(origin+","+archive+","+label+","+uri)
|
||||
return allowed_origins
|
||||
|
||||
def deleteDuplicatedElementFromList(list):
|
||||
resultList = []
|
||||
for item in list:
|
||||
if not item in resultList:
|
||||
resultList.append(item)
|
||||
return resultList
|
||||
|
||||
class UnattendUpgradeFilter():
|
||||
def __init__(self) -> None:
|
||||
pass
|
||||
|
||||
def GetAllowOrigins(self):
|
||||
# 获取源属性
|
||||
self.origin_property = OriginProperty()
|
||||
self.origin_property.get_allowed_sources()
|
||||
self.origin_property.get_allowed_origin()
|
||||
|
||||
self.allowed_origins = get_allowed_origins(self.origin_property.allow_origin)
|
||||
|
||||
self.allowed_origins = deleteDuplicatedElementFromList(self.allowed_origins)
|
||||
# logging.info(_("Allowed origins: %s"),
|
||||
# self.allowed_origins)
|
||||
return self.allowed_origins
|
||||
|
||||
class AcquireStatistics:
|
||||
def __init__(self,fetcher) -> None:
|
||||
|
@ -610,6 +850,7 @@ class KylinSystemUpdater:
|
|||
|
||||
def ConnectToSignals(self):
|
||||
def update_detect_finished_handler(success,updatelist,error_status,error_cause):
|
||||
logging.debug(updatelist)
|
||||
if success:
|
||||
logging.info("update detect success,quiting main loop")
|
||||
self.update_group = updatelist
|
||||
|
@ -833,7 +1074,9 @@ class UnattendedUpgradesCache(apt.Cache):
|
|||
|
||||
self._cached_candidate_pkgnames = set() # type: Set[str]
|
||||
|
||||
self.allowed_origins = get_allowed_origins()
|
||||
self.origin_filter = UnattendUpgradeFilter()
|
||||
|
||||
self.allowed_origins = self.origin_filter.GetAllowOrigins()
|
||||
logging.info(_("Allowed origins are: %s"),
|
||||
", ".join(self.allowed_origins))
|
||||
|
||||
|
@ -1032,7 +1275,7 @@ class UnattendedUpgradesCache(apt.Cache):
|
|||
def adjust_candidate_with_version(self,pkg,version):
|
||||
for v in pkg.versions:
|
||||
if v.version == version and is_in_allowed_origin(v,self.allowed_origins):
|
||||
logging.debug("pkg %s adjusting candidate version: %s" %(pkg.name,v))
|
||||
#logging.debug("pkg %s adjusting candidate version: %s" %(pkg.name,v))
|
||||
pkg.candidate = v
|
||||
return True
|
||||
return False
|
||||
|
@ -1548,7 +1791,7 @@ def get_allowed_origins_legacy():
|
|||
raise
|
||||
return allowed_origins
|
||||
|
||||
|
||||
'''
|
||||
def get_allowed_origins():
|
||||
# type: () -> List[str]
|
||||
uuf = UnattendUpgradeFilter()
|
||||
|
@ -1557,7 +1800,7 @@ def get_allowed_origins():
|
|||
|
||||
This will take substitutions (like distro_id) into account.
|
||||
"""
|
||||
'''
|
||||
|
||||
allowed_origins = get_allowed_origins_legacy()
|
||||
key = "Unattended-Upgrade::Origins-Pattern"
|
||||
try:
|
||||
|
@ -1566,10 +1809,10 @@ def get_allowed_origins():
|
|||
except ValueError:
|
||||
logging.error(_("Unable to parse %s." % key))
|
||||
raise
|
||||
'''
|
||||
|
||||
#logging.info("allowed origins are:%s"%"\n".join(allowed_origins))
|
||||
return allowed_origins
|
||||
|
||||
'''
|
||||
|
||||
def match_whitelist_string(whitelist, origin):
|
||||
# type: (str, Union[apt.package.Origin, apt_pkg.PackageFile]) -> bool
|
||||
|
@ -3159,12 +3402,13 @@ def run(options, # type: Options
|
|||
pkgs_to_upgrade = calculate_upgradable_pkgs(cache, options,white_list_with_version)
|
||||
pkgs_to_upgrade.sort(key=lambda p: p.name)
|
||||
pkgs = [pkg.name for pkg in pkgs_to_upgrade]
|
||||
logging.debug("pkgs that look like they should be upgraded or installed: %s"
|
||||
% "\n".join(pkgs))
|
||||
logging.debug("%d pkgs that look like they should be upgraded or installed: %s"
|
||||
% (len(pkgs),"\n".join(pkgs)))
|
||||
|
||||
|
||||
# FIXME: make this into a ContextManager
|
||||
# stop being nice
|
||||
os.nice(old_priority - os.nice(0))
|
||||
#os.nice(old_priority - os.nice(0))
|
||||
#adjust candidate versions
|
||||
logging.info("adjusting candidate from kylin update manager...")
|
||||
adjust_candidate_with_version(cache,white_list_with_version)
|
||||
|
@ -3790,6 +4034,7 @@ if __name__ == "__main__":
|
|||
config_manager = ConfigFileManager(CONFIG_FILE_ROOT_PATH)
|
||||
login_manager = LoginManager()
|
||||
kylin_system_updater = KylinSystemUpdater()
|
||||
'''
|
||||
if os_release_info['PROJECT_CODENAME'] == 'V10SP1-edu' and os_release_info['SUB_PROJECT_CODENAME']=='mavis':
|
||||
pass
|
||||
else:
|
||||
|
@ -3799,6 +4044,7 @@ if __name__ == "__main__":
|
|||
else:
|
||||
logging.info("auto upgrade not allow, exit")
|
||||
sys.exit(0)
|
||||
'''
|
||||
#check control center lock
|
||||
'''
|
||||
if os.path.exists(CONTROL_PANEL_LOCK_FILE):
|
||||
|
|
|
@ -39,6 +39,7 @@ import os.path
|
|||
import os
|
||||
import configparser
|
||||
import psutil
|
||||
from pytz import timezone
|
||||
# for dbus signal handling
|
||||
try:
|
||||
from dbus.mainloop.glib import DBusGMainLoop
|
||||
|
@ -52,6 +53,7 @@ from gettext import gettext as _
|
|||
from threading import Event
|
||||
from enum import IntEnum, Enum
|
||||
from apscheduler.schedulers.blocking import BlockingScheduler
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
import random
|
||||
import threading
|
||||
import re
|
||||
|
@ -83,6 +85,7 @@ AUTO_UPGRADE_POLICY_OPTION_UPGRADE_INTERVAL = "upgradeInverval"
|
|||
INTERVAL_DOWN_INSTALL = 120 # 下载安装的间隔 分钟
|
||||
INSTALL_RANDOM = 5 # 安装时间随机数范围0-INSTALL_RANDOM 分钟
|
||||
DOWNLOAD_RANDOM = 180 # 下载时间随机数范围0-DOWNLOAD_RANDOM 分钟
|
||||
PREDOWNLOAD_RANDOM = 180
|
||||
|
||||
class FeatureSwitch(Enum):
|
||||
ON = 'on'
|
||||
|
@ -214,6 +217,68 @@ def clean_flag_files(filelist):
|
|||
def init():
|
||||
if not os.path.exists(NOTIFICATION_PIPE):
|
||||
os.mkfifo(NOTIFICATION_PIPE)
|
||||
|
||||
def get_random_time(stime,random_range):
|
||||
now = datetime.datetime.now()
|
||||
delta = random.randint(0,random_range)
|
||||
actual_time = now + datetime.timedelta(minutes=delta)
|
||||
try:
|
||||
start_time = datetime.datetime.strptime(stime,"%H:%M")
|
||||
start=datetime.datetime(now.year,now.month,now.day,start_time.hour,start_time.minute,0,0)
|
||||
actual_time = start+datetime.timedelta(minutes=delta)
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
return actual_time
|
||||
|
||||
def task(task):
|
||||
env = copy.copy(os.environ)
|
||||
cmd = "date"
|
||||
if task in ["predownload","download"]:
|
||||
cmd = "kylin-unattended-upgrade --download-only"
|
||||
elif task == "install":
|
||||
cmd = "kylin-unattended-upgrade --install-only --mode=timing"
|
||||
elif task == "download_and_install":
|
||||
cmd = "kylin-unattended-upgrade --download-only&&kylin-unattended-upgrade --install-only --mode=timing"
|
||||
#do not check updgrade period when download and install
|
||||
else:
|
||||
pass
|
||||
ret = subprocess.run([cmd], shell=True,env=env)
|
||||
logging.debug("task:%s return code:%d"%(task,ret.returncode))
|
||||
return ret.returncode
|
||||
|
||||
def background_scheduler_init(background_scheduler):
|
||||
|
||||
background_scheduler.start()
|
||||
|
||||
random_time = get_random_time(autoupgradepolicy.GetOptionValue('downloadTime'),DOWNLOAD_RANDOM)
|
||||
background_scheduler.add_job(task,'cron', args=['download'],id='download', \
|
||||
hour = random_time.hour,minute = random_time.minute,replace_existing=True)
|
||||
|
||||
random_time = random_time + datetime.timedelta(minutes=INTERVAL_DOWN_INSTALL)
|
||||
background_scheduler.add_job(task,'cron', args=['install'],id='install', \
|
||||
hour = random_time.hour,minute = random_time.minute,replace_existing=True)
|
||||
|
||||
random_time = get_random_time(autoupgradepolicy.GetOptionValue('preDownloadTime'),PREDOWNLOAD_RANDOM)
|
||||
background_scheduler.add_job(task,'cron', args=['predownload'],id='predownload', \
|
||||
hour = random_time.hour,minute = random_time.minute,replace_existing=True)
|
||||
|
||||
if autoupgradepolicy.GetOptionValue('autoUpgradeState') == 'on':
|
||||
if autoupgradepolicy.GetOptionValue('downloadMode') != 'timing':
|
||||
background_scheduler.pause_job('download')
|
||||
if autoupgradepolicy.GetOptionValue('installMode') != 'timing':
|
||||
background_scheduler.pause_job('install')
|
||||
else:
|
||||
background_scheduler.pause_job('download')
|
||||
background_scheduler.pause_job('install')
|
||||
|
||||
if autoupgradepolicy.GetOptionValue('preDownload') != 'on':
|
||||
background_scheduler.pause_job('predownload')
|
||||
|
||||
|
||||
joblist = background_scheduler.get_jobs()
|
||||
|
||||
for job in joblist:
|
||||
logging.debug("job:%s,next run time:%s"%(job.id,job.next_run_time))
|
||||
'''
|
||||
def do_usplash(msg):
|
||||
# type: (str) -> None
|
||||
|
@ -340,21 +405,53 @@ class KylinSystemUpdater:
|
|||
|
||||
def SetConfigValue(self,section,option,value):
|
||||
return self.update_interface.SetConfigValue(section,option,value)
|
||||
|
||||
|
||||
class AutoUpgradePolicy():
|
||||
def __init__(self) -> None:
|
||||
self.autoupgradepolicy = {}
|
||||
if os.path.exists(UNATTENDED_UPGRADE_POLICY_FILE_PATH):
|
||||
config=configparser.ConfigParser(allow_no_value=True)
|
||||
config.optionxform = str
|
||||
config.read(UNATTENDED_UPGRADE_POLICY_FILE_PATH)
|
||||
for option in config.options('autoUpgradePolicy'):
|
||||
self.autoupgradepolicy.update({option:config['autoUpgradePolicy'][option]})
|
||||
for key in self.autoupgradepolicy.keys():
|
||||
logging.debug("%s:%s"%(key,self.autoupgradepolicy[key]))
|
||||
|
||||
def SetOptionValue(self,option,value):
|
||||
self.autoupgradepolicy.update({option:value})
|
||||
|
||||
def GetOptionValue(self,option):
|
||||
try:
|
||||
return self.autoupgradepolicy[option]
|
||||
except Exception:
|
||||
return ''
|
||||
|
||||
def reload_config(self):
|
||||
if os.path.exists(UNATTENDED_UPGRADE_POLICY_FILE_PATH):
|
||||
config=configparser.ConfigParser(allow_no_value=True)
|
||||
config.optionxform = str
|
||||
config.read(UNATTENDED_UPGRADE_POLICY_FILE_PATH)
|
||||
for option in config.options('autoUpgradePolicy'):
|
||||
self.autoupgradepolicy.update({option:config['autoUpgradePolicy'][option]})
|
||||
for key in self.autoupgradepolicy.keys():
|
||||
logging.debug("%s:%s"%(key,self.autoupgradepolicy[key]))
|
||||
|
||||
class UnattendedUpgradesShutdown():
|
||||
# 加载配置文件 unattended-upgrades-policy.conf
|
||||
'''
|
||||
def loadcfg(self):
|
||||
if os.path.isfile(UNATTENDED_UPGRADE_POLICY_FILE_PATH):
|
||||
if os.path.exists(UNATTENDED_UPGRADE_POLICY_FILE_PATH):
|
||||
self.preDownload = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_PREDOWNLOAD)
|
||||
self.autoUpgrade = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_AUTOUPGRADE)
|
||||
self.download_mode = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_DOWNLOAD_MODE)
|
||||
self.install_mode = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_INSTALL_MODE)
|
||||
download_time = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_DOWNLOAD_TIME)
|
||||
self.download_random = int(kylin_system_updater.GetConfigValue('AutoUpgradeConfig','downloadRandom')[1])
|
||||
self.upgrade_interval = int(kylin_system_updater.GetConfigValue('AutoUpgradeConfig','upgradeInterval')[1])
|
||||
logging.info("download random:%s,upgrade interval:%s"%(self.download_random,self.upgrade_interval))
|
||||
# self.download_random = int(kylin_system_updater.GetConfigValue('AutoUpgradeConfig','downloadRandom')[1])
|
||||
# self.upgrade_interval = int(kylin_system_updater.GetConfigValue('AutoUpgradeConfig','upgradeInterval')[1])
|
||||
# logging.info("download random:%s,upgrade interval:%s"%(self.download_random,self.upgrade_interval))
|
||||
# upgradeInterval = int(ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, 'upgradeInverval'))
|
||||
'''
|
||||
|
||||
if os_release_info['PROJECT_CODENAME'] == 'V10SP1-edu' and os_release_info['SUB_PROJECT_CODENAME']=='mavis':
|
||||
self.download_time['h'] = 10
|
||||
self.download_time['m'] = 0
|
||||
|
@ -363,11 +460,13 @@ class UnattendedUpgradesShutdown():
|
|||
self.download_time_r['h'], self.download_time_r['m'],self.preDownload, self.autoUpgrade, \
|
||||
self.download_mode, self.install_mode)
|
||||
return
|
||||
'''
|
||||
|
||||
timelist = download_time.strip().split(':')
|
||||
|
||||
if len(timelist) != TimeElement.TIME_NUM:
|
||||
logging.debug("unattended-upgrades-policy.conf time err %s",download_time)
|
||||
return
|
||||
|
||||
# 检查 传入时间 安全性
|
||||
try:
|
||||
tmphour = int(timelist[TimeElement.TIME_HOUR])
|
||||
|
@ -391,7 +490,8 @@ class UnattendedUpgradesShutdown():
|
|||
self.preDownload, self.autoUpgrade, self.download_mode, self.install_mode)
|
||||
else:
|
||||
logging.debug("unattended-upgrades-policy.conf not exist")
|
||||
|
||||
return
|
||||
'''
|
||||
def __init__(self, options):
|
||||
# type: (Values) -> None
|
||||
self.options = options
|
||||
|
@ -406,6 +506,7 @@ class UnattendedUpgradesShutdown():
|
|||
self.lock_was_taken = False
|
||||
self.signal_sent = False
|
||||
self.stop_signal_received = Event()
|
||||
'''
|
||||
self.download_mode = DownloadMode.TIMING_DOWNLOAD.value #下载模式
|
||||
self.install_mode = InstallMode.TIMING_INSTALL.value #安装模式
|
||||
self.download_time = {'h':9, 'm':0} #定时下载时间 09:00
|
||||
|
@ -417,8 +518,8 @@ class UnattendedUpgradesShutdown():
|
|||
self.download_job = None
|
||||
self.install_job = None
|
||||
self.startup_download_job = None
|
||||
self.scheduler = BlockingScheduler()
|
||||
|
||||
self.scheduler = BlockingScheduler(timezone = "Asia/Shanghai")
|
||||
'''
|
||||
try:
|
||||
hasattr(GLib, "MainLoop")
|
||||
DBusGMainLoop(set_as_default=True)
|
||||
|
@ -435,7 +536,7 @@ class UnattendedUpgradesShutdown():
|
|||
self.update_proxy = None
|
||||
self.wait_period = min(3, self.get_inhibit_max_delay() / 3)
|
||||
self.preparing_for_shutdown = False
|
||||
self.loadcfg()
|
||||
#self.loadcfg()
|
||||
|
||||
def get_update_proxy(self):
|
||||
if not self.update_proxy:
|
||||
|
@ -519,7 +620,7 @@ class UnattendedUpgradesShutdown():
|
|||
pass
|
||||
'''
|
||||
return True
|
||||
|
||||
'''
|
||||
def run_polling(self, signal_handler):
|
||||
logging.warning(
|
||||
_("Unable to monitor PrepareForShutdown() signal, polling "
|
||||
|
@ -543,7 +644,7 @@ class UnattendedUpgradesShutdown():
|
|||
while not self.iter():
|
||||
# TODO iter on sigterm and sighup, too
|
||||
time.sleep(self.wait_period)
|
||||
|
||||
'''
|
||||
# 定时下载 执行函数
|
||||
def timing_download(self):
|
||||
env = copy.copy(os.environ)
|
||||
|
@ -604,6 +705,7 @@ class UnattendedUpgradesShutdown():
|
|||
# return 0
|
||||
|
||||
# set signal handlers
|
||||
'''
|
||||
def signal_handler(signum, frame):
|
||||
|
||||
logging.warning(
|
||||
|
@ -611,7 +713,7 @@ class UnattendedUpgradesShutdown():
|
|||
"only if it is running")
|
||||
self.stop_signal_received.set()
|
||||
#self.start_iterations()
|
||||
|
||||
|
||||
# fall back to polling without GLib
|
||||
try:
|
||||
hasattr(GLib, "MainLoop")
|
||||
|
@ -619,14 +721,41 @@ class UnattendedUpgradesShutdown():
|
|||
logging.error("MainLoop Not Found")
|
||||
#self.run_polling(signal_handler)
|
||||
return
|
||||
'''
|
||||
|
||||
for sig in (signal.SIGTERM, signal.SIGHUP):
|
||||
GLib.unix_signal_add(GLib.PRIORITY_DEFAULT, sig,
|
||||
signal_handler, None, None)
|
||||
'''
|
||||
if self.options.wait_for_signal:
|
||||
def change_upgrade_policy_handler():
|
||||
if os.path.isfile(UNATTENDED_UPGRADE_POLICY_FILE_PATH):
|
||||
if os.path.exist(UNATTENDED_UPGRADE_POLICY_FILE_PATH):
|
||||
autoupgradepolicy.reload_config()
|
||||
|
||||
if autoupgradepolicy.GetOptionValue('autoUpgradeState') == 'on':
|
||||
random_time = get_random_time(autoupgradepolicy.GetOptionValue('downloadTime'),DOWNLOAD_RANDOM)
|
||||
if autoupgradepolicy.GetOptionValue('downloadMode') == 'timing':
|
||||
background_scheduler.add_job(task,'cron', args=['download'],id='download', \
|
||||
hour = random_time.hour,minute = random_time.minute,replace_existing=True)
|
||||
if autoupgradepolicy.GetOptionValue('installMode') == 'timing':
|
||||
random_time = random_time + datetime.timedelta(minutes=INTERVAL_DOWN_INSTALL)
|
||||
background_scheduler.add_job(task,'cron', args=['install'],id='install', \
|
||||
hour = random_time.hour,minute = random_time.minute,replace_existing=True)
|
||||
else:
|
||||
background_scheduler.pause_job('download')
|
||||
background_scheduler.pause_job('install')
|
||||
|
||||
if autoupgradepolicy.GetOptionValue('preDownload') == 'on':
|
||||
random_time = get_random_time(autoupgradepolicy.GetOptionValue('preDownloadTime'),PREDOWNLOAD_RANDOM)
|
||||
background_scheduler.add_job(task,'cron', args=['predownload'],id='download', \
|
||||
hour = random_time.hour,minute = random_time.minute,replace_existing=True)
|
||||
else:
|
||||
background_scheduler.pause_job('predownload')
|
||||
|
||||
joblist = background_scheduler.get_jobs()
|
||||
|
||||
for job in joblist:
|
||||
logging.debug("job:%s,next run time:%s"%(job.id,job.next_run_time))
|
||||
'''
|
||||
self.download_mode = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_DOWNLOAD_MODE)
|
||||
self.install_mode = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_INSTALL_MODE)
|
||||
self.preDownload = ReadValueFromFile(UNATTENDED_UPGRADE_POLICY_FILE_PATH, POLICY_CONF_SECTION_AUTO_UPGRADE_POLICY, AUTO_UPGRADE_POLICY_OPTION_PREDOWNLOAD)
|
||||
|
@ -656,7 +785,7 @@ class UnattendedUpgradesShutdown():
|
|||
self.install_time['h'] = self.download_time_r['h']
|
||||
self.install_time['m'] = self.download_time_r['m'] + INTERVAL_DOWN_INSTALL
|
||||
self.install_time_r = convert_time_by_random(self.install_time, INSTALL_RANDOM)
|
||||
'''
|
||||
|
||||
logging.info("download random:%d,upgrade interval:%d"%(self.download_random,self.upgrade_interval))
|
||||
|
||||
if self.preDownload == FeatureSwitch.ON.value: #open download timing
|
||||
|
@ -694,7 +823,7 @@ class UnattendedUpgradesShutdown():
|
|||
self.download_job.pause()
|
||||
except Exception as e:
|
||||
logging.error(e)
|
||||
'''
|
||||
|
||||
|
||||
if self.autoUpgrade == FeatureSwitch.OFF.value:
|
||||
logging.info("auto upgrade turned off,removing download and instal jobs...")
|
||||
|
@ -754,7 +883,7 @@ class UnattendedUpgradesShutdown():
|
|||
pass
|
||||
# logging.error(e)
|
||||
|
||||
'''
|
||||
|
||||
logging.info("upgrade time: [%d:%d] [%d:%d] predown[%s] autoupgrade[%s] d-mode[%s] i-mode[%s]",
|
||||
self.download_time_r['h'], self.download_time_r['m'],self.install_time_r['h'],self.install_time_r['m'],
|
||||
self.preDownload, self.autoUpgrade, self.download_mode, self.install_mode)
|
||||
|
@ -763,7 +892,17 @@ class UnattendedUpgradesShutdown():
|
|||
logging.debug("unattended-upgrades-policy.conf not exist")
|
||||
|
||||
def upgrade_all_now_handler():
|
||||
now=datetime.datetime.now()
|
||||
random_time = now + datetime.timedelta(minutes=DOWNLOAD_RANDOM)
|
||||
background_scheduler.add_job(task,'date', args=['download_and_install'],id='download', \
|
||||
hour = random_time.hour,minute = random_time.minute,replace_existing=True)
|
||||
|
||||
joblist = background_scheduler.get_jobs()
|
||||
|
||||
for job in joblist:
|
||||
logging.debug("job:%s,next run time:%s"%(job.id,job.next_run_time))
|
||||
#self._wait_for_unattended_upgrade_finish()
|
||||
'''
|
||||
if FindRuningUnattendedUpgrades():
|
||||
logging.warning("find runing unattended-upgrades,please wait")
|
||||
return False
|
||||
|
@ -777,7 +916,7 @@ class UnattendedUpgradesShutdown():
|
|||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
'''
|
||||
|
||||
def prepare_for_shutdown_handler(active):
|
||||
""" Handle PrepareForShutdown() """
|
||||
|
@ -802,15 +941,6 @@ class UnattendedUpgradesShutdown():
|
|||
do_plymouth_splash()
|
||||
self.start_iterations()
|
||||
logging.info("finished iteration")
|
||||
'''
|
||||
self.mainloop.quit()
|
||||
else:
|
||||
self.mainloop.quit()
|
||||
else:
|
||||
self.mainloop.quit()
|
||||
else:
|
||||
self.mainloop.quit()
|
||||
'''
|
||||
else:
|
||||
pass
|
||||
self.mainloop.quit()
|
||||
|
@ -837,12 +967,13 @@ class UnattendedUpgradesShutdown():
|
|||
logging.debug("Skip waiting for signals, starting operation "
|
||||
"now")
|
||||
# self.start_iterations()
|
||||
|
||||
'''
|
||||
if os_release_info['PROJECT_CODENAME'] == 'V10SP1-edu' and os_release_info['SUB_PROJECT_CODENAME']=='mavis':
|
||||
logging.info("setting startup download timer")
|
||||
GLib.timeout_add(300*1000, lambda: self.timing_download() and False)
|
||||
#local_time =time.localtime(time.time()+300)
|
||||
self.startup_download_job = self.scheduler.add_job(self.timing_download,'cron',hour=self.download_time_r['h'],minute = self.download_time_r['m'])
|
||||
|
||||
else:
|
||||
if self.autoUpgrade == FeatureSwitch.ON.value:
|
||||
logging.debug("download time:[%d:%d] install time:[%d:%d]", self.download_time_r['h'], self.download_time_r['m'],self.install_time_r['h'],self.install_time_r['m'])
|
||||
|
@ -850,8 +981,8 @@ class UnattendedUpgradesShutdown():
|
|||
self.install_job = self.scheduler.add_job(self.timing_install, 'cron', hour=self.install_time_r['h'], minute=self.install_time_r['m'])
|
||||
elif self.autoUpgrade == FeatureSwitch.OFF.value:
|
||||
logging.info("auto upgrade turned off")
|
||||
|
||||
TimerThread(self.scheduler).start()
|
||||
'''
|
||||
#TimerThread(self.scheduler).start()
|
||||
self.mainloop.run()
|
||||
logging.info("quit mainloop")
|
||||
os._exit(0)
|
||||
|
@ -1050,7 +1181,7 @@ if __name__ == "__main__":
|
|||
logging.basicConfig(filename=logfile,
|
||||
level=level,
|
||||
format="%(asctime)s %(levelname)s - %(message)s")
|
||||
|
||||
logging.getLogger('apscheduler').setLevel(logging.DEBUG)
|
||||
os_release_info = ReadOsRelease('/etc/os-release')
|
||||
logging.info("project id:%s,sub-project id:%s"%(os_release_info['PROJECT_CODENAME'],os_release_info['SUB_PROJECT_CODENAME']))
|
||||
|
||||
|
@ -1102,5 +1233,8 @@ if __name__ == "__main__":
|
|||
if abnormal_pkg_count != '0':
|
||||
apt_fix = subprocess.run("echo y|apt install -f",shell=True,stdout=open(logfile,'a+'),stderr=open(logfile,'a+'))
|
||||
kylin_system_updater = KylinSystemUpdater()
|
||||
autoupgradepolicy = AutoUpgradePolicy()
|
||||
background_scheduler = BackgroundScheduler(timezone = "Asia/Shanghai")
|
||||
background_scheduler_init(background_scheduler)
|
||||
UnattendedUpgradesShutdown(options).run()
|
||||
#main()
|
||||
|
|
Loading…
Reference in New Issue