2019-04-06 20:59:27 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# buildbot.py: Automatic management tool for an arch repo.
|
|
|
|
# This file is part of Buildbot by JerryXiao
|
|
|
|
|
|
|
|
import logging
|
|
|
|
from multiprocessing.connection import Listener
|
|
|
|
from time import time, sleep
|
|
|
|
import os
|
|
|
|
from pathlib import Path
|
2019-04-09 15:43:17 +08:00
|
|
|
from shutil import rmtree
|
2019-04-06 20:59:27 +08:00
|
|
|
from subprocess import CalledProcessError
|
|
|
|
|
2019-04-09 15:43:17 +08:00
|
|
|
from shared_vars import PKG_SUFFIX, PKG_SIG_SUFFIX
|
|
|
|
|
2019-04-06 20:59:27 +08:00
|
|
|
from config import ARCHS, BUILD_ARCHS, BUILD_ARCH_MAPPING, \
|
|
|
|
MASTER_BIND_ADDRESS, MASTER_BIND_PASSWD, \
|
2019-04-07 17:14:50 +08:00
|
|
|
PKGBUILD_DIR, MAKEPKG_PKGLIST_CMD, MAKEPKG_UPD_CMD, \
|
2019-04-09 15:43:17 +08:00
|
|
|
MAKEPKG_MAKE_CMD, MAKEPKG_MAKE_CMD_CLEAN, \
|
2019-04-09 20:48:37 +08:00
|
|
|
GPG_SIGN_CMD, GPG_VERIFY_CMD, UPDATE_INTERVAL, \
|
2019-04-10 20:36:43 +08:00
|
|
|
MAKEPKG_MAKE_CMD_MARCH, UPLOAD_CMD, \
|
2019-09-08 17:07:47 +08:00
|
|
|
GIT_PULL, GIT_RESET_SUBDIR, CONSOLE_LOGFILE, \
|
|
|
|
MAIN_LOGFILE, PKG_UPDATE_LOGFILE, MAKEPKG_LOGFILE
|
2019-04-07 17:14:50 +08:00
|
|
|
|
|
|
|
from utils import print_exc_plus, background, \
|
|
|
|
bash, get_pkg_details_from_name, vercmp, \
|
2019-04-09 15:43:17 +08:00
|
|
|
nspawn_shell, mon_nspawn_shell, get_arch_from_pkgbuild, \
|
2019-04-09 20:48:37 +08:00
|
|
|
configure_logger, mon_bash
|
2019-04-09 15:43:17 +08:00
|
|
|
|
|
|
|
from client import run as rrun
|
2019-04-06 20:59:27 +08:00
|
|
|
|
|
|
|
import json
|
|
|
|
|
|
|
|
from yamlparse import load_all as load_all_yaml
|
|
|
|
|
2019-09-08 17:07:47 +08:00
|
|
|
from extra import gen_pkglist as extra_gen_pkglist, \
|
|
|
|
readpkglog as extra_readpkglog, \
|
|
|
|
readmainlog as extra_readmainlog
|
|
|
|
|
2019-04-06 20:59:27 +08:00
|
|
|
abspath=os.path.abspath(__file__)
|
|
|
|
abspath=os.path.dirname(abspath)
|
|
|
|
os.chdir(abspath)
|
|
|
|
|
2019-04-09 15:43:17 +08:00
|
|
|
logger = logging.getLogger('buildbot')
|
2019-09-08 17:07:47 +08:00
|
|
|
configure_logger(logger, logfile=MAIN_LOGFILE, rotate_size=1024*1024*10, enable_notify=True, consolelog=CONSOLE_LOGFILE)
|
2019-04-09 15:43:17 +08:00
|
|
|
|
2019-04-11 16:32:39 +08:00
|
|
|
# refuse to run in systemd-nspawn
|
|
|
|
if 'systemd-nspawn' in bash('systemd-detect-virt || true'):
|
|
|
|
logger.error('Refused to run in systemd-nspawn.')
|
|
|
|
raise AssertionError('Refused to run in systemd-nspawn.')
|
|
|
|
|
2019-04-06 20:59:27 +08:00
|
|
|
REPO_ROOT = Path(PKGBUILD_DIR)
|
|
|
|
|
|
|
|
class Job:
|
2019-04-07 17:14:50 +08:00
|
|
|
def __init__(self, buildarch, pkgconfig, version, multiarch=False):
|
2019-04-06 20:59:27 +08:00
|
|
|
assert buildarch in BUILD_ARCHS
|
2019-04-07 17:14:50 +08:00
|
|
|
self.arch = buildarch
|
|
|
|
self.pkgconfig = pkgconfig
|
2019-04-06 20:59:27 +08:00
|
|
|
self.version = version
|
2019-04-07 17:14:50 +08:00
|
|
|
self.multiarch = multiarch
|
2019-04-06 20:59:27 +08:00
|
|
|
self.added = time()
|
2019-04-10 20:36:43 +08:00
|
|
|
def __repr__(self):
|
|
|
|
ret = "Job("
|
|
|
|
for myproperty in (
|
|
|
|
'arch', 'pkgconfig', 'version', 'multiarch', 'added'
|
|
|
|
):
|
|
|
|
ret += f'{myproperty}={getattr(self, myproperty, None)},'
|
|
|
|
ret += ')'
|
|
|
|
return ret
|
2019-10-01 17:36:49 +08:00
|
|
|
def __lt__(self, job2):
|
|
|
|
return self.pkgconfig.priority < job2.pkgconfig.priority
|
2019-04-06 20:59:27 +08:00
|
|
|
class jobsManager:
|
|
|
|
def __init__(self):
|
2019-04-07 17:14:50 +08:00
|
|
|
self.__buildjobs = list()
|
2019-04-06 20:59:27 +08:00
|
|
|
self.__uploadjobs = list()
|
|
|
|
self.__curr_job = None
|
2019-04-09 21:29:13 +08:00
|
|
|
self.pkgconfigs = None
|
2019-04-09 15:43:17 +08:00
|
|
|
self.last_updatecheck = 0.0
|
2019-04-09 15:59:34 +08:00
|
|
|
self.idle = False
|
2019-04-11 14:55:42 +08:00
|
|
|
@property
|
|
|
|
def jobs(self):
|
|
|
|
return \
|
|
|
|
{
|
|
|
|
'build_jobs': self.__buildjobs,
|
|
|
|
'upload_jobs': self.__uploadjobs,
|
|
|
|
'current_job': self.__curr_job
|
|
|
|
}
|
2019-04-10 20:36:43 +08:00
|
|
|
def __repr__(self):
|
|
|
|
ret = "jobsManager("
|
|
|
|
for myproperty in (
|
2019-04-11 14:55:42 +08:00
|
|
|
'jobs', 'pkgconfigs',
|
|
|
|
'last_updatecheck', 'idle'
|
2019-04-10 20:36:43 +08:00
|
|
|
):
|
|
|
|
ret += f'{myproperty}={getattr(self, myproperty, None)},'
|
|
|
|
ret += ')'
|
|
|
|
return ret
|
2019-09-06 14:59:45 +08:00
|
|
|
def reset_dir(self, pkgdirname=None, all=False, rmpkg=True):
|
2019-04-10 20:36:43 +08:00
|
|
|
if all:
|
2019-04-11 16:32:39 +08:00
|
|
|
logger.info('resetting %s', str(REPO_ROOT))
|
|
|
|
bash(GIT_RESET_SUBDIR, cwd=REPO_ROOT)
|
2019-04-10 20:36:43 +08:00
|
|
|
else:
|
|
|
|
if not pkgdirname:
|
|
|
|
return False
|
|
|
|
cwd = REPO_ROOT / pkgdirname
|
|
|
|
if cwd.exists():
|
2019-04-11 16:32:39 +08:00
|
|
|
logger.info('resetting %s', str(cwd))
|
2019-09-06 15:54:20 +08:00
|
|
|
try:
|
|
|
|
bash(GIT_RESET_SUBDIR, cwd=cwd)
|
|
|
|
except Exception:
|
|
|
|
logger.error(f'Unable to reset dir {cwd}')
|
|
|
|
print_exc_plus()
|
2019-04-10 20:36:43 +08:00
|
|
|
for fpath in [f for f in cwd.iterdir()]:
|
|
|
|
if fpath.is_dir() and \
|
|
|
|
fpath.name in ('pkg', 'src'):
|
2019-09-06 16:22:58 +08:00
|
|
|
if fpath.name == 'pkg':
|
|
|
|
fpath.chmod(0o0755)
|
2019-04-10 20:36:43 +08:00
|
|
|
rmtree(fpath)
|
2019-09-06 14:59:45 +08:00
|
|
|
elif rmpkg and fpath.is_file() and \
|
2019-04-10 20:36:43 +08:00
|
|
|
(fpath.name.endswith(PKG_SUFFIX) or \
|
|
|
|
fpath.name.endswith(PKG_SIG_SUFFIX)):
|
|
|
|
fpath.unlink()
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
return True
|
2019-09-06 14:59:45 +08:00
|
|
|
def force_upload_package(self, pkgdirname, overwrite=False):
|
|
|
|
if not self.idle:
|
|
|
|
logger.debug('force_upload requested and not idle.')
|
|
|
|
if not (REPO_ROOT / pkgdirname).exists():
|
|
|
|
ret = f'force_upload failed: no such dir {pkgdirname}'
|
|
|
|
logger.warning(ret)
|
|
|
|
else:
|
|
|
|
self.pkgconfigs = load_all_yaml()
|
|
|
|
updates = updmgr.check_update(rebuild_package=pkgdirname)
|
|
|
|
if updates and len(updates) == 1:
|
|
|
|
(pkgconfig, ver, buildarchs) = updates[0]
|
|
|
|
fakejob = Job(buildarchs[0], pkgconfig, ver)
|
2019-11-11 12:48:54 +08:00
|
|
|
self.__sign(fakejob)
|
2019-09-06 14:59:45 +08:00
|
|
|
if self.__upload(fakejob, overwrite=overwrite):
|
|
|
|
ret = f'done force_upload {pkgdirname}'
|
|
|
|
logger.info(ret)
|
|
|
|
else:
|
|
|
|
ret = f'force_upload {pkgdirname} failed: return code.'
|
|
|
|
logger.warning(ret)
|
|
|
|
else:
|
|
|
|
ret = f'force_upload {pkgdirname} failed: cannot check update.'
|
|
|
|
logger.warning(ret)
|
|
|
|
return ret
|
|
|
|
def rebuild_package(self, pkgdirname, clean=True):
|
2019-09-06 11:23:07 +08:00
|
|
|
if not self.idle:
|
|
|
|
logger.debug('rebuild requested and not idle.')
|
2019-04-10 20:36:43 +08:00
|
|
|
self.pkgconfigs = load_all_yaml()
|
2019-04-10 21:20:30 +08:00
|
|
|
if (REPO_ROOT / pkgdirname).exists() and clean:
|
|
|
|
self.reset_dir(pkgdirname)
|
2019-04-10 20:36:43 +08:00
|
|
|
updates = updmgr.check_update(rebuild_package=pkgdirname)
|
2019-09-05 22:42:08 +08:00
|
|
|
if not (REPO_ROOT / pkgdirname).exists():
|
|
|
|
ret = f'rebuild failed: no such dir {pkgdirname}'
|
|
|
|
logger.warning(ret)
|
|
|
|
elif updates and len(updates) == 1:
|
2019-04-10 20:36:43 +08:00
|
|
|
(pkgconfig, ver, buildarchs) = updates[0]
|
|
|
|
march = True if len(buildarchs) >= 2 else False
|
|
|
|
for arch in buildarchs:
|
|
|
|
newjob = Job(arch, pkgconfig, ver, multiarch=march)
|
|
|
|
self._new_buildjob(newjob)
|
|
|
|
ret = f'rebuild job added for {pkgdirname} {" ".join(buildarchs)}'
|
|
|
|
logger.info(ret)
|
|
|
|
else:
|
2019-09-06 14:59:45 +08:00
|
|
|
ret = f'rebuild {pkgdirname} failed: cannot check update.'
|
2019-04-10 20:36:43 +08:00
|
|
|
logger.warning(ret)
|
|
|
|
return ret
|
2019-04-07 17:14:50 +08:00
|
|
|
def _new_buildjob(self, job):
|
2019-04-06 20:59:27 +08:00
|
|
|
assert type(job) is Job
|
2019-04-07 17:14:50 +08:00
|
|
|
job_to_remove = list()
|
|
|
|
for previous_job in self.__buildjobs:
|
|
|
|
if job.pkgconfig.dirname == previous_job.pkgconfig.dirname and \
|
|
|
|
job.arch == previous_job.arch:
|
|
|
|
job_to_remove.append(previous_job)
|
|
|
|
for oldjob in job_to_remove:
|
|
|
|
self.__buildjobs.remove(oldjob)
|
|
|
|
logger.info('removed an old job for %s %s, %s => %s',
|
|
|
|
job.pkgconfig.dirname, job.arch,
|
|
|
|
oldjob.version, job.version)
|
|
|
|
logger.info('new job for %s %s %s',
|
|
|
|
job.pkgconfig.dirname, job.arch, job.version)
|
|
|
|
self.__buildjobs.append(job)
|
|
|
|
def __get_job(self):
|
2019-04-06 20:59:27 +08:00
|
|
|
if self.__curr_job:
|
2019-09-05 22:42:08 +08:00
|
|
|
logger.error(f'Job {self.__curr_job} failed and is not cleaned.')
|
2019-04-10 21:07:08 +08:00
|
|
|
self.__finish_job(self.__curr_job, force=True)
|
2019-04-10 20:36:43 +08:00
|
|
|
return self.__get_job()
|
2019-04-07 17:14:50 +08:00
|
|
|
jobs = self.__buildjobs
|
2019-04-06 20:59:27 +08:00
|
|
|
if jobs:
|
2019-10-01 17:36:49 +08:00
|
|
|
jobs.sort(reverse=True)
|
2019-04-06 20:59:27 +08:00
|
|
|
self.__curr_job = jobs.pop(0)
|
|
|
|
return self.__curr_job
|
2019-04-10 21:07:08 +08:00
|
|
|
def __finish_job(self, pkgdir, force=False):
|
|
|
|
if not force:
|
|
|
|
assert pkgdir == self.__curr_job.pkgconfig.dirname
|
2019-04-06 20:59:27 +08:00
|
|
|
self.__curr_job = None
|
|
|
|
return True
|
2019-09-05 22:42:08 +08:00
|
|
|
def clean_failed_job(self):
|
|
|
|
if self.__curr_job:
|
|
|
|
logger.error(f'Job {self.__curr_job} failed. Correct the error and rebuild')
|
|
|
|
self.__finish_job(self.__curr_job, force=True)
|
|
|
|
else:
|
|
|
|
raise RuntimeError('Unexpected behavior')
|
2019-04-07 17:14:50 +08:00
|
|
|
def __makepkg(self, job):
|
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 20:48:37 +08:00
|
|
|
if job.multiarch:
|
|
|
|
# assume a clean env, no source avail
|
|
|
|
mkcmd = MAKEPKG_MAKE_CMD_MARCH
|
|
|
|
else:
|
|
|
|
mkcmd = MAKEPKG_MAKE_CMD_CLEAN if job.pkgconfig.cleanbuild \
|
|
|
|
else MAKEPKG_MAKE_CMD
|
2019-04-07 17:14:50 +08:00
|
|
|
logger.info('makepkg in %s %s', job.pkgconfig.dirname, job.arch)
|
2019-09-05 22:42:08 +08:00
|
|
|
# run pre-makepkg-scripts
|
2019-09-05 22:55:11 +08:00
|
|
|
logger.debug('running pre-build scripts')
|
2019-09-05 22:42:08 +08:00
|
|
|
for scr in getattr(job.pkgconfig, 'prebuild', list()):
|
|
|
|
if type(scr) is str:
|
|
|
|
try:
|
|
|
|
mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=scr, seconds=60*60)
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
|
|
|
# actually makepkg
|
|
|
|
try:
|
|
|
|
ret = mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=mkcmd,
|
2019-09-08 17:07:47 +08:00
|
|
|
logfile = cwd / MAKEPKG_LOGFILE,
|
2019-09-05 22:42:08 +08:00
|
|
|
short_return = True,
|
|
|
|
seconds=job.pkgconfig.timeout*60)
|
|
|
|
except Exception:
|
|
|
|
logger.error(f'Job {job} failed. Running build-failure scripts')
|
|
|
|
for scr in getattr(job.pkgconfig, 'failure', list()):
|
|
|
|
if type(scr) is str:
|
|
|
|
try:
|
|
|
|
mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=scr, seconds=60*60)
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
|
|
|
raise
|
|
|
|
# run post-makepkg-scripts
|
2019-09-05 22:55:11 +08:00
|
|
|
logger.debug('running post-build scripts')
|
2019-09-05 22:42:08 +08:00
|
|
|
for scr in getattr(job.pkgconfig, 'postbuild', list()):
|
|
|
|
if type(scr) is str:
|
|
|
|
try:
|
|
|
|
mon_nspawn_shell(arch=job.arch, cwd=cwd, cmdline=scr, seconds=60*60)
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
|
|
|
return ret
|
2019-04-09 20:48:37 +08:00
|
|
|
def __clean(self, job, remove_pkg=False, rm_src=True):
|
2019-04-07 17:14:50 +08:00
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 15:43:17 +08:00
|
|
|
logger.info('cleaning build dir for %s, %sremoving pkg',
|
|
|
|
job.pkgconfig.dirname, '' if remove_pkg else 'not ')
|
|
|
|
for fpath in [f for f in cwd.iterdir()]:
|
2019-04-09 20:48:37 +08:00
|
|
|
if rm_src and fpath.is_dir() and \
|
|
|
|
fpath.name in ('pkg', 'src'):
|
2019-04-09 15:43:17 +08:00
|
|
|
rmtree(fpath)
|
|
|
|
elif remove_pkg and fpath.is_file() and \
|
2019-04-09 20:48:37 +08:00
|
|
|
((not job.multiarch) or job.arch in fpath.name) and \
|
2019-04-09 15:43:17 +08:00
|
|
|
(fpath.name.endswith(PKG_SUFFIX) or \
|
2019-04-09 20:48:37 +08:00
|
|
|
fpath.name.endswith(PKG_SIG_SUFFIX)):
|
2019-04-09 15:43:17 +08:00
|
|
|
fpath.unlink()
|
2019-04-07 17:14:50 +08:00
|
|
|
def __sign(self, job):
|
2019-04-11 14:55:42 +08:00
|
|
|
logger.info('signing in %s %s', job.pkgconfig.dirname, job.arch)
|
2019-04-07 17:14:50 +08:00
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 15:43:17 +08:00
|
|
|
for fpath in cwd.iterdir():
|
|
|
|
if fpath.name.endswith(PKG_SUFFIX):
|
|
|
|
bash(f'{GPG_SIGN_CMD} {fpath.name}', cwd=cwd)
|
2019-09-06 14:59:45 +08:00
|
|
|
def __upload(self, job, overwrite=False):
|
2019-04-07 17:14:50 +08:00
|
|
|
cwd = REPO_ROOT / job.pkgconfig.dirname
|
2019-04-09 20:48:37 +08:00
|
|
|
f_to_upload = list()
|
2019-09-06 14:59:45 +08:00
|
|
|
pkg_update_list = list()
|
2019-04-09 20:48:37 +08:00
|
|
|
for fpath in cwd.iterdir():
|
|
|
|
if fpath.name.endswith(PKG_SUFFIX) and \
|
|
|
|
get_pkg_details_from_name(fpath.name).ver == job.version:
|
|
|
|
sigpath = fpath.parent / f'{fpath.name}.sig'
|
|
|
|
assert sigpath.exists()
|
|
|
|
f_to_upload.append(sigpath)
|
|
|
|
f_to_upload.append(fpath)
|
2019-09-06 14:59:45 +08:00
|
|
|
pkg_update_list.append(fpath)
|
|
|
|
sizes = [f.stat().st_size / 1000 / 1000 for f in f_to_upload]
|
2019-09-07 11:56:52 +08:00
|
|
|
pkg_update_list_human = " ".join([f.name for f in pkg_update_list])
|
2019-10-06 13:21:16 +08:00
|
|
|
assert pkg_update_list
|
2019-09-06 14:59:45 +08:00
|
|
|
max_tries = 10
|
|
|
|
for tries in range(max_tries):
|
2019-09-06 15:16:24 +08:00
|
|
|
timeouts = rrun('push_start', args=([f.name for f in f_to_upload], sizes))
|
2019-09-06 14:59:45 +08:00
|
|
|
if type(timeouts) is list:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
if tries + 1 < max_tries:
|
|
|
|
logger.warning(f'Remote is busy ({timeouts}), wait 1 min x10 [{tries+1}/10]')
|
|
|
|
sleep(60)
|
|
|
|
else:
|
|
|
|
raise RuntimeError('Remote is busy and cannot connect')
|
|
|
|
assert len(f_to_upload) == len(timeouts)
|
|
|
|
pkgs_timeouts = {f_to_upload[i]:timeouts[i] for i in range(len(sizes))}
|
2019-04-09 20:48:37 +08:00
|
|
|
for f in f_to_upload:
|
2019-09-05 22:42:08 +08:00
|
|
|
max_tries = 5
|
|
|
|
for tries in range(max_tries):
|
2019-09-07 11:56:52 +08:00
|
|
|
timeout = pkgs_timeouts.get(f)
|
2019-09-05 22:42:08 +08:00
|
|
|
try:
|
2019-09-07 11:56:52 +08:00
|
|
|
logger.info(f'Uploading {f.name}, timeout in {timeout}s')
|
2019-09-05 22:42:08 +08:00
|
|
|
mon_bash(UPLOAD_CMD.format(src=f), seconds=int(timeout))
|
|
|
|
except Exception:
|
|
|
|
time_to_sleep = (tries + 1) * 60
|
2019-09-07 11:56:52 +08:00
|
|
|
logger.error(f'We are getting problem uploading {f.name}, wait {time_to_sleep} secs')
|
2019-09-07 12:33:32 +08:00
|
|
|
patret = rrun('push_add_time', args=(f.name, time_to_sleep + timeout))
|
|
|
|
if not patret is None:
|
|
|
|
logger.error(f'Unable to run push_add_time, reason: {patret}')
|
2019-09-05 22:42:08 +08:00
|
|
|
print_exc_plus()
|
|
|
|
if tries + 1 < max_tries:
|
|
|
|
sleep(time_to_sleep)
|
2019-04-09 20:48:37 +08:00
|
|
|
else:
|
2019-09-05 22:42:08 +08:00
|
|
|
break
|
|
|
|
else:
|
2019-09-07 11:56:52 +08:00
|
|
|
logger.error(f'Upload {f.name} failed, running push_fail and abort.')
|
2019-09-07 12:33:32 +08:00
|
|
|
pfret = rrun('push_fail', args=(f.name,))
|
|
|
|
if not pfret is None:
|
|
|
|
logger.error(f'Unable to run push_fail, reason: {pfret}')
|
2019-09-05 22:42:08 +08:00
|
|
|
raise RuntimeError('Unable to upload some files')
|
2019-09-07 11:56:52 +08:00
|
|
|
logger.info(f'Requesting repo update for {pkg_update_list_human}')
|
2019-09-06 14:59:45 +08:00
|
|
|
res = "unexpected"
|
|
|
|
max_tries = 5
|
|
|
|
for tries in range(max_tries):
|
|
|
|
try:
|
2019-09-06 15:16:24 +08:00
|
|
|
res = rrun('push_done', args=([f.name for f in f_to_upload],), kwargs={'overwrite': overwrite,})
|
2019-09-06 14:59:45 +08:00
|
|
|
except Exception:
|
|
|
|
time_to_sleep = (tries + 1) * 60
|
2019-09-07 11:56:52 +08:00
|
|
|
logger.info(f'Error updating {pkg_update_list_human}, wait {time_to_sleep} secs')
|
2019-09-06 14:59:45 +08:00
|
|
|
print_exc_plus()
|
|
|
|
if tries + 1 < max_tries:
|
|
|
|
sleep(time_to_sleep)
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
else:
|
2019-09-07 11:56:52 +08:00
|
|
|
ret = f'Update failed for {pkg_update_list_human}: max reties exceeded'
|
2019-09-06 14:59:45 +08:00
|
|
|
logger.error(ret)
|
|
|
|
raise RuntimeError(ret)
|
|
|
|
if res is None:
|
2019-09-07 11:56:52 +08:00
|
|
|
logger.info(f'Update success for {pkg_update_list_human}')
|
2019-09-06 14:59:45 +08:00
|
|
|
else:
|
2019-09-07 11:56:52 +08:00
|
|
|
ret = f'Update failed for {pkg_update_list_human}, reason: {res}'
|
2019-09-06 14:59:45 +08:00
|
|
|
logger.error(ret)
|
|
|
|
raise RuntimeError(ret)
|
|
|
|
return res is None
|
2019-09-06 11:23:07 +08:00
|
|
|
def getup(self):
|
|
|
|
'''
|
|
|
|
check for updates now !!!
|
|
|
|
'''
|
|
|
|
logger.info('Check for updates now.')
|
|
|
|
self.last_updatecheck = 0.0
|
|
|
|
return "buildbot wakes up"
|
2019-04-06 20:59:27 +08:00
|
|
|
def tick(self):
|
|
|
|
'''
|
|
|
|
check for updates,
|
|
|
|
create new jobs
|
|
|
|
and run them
|
|
|
|
'''
|
2019-04-07 17:14:50 +08:00
|
|
|
if not self.__buildjobs:
|
|
|
|
# This part check for updates
|
2019-04-09 16:33:25 +08:00
|
|
|
if time() - self.last_updatecheck <= UPDATE_INTERVAL * 60:
|
2019-04-09 15:59:34 +08:00
|
|
|
if not self.idle:
|
|
|
|
logger.info('Buildbot is idling for package updates.')
|
2019-09-05 22:42:08 +08:00
|
|
|
self.idle = True
|
|
|
|
return 60
|
|
|
|
else:
|
|
|
|
self.last_updatecheck = time()
|
|
|
|
self.idle = False
|
|
|
|
# git pull repo
|
|
|
|
try:
|
|
|
|
bash(GIT_PULL, cwd=REPO_ROOT)
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
|
|
|
self.pkgconfigs = load_all_yaml()
|
|
|
|
updates = updmgr.check_update()
|
|
|
|
for update in updates:
|
|
|
|
(pkgconfig, ver, buildarchs) = update
|
|
|
|
march = True if len(buildarchs) >= 2 else False
|
|
|
|
for arch in buildarchs:
|
|
|
|
newjob = Job(arch, pkgconfig, ver, multiarch=march)
|
|
|
|
self._new_buildjob(newjob)
|
|
|
|
return 0
|
2019-04-07 17:14:50 +08:00
|
|
|
else:
|
|
|
|
# This part does the job
|
2019-04-11 17:06:25 +08:00
|
|
|
self.idle = False
|
2019-04-09 15:43:17 +08:00
|
|
|
job = self.__get_job()
|
2019-04-10 20:36:43 +08:00
|
|
|
if not job:
|
|
|
|
logging.error('No job got')
|
|
|
|
return
|
2019-04-09 15:43:17 +08:00
|
|
|
if job.multiarch:
|
2019-04-09 20:48:37 +08:00
|
|
|
self.__clean(job, remove_pkg=True)
|
2019-04-09 22:47:48 +08:00
|
|
|
self.__makepkg(job)
|
2019-04-09 20:48:37 +08:00
|
|
|
self.__sign(job)
|
2019-04-09 21:15:03 +08:00
|
|
|
if self.__upload(job):
|
|
|
|
self.__clean(job, remove_pkg=True)
|
2019-04-09 15:43:17 +08:00
|
|
|
else:
|
|
|
|
self.__makepkg(job)
|
|
|
|
self.__sign(job)
|
2019-04-09 21:15:03 +08:00
|
|
|
if self.__upload(job):
|
|
|
|
if job.pkgconfig.cleanbuild:
|
|
|
|
self.__clean(job, remove_pkg=True)
|
|
|
|
else:
|
|
|
|
self.__clean(job, rm_src=False, remove_pkg=True)
|
2019-04-09 15:43:17 +08:00
|
|
|
self.__finish_job(job.pkgconfig.dirname)
|
2019-09-05 22:42:08 +08:00
|
|
|
return 0
|
|
|
|
|
2019-04-06 20:59:27 +08:00
|
|
|
jobsmgr = jobsManager()
|
|
|
|
|
|
|
|
class updateManager:
|
|
|
|
def __init__(self, filename='pkgver.json'):
|
|
|
|
self.__filename = filename
|
2019-09-05 22:42:08 +08:00
|
|
|
self.__pkgerrs = dict()
|
2019-04-06 20:59:27 +08:00
|
|
|
self.__pkgvers = dict()
|
|
|
|
self.__load()
|
2019-11-11 12:48:54 +08:00
|
|
|
self.__rebuilding = False
|
2019-09-08 17:07:47 +08:00
|
|
|
@property
|
|
|
|
def pkgvers(self):
|
|
|
|
return self.__pkgvers
|
|
|
|
@property
|
|
|
|
def pkgerrs(self):
|
|
|
|
return self.__pkgerrs
|
2019-04-06 20:59:27 +08:00
|
|
|
def __load(self):
|
|
|
|
if Path(self.__filename).exists():
|
|
|
|
with open(self.__filename,"r") as f:
|
|
|
|
try:
|
2019-09-05 22:42:08 +08:00
|
|
|
pkgdata = json.loads(f.read())
|
2019-04-06 20:59:27 +08:00
|
|
|
except json.JSONDecodeError:
|
|
|
|
logger.error('pkgver.json - Bad json')
|
|
|
|
print_exc_plus
|
|
|
|
exit(1)
|
|
|
|
else:
|
|
|
|
logger.warning(f'No {self.__filename} found')
|
2019-09-05 22:42:08 +08:00
|
|
|
pkgdata = dict()
|
|
|
|
assert type(pkgdata) is dict
|
|
|
|
for pkgname in pkgdata:
|
2019-04-06 20:59:27 +08:00
|
|
|
assert type(pkgname) is str
|
2019-09-05 22:42:08 +08:00
|
|
|
assert len(pkgdata[pkgname]) == 2
|
|
|
|
self.__pkgvers = {pkgname:pkgdata[pkgname][0] for pkgname in pkgdata}
|
|
|
|
self.__pkgerrs = {pkgname:pkgdata[pkgname][1] for pkgname in pkgdata}
|
2019-04-06 20:59:27 +08:00
|
|
|
def _save(self):
|
2019-09-05 22:42:08 +08:00
|
|
|
pkgdata = {pkgname:[self.__pkgvers[pkgname], self.__pkgerrs[pkgname]] for pkgname in self.__pkgvers}
|
|
|
|
pkgdatastr = json.dumps(pkgdata, indent=4)
|
|
|
|
pkgdatastr += '\n'
|
2019-04-06 20:59:27 +08:00
|
|
|
with open(self.__filename,"w") as f:
|
|
|
|
if f.writable:
|
2019-09-05 22:42:08 +08:00
|
|
|
f.write(pkgdatastr)
|
2019-04-06 20:59:27 +08:00
|
|
|
else:
|
|
|
|
logger.error('pkgver.json - Not writable')
|
2019-04-07 17:14:50 +08:00
|
|
|
def __get_package_list(self, dirname, arch):
|
2019-04-06 20:59:27 +08:00
|
|
|
pkgdir = REPO_ROOT / dirname
|
|
|
|
assert pkgdir.exists()
|
2019-11-09 21:02:40 +08:00
|
|
|
pkglist = nspawn_shell(arch, MAKEPKG_PKGLIST_CMD, cwd=pkgdir, RUN_CMD_TIMEOUT=5*60)
|
2019-04-06 20:59:27 +08:00
|
|
|
pkglist = pkglist.split('\n')
|
2019-04-07 22:19:06 +08:00
|
|
|
pkglist = [line for line in pkglist if not line.startswith('+')]
|
2019-04-06 20:59:27 +08:00
|
|
|
return pkglist
|
2019-04-07 17:14:50 +08:00
|
|
|
def __get_new_ver(self, dirname, arch):
|
|
|
|
pkgfiles = self.__get_package_list(dirname, arch)
|
|
|
|
ver = get_pkg_details_from_name(pkgfiles[0]).ver
|
|
|
|
return ver
|
2019-04-10 20:36:43 +08:00
|
|
|
def check_update(self, rebuild_package=None):
|
2019-04-06 20:59:27 +08:00
|
|
|
updates = list()
|
|
|
|
for pkg in jobsmgr.pkgconfigs:
|
2019-04-11 17:06:25 +08:00
|
|
|
try:
|
2019-11-11 12:48:54 +08:00
|
|
|
if self.__rebuilding and not rebuild_package:
|
|
|
|
logger.info(f'Stop checking updates for rebuild.')
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
self.__rebuilding = bool(rebuild_package)
|
2019-04-11 17:06:25 +08:00
|
|
|
if rebuild_package and \
|
|
|
|
rebuild_package != pkg.dirname:
|
|
|
|
continue
|
|
|
|
pkgdir = REPO_ROOT / pkg.dirname
|
2019-11-11 12:48:54 +08:00
|
|
|
logger.info(f'{"[rebuild] " if rebuild_package else ""}checking update: {pkg.dirname}')
|
2019-09-05 23:36:42 +08:00
|
|
|
if self.__pkgerrs.get(pkg.dirname, 0) >= 2:
|
|
|
|
logger.warning(f'package: {pkg.dirname} too many failures checking update')
|
2019-09-06 14:59:45 +08:00
|
|
|
if rebuild_package is None:
|
|
|
|
continue
|
2019-04-11 17:06:25 +08:00
|
|
|
pkgbuild = pkgdir / 'PKGBUILD'
|
|
|
|
archs = get_arch_from_pkgbuild(pkgbuild)
|
|
|
|
buildarchs = [BUILD_ARCH_MAPPING.get(arch, None) for arch in archs]
|
|
|
|
buildarchs = [arch for arch in buildarchs if arch is not None]
|
|
|
|
if not buildarchs:
|
|
|
|
logger.warning(f'No build arch for {pkg.dirname}, refuse to build.')
|
|
|
|
continue
|
|
|
|
# hopefully we only need to check one arch for update
|
|
|
|
arch = 'x86_64' if 'x86_64' in buildarchs else buildarchs[0] # prefer x86
|
2019-09-05 22:42:08 +08:00
|
|
|
# run pre_update_scripts
|
2019-09-05 22:55:11 +08:00
|
|
|
logger.debug('running pre-update scripts')
|
2019-09-05 22:42:08 +08:00
|
|
|
for scr in getattr(pkg, 'update', list()):
|
|
|
|
if type(scr) is str:
|
|
|
|
mon_nspawn_shell(arch, scr, cwd=pkgdir, seconds=60*60)
|
2019-11-09 21:02:40 +08:00
|
|
|
mon_nspawn_shell(arch, MAKEPKG_UPD_CMD, cwd=pkgdir, seconds=5*60*60,
|
2019-09-08 17:07:47 +08:00
|
|
|
logfile = pkgdir / PKG_UPDATE_LOGFILE,
|
2019-04-11 17:06:25 +08:00
|
|
|
short_return = True)
|
|
|
|
if pkg.type in ('git', 'manual'):
|
|
|
|
ver = self.__get_new_ver(pkg.dirname, arch)
|
|
|
|
oldver = self.__pkgvers.get(pkg.dirname, None)
|
|
|
|
has_update = False
|
|
|
|
if rebuild_package:
|
|
|
|
has_update = True
|
|
|
|
if oldver:
|
|
|
|
res = vercmp(ver, oldver)
|
|
|
|
if res == 1:
|
|
|
|
has_update = True
|
|
|
|
elif res == -1:
|
|
|
|
logger.warning(f'package: {pkg.dirname} downgrade attempted')
|
|
|
|
elif res == 0:
|
|
|
|
logger.info(f'package: {pkg.dirname} is up to date')
|
|
|
|
else:
|
2019-04-09 15:43:17 +08:00
|
|
|
has_update = True
|
2019-10-01 17:10:00 +08:00
|
|
|
# reset error counter
|
|
|
|
self.__pkgerrs[pkg.dirname] = 0
|
2019-04-11 17:06:25 +08:00
|
|
|
if has_update:
|
|
|
|
self.__pkgvers[pkg.dirname] = ver
|
|
|
|
updates.append((pkg, ver, buildarchs))
|
2019-04-09 15:43:17 +08:00
|
|
|
else:
|
2019-04-11 17:06:25 +08:00
|
|
|
logger.warning(f'unknown package type: {pkg.type}')
|
|
|
|
except Exception:
|
2019-09-05 22:42:08 +08:00
|
|
|
self.__pkgerrs[pkg.dirname] = self.__pkgerrs.get(pkg.dirname, 0) + 1
|
2019-04-11 17:06:25 +08:00
|
|
|
print_exc_plus()
|
2019-04-06 20:59:27 +08:00
|
|
|
self._save()
|
2019-11-11 12:48:54 +08:00
|
|
|
self.__rebuilding = False
|
2019-04-06 20:59:27 +08:00
|
|
|
return updates
|
|
|
|
|
|
|
|
updmgr = updateManager()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-09-08 18:33:02 +08:00
|
|
|
def info(human=False):
|
|
|
|
ret = ""
|
|
|
|
if human is False:
|
|
|
|
ret += str(jobsmgr)
|
2019-09-08 11:45:26 +08:00
|
|
|
ret += '\nhuman-readable:\n'
|
2019-09-08 18:33:02 +08:00
|
|
|
ret += "".join([f"{k} = {jobsmgr.jobs[k]}\n" for k in jobsmgr.jobs])
|
|
|
|
ret += f"idle: {jobsmgr.idle}"
|
2019-04-11 14:58:24 +08:00
|
|
|
return ret
|
2019-04-10 20:36:43 +08:00
|
|
|
|
|
|
|
def rebuild_package(pkgdirname, clean=False):
|
2019-09-08 11:45:26 +08:00
|
|
|
logger.info(f'rebuild command accecpted for {pkgdirname}')
|
2019-04-10 20:36:43 +08:00
|
|
|
return jobsmgr.rebuild_package(pkgdirname, clean=clean)
|
|
|
|
|
|
|
|
def clean(pkgdirname):
|
2019-09-08 11:45:26 +08:00
|
|
|
logger.info(f'clean command accecpted for {pkgdirname}')
|
2019-04-10 20:36:43 +08:00
|
|
|
return jobsmgr.reset_dir(pkgdirname=pkgdirname)
|
|
|
|
|
|
|
|
def clean_all():
|
2019-09-08 11:45:26 +08:00
|
|
|
logger.info('clean command accecpted for all')
|
2019-04-10 20:36:43 +08:00
|
|
|
return jobsmgr.reset_dir(all=True)
|
2019-04-06 20:59:27 +08:00
|
|
|
|
2019-09-06 15:24:48 +08:00
|
|
|
def force_upload(pkgdirname, overwrite=False):
|
2019-09-08 11:45:26 +08:00
|
|
|
logger.info(f'force_upload command accecpted for {pkgdirname}')
|
2019-09-06 15:24:48 +08:00
|
|
|
return jobsmgr.force_upload_package(pkgdirname, overwrite=overwrite)
|
2019-09-06 11:23:07 +08:00
|
|
|
|
|
|
|
def getup():
|
|
|
|
return jobsmgr.getup()
|
|
|
|
|
2019-09-08 17:07:47 +08:00
|
|
|
def extras(action, pkgname=None):
|
|
|
|
if action.startswith("pkg"):
|
|
|
|
p = extra_gen_pkglist(jobsmgr.pkgconfigs, updmgr.pkgvers, updmgr.pkgerrs)
|
|
|
|
if action == "pkgdetail":
|
|
|
|
return p[1].get(pkgname, None)
|
|
|
|
elif action == "pkgdetails":
|
|
|
|
return p[1]
|
|
|
|
elif action == "pkglist":
|
|
|
|
return p[0]
|
|
|
|
elif action == "mainlog":
|
|
|
|
return extra_readmainlog(debug=False)
|
|
|
|
elif action == "debuglog":
|
|
|
|
return extra_readmainlog(debug=True)
|
|
|
|
elif action == "readpkglog":
|
|
|
|
pkgname = str(pkgname)
|
2019-09-08 18:33:02 +08:00
|
|
|
return extra_readpkglog(pkgname, update=False)
|
|
|
|
elif action == "readpkgupdlog":
|
|
|
|
pkgname = str(pkgname)
|
|
|
|
return extra_readpkglog(pkgname, update=True)
|
2019-09-08 17:07:47 +08:00
|
|
|
return False
|
|
|
|
|
2019-04-06 20:59:27 +08:00
|
|
|
def run(funcname, args=list(), kwargs=dict()):
|
2019-09-06 11:23:07 +08:00
|
|
|
if funcname in ('info', 'rebuild_package', 'clean', 'clean_all',
|
2019-09-08 17:09:06 +08:00
|
|
|
'force_upload', 'getup', 'extras'):
|
2019-04-11 14:55:42 +08:00
|
|
|
logger.debug('running: %s %s %s',funcname, args, kwargs)
|
2019-04-06 20:59:27 +08:00
|
|
|
ret = eval(funcname)(*args, **kwargs)
|
2019-09-08 11:45:26 +08:00
|
|
|
logger.debug('run: done: %s %s %s',funcname, args, kwargs)
|
2019-04-06 20:59:27 +08:00
|
|
|
return ret
|
|
|
|
else:
|
|
|
|
logger.error('unexpected: %s %s %s',funcname, args, kwargs)
|
|
|
|
return False
|
|
|
|
|
2019-04-09 20:48:37 +08:00
|
|
|
@background
|
|
|
|
def __main():
|
2019-04-06 20:59:27 +08:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
with Listener(MASTER_BIND_ADDRESS, authkey=MASTER_BIND_PASSWD) as listener:
|
|
|
|
with listener.accept() as conn:
|
2019-09-06 11:23:07 +08:00
|
|
|
logger.debug('connection accepted from %s', listener.last_accepted)
|
2019-04-06 20:59:27 +08:00
|
|
|
myrecv = conn.recv()
|
|
|
|
if type(myrecv) is list and len(myrecv) == 3:
|
|
|
|
(funcname, args, kwargs) = myrecv
|
|
|
|
funcname = str(funcname)
|
|
|
|
conn.send(run(funcname, args=args, kwargs=kwargs))
|
|
|
|
except Exception:
|
|
|
|
print_exc_plus()
|
2019-04-09 20:48:37 +08:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
logger.info('Buildbot started.')
|
|
|
|
__main() # start the Listener thread
|
|
|
|
logger.info('Listener started.')
|
|
|
|
while True:
|
|
|
|
try:
|
2019-09-05 22:42:08 +08:00
|
|
|
try:
|
2019-09-05 22:59:11 +08:00
|
|
|
ret = 1
|
|
|
|
ret = jobsmgr.tick()
|
2019-09-05 22:42:08 +08:00
|
|
|
except Exception:
|
2019-09-05 22:59:11 +08:00
|
|
|
jobsmgr.clean_failed_job()
|
2019-09-05 22:42:08 +08:00
|
|
|
print_exc_plus()
|
2019-09-05 22:59:11 +08:00
|
|
|
if ret is None:
|
|
|
|
sleep(1)
|
|
|
|
elif ret == 0:
|
|
|
|
pass
|
|
|
|
elif type(ret) in (int, float):
|
|
|
|
sleep(ret)
|
|
|
|
else:
|
|
|
|
sleep(1)
|
|
|
|
except Exception:
|
2019-04-09 20:48:37 +08:00
|
|
|
print_exc_plus()
|
2019-04-06 20:59:27 +08:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
logger.info('KeyboardInterrupt')
|
|
|
|
print_exc_plus()
|
|
|
|
break
|