root/build-tools/stx/build-pkgs

2203 lines
98 KiB
Python
Executable File

#!/usr/bin/python3
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2021-2022 Wind River Systems,Inc
import apt
import apt_pkg
import argparse
import copy
from debian import deb822
import debrepack
import debsentry
import discovery
import dsc_depend
import dsccache
import logging
import os
import re
import repo_manage
import requests
import shutil
import signal
import subprocess
import sys
import tempfile
import time
import utils
import yaml
BUILDER_URL = os.environ.get('BUILDER_URL')
REPOMGR_URL = os.environ.get('REPOMGR_URL')
REPOMGR_ORIGIN = os.environ.get('REPOMGR_ORIGIN')
BUILD_ROOT = os.environ.get('MY_BUILD_PKG_DIR')
STX_ROOT = os.environ.get('MY_REPO_ROOT_DIR')
PKGBUILDER_ROOT = "/localdisk/pkgbuilder"
USER = os.environ.get('MYUNAME')
PROJECT = os.environ.get('PROJECT')
DISTRIBUTION = os.environ.get('DEBIAN_DISTRIBUTION')
STX_ARCH = 'amd64'
STX_META_NAME = 'stx-meta'
STX_META_PKG = 'stx-meta_1.0.orig.tar.gz'
# Different reasons can lead to package build failure
# Set maximum retry count for failed packages
MAX_PKG_BUILD_COUNT = 3
# The maximum parallel instances run at the same time
MAX_PARALLEL_JOBS = 30
# The default number of parallel tasks
DEFAULT_PARALLEL_TASKS = 1
# The default interval of polling build status (seconds)
DEFAULT_POLL_INTERVAL = 10
# Maximum number of package make jobs
MAX_PKG_MAKE_JOBS = 6
# The local STX repository which contains build output
REPO_BUILD = 'deb-local-build'
# The local STX source repository
REPO_SOURCE = 'deb-local-source'
# The mirror created with reused URL
REUSE_MIRROR = 'deb-remote-reuse'
# The maximum times that repogmr tries to create repository
REPOMGR_MAX_RETRY = 3
# The time interval between retires in seconds
REPOMGR_RETRY_INTERVAL = 20
# Listed all stx source layers which contains 'debian_pkg_dirs'
STX_SOURCE_REPOS = [
'SDO-rv-service',
'ansible-playbooks',
'audit-armada-app',
'cert-manager-armada-app',
'clients',
'compile',
'config',
'config-files',
'containers',
'distributedcloud',
'distributedcloud-client',
'fault',
'gui',
'ha',
'helm-charts',
'integ',
'kernel',
'metal',
'metrics-server-armada-app',
'monitor-armada-app',
'monitoring',
'nfv',
'nginx-ingress-controller-armada-app',
'oidc-auth-armada-app',
'openstack-armada-app',
'platform-armada-app',
'portieris-armada-app',
'ptp-notification-armada-app',
'rook-ceph',
'snmp-armada-app',
'stx-puppet',
'update',
'upstream',
'utilities',
'vault-armada-app',
]
STX_DEFAULT_DISTRO = discovery.STX_DEFAULT_DISTRO
STX_DEFAULT_BUILD_TYPE = discovery.STX_DEFAULT_BUILD_TYPE
STX_DEFAULT_BUILD_TYPE_LIST = discovery.STX_DEFAULT_BUILD_TYPE_LIST
ALL_DISTROS = discovery.get_all_distros()
ALL_LAYERS = discovery.get_all_layers(distro=STX_DEFAULT_DISTRO)
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=STX_DEFAULT_DISTRO)
logger = logging.getLogger('debcontroller')
utils.set_logger(logger)
def filter_depends(deps):
pkgs_list = []
deps = deps.replace('|', ',')
deps = deps.replace(' ', '').split(',')
for pkg in deps:
pkg = re.sub('\(.*?\)','', pkg)
pkg = re.sub('\[.*?\]','', pkg)
pkg = re.sub('\<.*?\>','', pkg)
pkgs_list.append(pkg)
return pkgs_list
def get_build_depends(dsc_file, all_debs):
'''
Get package's build depends with its dsc file
Param: dsc_file: Package's dsc file
'''
ret_deps = []
all_depends = set()
build_depends = None
build_depends_indep = None
build_depends_arch = None
if not dsc_file.endswith('dsc'):
logger.error("Invalid dsc %s", dsc_file)
return all_depends
try:
with open(dsc_file, 'r') as fh:
dsc = deb822.Dsc(fh)
if 'Build-Depends' in dsc.keys():
build_depends = filter_depends(dsc['Build-Depends'])
logger.debug("%s build_depends: %s", dsc_file, ','.join(build_depends))
if 'Build-Depends-Indep' in dsc.keys():
build_depends_indep = filter_depends(dsc['Build-Depends-Indep'])
logger.debug("%s build_depends_indep: %s", dsc_file, ','.join(build_depends_indep))
if 'Build-Depends-Arch' in dsc.keys():
build_depends_arch = filter_depends(dsc['Build-Depends-Arch'])
logger.debug("%s build_depends_arch: %s", dsc_file, ','.join(build_depends_arch))
except Exception as e:
logger.error(str(e))
logger.error("Failed to parse dsc %s")
return all_depends
all_depends = set(build_depends)
if build_depends_indep:
all_depends = all_depends | set(build_depends_indep)
if build_depends_arch:
all_depends = all_depends | set(build_depends_arch)
for dep in all_depends:
if dep in all_debs:
ret_deps.append(dep)
if len(ret_deps) > 0:
logger.debug("STX-Depends of %s are %s:", dsc_file, ','.join(ret_deps))
return ret_deps
def get_dsc_binary_package_names(dsc_files):
'''
Get all binary package names with the dsc files
dsc_files: package's dsc file
'''
all_subdebs = []
for dsc_file in dsc_files:
if not dsc_file.endswith('.dsc'):
logger.error("Invalid dsc %s", dsc_file)
continue
try:
with open(dsc_file, 'r') as fh:
dsc = deb822.Dsc(fh)
if 'Binary' in dsc.keys():
subdebs = dsc['Binary'].replace(' ', '').split(',')
all_subdebs.extend(subdebs)
except Exception as e:
logger.error(str(e))
logger.error("Failed to parse dsc %s", dsc_file)
continue
return set(all_subdebs)
def create_dependency_graph(dscs, pkgs_pool):
deps_graph = {}
for dsc in dscs:
deps = get_build_depends(dsc, pkgs_pool)
if deps:
logger.debug("Graph-> %s:%s", dsc, ','.join(deps))
deps_graph[dsc] = deps
logger.debug("STX-Depends: length of depends graph %d", len(deps_graph))
return deps_graph
def query_who_depends(pkgnames, deps_graph):
logger.debug("Subdebs-> %s", ','.join(pkgnames))
ddscs = []
for dsc, deps in deps_graph.items():
logger.debug("Subdebs-> %s:%s", dsc, ','.join(deps))
for subdeb in pkgnames:
if subdeb in deps:
ddscs.append(dsc)
return ddscs
def scan_all_depends(layer_pkgdirs_dscs, build_pkgdirs_dscs):
'''
Try to find these packages whose 'build-depend' contains the packages in build_pkgdirs_dscs
this function only scan depth 1 instead of recursively
layer_pkgdirs_dscs: contains pkg_src_dir:dsc of all STX packages belong to the layer
build_pkgdirs_dscs: The target pkg_src_dir:dsc need to be built
'''
extra_build_pkgs = set()
all_dscs = [dsc for pkgdir,dsc in layer_pkgdirs_dscs.items()]
all_debs = get_dsc_binary_package_names(all_dscs)
logger.debug("STX subdebs:%s are used to filter the depends", ','.join(all_debs))
logger.debug("There are %d dscs to create dependency graph", len(all_dscs))
dependency_graph = create_dependency_graph(all_dscs, all_debs)
logger.debug("There are %d dscs in build_pkgdirs_dscs", len(build_pkgdirs_dscs))
for pkgdir, dsc in build_pkgdirs_dscs.items():
subdebs = get_dsc_binary_package_names([dsc])
pkg_name = discovery.package_dir_to_package_name(pkgdir, STX_DEFAULT_DISTRO)
depender_dscs = query_who_depends(subdebs, dependency_graph)
if len(depender_dscs) == 0:
logger.debug("There are no STX packages found which depends on %s, skip", pkg_name)
continue
logger.debug("STX-Depends:%s depends on the build package %s", ','.join(depender_dscs), pkg_name)
for dsc in depender_dscs:
dep_dir = get_pkg_dir_from_dsc(layer_pkgdirs_dscs, dsc)
if not dep_dir:
logger.error("Failed to find package path for %s", dsc)
logger.error("Skip this failure")
continue
logger.debug("STX-Depends add %s to extra build list", dep_dir)
extra_build_pkgs.add(dep_dir)
return extra_build_pkgs
def get_debs_clue(btype):
if btype != 'rt':
btype = 'std'
return os.path.join(BUILD_ROOT, 'caches', btype + '_debsentry.pkl')
def get_pkg_dir_from_dsc(dscs, dsc_path):
for pkg_dir, dsc in dscs.items():
if dsc.strip() in dsc_path:
return pkg_dir
return None
def get_dsc_path_with_pkgdir(dscs, dst_pkg_dir):
for pkg_dir, dsc in dscs.items():
if pkg_dir.strip() == dst_pkg_dir:
return dsc
return None
def get_dsc_list_from_dict(dscs_dict):
dsc_list = []
for pkg_dir, dsc in dscs_dict.items():
dsc_list.append(dsc)
return dsc_list
def get_pkgname_ver_with_deb(deb_name):
if not deb_name.endswith('.deb'):
return None
name_list = deb_name.split('_')
if len(name_list) < 2:
return None
return name_list[0], name_list[1]
def get_aptcache(rootdir, repo_url, rtype, distribution):
os.makedirs(rootdir + '/etc/apt', exist_ok=True)
try:
f_sources = open(rootdir + '/etc/apt/sources.list', 'w')
if rtype == 'source':
repo_line = ' '.join(['deb-src [trusted=yes]', repo_url, distribution, 'main\n'])
else:
repo_line = ' '.join(['deb [trusted=yes]', repo_url, distribution, 'main\n'])
f_sources.write(repo_line)
f_sources.close()
except Exception as e:
logger.error(e)
return None
try:
apt_cache = apt.Cache(rootdir=rootdir, memonly=True)
ret = apt_cache.update()
except Exception as e:
logger.error(e)
return None
if not ret:
return None
apt_cache.open()
return apt_cache
def fetch_src(apt_cache, pkg_name, download_dir):
src = apt_pkg.SourceRecords()
source_lookup = src.lookup(pkg_name)
if not source_lookup:
logger.error('Source package %s does not exist.' % pkg_name)
return False
try:
for src_file in src.files:
res = requests.get(src.index.archive_uri(src_file.path), stream=True)
logger.debug('Fetch package file %s' % src.index.archive_uri(src_file.path))
with open(os.path.join(download_dir, os.path.basename(src_file.path)), 'wb') as download_file:
for chunk in res.iter_content(chunk_size=1024 * 1024):
if chunk:
download_file.write(chunk)
logger.info('Source package %s downloaded.' % pkg_name)
except Exception as e:
logger.error(e)
return False
return True
def get_shared_source(repo_url, pkg_name, distribution, download_dir):
tmp_folder = tempfile.TemporaryDirectory()
apt_cache = get_aptcache(tmp_folder.name, repo_url, 'source', distribution)
if None == apt_cache:
tmp_folder.cleanup()
logger.warning('get_shared_source: apt update failed')
return False
ret = fetch_src(apt_cache, pkg_name, download_dir)
apt_cache.clear()
apt_cache.close()
tmp_folder.cleanup()
return ret
def req_chroots_action(action, extra_params):
"""
Base function called by each require on chroot with Restful API
Param:
action: addchroot, loadchroot, savechroot
"""
req_params = {}
req_params['project'] = PROJECT
req_params['user'] = USER
if extra_params:
req_params.update(extra_params)
try:
resp = requests.get(BUILDER_URL + action, data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
if 'success' in resp.text:
return 'success'
if 'exists' in resp.text:
return 'success'
if 'creating' in resp.text:
return 'creating'
return 'fail'
def show_task_log(log_file, wait_time, success_str, exit_str):
"""
Display the log file on the current console
Param:
wait_time: customer defines to wait before the log file can be read
key_str: the separate string can be taken as flag to exit
"""
status = 'fail'
time.sleep(wait_time)
logger.debug(' '.join(['Waiting for log file', log_file]))
timeout = 8
time_counter = 0
while not os.path.exists(log_file):
time.sleep(1)
time_counter += 1
if time_counter > timeout:
break
if os.path.exists(log_file):
p = subprocess.Popen("tail -f " + log_file, stdout=subprocess.PIPE,
shell=True, universal_newlines=True, bufsize=0)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip()
if line:
print(line)
if success_str and success_str in line:
status = 'success'
break
if exit_str and line.startswith(exit_str):
logger.error(' '.join(['Task failed. For details please',
'consult log', log_file]))
status = 'fail'
break
return status
def pkgdirs_entry_handler(entry):
if entry:
return os.path.basename(entry)
return []
def get_package_jobs(pkg_dir, distro=STX_DEFAULT_DISTRO):
'''
Returns the number of parallel jobs of the package
If the serial build is not enabled by the meta file,
the default number of jobs is equal to the value of
environment variable MAX_CPUS.
'''
jobs = os.environ.get('MAX_CPUS', 1)
package = discovery.package_dir_to_package_name(pkg_dir, distro=distro)
if pkg_dir:
pkg_meta_yaml = os.path.join(pkg_dir, 'debian/meta_data.yaml')
try:
with open(pkg_meta_yaml) as f:
yaml_doc = yaml.safe_load(f)
except Exception as e:
logger.error(str(e))
else:
# serial: true [Disable parallel build]
# No 'serial:' or 'serial: false' [Support parallel build]
if yaml_doc.get('serial'):
jobs = 1
logger.debug('Requires the number of jobs %s for %s', jobs, package)
return int(jobs)
def get_never_reuse_pkgs():
never_reuse_pkgs = set()
lst_dir = os.path.join(os.environ.get('MY_REPO_ROOT_DIR'),
'stx-tools/debian-mirror-tools/config/debian/common')
never_reuse_lst = os.path.join(lst_dir, 'never_reuse.lst')
try:
with open(never_reuse_lst, 'r') as npkgs:
lines = list(line for line in (p.strip() for p in npkgs) if line)
except Exception as e:
logger.warning(str(e))
return never_reuse_pkgs
else:
for pkg in lines:
pkg = pkg.strip()
if pkg.startswith('#'):
continue
never_reuse_pkgs.add(pkg)
return never_reuse_pkgs
def move_debs_to_build_dir(dl_bin_debs_dir):
try:
for root, dirs, files in os.walk(dl_bin_debs_dir):
if dirs:
pass
for r in files:
if r.endswith('.deb'):
pkg_item = r.split('_')
sdeb = '_'.join([pkg_item[0], pkg_item[1]])
pname = ''
for btype in ['std', 'rt']:
debs_clue = get_debs_clue(btype)
deb_file = os.path.join(root, r)
pname = debsentry.get_pkg_by_deb(debs_clue, sdeb, logger)
if pname:
pkg_build_dir = os.path.join(BUILD_ROOT, btype, pname)
os.makedirs(pkg_build_dir, exist_ok=True)
os.system('sudo rm -f %s/*.build' % (pkg_build_dir))
shutil.move(deb_file, os.path.join(pkg_build_dir, r))
logger.debug("Reuse: %s is moved to build directory", sdeb)
break
if not pname:
logger.warning("Failed to get the package name for %s", sdeb)
except Exception as e:
logger.error("An exception occurred during moving reused debs into build directory")
logger.error(str(e))
return False
return True
class repoSnapshots():
"""
The repository snapshots pool to manage the apply/release
of snapshots
"""
def __init__(self, count):
self.snapshots = {}
for s in range(count):
self.snapshots[str(s)] = 'idle'
def apply(self, dsc):
for idx, owner in self.snapshots.items():
if owner == 'idle':
self.snapshots[idx] = dsc
logger.debug("Repository snapshot %s is applied for %s", idx, dsc)
return idx
def release(self, dsc):
for idx, owner in self.snapshots.items():
if owner == dsc:
self.snapshots[idx] = 'idle'
logger.debug("Repository snapshot %s is released for %s", idx, dsc)
break
class BuildController():
"""
builderClient helps to create or refresh the debian build recipes
(.dsc, *.tar) based on the stx source, then it offloads the build
task to the container 'pkgbuilder' with customer's build options
The build log will be displayed on console until getting the result
'Status: success': build ok
'Status: fail': build fail
'Status: give-back': try again later
"""
def __init__(self, distro=STX_DEFAULT_DISTRO):
self.attrs = {
'mode': 'private',
'distro': distro,
'avoid': True,
'parallel': 1,
'exit_on_fail': False,
'run_tests': False,
'build_depend': False,
'upload_source': False,
'poll_build_status': True,
'reuse': False,
'reuse_max': False,
'build_all': False,
'reuse_export': True,
'dl_reused': False,
'reuse_shared_repo': True
}
self.kits = {
'dsc_cache': {},
'dsc_rcache': {},
'repo_mgr': None,
'dsc_maker': {},
}
self.lists = {
'uploaded': []
}
self.build_types = []
self.pkgs_digests = {}
self.dscs_building = []
self.extend_deps = set()
self.dscs_chroots = {}
if not self.kits['repo_mgr']:
rlogger = logging.getLogger('repo_manager')
utils.set_logger(rlogger)
self.kits['repo_mgr'] = repo_manage.RepoMgr('aptly', REPOMGR_URL,
'/tmp', REPOMGR_ORIGIN,
rlogger)
logger.debug("Successful created repo manager")
@property
def build_avoid(self):
return self.attrs['avoid']
@build_avoid.setter
def build_avoid(self, avoid):
self.attrs['avoid'] = avoid
def reuse_shared_repo(self):
try:
ret = self.kits['repo_mgr'].remove_repo('deb-local-build')
if ret:
#recreate deb-local-build
if not self.kits['repo_mgr'].upload_pkg('deb-local-build', ''):
logger.error("Failed to recreate deb-local-build")
return False
packages = self.kits['repo_mgr'].list_pkgs(REUSE_MIRROR, quiet=True)
pkgs = list()
for pkg in packages:
pkgs.append(pkg.split('_')[0])
if self.kits['repo_mgr'].copy_pkgs(REUSE_MIRROR, 'deb-local-build', ','.join(pkgs)):
logger.info("Successfully copied %s to deb-local-build", REUSE_MIRROR)
return True
except Exception as e:
logger.error(str(e))
logger.error("Failed to copy %s to deb-local-build", REUSE_MIRROR)
return False
def get_reuse(self, build_types):
if not self.attrs['reuse']:
return True
# 'reuse' should be handled for either no '-c' or '-c -all'
if not self.attrs['avoid'] and not self.attrs['build_all']:
return True
reuse_url = os.environ.get('STX_SHARED_REPO')
if not reuse_url:
logger.error("Reuse is enabled, failed to get STX_SHARED_REPO from ENV")
return False
logger.debug("Reuse is enabled, the reused repository is %s", reuse_url)
reuse_src_url = os.environ.get('STX_SHARED_SOURCE')
if not reuse_src_url:
logger.error("Reuse is enabled, failed to get STX_SHARED_SOURCE from ENV")
return False
logger.debug("Reuse is enabled, the reused source repository is %s", reuse_src_url)
kwargs = {'url': reuse_url, 'distribution': DISTRIBUTION, 'component': 'main',
'architectures': STX_ARCH}
try:
logger.info("Creating reused mirror with the shared URL, please wait...")
ret = self.kits['repo_mgr'].mirror(REUSE_MIRROR, **kwargs)
if ret and self.attrs['reuse_shared_repo']:
ret = self.reuse_shared_repo()
except Exception as e:
logger.error(str(e))
logger.error("Failed to create reused mirror with %s", reuse_url)
return False
else:
if ret:
logger.info("Successfully created reuse mirror with %s", reuse_url)
else:
logger.error("Failed to create reused mirror with %s", reuse_url)
return False
meta_dir = os.path.join(BUILD_ROOT, 'stx-meta')
os.makedirs(meta_dir, exist_ok=True)
try:
ret = get_shared_source(reuse_src_url, STX_META_NAME, DISTRIBUTION, meta_dir)
except Exception as e:
logger.error(str(e))
logger.error("Failed to download stx-meta to reuse")
return False
meta_file = os.path.join(meta_dir, STX_META_PKG)
os.system('tar zxvf ' + meta_file + ' -C ' + meta_dir)
rcache_dir = os.path.join(meta_dir, STX_META_NAME + '-1.0')
if not os.path.exists(rcache_dir):
logger.error("Failed to get remote stx-meta in %s", BUILD_ROOT)
return False
logger.info("Successfully downloaded stx-meta in %s", BUILD_ROOT)
for btype in build_types:
logger.info("Loaded remote pkl for %s", btype)
remote_pkl = os.path.join(rcache_dir, btype + '_dsc.pkl')
self.kits['dsc_rcache'][btype] = dsccache.DscCache(logger, remote_pkl)
return True
def create_repo(self, repo, retry=REPOMGR_MAX_RETRY, interval=REPOMGR_RETRY_INTERVAL):
t = 0
while t < retry:
try:
self.kits['repo_mgr'].upload_pkg(repo, None)
return True
except Exception as e:
logger.error(str(e))
logger.warning("Repo manager failed to create repositories, retry %d ...", t)
time.sleep(interval)
t = t + 1
logger.critical("Failed to create repository %s", repo)
return False
def start(self, build_types=ALL_BUILD_TYPES):
build_types_to_init = ALL_BUILD_TYPES
if build_types is not None:
build_types_to_init = build_types
self.build_types = build_types_to_init
if not self.kits['repo_mgr']:
logger.critical("Failed to create repo manager")
return False
for repo in [REPO_BUILD, REPO_SOURCE]:
if not self.create_repo(repo):
return False
caches_dir = os.path.join(BUILD_ROOT, 'caches')
os.makedirs(caches_dir, exist_ok=True)
self.lists['pkgs_not_found'] = []
self.lists['never_reuse_pkgs'] = []
if self.attrs['reuse'] and not self.attrs['reuse_max']:
self.lists['never_reuse_pkgs'] = get_never_reuse_pkgs()
for build_type in build_types_to_init:
self.lists['success_' + build_type] = []
self.lists['fail_' + build_type] = []
self.lists['build-needed_' + build_type] = []
self.lists['real_build_' + build_type] = []
self.lists['success_depends_' + build_type] = []
self.lists['fail_depends_' + build_type] = []
self.lists['reuse_' + build_type] = []
self.lists['reuse_pkgname_' + build_type] = []
if build_type not in self.kits['dsc_cache']:
# Transparently migrate dsc cache files
dsc_pkl = os.path.join(caches_dir, build_type + '_dsc.pkl')
if not os.path.exists(dsc_pkl):
old_dsc_pkl = os.path.join(BUILD_ROOT, build_type, 'dsc.pkl')
if os.path.exists(old_dsc_pkl):
os.system('mv -f %s %s' % (old_dsc_pkl, dsc_pkl))
self.kits['dsc_cache'][build_type] = dsccache.DscCache(logger, dsc_pkl)
if not self.kits['dsc_cache'][build_type]:
logger.warning('Failed to create dsc cache %s', dsc_pkl)
# Transparently migrate debsentry cache files
debsentry_pkl = os.path.join(caches_dir, build_type + '_debsentry.pkl')
if not os.path.exists(debsentry_pkl):
if build_type == 'rt':
old_debsentry_pkl = os.path.join(BUILD_ROOT, 'debs_entry_rt.pkl')
else:
old_debsentry_pkl = os.path.join(BUILD_ROOT, 'debs_entry.pkl')
if os.path.exists(old_debsentry_pkl):
os.system('mv -f %s %s' % (old_debsentry_pkl, debsentry_pkl))
recipes_dir = os.path.join(BUILD_ROOT, 'recipes')
os.makedirs(recipes_dir, exist_ok=True)
for build_type in build_types_to_init:
build_dir = os.path.join(BUILD_ROOT, build_type)
os.makedirs(build_dir, exist_ok=True)
if build_type not in self.kits['dsc_maker']:
try:
if build_type == 'rt':
self.kits['dsc_maker'][build_type] = debrepack.Parser(build_dir, recipes_dir,
'debug', None, 'rt')
else:
self.kits['dsc_maker'][build_type] = debrepack.Parser(build_dir, recipes_dir, 'debug')
except Exception as e:
logger.error(str(e))
logger.error("Failed to create dsc maker")
return False
else:
logger.info("Successfully created dsc maker for %s", build_type)
# Prepare for build output reuse
if not self.get_reuse(build_types_to_init):
return False
# load the persistent chroot on shared volume
logger.info("Loading chroot")
req_chroots_action('loadchroot', None)
logger.info("Successfully loaded chroot")
return True
def stop(self):
self.attrs['poll_build_status'] = False
self.req_stop_task()
return self.show_build_stats()
def get_reused_debs(self):
reused_debs = set()
ret = True
for btype in ['std', 'rt']:
if self.lists['reuse_pkgname_' + btype]:
debs_clue = get_debs_clue(btype)
for reused_pkg in self.lists['reuse_pkgname_' + btype]:
debs_dl_dir = os.path.join(BUILD_ROOT, btype, reused_pkg)
subdebs = debsentry.get_subdebs(debs_clue, reused_pkg, logger)
if not subdebs:
logger.error("Failed to get subdebs for %s from local debsentry", reused_pkg)
ret = False
continue
reused_debs.update(set(subdebs))
return ret, reused_debs
def download_reused_debs(self, distribution):
if not self.attrs['dl_reused']:
return True
try:
reuse_dl_dir = os.path.join(BUILD_ROOT, 'reused_debs')
if os.path.exists(reuse_dl_dir):
shutil.rmtree(reuse_dl_dir)
if os.path.exists(reuse_dl_dir):
logger.error("Failed to clean the old download directory")
logger.error("Please check and make sure it is removed")
return False
os.makedirs(reuse_dl_dir, exist_ok=True)
apt_src_file = os.path.join(BUILD_ROOT, 'aptsrc')
with open(apt_src_file, 'w') as f:
reuse_url = os.environ.get('STX_SHARED_REPO')
apt_item = ' '.join(['deb [trusted=yes]', reuse_url, distribution, 'main\n'])
f.write(apt_item)
logger.debug("Created apt source file %s to download reused debs", apt_src_file)
except Exception as e:
logger.error(str(e))
logger.error("Failed to create the apt source file")
return False
rlogger = logging.getLogger('repo_manager')
if not rlogger.handlers:
utils.set_logger(rlogger)
if os.path.exists(apt_src_file):
debs_fetcher = repo_manage.AptFetch(rlogger, apt_src_file, reuse_dl_dir)
ret, reused_deb_list = self.get_reused_debs()
reused_debs = []
if reused_deb_list:
for deb in reused_deb_list:
reused_debs.append(deb.replace('_', ' '))
else:
logger.error("Reused deb package list is NULL")
return False
try:
fetch_ret = debs_fetcher.fetch_pkg_list(reused_debs)
except Exception as e:
logger.error(str(e))
logger.error("Exception has when fetching the reused debs with repo_manage")
return False
if len(fetch_ret['deb-failed']) == 0:
dl_bin_debs_dir=os.path.join(reuse_dl_dir, 'downloads/binary')
logger.info("Successfully downloaded all reused debs to %s", dl_bin_debs_dir)
move_debs_to_build_dir(dl_bin_debs_dir)
return True
else:
for failed_deb in fetch_ret['deb-failed']:
logger.error("Failed to download reused debs: %s", ','.join(fetch_ret['deb-failed']))
return False
def set_reuse(self, cache_dir):
meta_files = []
if not self.attrs['reuse_export']:
return
logger.debug("Build_all done, upload cache for build output reuse")
for btype in ALL_BUILD_TYPES:
dsc_path = os.path.join(cache_dir, btype + '_dsc.pkl')
if os.path.exists(dsc_path):
meta_files.append(dsc_path)
for btype in ALL_BUILD_TYPES:
debsentry_path = os.path.join(cache_dir, btype + '_debsentry.pkl')
if os.path.exists(debsentry_path):
meta_files.append(debsentry_path)
logger.debug("All the cache files which need to be uploaded:%s", ','.join(meta_files))
if not meta_files:
return
try:
outputs = self.kits['dsc_maker']['std'].dummy_package(meta_files, STX_META_NAME)
except Exception as e:
logger.error(str(e))
logger.error("Failed to create the package %s to reuse", STX_META_NAME)
else:
logger.debug("Successfully created the package %s to reuse", STX_META_NAME)
for recipe in outputs:
if recipe.endswith(".dsc"):
logger.info("Uploading %s with dsc %s for reuse", STX_META_NAME, recipe)
if not self.upload_with_dsc(STX_META_NAME, recipe, REPO_SOURCE):
logger.warning("Failed to upload %s to %s for reuse", STX_META_NAME, REPO_SOURCE)
else:
logger.debug("Successfully uploaded %s to %s for reuse", STX_META_NAME, REPO_SOURCE)
break
def clean(self, build_types=ALL_BUILD_TYPES):
"""
Clean the build env includes cleaning all these build artifacts under
<path to>/std or <path to>/rt and empty the local build repo
"""
if build_types is None:
build_types = ALL_BUILD_TYPES
# clean build artifacts
for build_type in build_types:
build_dir = os.path.join(BUILD_ROOT, build_type)
if os.path.exists(build_dir):
logger.debug(' '.join(['Cleaning the build directroy', build_dir]))
try:
shutil.rmtree(build_dir)
except Exception as e:
logger.error(str(e))
logger.error("Failed to clean of the build directory")
else:
logger.info("Finished cleaning of the build directory")
# clean build repo
if self.kits['repo_mgr']:
if not self.kits['repo_mgr'].remove_repo(REPO_BUILD):
logger.debug(' '.join(['Failed to clean', REPO_BUILD]))
else:
logger.debug(' '.join(['Successfully cleaned', REPO_BUILD]))
cache_dir = os.path.join(BUILD_ROOT, 'caches')
os.system("rm -f %s" % os.path.join(cache_dir, '*.pkl'))
def add_chroot(self, mirror):
extra_req = {}
if mirror:
# Extra required data can be extended here, for example:
# req_param['mirror'] = "http://ftp.de.debian.org/debian"
# when 'addchroot'
extra_req['mirror'] = mirror
ret = req_chroots_action('addchroot', extra_req)
if 'creating' in ret:
key_string = "Successfully set up bullseye chroot"
state = show_task_log(os.path.join(PKGBUILDER_ROOT, USER, PROJECT, 'chroot.log'),
10, key_string, "E: ")
if 'success' in state:
req_chroots_action('savechroot', None)
ret = 'success'
else:
logger.error('Failed to add chroot, please consult the log')
ret = 'fail'
self.req_kill_task('chroot')
if 'success' in ret:
logger.debug('Parent chroot is ready to create children chroots')
ret_status = req_chroots_action('clonechroot', {'instances': self.attrs['parallel']})
if not 'success' == ret_status:
logger.error("Failed to clone children chroots")
else:
logger.info("Successfully cloned children chroots")
return ret
def clone_chroots(self):
ret = req_chroots_action('clonechroot', None)
if 'success' in ret:
logger.debug('Successfully cloned chroots')
if 'fail' in ret:
logger.debug('Failed to clone chroots')
def refresh_chroots(self):
ret = req_chroots_action('refreshchroots', None)
return ret
def publish_repo(self, repo_name, suffix=None):
if suffix:
dst_repo = '-'.join([repo_name, suffix])
else:
dst_repo = repo_name
try:
logger.debug("Try to deploy the repository %s", dst_repo)
if suffix:
self.kits['repo_mgr'].deploy_repo(repo_name, suffix)
else:
self.kits['repo_mgr'].deploy_repo(repo_name)
except Exception as e:
logger.error(str(e))
logger.error("Failed to deploy the repository %s", dst_repo)
return False
else:
logger.info("Successfully deployed the repository %s", dst_repo)
return True
def remove_pkg_debs(self, package, build_type):
"""
remove package's all subdebs from the binary repo
Params:
package: target package name
build_type:
"""
logger.debug(' '.join(['Remove all old version of debs for', package]))
debs_clue = get_debs_clue(build_type)
subdebs = debsentry.get_subdebs(debs_clue, package, logger)
if not subdebs:
logger.warning('Failed to get subdebs of %s from local debsentry cache', package)
return False
for deb in subdebs:
pkg_item = deb.split('_')
# if deb = name_version
if len(pkg_item) > 1:
msg = ''.join(['package ', pkg_item[0], '(', pkg_item[1], ')'])
# if deb = name
else:
msg = ''.join(['package ', pkg_item[0]])
logger.info(' '.join(['Searching for binary', msg, 'in repository', REPO_BUILD]))
if self.kits['repo_mgr'].search_pkg(REPO_BUILD, pkg_item[0]):
logger.info('Found binary %s in repository %s', msg, REPO_BUILD)
if self.kits['repo_mgr'].delete_pkg(REPO_BUILD, pkg_item[0], 'binary', None, deploy=False):
logger.info('Successfully deleted binary %s from repository %s',
msg, REPO_BUILD)
else:
logger.info('Failed to delete binary %s from repository %s', msg,
REPO_BUILD)
''' Fixme: not sure whether it's ok to skip self.publish_repo(REPO_BUILD) here
'''
return True
def upload_with_deb(self, package, debs_dir, build_type):
"""
upload the local build debian binaries to repo manager
Params:
package: target package name
debs_dir: the directory to debian binaries
"""
logger.debug(' '.join(['Remove all old version of debs for', package]))
debs_clue = get_debs_clue(build_type)
subdebs = debsentry.get_subdebs(debs_clue, package, logger)
if subdebs:
for deb in subdebs:
pkg_item = deb.split('_')
if len(pkg_item) > 1:
msg = ''.join(['package ', pkg_item[0], '(', pkg_item[1], ')'])
else:
msg = ''.join(['package ', pkg_item[0]])
logger.info(' '.join(['Searching for binary', msg, 'in repository', REPO_BUILD]))
if self.kits['repo_mgr'].search_pkg(REPO_BUILD, pkg_item[0]):
logger.info('Found binary %s in repository %s', msg, REPO_BUILD)
if self.kits['repo_mgr'].delete_pkg(REPO_BUILD, pkg_item[0], 'binary', None, deploy=False):
logger.info('Successfully deleted binary %s from repository %s',
msg, REPO_BUILD)
else:
logger.info('Failed to delete binary %s from repository %s', msg,
REPO_BUILD)
''' Fixme: not sure whether it's ok to skip self.publish_repo(REPO_BUILD) here
'''
sdebs = []
if not os.path.exists(debs_dir):
logger.error(' '.join(['Noneexistent directory', debs_dir]))
return False
try:
for root, dirs, files in os.walk(debs_dir):
if dirs:
pass
for r in files:
if r.endswith('.deb'):
debname = r.split('_')[0]
self.kits['repo_mgr'].delete_pkg(REPO_BUILD, debname, 'binary', None)
logger.debug("Tried to delete the old %s from %s before uploading", debname, REPO_BUILD)
deb_file = os.path.join(root, r)
if self.kits['repo_mgr'].upload_pkg(REPO_BUILD, deb_file, deploy=False):
logger.info("Successfully uploaded %s to %s", deb_file, REPO_BUILD)
pkg_item = r.split('_')
if pkg_item and len(pkg_item) > 1:
sdebs.append('_'.join([pkg_item[0], pkg_item[1]]))
else:
logger.error("Failed to upload %s to %s", deb_file, REPO_BUILD)
return False
except Exception as e:
logger.error("An exception occurred during uploading %s(%s)", package, str(e))
return False
if sdebs:
debsentry.set_subdebs(debs_clue, package, sdebs, logger)
logger.debug("%s_%s is saved into debsentry", pkg_item[0], pkg_item[1])
return True
def upload_with_dsc(self, pkg_name, dsc, repo_name):
if not os.path.exists(dsc):
logger.error(' '.join(['Dsc file', dsc, 'does not exist']))
return False
dsc_pkg = os.path.basename(dsc).split('_')[0]
if pkg_name != dsc_pkg:
logger.warning(''.join(['Package name passed in is ', pkg_name,
', from dsc is ', dsc_pkg, ' ,did not match.']))
logger.info(' '.join(['Existing source for', dsc_pkg,
'will be deleted from repository', repo_name, 'before new source is uploaded']))
logger.info("Searching for %s in repository %s", dsc_pkg, repo_name)
if self.kits['repo_mgr'].search_pkg(repo_name, dsc_pkg, binary=False):
logger.info("Found %s in repository %s, attempting to delete", dsc_pkg, repo_name)
if not self.kits['repo_mgr'].delete_pkg(repo_name, dsc_pkg, 'source'):
logger.error("Failed to delete source %s from repository %s", dsc_pkg, repo_name)
return False
logger.info("Successfully deleted source %s from repository %s", dsc_pkg, repo_name)
else:
logger.info("can't find %s in repository %s", dsc_pkg, repo_name)
logger.info(' '.join(['Start to upload source', dsc, 'to repository', repo_name]))
if not self.kits['repo_mgr'].upload_pkg(repo_name, dsc):
logger.error("Failed to upload source %s to repository %s", dsc, repo_name)
return False
logger.info("Successfully uploaded source %s to repository %s", dsc, repo_name)
return True
def req_add_task(self, pkg_dir, dsc, build_type, snapshot_index, layer):
status = 'fail'
chroot = None
# For serial build and parallel build, the pkg_jobs should have different value
pkg_jobs = get_package_jobs(pkg_dir, self.attrs['distro'])
if pkg_jobs > self.attrs['max_make_jobs']:
pkg_jobs = self.attrs['max_make_jobs']
req_params = {}
req_params['mode'] = self.attrs['mode']
req_params['type'] = build_type
req_params['project'] = PROJECT
req_params['user'] = USER
req_params['dsc'] = dsc
req_params['run_tests'] = self.attrs['run_tests']
req_params['jobs'] = str(pkg_jobs)
req_params['snapshot_idx'] = snapshot_index
req_params['layer'] = layer
try:
resp = requests.post(BUILDER_URL + 'addtask', json=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
chroot = 'ServerError'
else:
resp_json = resp.json()
if 'success' in resp_json['status']:
status = 'success'
chroot = resp_json['msg']
else:
status = 'fail'
chroot = 'PkgbuilderFail'
return status, chroot
def req_kill_task(self, owner, dsc=None):
req_params = {}
req_params['owner'] = owner
req_params['user'] = USER
req_params['mode'] = self.attrs['mode']
if dsc:
req_params['dsc'] = dsc
try:
resp = requests.get(BUILDER_URL + 'killtask', data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
logger.error("Failed to request killtask to server")
else:
logger.debug(resp.text)
def req_stop_task(self):
ret = False
req_params = {}
req_params['user'] = USER
req_params['mode'] = self.attrs['mode']
try:
resp = requests.get(BUILDER_URL + 'stoptask', data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
ret = True
return ret
def create_dsc(self, pkg_name, pkg_dir, reclaim, build_type=STX_DEFAULT_BUILD_TYPE):
"""
Call dsc maker(debrepack) to generate the new dsc for package
Params:
pkg_name: package name
pkg_dir: path to the directory containing the package's debian folder
is_reclaim: If True, this is reclaim the reused packages
build_type: build type ... probably 'std' or 'rt'
Return:
status: DSC_BUILD, DSC_REUSE
dsc_file: path to dsc file
"""
status = 'DSC_BUILD'
dsc_file = None
skip_create_dsc = False
pkg_build_dir = os.path.join(BUILD_ROOT, build_type, pkg_name)
# only '-c' clean the package build directory
if not self.attrs['avoid']:
if os.path.exists(pkg_build_dir):
try:
shutil.rmtree(pkg_build_dir)
except Exception as e:
logger.error(str(e))
else:
logger.debug("Successfully cleaned the old %s", pkg_build_dir)
os.makedirs(pkg_build_dir)
# Create the dsc file unless build avoidance and dsc does exist
new_checksum = self.kits['dsc_maker'][build_type].checksum(pkg_dir)
self.pkgs_digests[pkg_dir] = new_checksum
if self.attrs['avoid'] and self.kits['dsc_cache'][build_type]:
dsc_file, old_checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if dsc_file and old_checksum:
if old_checksum and old_checksum == new_checksum:
logger.info("No update on package meta of %s", pkg_name)
logger.info("The dsc file is %s", dsc_file)
if os.path.exists(dsc_file):
logger.info("Skip creating dsc for %s again for it exists", pkg_name)
skip_create_dsc = True
status = 'DSC_NO_UPDATE'
else:
logger.info("The dsc file %s does not exist", dsc_file)
if not skip_create_dsc:
try:
src_mirror_dir = os.path.join(os.environ.get('STX_MIRROR'), 'sources')
dsc_recipes = self.kits['dsc_maker'][build_type].package(pkg_dir, src_mirror_dir)
except Exception as e:
logger.error(str(e))
# Exception when calling debrepack.package, should exit
return 'DSC_EXCEPTION', None
else:
if not dsc_recipes:
logger.error("Failed to create dsc for %s", pkg_name)
# Fatal error when calling debrepack.package, should exit
return 'DSC_ERROR', None
logger.debug("Successfully created dsc for %s", pkg_name)
pkg_checksum = self.pkgs_digests[pkg_dir]
dsc_path = os.path.join(pkg_build_dir, dsc_recipes[0])
self.kits['dsc_cache'][build_type].set_package(pkg_dir, dsc_path + ':' + pkg_checksum)
dsc_file = os.path.join(pkg_build_dir, dsc_recipes[0])
# If the sharing mode is enabled
if not reclaim and self.attrs['reuse']:
if pkg_name in self.lists['never_reuse_pkgs']:
if status == 'DSC_NO_UPDATE':
logger.info("%s is forbidden to reuse, but no need to build locally again", pkg_name)
else:
logger.info("%s is forbidden to reuse and will be build locally later", pkg_name)
status = 'DSC_BUILD'
else:
# 'reuse' should be handled for either no '-c' or '-c -all'
if self.attrs['avoid'] or (self.attrs['build_all'] and not self.attrs['avoid']):
logger.debug("Comparing with the remote shared dsc cache for %s", build_type)
# Only match the subdir under STX REPO
pkg_stx_path = pkg_dir.replace(os.environ.get('MY_REPO'), '')
remote_dsc, shared_checksum = self.kits['dsc_rcache'][build_type].get_package_re(pkg_stx_path)
logger.debug("Checking package=%s, shared_checksum=%s, local_checksum=%s", pkg_stx_path, shared_checksum, new_checksum)
if shared_checksum and shared_checksum == new_checksum:
logger.debug("Same checksum, %s will be reused from remote", pkg_name)
# True None: just continue in the external for loop
status = 'DSC_REUSE'
'''
Here the local dsc_cache also need to be set which prevents the subsequent
build without 'reuse' rebuilding the package with same checksum again
'''
if dsc_file:
self.kits['dsc_cache'][build_type].set_package(pkg_dir, dsc_file + ':' + shared_checksum)
else:
logger.warning("dsc file is invalid and can not set dsc cache for %s", pkg_name)
else:
logger.debug("Different source checksums, can not reuse the remote, continue to local build")
status = 'DSC_BUILD'
return status, dsc_file
def get_stamp(self, pkg_dir, dsc_path, build_type, state):
dsc_file, checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if not dsc_file or not checksum:
return False
if dsc_file != dsc_path:
logger.error("Mismatched dsc path for %s", pkg_dir)
return False
stamp_dir = os.path.join(os.environ.get('MY_WORKSPACE'), build_type, 'stamp')
dsc_stamp = '.'.join([os.path.basename(dsc_file), checksum, state])
dsc_stamp_file = os.path.join(stamp_dir, dsc_stamp)
if os.path.exists(dsc_stamp_file):
return True
return False
def set_stamp(self, pkg_dir, dsc_path, build_type, state):
dsc_file, checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if not dsc_file or not checksum:
return False
if dsc_file != dsc_path:
logger.error("Mismatched dsc path for %s", pkg_dir)
return False
try:
stamp_dir = os.path.join(os.environ.get('MY_WORKSPACE'), build_type, 'stamp')
os.makedirs(stamp_dir, exist_ok=True)
dsc_stamp = '.'.join([os.path.basename(dsc_file), checksum, state])
os.mknod(os.path.join(stamp_dir, dsc_stamp))
except Exception as e:
logger.error(str(e))
logger.error("Failed to create stamp(%s) for %s", state, pkg_dir)
return False
else:
logger.info("Successfully create stamp(%s) for %s", state, pkg_dir)
return False
def del_stamp(self, pkg_dir, dsc_path, build_type, state):
dsc_file, checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if not dsc_file or not checksum:
return False
if dsc_file != dsc_path:
logger.warning("Mismatched dsc path for %s", pkg_dir)
return False
try:
stamp_dir = os.path.join(os.environ.get('MY_WORKSPACE'), build_type, 'stamp')
dsc_stamp = '.'.join([os.path.basename(dsc_file), checksum, state])
dsc_stamp_file = os.path.join(stamp_dir, dsc_stamp)
if not os.path.exists(dsc_stamp_file):
return True
logger.info("Stamp for %s found, now remove it", pkg_dir)
os.remove(dsc_stamp_file)
except Exception as e:
logger.error(str(e))
logger.error("Failed to remove stamp(%s) for %s", state, pkg_dir)
return False
else:
logger.info("Successfully removed stamp(%s) for %s", state, pkg_dir)
return True
def clean_build_output(self, dsc_path):
'''
Clean the old generated binary packages .deb and the log link to be ready
for the new build task
Since the log link will be polled and read to know the build result, here
remove the old one, but this may has the side effect of missing the build
history logs
'''
try:
build_dir = os.path.abspath(dsc_path)
log_file = dsc_path.replace('.dsc', '_' + STX_ARCH + '.build')
if build_dir:
os.system("rm -f %s" % os.path.join(build_dir, '*.deb'))
os.system("unlink %s > /dev/null 2>&1" % log_file)
except Exception as e:
logger.error(str(e))
logger.error("Failed to remove the old deb packages or log link")
else:
logger.debug("Successfully removed the old deb packages and log link")
def poll_building_status(self):
'''
Poll all these log links which in self.dscs_building, any package done
('successful' or 'failed') will be returned, the return means a new build
instance can be added now
'''
if not len(self.dscs_building):
logger.info("There are no build tasks running, polling status quit")
return None, 'fail'
while self.attrs['poll_build_status']:
for dsc in self.dscs_building:
log = dsc.replace('.dsc', '_' + STX_ARCH + '.build')
if not os.path.exists(log):
continue
cmd_status = 'sed -n \'/| Summary /,$P\' %s | grep \'^Status: \'' % (log)
try:
status_line = subprocess.check_output(cmd_status, shell=True).decode()
if not status_line:
continue
except Exception:
# logger.error(str(e))
continue
else:
logger.debug("Captured result of cmd_status is %s from log %s", status_line, log)
if 'successful' in status_line:
logger.info("Got success status for %s", dsc)
return dsc, 'success'
else:
logger.info("Got failed status for %s", dsc)
cmd_stage = 'sed -n \'/| Summary /,$P\' %s | grep \'^Fail-Stage: \'' % (log)
try:
stage_line = subprocess.check_output(cmd_stage, shell=True).decode()
except Exception as e:
logger.error(str(e))
else:
logger.info("Fail-State is %s for %s", stage_line, dsc)
return dsc, 'fail'
time.sleep(self.attrs['poll_interval'])
logger.debug("Polling build status done")
return None, 'fail'
def run_build_loop(self, layer_pkgdir_dscs, target_pkgdir_dscs, layer, build_type=STX_DEFAULT_BUILD_TYPE):
'''
Prerequisite to run this function is that the phase I build(dsc creating) done
layer_pkgdir_dscs: Dict of the full layer packages
target_pkgdir_dscs: Dict of the target packages
layer: The layer currently build
build_type: type of build
'''
build_dir = os.path.join(BUILD_ROOT, build_type)
dsc_list_file = os.path.join(build_dir, layer + '_dscs.lst')
dscs_list = get_dsc_list_from_dict(target_pkgdir_dscs)
logger.debug('There are %d packages to be built in this round', len(dscs_list))
ds_logger = logging.getLogger('dsc_depend')
if not ds_logger.handlers:
utils.set_logger(ds_logger)
logger.debug("All dscs of layer %s passed to dsc_depends in file %s", layer, dsc_list_file)
logger.debug("Target dscs(%d) passed to dsc_depends: %s", len(dscs_list), str(dscs_list))
deps_resolver = dsc_depend.Dsc_build_order(dsc_list_file, dscs_list, ds_logger)
repo_snapshots = repoSnapshots(self.attrs['parallel'] + 2)
# To track these repeatly built packages
build_counter = {}
# To set the right count of parallel jobs
target_pkgs_count = len(target_pkgdir_dscs)
parallel_jobs = self.attrs['parallel']
if parallel_jobs > target_pkgs_count:
parallel_jobs = target_pkgs_count
continue_build = True
# build all the target packages
while (dscs_list or self.lists['fail_' + build_type]) and continue_build:
if len(dscs_list) == 0:
# reliable_build is serail build mode
self.attrs['parallel'] = 1
for pdir in self.lists['fail_' + build_type]:
fail_dsc = get_dsc_path_with_pkgdir(layer_pkgdir_dscs, pdir)
if fail_dsc:
dscs_list.append(fail_dsc)
if len(dscs_list) == 0:
break
logger.info("Reliable build: dsc_list_file is %s", dsc_list_file)
logger.info("Reliable build: all target dscs are: %s(%d)", ','.join(dscs_list), len(dscs_list))
deps_resolver = dsc_depend.Dsc_build_order(dsc_list_file, dscs_list, ds_logger)
build_counter = {}
# Enable this to end the build if still has failed packages
continue_build = False
logger.info("\nReliable build starts for the failed packages: %s(%d)", ','.join(dscs_list), len(dscs_list))
wait_task_done = False
# The serial build is just special case with self.attrs['parallel'] = 1
if len(self.dscs_building) < self.attrs['parallel']:
pkgs_can_build = deps_resolver.get_build_able_pkg(1)
else:
pkgs_can_build = None
if pkgs_can_build:
dsc_path = pkgs_can_build[0]
pkg_dir = get_pkg_dir_from_dsc(layer_pkgdir_dscs, dsc_path)
pkg_name = discovery.package_dir_to_package_name(pkg_dir, distro=self.attrs['distro'])
logger.info("Depends resolver told to build %s", pkg_name)
# For layer builds, the package may has been built before in the layer with higher priority
in_reuse_list = False
if self.attrs['reuse'] and self.lists['reuse_' + build_type]:
if pkg_dir in self.lists['reuse_' + build_type]:
in_reuse_list = True
if pkg_dir in self.lists['success_' + build_type] or in_reuse_list:
logger.warning("Package %s has been built/reused in this round, skip", pkg_name)
deps_resolver.pkg_accomplish(dsc_path)
logger.debug("dsc_path will be removed %s, current dscs list:%s", dsc_path, ','.join(dscs_list))
if dsc_path in dscs_list:
dscs_list.remove(dsc_path)
continue
# For the depended packages, skip checking the 'avoid' option
if pkg_dir not in target_pkgdir_dscs.keys():
if self.get_stamp(pkg_dir, dsc_path, build_type, 'build_done'):
logger.info("Stamp[build_done] found for the depended package %s, skipped", pkg_name)
deps_resolver.pkg_accomplish(dsc_path)
continue
# If the option 'build_depend' disabled, just exit
if not self.attrs['build_depend']:
logger.error("The depended package %s is not in %s and has not been built", pkg_name, layer)
return
# For the target packages
else:
if self.attrs['avoid']:
# These packages in self.extend_deps must be rebuilt
if pkg_dir not in self.extend_deps:
if self.get_stamp(pkg_dir, dsc_path, build_type, 'build_done'):
logger.info("Stamp build_done found, package %s has been built, skipped", pkg_name)
self.lists['success_' + build_type].append(pkg_dir)
deps_resolver.pkg_accomplish(dsc_path)
logger.debug("Avoid is enabled, dsc_path will be removed %s, current dscs list:%s", dsc_path, ','.join(dscs_list))
if dsc_path in dscs_list:
dscs_list.remove(dsc_path)
continue
else:
logger.info("Since the depended package changes, %s will be rebuilt", pkg_name)
logger.info("Clean data(stamp and build output) to prepare to build %s", pkg_name)
# This package is decided to be built now
self.del_stamp(pkg_dir, dsc_path, build_type, 'build_done')
self.clean_build_output(dsc_path)
snapshot_idx = repo_snapshots.apply(dsc_path)
self.publish_repo(REPO_BUILD, snapshot_idx)
# Requires the remote pkgbuilder to add build task
logger.info("To Require to add build task for %s with snapshot %s", pkg_name, snapshot_idx)
(status, chroot) = self.req_add_task(pkg_dir, dsc_path, build_type, snapshot_idx, layer)
if 'fail' in status:
if chroot and 'ServerError' in chroot:
self.req_stop_task()
logger.error("Fatal error from pkgbuilder, exit from %s build with %s", layer, build_type)
return
# The most likely cause here is that there are no idle chroots to take this task
# Enable wait_task_done to wait for chroots releasing
logger.error("Failed to add build task for %s, wait for running task done", pkg_name)
deps_resolver.pkg_fail(dsc_path)
logger.debug("Notified dsc_depends to retrieve %s, exit exit", pkg_name)
repo_snapshots.release(dsc_path)
wait_task_done = True
else:
logger.info("Successfully sent request to add build task for %s", pkg_name)
# The build task is accepted and the package will be built
if pkg_dir not in build_counter.keys():
build_counter[pkg_dir] = 1
else:
build_counter[pkg_dir] += 1
logger.debug("Attempting to build package %s for the %d time", pkg_dir, build_counter[pkg_dir])
# Refresh the two important tables: dscs_chroots and dscs_building
self.dscs_chroots[dsc_path] = chroot
self.dscs_building.append(dsc_path)
logger.info("Appended %s to current building list", dsc_path)
# The original design is insert a console thread to display the build progress
# self.refresh_log_console()
# dsc_depend return None
else:
logger.warning("dsc_depend returns no package, wait for packages building done")
if len(self.dscs_building) == 0:
return
wait_task_done = True
# The main thead will stop to query whether there is job done
# if the task queue is reach to self.attrs['parallel']
dscs_count = len(dscs_list)
dscs_building_count = len(self.dscs_building)
if wait_task_done or dscs_building_count == self.attrs['parallel'] or (dscs_count < self.attrs['parallel'] and dscs_count == dscs_building_count):
if wait_task_done:
logger.debug("wait_task_done is enabled")
logger.info("############################################################")
logger.info("Remain packages %d, building packages %d", dscs_count, dscs_building_count)
logger.info("------------------------------------------------------------")
if self.dscs_building:
for bdsc in self.dscs_building:
pkglog = bdsc.replace('.dsc', '_' + STX_ARCH + '.build')
logger.info("Running: %s --> %s (Log:%s)", os.path.basename(bdsc), self.dscs_chroots[bdsc], pkglog)
logger.info("------------------------------------------------------------")
logger.debug("Waiting for the build task to complete......")
logger.info("############################################################")
# Only return one finished dsc
(done_dsc, status) = self.poll_building_status()
# The build task is done, possible results: success/fail/given-back
if done_dsc:
repo_snapshots.release(done_dsc)
done_pkg_dir = get_pkg_dir_from_dsc(layer_pkgdir_dscs, done_dsc)
done_pkg_name = discovery.package_dir_to_package_name(done_pkg_dir, distro=self.attrs['distro'])
# Removed from current building list
self.dscs_building.remove(done_dsc)
logger.info("Removed %s from the current building list after build done", done_pkg_name)
if 'success' in status:
logger.info("Successfully built %s, uploading to repository", done_pkg_name)
if self.upload_with_deb(done_pkg_name, os.path.join(BUILD_ROOT, build_type, done_pkg_name), build_type):
self.set_stamp(done_pkg_dir, done_dsc, build_type, state='build_done')
logger.info("Successfully uploaded all the debs of %s to repository and created stamp", done_pkg_name)
deps_resolver.pkg_accomplish(done_dsc)
logger.debug('Notified dsc_depend that %s accomplished', done_pkg_name)
if done_pkg_dir in target_pkgdir_dscs.keys() or done_pkg_dir in self.extend_deps:
dscs_list.remove(done_dsc)
logger.info('Removed %s from remain packages after successfully build', done_pkg_name)
self.lists['success_' + build_type].append(done_pkg_dir)
if done_pkg_dir in self.lists['fail_' + build_type]:
self.lists['fail_' + build_type].remove(done_pkg_dir)
logger.info('Added %s to success list success_%s', done_pkg_name, build_type)
else:
self.lists['success_depends_' + build_type].append(done_pkg_dir)
if done_pkg_dir in self.lists['fail_depends_' + build_type]:
self.lists['fail_depends_' + build_type].remove(done_pkg_dir)
logger.info('Added %s to list success_depends_%s', done_pkg_name, build_type)
else:
# To check whether reach the maxmum attempting count
if build_counter[done_pkg_dir] >= MAX_PKG_BUILD_COUNT:
deps_resolver.pkg_accomplish(done_dsc)
logger.warning('Notified dsc_depend to accomplish %s after %d attempts', done_pkg_name, MAX_PKG_BUILD_COUNT)
if done_pkg_dir in target_pkgdir_dscs.keys():
self.lists['fail_' + build_type].append(done_pkg_dir)
logger.error('Added %s to fail list fail_%s', done_pkg_name, build_type)
dscs_list.remove(done_dsc)
logger.info('Removed %s from remain packages after failed build', done_pkg_name)
else:
self.lists['fail_depends_' + build_type].append(done_pkg_dir)
logger.info('Added %s to list fail_depends_%s', done_pkg_name, build_type)
else:
deps_resolver.pkg_fail(done_dsc)
logger.warning('Notified dsc_depend to retrieve %s', done_pkg_name)
self.req_kill_task('sbuild', done_dsc)
logger.debug('Require pkgbuilder to clean the task for %s', done_pkg_name)
if self.attrs['exit_on_fail']:
logger.error("Exiting due to failed package build")
return
continue
self.req_kill_task('sbuild', done_dsc)
logger.debug('Require pkgbuilder to clean the task for %s', done_pkg_name)
logger.info("Build done, publish repository %s if there are not deployed deb binaries in it", REPO_BUILD)
self.publish_repo(REPO_BUILD)
logger.info("Build done, please check the statistics")
def build_all(self, layers=ALL_LAYERS, build_types=None, packages=None):
if layers:
for layer in layers:
if layer not in ALL_LAYERS:
logger.error(' '.join([layer, 'is not a valid layer']))
return
else:
layers = ALL_LAYERS
if packages:
# We save all pkgs specified by the user and remove it as we find it.
self.lists['pkgs_not_found'] = copy.deepcopy(packages)
if build_types:
for build_type in build_types:
if build_type not in ALL_BUILD_TYPES:
logger.error(' '.join([build_type, 'is not a valid build_type']))
return
if layers:
total_layers = len(layers)
logger.debug(' '.join(['Building ', str(total_layers), ' layers:',
','.join(layers)]))
self.build_layers(layers=layers, build_types=build_types, packages=packages)
else:
logger.error('No layeres specified for the build.')
def save_failed_pkgs(self, pkgs_exist, pkgs_target, build_type):
if not pkgs_exist:
return
pkgs_name_fail = list(set(pkgs_target) - set(pkgs_exist))
if not pkgs_name_fail:
return
for pkg in pkgs_name_fail:
for pkgdir, pkgname in pkgs_exist.items():
if pkgname == pkg:
if build_type:
self.lists['fail_' + build_type].append(pkgdir)
else:
self.lists['fail_std'].append(pkgdir)
def build_layer_and_build_type(self, layer=None, build_type=None, packages=None):
pkgs_exist = {}
if not layer:
logger.error('Failed to specify layer')
return
if not build_type:
logger.error('Failed to specify build_type')
return
pkg_dirs = discovery.package_dir_list(distro=self.attrs['distro'], layer=layer, build_type=build_type)
layer_pkg_dirs = pkg_dirs
word = "all"
if packages:
word = "selected"
pkg_dirs, pkgs_exist = discovery.filter_package_dirs_by_package_names(pkg_dirs, packages, distro=self.attrs['distro'])
self.save_failed_pkgs(pkgs_exist, packages, build_type)
layer_pkg_dirs = pkg_dirs
for pkg in self.lists['pkgs_not_found'].copy():
if pkg in pkgs_exist.values():
self.lists['pkgs_not_found'].remove(pkg)
if not pkg_dirs:
logger.debug(' '.join(['Found no buildable packages matching selection criteria in build_type',
build_type, 'of layer %s', layer]))
return
logger.info(' '.join(['Start to build', word, 'packages in',
'build_type', build_type,
'of layer', layer]))
packages = discovery.package_dirs_to_package_names(pkg_dirs)
logger.debug(' '.join(['Building packages:',
','.join(packages)]))
self.build_packages(layer_pkg_dirs, pkg_dirs, layer, word, build_type=build_type)
logger.info(' '.join(['Finished building packages in',
'build_type', build_type,
'of layer', layer]))
def build_layer_and_build_types(self, layer=None, build_types=STX_DEFAULT_BUILD_TYPE_LIST, packages=None):
if not layer:
logger.error('Failed to specify layer')
return
if not build_types:
logger.error('Failed to specify build_types')
return
# remove duplication
build_types = list(set(build_types))
'''
The signed packages like kernel-std-signed and kernel-rt-signed need
some interactive operations before building them, so here excluded the
build type 'sign' from the default build types
'''
if not packages and 'sign' in build_types:
build_types.remove('sign')
valid_build_type = discovery.get_layer_build_types(layer, distro=self.attrs['distro'])
# sort the build_type list so we build in the proper order
build_types = discovery.sort_build_type_list(build_types, layer, distro=self.attrs['distro'])
for build_type in build_types:
if build_type not in valid_build_type:
logger.info(' '.join(['Skipping build_type', build_type, 'which is not a valid for layer', layer]))
continue
self.build_layer_and_build_type(layer=layer, build_type=build_type, packages=packages)
return
def build_layer(self, layer=None, build_types=STX_DEFAULT_BUILD_TYPE_LIST, packages=None):
if not layer:
logger.error('Failed to specify layer')
return
if layer not in ALL_LAYERS:
logger.error(' '.join([layer, 'is not a valid layer']))
return
logger.info(' '.join(['Start to build all packages in layer',
layer]))
self.build_layer_and_build_types(layer=layer, build_types=build_types, packages=packages)
logger.info(' '.join(['Finished building packages in layer',
layer]))
return
def build_layers(self, layers=None, build_types=None, packages=None):
if not layers:
logger.error('Failed to specify layers')
return
# remove duplication
layers = list(set(layers))
for layer in layers:
if layer not in ALL_LAYERS:
logger.error(' '.join([layer, 'is not a valid layer']))
return
# sort the layer list so we build in the proper order
layers = discovery.sort_layer_list(layers, distro=self.attrs['distro'])
for layer in layers:
if build_types is None:
build_types = discovery.get_layer_build_types(layer=layer, distro=self.attrs['distro'])
self.build_layer(layer=layer, build_types=build_types, packages=packages)
return
def reclaim_reused_package(self, pkgname, pkgdir, layer_pkgdir_dscs, build_dscs, no_build_dscs, fdsc_file, build_type):
self.lists['reuse_' + build_type].remove(pkgdir)
self.lists['reuse_pkgname_' + build_type].remove(pkgname)
status, dsc_file = self.create_dsc(pkgname, pkgdir, reclaim=True, build_type=build_type)
if dsc_file and dsc_file.endswith('.dsc'):
if status == 'DSC_BUILD' or status == 'DSC_NO_UPDATE':
if status == 'DSC_BUILD':
build_dscs[pkgdir.strip()] = dsc_file
if status == 'DSC_NO_UPDATE':
no_build_dscs[pkgdir.strip()] = dsc_file
fdsc_file.write(dsc_file + '\n')
if self.attrs['upload_source'] and self.kits['repo_mgr']:
self.upload_with_dsc(pkgname, dsc_file, REPO_SOURCE)
return True
def build_packages(self, layer_pkg_dirs, pkg_dirs, layer, word, build_type=STX_DEFAULT_BUILD_TYPE):
# remove duplication
pkg_dirs = list(set(pkg_dirs))
logger.debug(' '.join(['build_packages: Building: ', str(pkg_dirs)]))
pkgs_dirs_map = {}
fdsc_file = None
layer_pkgdir_dscs = {}
logger.debug('Length of build-needed_%s:%d before extending', build_type, len(self.lists['build-needed_' + build_type]))
self.lists['build-needed_' + build_type].extend(pkg_dirs)
logger.debug('Length of build-needed_%s:%d after extending', build_type, len(self.lists['build-needed_' + build_type]))
build_dir = os.path.join(BUILD_ROOT, build_type)
os.makedirs(build_dir, exist_ok=True)
dscs_list_file = os.path.join(build_dir, layer + '_dscs.lst')
logger.debug(' '.join(['Prepare', dscs_list_file, 'to deps_resolver']))
fdsc_file = open(dscs_list_file, 'w+')
fdsc_file.seek(0)
fdsc_file.truncate()
# Now check and create the debian meta one by one
need_build = {}
no_need_build = {}
# layer_pkg_dirs contains all STX packages of this layer
for pkg_dir in layer_pkg_dirs:
dsc_file = ""
pkg_name = discovery.package_dir_to_package_name(pkg_dir, distro=self.attrs['distro'])
pkgs_dirs_map[pkg_name] = pkg_dir
status, dsc_file = self.create_dsc(pkg_name, pkg_dir, reclaim=False, build_type=build_type)
if status == 'DSC_BUILD' and dsc_file:
logger.debug("dsc_file = %s" % dsc_file)
# need_build will be passed to scan_all_depends() to get these depended packages
# Not checking 'build_done' stamp for package in need_build will cause the case
# the target package does not rebuild, but all its depended packages are forced
# to be rebuilt. Put the checking for 'build_done' stamp here to fix this issue
pkg_dir = pkg_dir.strip()
if not self.get_stamp(pkg_dir, dsc_file, build_type, 'build_done'):
need_build[pkg_dir] = dsc_file
else:
no_need_build[pkg_dir] = dsc_file
layer_pkgdir_dscs[pkg_dir] = dsc_file
fdsc_file.write(dsc_file + '\n')
if self.attrs['upload_source'] and not skip_dsc and self.kits['repo_mgr']:
self.upload_with_dsc(pkg_name, dsc_file, REPO_SOURCE)
else:
if status == 'DSC_REUSE':
logger.info("%s will reuse the remote debs, skip to build", pkg_name)
self.lists['reuse_' + build_type].append(pkg_dir)
self.lists['reuse_pkgname_' + build_type].append(pkg_name)
layer_pkgdir_dscs[pkg_dir.strip()] = dsc_file
fdsc_file.write(dsc_file + '\n')
if self.attrs['upload_source'] and self.kits['repo_mgr']:
self.upload_with_dsc(pkgname, dsc_file, REPO_SOURCE)
continue
else:
if status == 'DSC_EXCEPTION' or status == 'DSC_ERROR':
# Exit if fails to create dsc file
if fdsc_file:
fdsc_file.close()
logger.error("Failed to create needed dsc file, exit")
self.stop()
sys.exit(1)
else:
if status == 'DSC_NO_UPDATE':
logger.debug("Create_dsc return DSC_NO_UPDATE for %s", dsc_file)
layer_pkgdir_dscs[pkg_dir] = dsc_file
if not self.get_stamp(pkg_dir, dsc_file, build_type, 'build_done'):
need_build[pkg_dir] = dsc_file
else:
no_need_build[pkg_dir] = dsc_file
fdsc_file.write(dsc_file + '\n')
# Find the dependency chain
if not word == 'selected':
if self.attrs['build_all'] or layer:
if self.attrs['avoid'] and self.kits['dsc_cache'][build_type]:
logger.info("Start to find these packages which depend on the build packages")
self.extend_deps = scan_all_depends(layer_pkgdir_dscs, need_build)
if len(self.extend_deps) > 0:
logger.info("Found %d packages which should be rebuilt:%s", len(self.extend_deps), ','.join(self.extend_deps))
else:
logger.info("There are no other packages to be rebuilt")
if self.attrs['reuse'] and len(self.lists['reuse_pkgname_' + build_type]) > 0:
# Filter all that packages in dependency chain, these packages should not be reused
for rpkg_dir in self.extend_deps:
rpkg_name = discovery.package_dir_to_package_name(rpkg_dir, distro=self.attrs['distro'])
logger.debug("%s:%s needs to be removed from reused list", rpkg_name, rpkg_dir)
if rpkg_dir in self.lists['reuse_' + build_type]:
self.lists['reuse_' + build_type].remove(rpkg_dir)
self.lists['reuse_pkgname_' + build_type].remove(rpkg_name)
logger.debug("%s is removed from reused list for dependency chain", rpkg_name)
else:
logger.debug("%s is not in reuse list", rpkg_dir)
# If 'reuse_shared_repo' is enabled, only need to replace the local debentry
# with the remote debentry in stx-meta
stx_meta_dir = os.path.join(STX_META_NAME, STX_META_NAME + '-1.0')
remote_debsentry = os.path.join(BUILD_ROOT, stx_meta_dir, build_type + '_debsentry.pkl')
local_debsentry = get_debs_clue(build_type)
if not self.attrs['reuse_shared_repo']:
logger.info("The reused pkgs:%s", ','.join(self.lists['reuse_pkgname_' + build_type]))
for pkgname in self.lists['reuse_pkgname_' + build_type]:
logger.debug("First try to remove all subdebs from %s for %s", REPO_BUILD, pkgname)
self.remove_pkg_debs(pkgname, build_type)
logger.debug("Then try to copy all subdebs of %s from mirror to %s", pkgname, REPO_BUILD)
logger.debug("Get the subdebs of %s with remote %s", pkgname, remote_debsentry)
debs_list = debsentry.get_subdebs(remote_debsentry, pkgname, logger)
if not debs_list:
'''
dsc cache says to reuse this package, but fails to find the subdebs in shared debentry
for this special case, the package will switch to locally build
'''
logger.warning("Failed to get subdebs from the remote cache, reclaim %s", pkgname)
if self.reclaim_reused_package(pkgname, pkgs_dirs_map[pkgname], layer_pkgdir_dscs, need_build, no_need_build, fdsc_file, build_type):
logger.info("Successfully reclaimed %s when failed to get subdebs from remote cache", pkgname)
continue
else:
logger.error("Failed to reclaime %s when failed to get subdebs from remote cache", pkgname)
if fdsc_file:
fdsc_file.close()
return
debs_reused = None
for deb in debs_list:
if not debs_reused:
debs_reused = deb.split('_')[0]
else:
debs_reused = debs_reused + ',' + (deb.split('_')[0])
if debs_reused:
logger.info("All subdebs of %s will be imported:%s", pkgname, debs_reused)
try:
logger.info("Calls copy_pkgs: mirror=%s local_repo=%s type=binary deploy=True overwrite=True",
REUSE_MIRROR, REPO_BUILD)
ret = self.kits['repo_mgr'].copy_pkgs(REUSE_MIRROR, REPO_BUILD, debs_reused,
pkg_type='binary',
deploy=True, overwrite=True)
except Exception as e:
logger.error(str(e))
logger.error("Exception occurrs when call repomgr.copy_pkgs");
# Reclaim reused packages after a broken copy_pkgs
if self.reclaim_reused_package(pkgname, pkgs_dirs_map[pkgname], layer_pkgdir_dscs, need_build, no_need_build, fdsc_file, build_type):
logger.info("Successfully reclaimed %s after copy_pkgs broken", pkgname)
else:
logger.error("Failed to reclaime %s after copy_pkgs broken", pkgname)
if fdsc_file:
fdsc_file.close()
return
else:
if ret:
logger.debug("Successfully call repomgr.copy_pkgs to import reused debs")
# Now set the debentry cache
debs_clue = get_debs_clue(build_type)
debsentry.set_subdebs(debs_clue, pkgname, debs_list, logger)
logger.debug("Successfully updated local %s_debsentry after copying reused debs done", build_type)
else:
# Reclaim reused packages after a failed copy_pkgs
logger.warning("Failed to copy all reused debs with repomgr.copy_pkgs")
if self.reclaim_reused_package(pkgname, pkgs_dirs_map[pkgname], layer_pkgdir_dscs, need_build, no_need_build, fdsc_file, build_type):
logger.info("Successfully reclaimed %s after copy_pkgs failure", pkgname)
else:
logger.error("Failed to reclaime %s after copy_pkgs failure", pkgname)
if fdsc_file:
fdsc_file.close()
return
# reuse_shared_repo directly is enabled, need not the above time consuming work
else:
os.system('cp -f ' + remote_debsentry + ' ' + local_debsentry)
# Start to build
target_pkgdir_dscs = need_build
for pdir, dsc in need_build.items():
self.lists['real_build_' + build_type].append(pdir)
for pkg in self.extend_deps:
if pkg in layer_pkgdir_dscs.keys():
target_pkgdir_dscs[pkg] = layer_pkgdir_dscs[pkg]
self.lists['real_build_' + build_type].append(pkg)
# no_need_build is returned by create_dsc, it just means
# that there is not any changes on dsc file but the build
# stamp of the 2nd phase may not exist, if it does not, it
# still needs to be built
target_pkgdir_dscs.update(no_need_build)
if fdsc_file:
fdsc_file.close()
if target_pkgdir_dscs:
self.run_build_loop(layer_pkgdir_dscs, target_pkgdir_dscs, layer, build_type=build_type)
else:
logger.debug("There are no debian dsc files feeded to build_packages")
def show_build_stats(self):
"""
Since all packages are put into self.lists['build-needed']
at the begining of building, we know how many
packages want to build
"""
ret_val = 0
for build_type in self.build_types:
logger.info("Total %s packages needing to be built: %d", build_type, len(self.lists['build-needed_' + build_type]))
logger.info("-------------------------------------------")
logger.info("Total %s packages reused from remote: %d", build_type, len(self.lists['reuse_' + build_type]))
reuse_list = list(set(self.lists['reuse_' + build_type]))
reuse_number = len(reuse_list)
if reuse_number > 0:
logger.info("Successfully reused: %d", reuse_number)
for pkg_dir in sorted(reuse_list):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.info(pkg_name)
self.lists['build-needed_' + build_type] = list(set(self.lists['build-needed_' + build_type]) - set(self.lists['reuse_' + build_type]))
logger.info("Total %s packages needing to be built locally: %d", build_type, len(self.lists['build-needed_' + build_type]))
success_list = list(set(self.lists['success_' + build_type]))
success_number = len(success_list)
if success_number > 0:
logger.info("Successfully built: %d", success_number)
for pkg_dir in sorted(success_list):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.info(pkg_name)
real_build_list = list(set(self.lists['real_build_' + build_type]))
real_build_number = len(real_build_list)
if real_build_number > 0:
logger.info("Successfully built in pkgbuilder: %d", real_build_number)
for pkg_dir in sorted(real_build_list):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.info(pkg_name)
success_depends_list = list(set(self.lists['success_depends_' + build_type]))
success_depends_number = len(success_depends_list)
if success_depends_number > 0:
logger.info("Successfully built depended packages: %d", success_depends_number)
for pkg_dir in sorted(success_depends_list):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.info(pkg_name)
failed_pkg_dirs = list(set(self.lists['build-needed_' + build_type]) - set(self.lists['success_' + build_type]))
failed_number = len(failed_pkg_dirs)
if failed_number > 0:
ret_val = 1
logger.error("Failed to build: %d", failed_number)
for pkg_dir in sorted(failed_pkg_dirs):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.error(pkg_name)
# self.lists['fail'] is the subset of failed_pkg_dirs
# particularly refer to those failed packages reported by pkgbuilder
if len(self.lists['fail_' + build_type]) > 0:
logger.info("List of failed packages:")
for pkg_dir in sorted(list(set(self.lists['fail_' + build_type]))):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.error(pkg_name)
logger.info("For the failure reason, you can check with:")
logger.info("\'cat /localdisk/builder.log | grep ERROR\' or")
logger.info("\'cat ${MY_WORKSPACE}/<std or rt>/<Failed package>/*.build\'")
if len(self.lists['pkgs_not_found']) > 0:
# self.lists['pkgs_not_found'] is set of packages specified by the user that has not been found
ret_val = 1
logger.info("-------------------------------------------")
logger.error('The following packages were not found in the building process:')
for pkg in self.lists['pkgs_not_found']:
logger.error(pkg)
return ret_val
def bc_signal_handler(signum, frame):
ret_val = 0
if not build_controller:
sys.exit(1)
if frame:
logger.debug(' '.join(['Signal', str(signum), 'got']))
ret_val = build_controller.stop()
logger.debug('Exit for user interrupt')
sys.exit(ret_val)
def bc_reg_signal_handler():
signal.signal(signal.SIGINT, bc_signal_handler)
signal.signal(signal.SIGHUP, bc_signal_handler)
signal.signal(signal.SIGTERM, bc_signal_handler)
if __name__ == "__main__":
distro = STX_DEFAULT_DISTRO
layers = None
build_types = None
packages = None
parser = argparse.ArgumentParser(description="build-pkgs helper",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-c', '--clean', help="Start a fresh build",
action='store_true')
parser.add_argument('-e', '--exit_on_fail', help="Exit for any fail",
action='store_true')
parser.add_argument('-t', '--test', help="Run package tests during build",
action='store_true')
reuse_types = parser.add_mutually_exclusive_group()
reuse_types.add_argument('--reuse', help="Reuse the debs from STX_SHARED_REPO(no signed debs)", action='store_true')
reuse_types.add_argument('--reuse_maximum', help="Reuse all debs from STX_SHARED_REPO", action='store_true')
parser.add_argument('--dl_reused', help="Download reused debs to build directory", action='store_true', default=False)
parser.add_argument('--refresh_chroots', help="Force to fresh chroots before build", action='store_true')
parser.add_argument('--parallel', help="The number of parallel build tasks", type=int, default=DEFAULT_PARALLEL_TASKS)
parser.add_argument('--poll_interval', help="The interval to poll the build status", type=int, default=DEFAULT_POLL_INTERVAL)
parser.add_argument('--max_make_jobs', help="The maximum number of jobs for package make", type=int, default=MAX_PKG_MAKE_JOBS)
parser.add_argument('-d', '--distro', type=str, nargs=1,
help="name of the distro to build\n %s" % ALL_DISTROS,
default=STX_DEFAULT_DISTRO, required=False)
parser.add_argument('-b', '--build-types', type=str,
help="comma separated list of all build-types to build\n %s" % ALL_BUILD_TYPES,
default='std,rt', required=False)
parser.add_argument('-l', '--layers', type=str,
help="comma separated list of all layers to build\n %s" % ALL_LAYERS,
default=None, required=False)
parser.add_argument('-p', '--packages', help="Packages with comma",
type=str)
parser.add_argument('-a', '--all', help="(DEPRECATED) Builds all packages",
action='store_true')
args = parser.parse_args()
if args.reuse or args.reuse_maximum:
if args.clean and args.packages:
logger.error("Reuse mode can not be used for the clean build of specific packages.");
sys.exit(1)
if args.distro:
if args.distro not in ALL_DISTROS:
logger.error(' '.join(['Distro', args.distro, 'not in', ','.join(ALL_DISTROS)]))
logger.error("Please consult: build-pkgs --help")
sys.exit(1)
distro = args.distro
ALL_LAYERS = discovery.get_all_layers(distro=distro)
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=distro)
if args.build_types:
build_types = args.build_types.strip().split(',')
for build_type in build_types:
if build_type not in ALL_BUILD_TYPES:
logger.error(' '.join(['Build_type', build_type, 'not in', ','.join(ALL_BUILD_TYPES)]))
logger.error("Please consult: build-pkgs --help")
sys.exit(1)
if args.layers:
layers = args.layers.strip().split(',')
for layer in layers:
if layer not in ALL_LAYERS:
logger.error(' '.join(['Layer', layer, 'not in', ','.join(ALL_LAYERS)]))
logger.error("Please consult: build-pkgs --help")
sys.exit(1)
build_controller = BuildController(distro=distro)
if args.clean:
build_controller.build_avoid = False
if not args.packages and not args.layers:
build_controller.clean(build_types=build_types)
if args.exit_on_fail:
build_controller.attrs['exit_on_fail'] = True
if args.test:
build_controller.attrs['run_tests'] = True
if args.parallel:
if args.parallel < 1 or args.parallel > MAX_PARALLEL_JOBS:
logger.critical("Invalid parallel build tasks[1-%s]", MAX_PARALLEL_JOBS)
sys.exit(1)
build_controller.attrs['parallel'] = args.parallel
if args.poll_interval:
build_controller.attrs['poll_interval'] = args.poll_interval
if args.max_make_jobs:
build_controller.attrs['max_make_jobs'] = args.max_make_jobs
if args.reuse or args.reuse_maximum:
build_controller.attrs['reuse'] = True
if args.reuse_maximum:
build_controller.attrs['reuse_max'] = True
if args.dl_reused:
build_controller.attrs['dl_reused'] = True
else:
if args.dl_reused:
logger.error("option 'dl_reused' only valid if '--reuse|--reuse_maximum' is enabled, quit")
sys.exit(1)
if args.packages:
packages = args.packages.strip().split(',')
else:
if not args.layers:
build_controller.attrs['build_all'] = True
packages = None
if not build_controller.start(build_types=build_types):
logger.critical("Fail to initialize build controller, exit ......")
sys.exit(1)
bc_reg_signal_handler()
# mirror can be set to add_chroot as the main package repo
# e.g http://ftp.de.debian.org/debian
if build_controller.add_chroot(os.environ.get('DEBIAN_SNAPSHOT')) != 'success':
pkgbuilder_log = '/localdisk/pkgbuilder.log'
logger.error(' '.join(['Chroot is not ready, please check',
pkgbuilder_log]))
sys.exit(1)
if args.refresh_chroots:
if build_controller.refresh_chroots() != 'success':
logger.error("Failed to fresh all the idle chroots")
sys.exit(1)
else:
logger.info("Successfully refreshed all the idle chroots")
build_controller.build_all(layers=layers, build_types=build_types, packages=packages)
reuse_dl_ret = 0
build_controller.set_reuse(os.path.join(BUILD_ROOT, 'caches'))
if not build_controller.download_reused_debs('bullseye'):
reuse_dl_ret = 1
ret_value = build_controller.stop()
logger.info("build-pkgs done")
sys.exit(ret_value or reuse_dl_ret)