root/build-tools/stx/build-pkgs

1701 lines
74 KiB
Python
Executable File

#!/usr/bin/python3
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (C) 2021-2022 Wind River Systems,Inc
import apt
import apt_pkg
import argparse
import debrepack
import debsentry
import discovery
import dsc_depend
import dsccache
import logging
import os
import repo_manage
import requests
import shutil
import signal
import subprocess
import sys
import tempfile
import time
import utils
import yaml
BUILDER_URL = os.environ.get('BUILDER_URL')
REPOMGR_URL = os.environ.get('REPOMGR_URL')
REPOMGR_ORIGIN = os.environ.get('REPOMGR_ORIGIN')
BUILD_ROOT = os.environ.get('MY_BUILD_PKG_DIR')
STX_ROOT = os.environ.get('MY_REPO_ROOT_DIR')
PKGBUILDER_ROOT = "/localdisk/pkgbuilder"
USER = os.environ.get('MYUNAME')
PROJECT = os.environ.get('PROJECT')
DISTRIBUTION = os.environ.get('DEBIAN_DISTRIBUTION')
STX_ARCH = 'amd64'
STX_META_NAME = 'stx-meta'
STX_META_PKG = 'stx-meta_1.0.orig.tar.gz'
# Different reasons can lead to package build failure
# Set maximum retry count for failed packages
MAX_PKG_BUILD_COUNT = 3
# The maximum parallel instances run at the same time
MAX_PARALLEL_JOBS = 30
# The default number of parallel tasks
DEFAULT_PARALLEL_TASKS = 1
# The default interval of polling build status (seconds)
DEFAULT_POLL_INTERVAL = 10
# Maximum number of package make jobs
MAX_PKG_MAKE_JOBS = 6
# The local STX repository which contains build output
REPO_BUILD = 'deb-local-build'
# The local STX source repository
REPO_SOURCE = 'deb-local-source'
# The mirror created with reused URL
REUSE_MIRROR = 'deb-remote-reuse'
# Listed all stx source layers which contains 'debian_pkg_dirs'
STX_SOURCE_REPOS = [
'SDO-rv-service',
'ansible-playbooks',
'audit-armada-app',
'cert-manager-armada-app',
'clients',
'compile',
'config',
'config-files',
'containers',
'distributedcloud',
'distributedcloud-client',
'fault',
'gui',
'ha',
'helm-charts',
'integ',
'kernel',
'metal',
'metrics-server-armada-app',
'monitor-armada-app',
'monitoring',
'nfv',
'nginx-ingress-controller-armada-app',
'oidc-auth-armada-app',
'openstack-armada-app',
'platform-armada-app',
'portieris-armada-app',
'ptp-notification-armada-app',
'rook-ceph',
'snmp-armada-app',
'stx-puppet',
'update',
'upstream',
'utilities',
'vault-armada-app',
]
STX_DEFAULT_DISTRO = discovery.STX_DEFAULT_DISTRO
STX_DEFAULT_BUILD_TYPE = discovery.STX_DEFAULT_BUILD_TYPE
STX_DEFAULT_BUILD_TYPE_LIST = discovery.STX_DEFAULT_BUILD_TYPE_LIST
ALL_DISTROS = discovery.get_all_distros()
ALL_LAYERS = discovery.get_all_layers(distro=STX_DEFAULT_DISTRO)
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=STX_DEFAULT_DISTRO)
logger = logging.getLogger('debcontroller')
utils.set_logger(logger)
def get_debs_clue(btype):
if btype != 'rt':
btype = 'std'
return os.path.join(BUILD_ROOT, 'caches', btype + '_debsentry.pkl')
def get_pkg_dir_from_dsc(dscs, dsc_path):
for pkg_dir, dsc in dscs.items():
if dsc.strip() in dsc_path:
return pkg_dir
return None
def get_dsc_path_with_pkgdir(dscs, dst_pkg_dir):
for pkg_dir, dsc in dscs.items():
if pkg_dir.strip() == dst_pkg_dir:
return dsc
return None
def get_dsc_list_from_dict(dscs_dict):
dsc_list = []
for pkg_dir, dsc in dscs_dict.items():
dsc_list.append(dsc)
return dsc_list
def get_pkgname_ver_with_deb(deb_name):
if not deb_name.endswith('.deb'):
return None
name_list = deb_name.split('_')
if len(name_list) < 2:
return None
return name_list[0], name_list[1]
def get_aptcache(rootdir, repo_url, distribution):
try:
os.makedirs(rootdir + '/etc/apt')
f_sources = open(rootdir + '/etc/apt/sources.list', 'w')
repo_line = ' '.join(['deb-src [trusted=yes]', repo_url, distribution, 'main\n'])
f_sources.write(repo_line)
f_sources.close()
except Exception as e:
logger.error(e)
return None
try:
apt_cache = apt.Cache(rootdir=rootdir)
ret = apt_cache.update()
except Exception as e:
logger.error(e)
return None
if not ret:
return None
apt_cache.open()
return apt_cache
def fetch_src(apt_cache, pkg_name, download_dir):
src = apt_pkg.SourceRecords()
source_lookup = src.lookup(pkg_name)
if not source_lookup:
logger.error('Source package %s does not exist.' % pkg_name)
return False
try:
for src_file in src.files:
res = requests.get(src.index.archive_uri(src_file.path), stream=True)
logger.debug('Fetch package file %s' % src.index.archive_uri(src_file.path))
with open(os.path.join(download_dir, os.path.basename(src_file.path)), 'wb') as download_file:
for chunk in res.iter_content(chunk_size=1024 * 1024):
if chunk:
download_file.write(chunk)
logger.info('Source package %s downloaded.' % pkg_name)
except Exception as e:
logger.error(e)
return False
return True
def get_shared_source(repo_url, pkg_name, distribution, download_dir):
tmp_folder = tempfile.TemporaryDirectory()
apt_cache = get_aptcache(tmp_folder.name, repo_url, distribution)
if None == apt_cache:
tmp_folder.cleanup()
logger.warning('get_shared_source: apt update failed')
return False
ret = fetch_src(apt_cache, pkg_name, download_dir)
apt_cache.clear()
apt_cache.close()
tmp_folder.cleanup()
return ret
def req_chroots_action(action, extra_params):
"""
Base function called by each require on chroot with Restful API
Param:
action: addchroot, loadchroot, savechroot
"""
req_params = {}
req_params['project'] = PROJECT
req_params['user'] = USER
if extra_params:
req_params.update(extra_params)
try:
resp = requests.get(BUILDER_URL + action, data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
if 'success' in resp.text:
return 'success'
if 'exists' in resp.text:
return 'success'
if 'creating' in resp.text:
return 'creating'
return 'fail'
def show_task_log(log_file, wait_time, success_str, exit_str):
"""
Display the log file on the current console
Param:
wait_time: customer defines to wait before the log file can be read
key_str: the separate string can be taken as flag to exit
"""
status = 'fail'
time.sleep(wait_time)
logger.debug(' '.join(['Waiting for log file', log_file]))
timeout = 8
time_counter = 0
while not os.path.exists(log_file):
time.sleep(1)
time_counter += 1
if time_counter > timeout:
break
if os.path.exists(log_file):
p = subprocess.Popen("tail -f " + log_file, stdout=subprocess.PIPE,
shell=True, universal_newlines=True, bufsize=0)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip()
if line:
print(line)
if success_str and success_str in line:
status = 'success'
break
if exit_str and line.startswith(exit_str):
logger.error(' '.join(['Task failed. For details please',
'consult log', log_file]))
status = 'fail'
break
return status
def pkgdirs_entry_handler(entry):
if entry:
return os.path.basename(entry)
return []
def get_package_jobs(pkg_dir, distro=STX_DEFAULT_DISTRO):
'''
Returns the number of parallel jobs of the package
If the serial build is not enabled by the meta file,
the default number of jobs is equal to the value of
environment variable MAX_CPUS.
'''
jobs = os.environ.get('MAX_CPUS', 1)
package = discovery.package_dir_to_package_name(pkg_dir, distro=distro)
if pkg_dir:
pkg_meta_yaml = os.path.join(pkg_dir, 'debian/meta_data.yaml')
try:
with open(pkg_meta_yaml) as f:
yaml_doc = yaml.safe_load(f)
except Exception as e:
logger.error(str(e))
else:
# serial: true [Disable parallel build]
# No 'serial:' or 'serial: false' [Support parallel build]
if yaml_doc.get('serial'):
jobs = 1
logger.debug('Requires the number of jobs %s for %s', jobs, package)
return int(jobs)
class repoSnapshots():
"""
The repository snapshots pool to manage the apply/release
of snapshots
"""
def __init__(self, count):
self.snapshots = {}
for s in range(count):
self.snapshots[str(s)] = 'idle'
def apply(self, dsc):
for idx, owner in self.snapshots.items():
if owner == 'idle':
self.snapshots[idx] = dsc
logger.debug("Repository snapshot %s is applied for %s", idx, dsc)
return idx
def release(self, dsc):
for idx, owner in self.snapshots.items():
if owner == dsc:
self.snapshots[idx] = 'idle'
logger.debug("Repository snapshot %s is released for %s", idx, dsc)
break
class BuildController():
"""
builderClient helps to create or refresh the debian build recipes
(.dsc, *.tar) based on the stx source, then it offloads the build
task to the container 'pkgbuilder' with customer's build options
The build log will be displayed on console until getting the result
'Status: success': build ok
'Status: fail': build fail
'Status: give-back': try again later
"""
def __init__(self, distro=STX_DEFAULT_DISTRO):
self.attrs = {
'mode': 'private',
'distro': distro,
'avoid': True,
'parallel': 1,
'exit_on_fail': False,
'run_tests': False,
'build_depend': False,
'upload_source': False,
'poll_build_status': True,
'reuse': False,
'build_all': False,
'reuse_export': True
}
self.kits = {
'dsc_cache': {},
'dsc_rcache': {},
'repo_mgr': None,
'dsc_maker': {},
}
self.lists = {
'uploaded': []
}
self.build_types = []
self.pkgs_digests = {}
self.dscs_building = []
self.dscs_chroots = {}
if not self.kits['repo_mgr']:
rlogger = logging.getLogger('repo_manager')
utils.set_logger(rlogger)
self.kits['repo_mgr'] = repo_manage.RepoMgr('aptly', REPOMGR_URL,
'/tmp', REPOMGR_ORIGIN,
rlogger)
logger.debug("Successful created repo manager")
@property
def build_avoid(self):
return self.attrs['avoid']
@build_avoid.setter
def build_avoid(self, avoid):
self.attrs['avoid'] = avoid
def get_reuse(self, build_types):
if not self.attrs['reuse']:
return True
# 'reuse' should be handled for either no '-c' or '-c -all'
if not self.attrs['avoid'] and not self.attrs['build_all']:
return True
reuse_url = os.environ.get('STX_SHARED_REPO')
if not reuse_url:
logger.error("Reuse is enabled, failed to get STX_SHARED_REPO from ENV")
return False
logger.debug("Reuse is enabled, the reused repository is %s", reuse_url)
reuse_src_url = os.environ.get('STX_SHARED_SOURCE')
if not reuse_src_url:
logger.error("Reuse is enabled, failed to get STX_SHARED_SOURCE from ENV")
return False
logger.debug("Reuse is enabled, the reused source repository is %s", reuse_src_url)
kwargs = {'url': reuse_url, 'distribution': DISTRIBUTION, 'component': 'main',
'architectures': STX_ARCH}
try:
ret = self.kits['repo_mgr'].mirror(REUSE_MIRROR, **kwargs)
except Exception as e:
logger.error(str(e))
logger.error("Failed to create reused mirror with %s", reuse_url)
return False
else:
if ret:
logger.info("Successfully created reuse mirror with %s", reuse_url)
else:
logger.error("Failed to create reused mirror with %s", reuse_url)
return False
meta_dir = os.path.join(BUILD_ROOT, 'stx-meta')
os.makedirs(meta_dir, exist_ok=True)
try:
ret = get_shared_source(reuse_src_url, STX_META_NAME, DISTRIBUTION, meta_dir)
except Exception as e:
logger.error(str(e))
logger.error("Failed to download stx-meta to reuse")
return False
meta_file = os.path.join(meta_dir, STX_META_PKG)
os.system('tar zxvf ' + meta_file + ' -C ' + meta_dir)
rcache_dir = os.path.join(meta_dir, STX_META_NAME + '-1.0')
if not os.path.exists(rcache_dir):
logger.error("Failed to get remote stx-meta in %s", BUILD_ROOT)
return False
logger.info("Successfully downloaded stx-meta in %s", BUILD_ROOT)
for btype in build_types:
logger.info("Loaded remote pkl for %s", btype)
remote_pkl = os.path.join(rcache_dir, btype + '_dsc.pkl')
self.kits['dsc_rcache'][btype] = dsccache.DscCache(logger, remote_pkl)
return True
def start(self, build_types=ALL_BUILD_TYPES):
build_types_to_init = ALL_BUILD_TYPES
if build_types is not None:
build_types_to_init = build_types
self.build_types = build_types_to_init
if not self.kits['repo_mgr']:
logger.critical("Failed to create repo manager")
return False
self.kits['repo_mgr'].upload_pkg(REPO_BUILD, None)
self.kits['repo_mgr'].upload_pkg(REPO_SOURCE, None)
caches_dir = os.path.join(BUILD_ROOT, 'caches')
os.makedirs(caches_dir, exist_ok=True)
for build_type in build_types_to_init:
self.lists['success_' + build_type] = []
self.lists['fail_' + build_type] = []
self.lists['build-needed_' + build_type] = []
self.lists['success_depends_' + build_type] = []
self.lists['fail_depends_' + build_type] = []
self.lists['reuse_' + build_type] = []
self.lists['reuse_pkgname_' + build_type] = []
if build_type not in self.kits['dsc_cache']:
# Transparently migrate dsc cache files
dsc_pkl = os.path.join(caches_dir, build_type + '_dsc.pkl')
if not os.path.exists(dsc_pkl):
old_dsc_pkl = os.path.join(BUILD_ROOT, build_type, 'dsc.pkl')
if os.path.exists(old_dsc_pkl):
os.system('mv -f %s %s' % (old_dsc_pkl, dsc_pkl))
self.kits['dsc_cache'][build_type] = dsccache.DscCache(logger, dsc_pkl)
if not self.kits['dsc_cache'][build_type]:
logger.warning('Failed to create dsc cache %s', dsc_pkl)
# Transparently migrate debsentry cache files
debsentry_pkl = os.path.join(caches_dir, build_type + '_debsentry.pkl')
if not os.path.exists(debsentry_pkl):
if build_type == 'rt':
old_debsentry_pkl = os.path.join(BUILD_ROOT, 'debs_entry_rt.pkl')
else:
old_debsentry_pkl = os.path.join(BUILD_ROOT, 'debs_entry.pkl')
if os.path.exists(old_debsentry_pkl):
os.system('mv -f %s %s' % (old_debsentry_pkl, debsentry_pkl))
recipes_dir = os.path.join(BUILD_ROOT, 'recipes')
os.makedirs(recipes_dir, exist_ok=True)
for build_type in build_types_to_init:
build_dir = os.path.join(BUILD_ROOT, build_type)
os.makedirs(build_dir, exist_ok=True)
if build_type not in self.kits['dsc_maker']:
try:
if build_type == 'rt':
self.kits['dsc_maker'][build_type] = debrepack.Parser(build_dir, recipes_dir,
'debug', None, 'rt')
else:
self.kits['dsc_maker'][build_type] = debrepack.Parser(build_dir, recipes_dir, 'debug')
except Exception as e:
logger.error(str(e))
logger.error("Failed to create dsc maker")
return False
else:
logger.info("Successfully created dsc maker for %s", build_type)
# Prepare for build output reuse
if not self.get_reuse(build_types_to_init):
return False
# load the persistent chroot on shared volume
logger.info("Loading chroot")
req_chroots_action('loadchroot', None)
logger.info("Successfully loaded chroot")
return True
def stop(self):
self.attrs['poll_build_status'] = False
self.req_stop_task()
return self.show_build_stats()
def set_reuse(self, cache_dir):
meta_files = []
if not self.attrs['reuse_export']:
return
logger.debug("Build_all done, upload cache for build output reuse")
for btype in ALL_BUILD_TYPES:
dsc_path = os.path.join(cache_dir, btype + '_dsc.pkl')
if os.path.exists(dsc_path):
meta_files.append(dsc_path)
for btype in ALL_BUILD_TYPES:
debsentry_path = os.path.join(cache_dir, btype + '_debsentry.pkl')
if os.path.exists(debsentry_path):
meta_files.append(debsentry_path)
logger.debug("All the cache files which need to be uploaded:%s", ','.join(meta_files))
if not meta_files:
return
try:
outputs = self.kits['dsc_maker']['std'].dummy_package(meta_files, STX_META_NAME)
except Exception as e:
logger.error(str(e))
logger.error("Failed to create the package %s to reuse", STX_META_NAME)
else:
logger.debug("Successfully created the package %s to reuse", STX_META_NAME)
for recipe in outputs:
if recipe.endswith(".dsc"):
logger.info("Uploading %s with dsc %s for reuse", STX_META_NAME, recipe)
if not self.upload_with_dsc(STX_META_NAME, recipe, REPO_SOURCE):
logger.warning("Failed to upload %s to %s for reuse", STX_META_NAME, REPO_SOURCE)
else:
logger.debug("Successfully uploaded %s to %s for reuse", STX_META_NAME, REPO_SOURCE)
break
def clean(self, build_types=ALL_BUILD_TYPES):
"""
Clean the build env includes cleaning all these build artifacts under
<path to>/std or <path to>/rt and empty the local build repo
"""
if build_types is None:
build_types = ALL_BUILD_TYPES
# clean build artifacts
for build_type in build_types:
build_dir = os.path.join(BUILD_ROOT, build_type)
if os.path.exists(build_dir):
logger.debug(' '.join(['Cleaning the build directroy', build_dir]))
try:
shutil.rmtree(build_dir)
except Exception as e:
logger.error(str(e))
logger.error("Failed to clean of the build directory")
else:
logger.info("Finished cleaning of the build directory")
# clean build repo
if self.kits['repo_mgr']:
if not self.kits['repo_mgr'].remove_repo(REPO_BUILD):
logger.debug(' '.join(['Failed to clean', REPO_BUILD]))
else:
logger.debug(' '.join(['Successfully cleaned', REPO_BUILD]))
cache_dir = os.path.join(BUILD_ROOT, 'caches')
os.system("rm -f %s" % os.path.join(cache_dir, '*.pkl'))
def add_chroot(self, mirror):
extra_req = {}
if mirror:
# Extra required data can be extended here, for example:
# req_param['mirror'] = "http://ftp.de.debian.org/debian"
# when 'addchroot'
extra_req['mirror'] = mirror
ret = req_chroots_action('addchroot', extra_req)
if 'creating' in ret:
key_string = "Successfully set up bullseye chroot"
state = show_task_log(os.path.join(PKGBUILDER_ROOT, USER, PROJECT, 'chroot.log'),
10, key_string, "E: ")
if 'success' in state:
req_chroots_action('savechroot', None)
ret = 'success'
else:
logger.error('Failed to add chroot, please consult the log')
ret = 'fail'
self.req_kill_task('chroot')
if 'success' in ret:
logger.debug('Parent chroot is ready to create children chroots')
ret_status = req_chroots_action('clonechroot', {'instances': self.attrs['parallel']})
if not 'success' == ret_status:
logger.error("Failed to clone children chroots")
else:
logger.info("Successfully cloned children chroots")
return ret
def clone_chroots(self):
ret = req_chroots_action('clonechroot', None)
if 'success' in ret:
logger.debug('Successfully cloned chroots')
if 'fail' in ret:
logger.debug('Failed to clone chroots')
def refresh_chroots(self):
ret = req_chroots_action('refreshchroots', None)
return ret
def publish_repo(self, repo_name, suffix=None):
if suffix:
dst_repo = '-'.join([repo_name, suffix])
else:
dst_repo = repo_name
try:
logger.debug("Try to deploy the repository %s", dst_repo)
if suffix:
self.kits['repo_mgr'].deploy_repo(repo_name, suffix)
else:
self.kits['repo_mgr'].deploy_repo(repo_name)
except Exception as e:
logger.error(str(e))
logger.error("Failed to deploy the repository %s", dst_repo)
return False
else:
logger.info("Successfully deployed the repository %s", dst_repo)
return True
def remove_pkg_debs(self, package, build_type):
"""
remove package's all sub debs from the binary repo
Params:
package: target package name
build_type:
"""
logger.debug(' '.join(['Remove all old version of debs for', package]))
debs_clue = get_debs_clue(build_type)
subdebs = debsentry.get_subdebs(debs_clue, package, logger)
if not subdebs:
logger.warning('Failed to get sub deb packages for %s', package)
return False
for deb in subdebs:
pkg_item = deb.split('_')
msg = ''.join(['package ', pkg_item[0], '(', pkg_item[1], ')'])
logger.info(' '.join(['Searching for binary', msg, 'in repository', REPO_BUILD]))
if self.kits['repo_mgr'].search_pkg(REPO_BUILD, pkg_item[0]):
logger.info('Found binary %s in repository %s', msg, REPO_BUILD)
if self.kits['repo_mgr'].delete_pkg(REPO_BUILD, pkg_item[0], 'binary', None, deploy=False):
logger.info('Successfully deleted binary %s from repository %s',
msg, REPO_BUILD)
else:
logger.info('Failed to delete binary %s from repository %s', msg,
REPO_BUILD)
''' Fixme: not sure whether it's ok to skip self.publish_repo(REPO_BUILD) here
'''
return True
def upload_with_deb(self, package, debs_dir, build_type):
"""
upload the local build debian binaries to repo manager
Params:
package: target package name
debs_dir: the directory to debian binaries
"""
logger.debug(' '.join(['Remove all old version of debs for', package]))
debs_clue = get_debs_clue(build_type)
subdebs = debsentry.get_subdebs(debs_clue, package, logger)
if subdebs:
for deb in subdebs:
pkg_item = deb.split('_')
msg = ''.join(['package ', pkg_item[0], '(', pkg_item[1], ')'])
logger.info(' '.join(['Searching for binary', msg, 'in repository', REPO_BUILD]))
if self.kits['repo_mgr'].search_pkg(REPO_BUILD, pkg_item[0]):
logger.info('Found binary %s in repository %s', msg, REPO_BUILD)
if self.kits['repo_mgr'].delete_pkg(REPO_BUILD, pkg_item[0], 'binary', None, deploy=False):
logger.info('Successfully deleted binary %s from repository %s',
msg, REPO_BUILD)
else:
logger.info('Failed to delete binary %s from repository %s', msg,
REPO_BUILD)
''' Fixme: not sure whether it's ok to skip self.publish_repo(REPO_BUILD) here
'''
sdebs = []
if not os.path.exists(debs_dir):
logger.error(' '.join(['Noneexistent directory', debs_dir]))
return False
for root, dirs, files in os.walk(debs_dir):
if dirs:
pass
for r in files:
if r.endswith('.deb'):
deb_file = os.path.join(root, r)
if self.kits['repo_mgr'].upload_pkg(REPO_BUILD, deb_file, deploy=False):
logger.info("Successfully uploaded %s to %s", deb_file, REPO_BUILD)
pkg_item = r.split('_')
if pkg_item and len(pkg_item) > 1:
sdebs.append('_'.join([pkg_item[0], pkg_item[1]]))
else:
logger.error("Failed to upload %s to %s", deb_file, REPO_BUILD)
return False
if sdebs:
debsentry.set_subdebs(debs_clue, package, sdebs, logger)
logger.debug("%s_%s is saved into debsentry", pkg_item[0], pkg_item[1])
return True
def upload_with_dsc(self, pkg_name, dsc, repo_name):
if not os.path.exists(dsc):
logger.error(' '.join(['Dsc file', dsc, 'does not exist']))
return False
dsc_pkg = os.path.basename(dsc).split('_')[0]
if pkg_name != dsc_pkg:
logger.warning(''.join(['Package name passed in is ', pkg_name,
', from dsc is ', dsc_pkg, ' ,did not match.']))
logger.info(' '.join(['Existing source for', dsc_pkg,
'will be deleted from repository', repo_name, 'before new source is uploaded']))
logger.info("Searching for %s in repository %s", dsc_pkg, repo_name)
if self.kits['repo_mgr'].search_pkg(repo_name, dsc_pkg, binary=False):
logger.info("Found %s in repository %s, attempting to delete", dsc_pkg, repo_name)
if not self.kits['repo_mgr'].delete_pkg(repo_name, dsc_pkg, 'source'):
logger.error("Failed to delete source %s from repository %s", dsc_pkg, repo_name)
return False
logger.info("Successfully deleted source %s from repository %s", dsc_pkg, repo_name)
else:
logger.info("can't find %s in repository %s", dsc_pkg, repo_name)
logger.info(' '.join(['Start to upload source', dsc, 'to repository', repo_name]))
if not self.kits['repo_mgr'].upload_pkg(repo_name, dsc):
logger.error("Failed to upload source %s to repository %s", dsc, repo_name)
return False
logger.info("Successfully uploaded source %s to repository %s", dsc, repo_name)
return True
def req_add_task(self, pkg_dir, dsc, build_type, snapshot_index):
status = 'fail'
chroot = None
# For serial build and parallel build, the pkg_jobs should have different value
pkg_jobs = get_package_jobs(pkg_dir, self.attrs['distro'])
if pkg_jobs > self.attrs['max_make_jobs']:
pkg_jobs = self.attrs['max_make_jobs']
req_params = {}
req_params['mode'] = self.attrs['mode']
req_params['type'] = build_type
req_params['project'] = PROJECT
req_params['user'] = USER
req_params['dsc'] = dsc
req_params['run_tests'] = self.attrs['run_tests']
req_params['jobs'] = str(pkg_jobs)
req_params['snapshot_idx'] = snapshot_index
try:
resp = requests.post(BUILDER_URL + 'addtask', json=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
chroot = 'ServerError'
else:
resp_json = resp.json()
if 'success' in resp_json['status']:
status = 'success'
chroot = resp_json['msg']
else:
status = 'fail'
chroot = 'PkgbuilderFail'
return status, chroot
def req_kill_task(self, owner, dsc=None):
req_params = {}
req_params['owner'] = owner
req_params['user'] = USER
req_params['mode'] = self.attrs['mode']
if dsc:
req_params['dsc'] = dsc
try:
resp = requests.get(BUILDER_URL + 'killtask', data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
logger.error("Failed to request killtask to server")
else:
logger.debug(resp.text)
def req_stop_task(self):
ret = False
req_params = {}
req_params['user'] = USER
req_params['mode'] = self.attrs['mode']
try:
resp = requests.get(BUILDER_URL + 'stoptask', data=req_params)
resp.raise_for_status()
except requests.RequestException as e:
print(e)
else:
logger.debug(resp.text)
ret = True
return ret
def create_dsc(self, pkg_name, pkg_dir, build_type=STX_DEFAULT_BUILD_TYPE):
"""
Call dsc maker(debrepack) to generate the new dsc for package
Params:
pkg_name: package name
pkg_dir: path to the directory containing the package's debian folder
build_type: build type ... probably 'std' or 'rt'
Return: result list like:
['dhcp-2.10.1.tis.dsc' 'dhcp-2.10.tar.xz' 'dhcp-2.10.tar.xz.orig']
"""
dsc_file = None
skip_create_dsc = False
# Check whether there are changes on package's debian folder
new_checksum = self.kits['dsc_maker'][build_type].checksum(pkg_dir)
# If the sharing mode is enabled
if self.attrs['reuse']:
# 'reuse' should be handled for either no '-c' or '-c -all'
if self.attrs['avoid'] or (self.attrs['build_all'] and not self.attrs['avoid']):
logger.debug("Compare with the remote shared dsc cache for %s", build_type)
# Only match the subdir under STX REPO
pkg_stx_path = pkg_dir.replace(os.environ.get('MY_REPO'), '')
remote_dsc, shared_checksum = self.kits['dsc_rcache'][build_type].get_package_re(pkg_stx_path)
logger.debug("Checking package=%s, shared_checksum=%s, local_checksum=%s", pkg_stx_path, shared_checksum, new_checksum)
if shared_checksum and shared_checksum == new_checksum:
logger.debug("No updates on debian meta source compared with the remote shared")
# True None: just continue in the external for loop
skip_create_dsc = True
'''
Here the local dsc_cache also need to be set which prevents the subsequent
build without 'reuse' rebuilding the package with same checksum again
'''
self.kits['dsc_cache'][build_type].set_package(pkg_dir, 'reuse:' + shared_checksum)
return skip_create_dsc, None
else:
logger.debug("The remote checksum is different to the local checksum, now follow the local way")
self.pkgs_digests[pkg_dir] = new_checksum
if self.attrs['avoid'] and self.kits['dsc_cache'][build_type]:
dsc_file, old_checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if dsc_file and old_checksum:
if old_checksum and old_checksum == new_checksum:
logger.info("No update on package meta of %s", pkg_name)
'''
The specifial value 'reuse' of dsc field shows that this package
comes from the shared repo before and there are no changes, it
continues to be used
'''
if dsc_file == 'reuse':
logger.info("%s is a reused package which has no meta changes", pkg_name)
skip_create_dsc = True
return skip_create_dsc, None
if os.path.exists(dsc_file):
logger.info("Skip creating dsc for %s again", pkg_name)
skip_create_dsc = True
# True not None: just continue in the external for loop
return skip_create_dsc, dsc_file
else:
logger.info("Found %s in dsc_cache, but does not exist, need to create", pkg_name)
logger.debug("Be ready to create dsc for %s", pkg_dir)
pkg_build_dir = os.path.join(BUILD_ROOT, build_type, pkg_name)
if os.path.exists(pkg_build_dir):
try:
shutil.rmtree(pkg_build_dir)
except Exception as e:
logger.error(str(e))
else:
logger.debug("Successfully clean the old %s", pkg_build_dir)
os.makedirs(pkg_build_dir)
try:
src_mirror_dir = os.path.join(os.environ.get('STX_MIRROR'), 'sources')
dsc_recipes = self.kits['dsc_maker'][build_type].package(pkg_dir, src_mirror_dir)
except Exception as e:
logger.error(str(e))
# False None: Fatal error, should exit
return skip_create_dsc, None
else:
if not dsc_recipes:
logger.error("Failed to create dsc for %s", pkg_name)
# False None: Fatal error, should exit
return skip_create_dsc, None
logger.debug("Successfully created dsc for %s", pkg_name)
pkg_checksum = self.pkgs_digests[pkg_dir]
dsc_path = os.path.join(pkg_build_dir, dsc_recipes[0])
self.kits['dsc_cache'][build_type].set_package(pkg_dir, dsc_path + ':' + pkg_checksum)
# False not None: normal case
return skip_create_dsc, os.path.join(pkg_build_dir, dsc_recipes[0])
def get_stamp(self, pkg_dir, dsc_path, build_type, state):
dsc_file, checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if not dsc_file or not checksum:
return False
if dsc_file != dsc_path:
logger.error("Mismatched dsc path for %s", pkg_dir)
return False
stamp_dir = os.path.join(os.environ.get('MY_WORKSPACE'), build_type, 'stamp')
dsc_stamp = '.'.join([os.path.basename(dsc_file), checksum, state])
dsc_stamp_file = os.path.join(stamp_dir, dsc_stamp)
if os.path.exists(dsc_stamp_file):
return True
return False
def set_stamp(self, pkg_dir, dsc_path, build_type, state):
dsc_file, checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if not dsc_file or not checksum:
return False
if dsc_file != dsc_path:
logger.error("Mismatched dsc path for %s", pkg_dir)
return False
try:
stamp_dir = os.path.join(os.environ.get('MY_WORKSPACE'), build_type, 'stamp')
os.makedirs(stamp_dir, exist_ok=True)
dsc_stamp = '.'.join([os.path.basename(dsc_file), checksum, state])
os.mknod(os.path.join(stamp_dir, dsc_stamp))
except Exception as e:
logger.error(str(e))
logger.error("Failed to create stamp(%s) for %s", state, pkg_dir)
return False
else:
logger.info("Successfully create stamp(%s) for %s", state, pkg_dir)
return False
def del_stamp(self, pkg_dir, dsc_path, build_type, state):
dsc_file, checksum = self.kits['dsc_cache'][build_type].get_package(pkg_dir)
if not dsc_file or not checksum:
return False
if dsc_file != dsc_path:
logger.warning("Mismatched dsc path for %s", pkg_dir)
return False
try:
stamp_dir = os.path.join(os.environ.get('MY_WORKSPACE'), build_type, 'stamp')
dsc_stamp = '.'.join([os.path.basename(dsc_file), checksum, state])
dsc_stamp_file = os.path.join(stamp_dir, dsc_stamp)
if not os.path.exists(dsc_stamp_file):
return True
logger.info("Stamp for %s found, now remove it", pkg_dir)
os.remove(dsc_stamp_file)
except Exception as e:
logger.error(str(e))
logger.error("Failed to remove stamp(%s) for %s", state, pkg_dir)
return False
else:
logger.info("Successfully removed stamp(%s) for %s", state, pkg_dir)
return True
def clean_build_output(self, dsc_path):
'''
Clean the old generated binary packages .deb and the log link to be ready
for the new build task
Since the log link will be polled and read to know the build result, here
remove the old one, but this may has the side effect of missing the build
history logs
'''
try:
build_dir = os.path.abspath(dsc_path)
log_file = dsc_path.replace('.dsc', '_' + STX_ARCH + '.build')
if build_dir:
os.system("rm -f %s" % os.path.join(build_dir, '*.deb'))
os.system("unlink %s > /dev/null 2>&1" % log_file)
except Exception as e:
logger.error(str(e))
logger.error("Failed to remove the old deb packages or log link")
else:
logger.debug("Successfully removed the old deb packages and log link")
def poll_building_status(self):
'''
Poll all these log links which in self.dscs_building, any package done
('successful' or 'failed') will be returned, the return means a new build
instance can be added now
'''
if not len(self.dscs_building):
logger.info("There are no build tasks running, polling status quit")
return None, 'fail'
while self.attrs['poll_build_status']:
for dsc in self.dscs_building:
log = dsc.replace('.dsc', '_' + STX_ARCH + '.build')
if not os.path.exists(log):
continue
cmd_status = 'sed -n \'/| Summary /,$P\' %s | grep \'^Status: \'' % (log)
try:
status_line = subprocess.check_output(cmd_status, shell=True).decode()
if not status_line:
continue
except Exception:
# logger.error(str(e))
continue
else:
logger.debug("Captured result of cmd_status is %s from log %s", status_line, log)
if 'successful' in status_line:
logger.info("Got success status for %s", dsc)
return dsc, 'success'
else:
logger.info("Got failed status for %s", dsc)
cmd_stage = 'sed -n \'/| Summary /,$P\' %s | grep \'^Fail-Stage: \'' % (log)
try:
stage_line = subprocess.check_output(cmd_stage, shell=True).decode()
except Exception as e:
logger.error(str(e))
else:
logger.info("Fail-State is %s for %s", stage_line, dsc)
return dsc, 'fail'
time.sleep(self.attrs['poll_interval'])
logger.debug("Polling build status done")
return None, 'fail'
def run_build_loop(self, layer_pkgdir_dscs, target_pkgdir_dscs, layer, build_type=STX_DEFAULT_BUILD_TYPE):
'''
Prerequisite to run this function is that the phase I build(dsc creating) done
layer_pkgdir_dscs: Dict of the full layer packages
target_pkgdir_dscs: Dict of the target packages
layer: The layer currently build
build_type: type of build
'''
build_dir = os.path.join(BUILD_ROOT, build_type)
dsc_list_file = os.path.join(build_dir, layer + '_dscs.lst')
dscs_list = get_dsc_list_from_dict(target_pkgdir_dscs)
logger.debug('There are %d packages to be built in this round', len(dscs_list))
ds_logger = logging.getLogger('dsc_depend')
if not ds_logger.handlers:
utils.set_logger(ds_logger)
logger.debug("All dscs of layer %s passed to dsc_depends in file %s", layer, dsc_list_file)
logger.debug("Target dscs(%d) passed to dsc_depends: %s", len(dscs_list), str(dscs_list))
deps_resolver = dsc_depend.Dsc_build_order(dsc_list_file, dscs_list, ds_logger)
repo_snapshots = repoSnapshots(self.attrs['parallel'] + 2)
# To track these repeatly built packages
build_counter = {}
# To set the right count of parallel jobs
target_pkgs_count = len(target_pkgdir_dscs)
parallel_jobs = self.attrs['parallel']
if parallel_jobs > target_pkgs_count:
parallel_jobs = target_pkgs_count
continue_build = True
# build all the target packages
while (dscs_list or self.lists['fail_' + build_type]) and continue_build:
if len(dscs_list) == 0:
# reliable_build is serail build mode
self.attrs['parallel'] = 1
for pdir in self.lists['fail_' + build_type]:
fail_dsc = get_dsc_path_with_pkgdir(layer_pkgdir_dscs, pdir)
if fail_dsc:
dscs_list.append(fail_dsc)
if len(dscs_list) == 0:
break
logger.info("Reliable build: dsc_list_file is %s", dsc_list_file)
logger.info("Reliable build: all target dscs are: %s(%d)", ','.join(dscs_list), len(dscs_list))
deps_resolver = dsc_depend.Dsc_build_order(dsc_list_file, dscs_list, ds_logger)
build_counter = {}
# Enable this to end the build if still has failed packages
continue_build = False
logger.info("\nReliable build starts for the failed packages: %s(%d)", ','.join(dscs_list), len(dscs_list))
wait_task_done = False
# The serial build is just special case with self.attrs['parallel'] = 1
if len(self.dscs_building) < self.attrs['parallel']:
pkgs_can_build = deps_resolver.get_build_able_pkg(1)
else:
pkgs_can_build = None
if pkgs_can_build:
dsc_path = pkgs_can_build[0]
pkg_dir = get_pkg_dir_from_dsc(layer_pkgdir_dscs, dsc_path)
pkg_name = discovery.package_dir_to_package_name(pkg_dir, distro=self.attrs['distro'])
logger.info("Depends resolver told to build %s", pkg_name)
# For layer builds, the package may has been built before in the layer with higher priority
if pkg_dir in self.lists['success_' + build_type]:
logger.warning("Package %s has been built in this round, skip", pkg_name)
deps_resolver.pkg_accomplish(dsc_path)
logger.debug("dsc_path will be removed %s, current dscs list:%s", dsc_path, ','.join(dscs_list))
if dsc_path in dscs_list:
dscs_list.remove(dsc_path)
continue
# For the depended packages, skip checking the 'avoid' option
if pkg_dir not in target_pkgdir_dscs.keys():
if self.get_stamp(pkg_dir, dsc_path, build_type, 'build_done'):
logger.info("Stamp[build_done] found for the depended package %s, skipped", pkg_name)
deps_resolver.pkg_accomplish(dsc_path)
continue
# If the option 'build_depend' disabled, just exit
if not self.attrs['build_depend']:
logger.error("The depended package %s is not in %s and has not been built", layer, pkg_name)
return
# For the target packages
else:
if self.attrs['avoid']:
if self.get_stamp(pkg_dir, dsc_path, build_type, 'build_done'):
logger.info("Stamp build_done found, package %s has been built, skipped", pkg_name)
self.lists['success_' + build_type].append(pkg_dir)
deps_resolver.pkg_accomplish(dsc_path)
logger.debug("Avoid is enabled, dsc_path will be removed %s, current dscs list:%s", dsc_path, ','.join(dscs_list))
if dsc_path in dscs_list:
dscs_list.remove(dsc_path)
continue
logger.info("Clean data(stamp and build output) to prepare to build %s", pkg_name)
# This package is decided to be built now
self.del_stamp(pkg_dir, dsc_path, build_type, 'build_done')
self.clean_build_output(dsc_path)
snapshot_idx = repo_snapshots.apply(dsc_path)
self.publish_repo(REPO_BUILD, snapshot_idx)
# Requires the remote pkgbuilder to add build task
logger.info("To Require to add build task for %s with snapshot %s", pkg_name, snapshot_idx)
(status, chroot) = self.req_add_task(pkg_dir, dsc_path, build_type, snapshot_idx)
if 'fail' in status:
if chroot and 'ServerError' in chroot:
self.req_stop_task()
logger.error("Fatal error from pkgbuilder, exit from %s build with %s", layer, build_type)
return
# The most likely cause here is that there are no idle chroots to take this task
# Enable wait_task_done to wait for chroots releasing
logger.error("Failed to add build task for %s, wait for running task done", pkg_name)
deps_resolver.pkg_fail(dsc_path)
logger.debug("Notified dsc_depends to retrieve %s, exit exit", pkg_name)
repo_snapshots.release(dsc_path)
wait_task_done = True
else:
logger.info("Successfully sent request to add build task for %s", pkg_name)
# The build task is accepted and the package will be built
if pkg_dir not in build_counter.keys():
build_counter[pkg_dir] = 1
else:
build_counter[pkg_dir] += 1
logger.debug("Attempting to build package %s for the %d time", pkg_dir, build_counter[pkg_dir])
# Refresh the two important tables: dscs_chroots and dscs_building
self.dscs_chroots[dsc_path] = chroot
self.dscs_building.append(dsc_path)
logger.info("Appended %s to current building list", dsc_path)
# The original design is insert a console thread to display the build progress
# self.refresh_log_console()
# dsc_depend return None
else:
logger.warning("dsc_depend returns no package, wait for packages building done")
if len(self.dscs_building) == 0:
return
wait_task_done = True
# The main thead will stop to query whether there is job done
# if the task queue is reach to self.attrs['parallel']
dscs_count = len(dscs_list)
dscs_building_count = len(self.dscs_building)
if wait_task_done or dscs_building_count == self.attrs['parallel'] or (dscs_count < self.attrs['parallel'] and dscs_count == dscs_building_count):
if wait_task_done:
logger.debug("wait_task_done is enabled")
logger.info("############################################################")
logger.info("Remain packages %d, building packages %d", dscs_count, dscs_building_count)
logger.info("------------------------------------------------------------")
if self.dscs_building:
for bdsc in self.dscs_building:
pkglog = bdsc.replace('.dsc', '_' + STX_ARCH + '.build')
logger.info("Running: %s --> %s (Log:%s)", os.path.basename(bdsc), self.dscs_chroots[bdsc], pkglog)
logger.info("------------------------------------------------------------")
logger.debug("Waiting for the build task to complete......")
logger.info("############################################################")
# Only return one finished dsc
(done_dsc, status) = self.poll_building_status()
# The build task is done, possible results: success/fail/given-back
if done_dsc:
repo_snapshots.release(done_dsc)
done_pkg_dir = get_pkg_dir_from_dsc(layer_pkgdir_dscs, done_dsc)
done_pkg_name = discovery.package_dir_to_package_name(done_pkg_dir, distro=self.attrs['distro'])
# Removed from current building list
self.dscs_building.remove(done_dsc)
logger.info("Removed %s from the current building list after build done", done_pkg_name)
if 'success' in status:
logger.info("Successfully built %s, uploading to repository", done_pkg_name)
if self.upload_with_deb(done_pkg_name, os.path.join(BUILD_ROOT, build_type, done_pkg_name), build_type):
self.set_stamp(done_pkg_dir, done_dsc, build_type, state='build_done')
logger.info("Successfully uploaded all the debs of %s to repository and created stamp", done_pkg_name)
deps_resolver.pkg_accomplish(done_dsc)
logger.debug('Notified dsc_depend that %s accomplished', done_pkg_name)
if done_pkg_dir in target_pkgdir_dscs.keys():
dscs_list.remove(done_dsc)
logger.info('Removed %s from remain packages after successfully build', done_pkg_name)
self.lists['success_' + build_type].append(done_pkg_dir)
if done_pkg_dir in self.lists['fail_' + build_type]:
self.lists['fail_' + build_type].remove(done_pkg_dir)
logger.info('Added %s to success list success_%s', done_pkg_name, build_type)
else:
self.lists['success_depends_' + build_type].append(done_pkg_dir)
if done_pkg_dir in self.lists['fail_depends_' + build_type]:
self.lists['fail_depends_' + build_type].remove(done_pkg_dir)
logger.info('Added %s to list success_depends_%s', done_pkg_name, build_type)
else:
# To check whether reach the maxmum attempting count
if build_counter[done_pkg_dir] >= MAX_PKG_BUILD_COUNT:
deps_resolver.pkg_accomplish(done_dsc)
logger.warning('Notified dsc_depend to accomplish %s after %d attempts', done_pkg_name, MAX_PKG_BUILD_COUNT)
if done_pkg_dir in target_pkgdir_dscs.keys():
self.lists['fail_' + build_type].append(done_pkg_dir)
logger.error('Added %s to fail list fail_%s', done_pkg_name, build_type)
dscs_list.remove(done_dsc)
logger.info('Removed %s from remain packages after failed build', done_pkg_name)
else:
self.lists['fail_depends_' + build_type].append(done_pkg_dir)
logger.info('Added %s to list fail_depends_%s', done_pkg_name, build_type)
else:
deps_resolver.pkg_fail(done_dsc)
logger.warning('Notified dsc_depend to retrieve %s', done_pkg_name)
self.req_kill_task('sbuild', done_dsc)
logger.debug('Require pkgbuilder to clean the task for %s', done_pkg_name)
continue
self.req_kill_task('sbuild', done_dsc)
logger.debug('Require pkgbuilder to clean the task for %s', done_pkg_name)
logger.info("Build done, publish repository %s if there are not deployed deb binaries in it", REPO_BUILD)
self.publish_repo(REPO_BUILD)
logger.info("Build done, please check the statistics")
def build_all(self, layers=ALL_LAYERS, build_types=None, packages=None):
if layers:
for layer in layers:
if layer not in ALL_LAYERS:
logger.error(' '.join([layer, 'is not a valid layer']))
return
else:
layers = ALL_LAYERS
if build_types:
for build_type in build_types:
if build_type not in ALL_BUILD_TYPES:
logger.error(' '.join([build_type, 'is not a valid build_type']))
return
if layers:
total_layers = len(layers)
logger.debug(' '.join(['Building ', str(total_layers), ' layers:',
','.join(layers)]))
self.build_layers(layers=layers, build_types=build_types, packages=packages)
else:
logger.error('No layeres specified for the build.')
def save_failed_pkgs(self, pkgs_exist, pkgs_target, build_type):
if not pkgs_exist:
return
pkgs_name_fail = list(set(pkgs_target) - set(pkgs_exist))
if not pkgs_name_fail:
return
for pkg in pkgs_name_fail:
for pkgdir, pkgname in pkgs_exist.items():
if pkgname == pkg:
if build_type:
self.lists['fail_' + build_type].append(pkgdir)
else:
self.lists['fail_std'].append(pkgdir)
def build_layer_and_build_type(self, layer=None, build_type=None, packages=None):
pkgs_exist = {}
if not layer:
logger.error('Failed to specify layer')
return
if not build_type:
logger.error('Failed to specify build_type')
return
pkg_dirs = discovery.package_dir_list(distro=self.attrs['distro'], layer=layer, build_type=build_type)
layer_pkg_dirs = pkg_dirs
word = "all"
if packages:
word = "selected"
pkg_dirs, pkgs_exist = discovery.filter_package_dirs_by_package_names(pkg_dirs, packages, distro=self.attrs['distro'])
self.save_failed_pkgs(pkgs_exist, packages, build_type)
layer_pkg_dirs = pkg_dirs
if not pkg_dirs:
logger.debug(' '.join(['Found no buildable packages matching selection criteria in build_type',
build_type, 'of layer %s', layer]))
return
logger.info(' '.join(['Start to build', word, 'packages in',
'build_type', build_type,
'of layer', layer]))
packages = discovery.package_dirs_to_package_names(pkg_dirs)
logger.debug(' '.join(['Building packages:',
','.join(packages)]))
self.build_packages(layer_pkg_dirs, pkg_dirs, layer, build_type=build_type)
logger.info(' '.join(['Finished building packages in',
'build_type', build_type,
'of layer', layer]))
def build_layer_and_build_types(self, layer=None, build_types=STX_DEFAULT_BUILD_TYPE_LIST, packages=None):
if not layer:
logger.error('Failed to specify layer')
return
if not build_types:
logger.error('Failed to specify build_types')
return
# remove duplication
build_types = list(set(build_types))
'''
The signed packages like kernel-std-signed and kernel-rt-signed need
some interactive operations before building them, so here excluded the
build type 'sign' from the default build types
'''
if not packages and 'sign' in build_types:
build_types.remove('sign')
valid_build_type = discovery.get_layer_build_types(layer, distro=self.attrs['distro'])
# sort the build_type list so we build in the proper order
build_types = discovery.sort_build_type_list(build_types, layer, distro=self.attrs['distro'])
for build_type in build_types:
if build_type not in valid_build_type:
logger.info(' '.join(['Skipping build_type', build_type, 'which is not a valid for layer', layer]))
continue
self.build_layer_and_build_type(layer=layer, build_type=build_type, packages=packages)
return
def build_layer(self, layer=None, build_types=STX_DEFAULT_BUILD_TYPE_LIST, packages=None):
if not layer:
logger.error('Failed to specify layer')
return
if layer not in ALL_LAYERS:
logger.error(' '.join([layer, 'is not a valid layer']))
return
logger.info(' '.join(['Start to build all packages in layer',
layer]))
self.build_layer_and_build_types(layer=layer, build_types=build_types, packages=packages)
logger.info(' '.join(['Finished building packages in layer',
layer]))
return
def build_layers(self, layers=None, build_types=None, packages=None):
if not layers:
logger.error('Failed to specify layers')
return
# remove duplication
layers = list(set(layers))
for layer in layers:
if layer not in ALL_LAYERS:
logger.error(' '.join([layer, 'is not a valid layer']))
return
# sort the layer list so we build in the proper order
layers = discovery.sort_layer_list(layers, distro=self.attrs['distro'])
for layer in layers:
if build_types is None:
build_types = discovery.get_layer_build_types(layer=layer, distro=self.attrs['distro'])
self.build_layer(layer=layer, build_types=build_types, packages=packages)
return
def build_packages(self, layer_pkg_dirs, pkg_dirs, layer, build_type=STX_DEFAULT_BUILD_TYPE):
# remove duplication
pkg_dirs = list(set(pkg_dirs))
logger.debug(' '.join(['build_packages: Building: ', str(pkg_dirs)]))
fdsc_file = None
layer_pkgdir_dscs = {}
logger.debug('Length of build-needed_%s:%d before extending', build_type, len(self.lists['build-needed_' + build_type]))
self.lists['build-needed_' + build_type].extend(pkg_dirs)
logger.debug('Length of build-needed_%s:%d after extending', build_type, len(self.lists['build-needed_' + build_type]))
build_dir = os.path.join(BUILD_ROOT, build_type)
os.makedirs(build_dir, exist_ok=True)
dscs_list_file = os.path.join(build_dir, layer + '_dscs.lst')
logger.debug(' '.join(['Prepare', dscs_list_file, 'to deps_resolver']))
fdsc_file = open(dscs_list_file, 'w+')
fdsc_file.seek(0)
fdsc_file.truncate()
# Now check and create the debian meta one by one
for pkg_dir in layer_pkg_dirs:
dsc_file = ""
pkg_name = discovery.package_dir_to_package_name(pkg_dir, distro=self.attrs['distro'])
skip_dsc, dsc_file = self.create_dsc(pkg_name, pkg_dir, build_type=build_type)
if dsc_file:
logger.debug("dsc_file = %s" % dsc_file)
layer_pkgdir_dscs[pkg_dir.strip()] = dsc_file
fdsc_file.write(dsc_file + '\n')
if self.attrs['upload_source'] and not skip_dsc and self.kits['repo_mgr']:
self.upload_with_dsc(pkg_name, dsc_file, REPO_SOURCE)
else:
if skip_dsc:
if self.attrs['reuse']:
logger.info("%s will reuse the remote debs, skip to build", pkg_name)
else:
logger.info("%s has reused the shared debs, skip to build", pkg_name)
self.lists['reuse_' + build_type].append(pkg_dir)
self.lists['reuse_pkgname_' + build_type].append(pkg_name)
continue
else:
# Exit if fails to create dsc file
if fdsc_file:
fdsc_file.close()
logger.error("Failed to create needed dsc file, exit")
return
if fdsc_file:
fdsc_file.close()
# Start to build
target_pkgdir_dscs = {}
for pkg in pkg_dirs:
if pkg in layer_pkgdir_dscs.keys():
target_pkgdir_dscs[pkg] = layer_pkgdir_dscs[pkg]
if self.attrs['reuse'] and len(self.lists['reuse_pkgname_' + build_type]) > 0:
logger.info("The reused pkgs:%s", ','.join(self.lists['reuse_pkgname_' + build_type]))
stx_meta_dir = os.path.join(STX_META_NAME, STX_META_NAME + '-1.0')
remote_debsentry = os.path.join(BUILD_ROOT, stx_meta_dir, build_type + '_debsentry.pkl')
for pkgname in self.lists['reuse_pkgname_' + build_type]:
logger.debug("First try to remove all sub deb packages from %s for %s", REPO_BUILD, pkgname)
self.remove_pkg_debs(pkgname, build_type)
logger.debug("Then try to copy all sub deb packages of %s from mirror to %s", pkgname, REPO_BUILD)
logger.debug("Get the sub debs of %s with remote %s", pkgname, remote_debsentry)
debs_list = debsentry.get_subdebs(remote_debsentry, pkgname, logger)
if not debs_list:
logger.warning("Failed to get sub debs from the remote cache")
continue
debs_reused = None
for deb in debs_list:
if not debs_reused:
debs_reused = deb.split('_')[0]
else:
debs_reused = debs_reused + ',' + (deb.split('_')[0])
if debs_reused:
logger.info("All sub debs of %s will be imported:%s", pkgname, debs_reused)
try:
logger.info("Calls copy_pkgs: mirror=%s local_repo=%s type=binary deploy=True overwrite=True",
REUSE_MIRROR, REPO_BUILD)
ret = self.kits['repo_mgr'].copy_pkgs(REUSE_MIRROR, REPO_BUILD, debs_reused,
pkg_type='binary',
deploy=True, overwrite=True)
except Exception as e:
logger.error(str(e))
logger.error("Exception occurrs when call repomgr.copy_pkgs");
else:
if ret:
logger.debug("Successfully call repomgr.copy_pkgs to import reused debs")
else:
logger.warning("Failed to imported all reused debs with repomgr.copy_pkgs")
if target_pkgdir_dscs:
self.run_build_loop(layer_pkgdir_dscs, target_pkgdir_dscs, layer, build_type=build_type)
else:
logger.debug("There are no debian dsc files feeded to build_packages")
def show_build_stats(self):
"""
Since all packages are put into self.lists['build-needed']
at the begining of building, we know how many
packages want to build
"""
ret_val = 0
for build_type in self.build_types:
logger.info("Total %s packages needing to be built: %d", build_type, len(self.lists['build-needed_' + build_type]))
logger.info("-------------------------------------------")
logger.info("Total %s packages reused from remote: %d", build_type, len(self.lists['reuse_' + build_type]))
reuse_list = list(set(self.lists['reuse_' + build_type]))
reuse_number = len(reuse_list)
if reuse_number > 0:
logger.info("Successfully reused: %d", reuse_number)
for pkg_dir in sorted(reuse_list):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.info(pkg_name)
self.lists['build-needed_' + build_type] = list(set(self.lists['build-needed_' + build_type]) - set(self.lists['reuse_' + build_type]))
logger.info("Total %s packages needing to be built locally: %d", build_type, len(self.lists['build-needed_' + build_type]))
success_list = list(set(self.lists['success_' + build_type]))
success_number = len(success_list)
if success_number > 0:
logger.info("Successfully built: %d", success_number)
for pkg_dir in sorted(success_list):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.info(pkg_name)
success_depends_list = list(set(self.lists['success_depends_' + build_type]))
success_depends_number = len(success_depends_list)
if success_depends_number > 0:
logger.info("Successfully built depended packages: %d", success_depends_number)
for pkg_dir in sorted(success_depends_list):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.info(pkg_name)
failed_pkg_dirs = list(set(self.lists['build-needed_' + build_type]) - set(self.lists['success_' + build_type]))
failed_number = len(failed_pkg_dirs)
if failed_number > 0:
ret_val = 1
logger.error("Failed to build: %d", failed_number)
for pkg_dir in sorted(failed_pkg_dirs):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.error(pkg_name)
# self.lists['fail'] is the subset of failed_pkg_dirs
# particularly refer to those failed packages reported by pkgbuilder
if len(self.lists['fail_' + build_type]) > 0:
logger.info("List of failed packages:")
for pkg_dir in sorted(list(set(self.lists['fail_' + build_type]))):
pkg_name = discovery.package_dir_to_package_name(pkg_dir, self.attrs['distro'])
logger.error(pkg_name)
logger.info("For the failure reason, you can check with:")
logger.info("\'cat /localdisk/builder.log | grep ERROR\' or")
logger.info("\'cat ${MY_WORKSPACE}/<std or rt>/<Failed package>/*.build\'")
return ret_val
def bc_signal_handler(signum, frame):
ret_val = 0
if not build_controller:
sys.exit(1)
if frame:
logger.debug(' '.join(['Signal', str(signum), 'got']))
ret_val = build_controller.stop()
logger.debug('Exit for user interrupt')
sys.exit(ret_val)
def bc_reg_signal_handler():
signal.signal(signal.SIGINT, bc_signal_handler)
signal.signal(signal.SIGHUP, bc_signal_handler)
signal.signal(signal.SIGTERM, bc_signal_handler)
if __name__ == "__main__":
distro = STX_DEFAULT_DISTRO
layers = None
build_types = None
packages = None
parser = argparse.ArgumentParser(description="build-pkgs helper",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-c', '--clean', help="Start a fresh build",
action='store_true')
parser.add_argument('-e', '--exit_on_fail', help="Exit for any fail",
action='store_true')
parser.add_argument('-t', '--test', help="Run package tests during build",
action='store_true')
parser.add_argument('--reuse', help="Reuse the debs from STX_SHARED_REPO", action='store_true')
parser.add_argument('--refresh_chroots', help="Force to fresh chroots before build", action='store_true')
parser.add_argument('--parallel', help="The number of parallel build tasks", type=int, default=DEFAULT_PARALLEL_TASKS)
parser.add_argument('--poll_interval', help="The interval to poll the build status", type=int, default=DEFAULT_POLL_INTERVAL)
parser.add_argument('--max_make_jobs', help="The maximum number of jobs for package make", type=int, default=MAX_PKG_MAKE_JOBS)
parser.add_argument('-d', '--distro', type=str, nargs=1,
help="name of the distro to build\n %s" % ALL_DISTROS,
default=STX_DEFAULT_DISTRO, required=False)
parser.add_argument('-b', '--build-types', type=str,
help="comma separated list of all build-types to build\n %s" % ALL_BUILD_TYPES,
default='std,rt', required=False)
parser.add_argument('-l', '--layers', type=str,
help="comma separated list of all layers to build\n %s" % ALL_LAYERS,
default=None, required=False)
# set mutually options pair for package build and layer build
build_group = parser.add_mutually_exclusive_group()
build_group.add_argument('-a', '--all', help="Builds all packages",
action='store_true')
build_group.add_argument('-p', '--packages', help="Packages with comma",
type=str)
args = parser.parse_args()
if args.reuse:
if args.clean and not args.all:
logger.error("Reuse mode can not be used for the clean build of specific packages.");
sys.exit(1)
if args.distro:
if args.distro not in ALL_DISTROS:
logger.error(' '.join(['Distro', args.distro, 'not in', ','.join(ALL_DISTROS)]))
logger.error("Please consult: build-pkgs --help")
sys.exit(1)
distro = args.distro
ALL_LAYERS = discovery.get_all_layers(distro=distro)
ALL_BUILD_TYPES = discovery.get_all_build_types(distro=distro)
if args.build_types:
build_types = args.build_types.strip().split(',')
for build_type in build_types:
if build_type not in ALL_BUILD_TYPES:
logger.error(' '.join(['Build_type', build_type, 'not in', ','.join(ALL_BUILD_TYPES)]))
logger.error("Please consult: build-pkgs --help")
sys.exit(1)
if args.layers:
layers = args.layers.strip().split(',')
for layer in layers:
if layer not in ALL_LAYERS:
logger.error(' '.join(['Layer', layer, 'not in', ','.join(ALL_LAYERS)]))
logger.error("Please consult: build-pkgs --help")
sys.exit(1)
build_controller = BuildController(distro=distro)
if args.clean:
build_controller.build_avoid = False
if args.all:
build_controller.clean(build_types=build_types)
if args.exit_on_fail:
build_controller.attrs['exit_on_fail'] = True
if args.test:
build_controller.attrs['run_tests'] = True
if args.parallel:
if args.parallel < 1 or args.parallel > MAX_PARALLEL_JOBS:
logger.critical("Invalid parallel build tasks[1-%s]", MAX_PARALLEL_JOBS)
sys.exit(1)
build_controller.attrs['parallel'] = args.parallel
if args.poll_interval:
build_controller.attrs['poll_interval'] = args.poll_interval
if args.max_make_jobs:
build_controller.attrs['max_make_jobs'] = args.max_make_jobs
if args.reuse:
build_controller.attrs['reuse'] = True
if args.packages:
packages = args.packages.strip().split(',')
else:
if args.all:
build_controller.attrs['build_all'] = True
packages = None
if not build_controller.start(build_types=build_types):
logger.critical("Fail to initialize build controller, exit ......")
sys.exit(1)
bc_reg_signal_handler()
# mirror can be set to add_chroot as the main package repo
# e.g http://ftp.de.debian.org/debian
if build_controller.add_chroot(os.environ.get('DEBIAN_SNAPSHOT')) != 'success':
pkgbuilder_log = '/localdisk/pkgbuilder.log'
logger.error(' '.join(['Chroot is not ready, please check',
pkgbuilder_log]))
sys.exit(1)
if args.refresh_chroots:
if build_controller.refresh_chroots() != 'success':
logger.error("Failed to fresh all the idle chroots")
sys.exit(1)
else:
logger.info("Successfully refreshed all the idle chroots")
build_controller.build_all(layers=layers, build_types=build_types, packages=packages)
build_controller.set_reuse(os.path.join(BUILD_ROOT, 'caches'))
ret_value = build_controller.stop()
logger.info("build-pkgs done")
sys.exit(ret_value)