Skip to content
Snippets Groups Projects
Commit 7001872b authored by Eric Müller's avatar Eric Müller :mountain_bicyclist:
Browse files

Feat(ASIC): Add spack (and tcl lib for TT)

Depends-On: 15207
Change-Id: I5eb3742a7347af94e09f7731e8a27f18654b4397
parent dfb7cb08
No related branches found
No related tags found
No related merge requests found
......@@ -6,6 +6,11 @@ Closure cleanupSteps = {
sh "sudo /bin/rm -rf \"${WORKSPACE}/sandboxes/\" || exit 0"
// remove tmp (not only for spack)
sh "sudo /bin/rm -rf \"/tmp/${NODE_NAME}/\""
// the spack repository gets bind mounted into the sandbox and owned by
// spack user during build -> revert prior to cleaning worksapce
sh "[ -d \"$WORKSPACE/spack\" ] && sudo chown -R vis_jenkins \"$WORKSPACE/spack\" || true"
cleanWs(patterns: [[pattern: 'download_cache/', type: 'EXCLUDE']],
deleteDirs: true)
}
pipeline {
......@@ -16,6 +21,12 @@ pipeline {
skipDefaultCheckout()
}
parameters {
string(name: 'BUILD_CACHE_NAME',
defaultValue: 'asic_init_from_2021-06-18_1',
description: 'Which buildcache to use? They reside under $HOME/build_caches/$BUILD_CACHE_NAME and will be created if they do not exist.')
}
environment {
CONTAINER_STYLE = "asic"
}
......@@ -25,6 +36,11 @@ pipeline {
agent { label 'conviz1||conviz2' }
environment {
DOCKER_BASE_IMAGE = "centos:7"
// versions from system packages
DEPENDENCY_PYTHON = "python@2.7.5"
DEPENDENCY_PYTHON3 = "python@3.6.8"
VISIONARY_GCC_VERSION = "4.8.5"
VISIONARY_GCC = "gcc@${VISIONARY_GCC_VERSION}"
TMPDIR = "/tmp/${env.NODE_NAME}"
JOB_TMP_SPACK = sh(script: "mkdir -p ${env.TMPDIR} &>/dev/null; mktemp -d ${env.TMPDIR}/spack-XXXXXXXXXX",
returnStdout: true).trim()
......@@ -44,6 +60,42 @@ pipeline {
checkout scm
}
}
stage('Validate environment') {
steps {
sh ".ci/validate_environment.sh"
}
}
stage('Spack Clone') {
steps {
sh ".ci/clone.sh"
}
}
stage('Dump Meta Info') {
steps {
sh ".ci/dump_meta_info.sh"
sh ".ci/notify_gerrit.sh -m 'Build containing this change started..'"
}
}
stage('Spack Fetch') {
steps {
script {
try {
sh ".ci/fetch.sh"
}
catch (Throwable t) {
archiveArtifacts "errors_concretization.log"
throw t
}
spec_folder_in_container = sh(script: ".ci/get_jenkins_env.sh SPEC_FOLDER_IN_CONTAINER", returnStdout: true).trim()
archiveArtifacts(artifacts: "sandboxes/*/$spec_folder_in_container/*.yaml", allowEmptyArchive: true)
}
}
}
stage('Deploy utilities') {
steps {
sh ".ci/deploy_utilities.sh"
}
}
stage('Create asic recipe') {
steps {
sh ".ci/asic_create_recipe.sh"
......@@ -65,6 +117,25 @@ pipeline {
// we only want the container name, tail everything else
CONTAINER_IMAGE = sh(script: ".ci/deploy_container.sh | tail -n 1", returnStdout: true).trim()
}
sh ".ci/update_build_cache.sh -c \"$CONTAINER_IMAGE\""
sh ".ci/notify_gerrit.sh -t Build -c \"$CONTAINER_IMAGE\""
}
}
}
post {
failure {
script {
cache_failed = sh(script: ".ci/create_temporary_build_cache_after_failure.sh", returnStdout: true).trim()
}
sh ".ci/notify_gerrit.sh -v -1 -t Build -m \"Successfully built packages stored in cache. Resume by issuing:\nWITH_CACHE_NAME=${cache_failed}\n\nIn your next gerrit comment, NOT commit message!\""
}
cleanup {
archiveArtifacts "jenkins.env"
archiveArtifacts "out_singularity_build_asic_recipe.txt"
// Clean build artifacts because otherwise the latest build from each jenkins job can take up to 50GB.
// 2 executors and 5 Jenkins-Jobs (testing, testing-spack, testing-asic, stable, stable-asic) will slowly but surely eat away memory.
script {
cleanupSteps()
}
}
}
......@@ -77,7 +148,7 @@ pipeline {
steps {
build(job: 'bld_gerrit_tools-xilinx_dependencies',
parameters: [string(name: 'OVERWRITE_DEFAULT_ASIC_CONTAINER_IMAGE',
value: CONTAINER_NAME)])
value: CONTAINER_IMAGE)])
}
}
}
......
#!/bin/bash
SOURCE_DIR="$(dirname "$(readlink -m "${BASH_SOURCE[0]}")")"
source "${SOURCE_DIR}/asic_dummy_variables.sh"
source "${SOURCE_DIR}/commons.sh"
GITLOG="git_log_yashchiki.txt"
( cd ${SOURCE_DIR} && git log > "${WORKSPACE}/${GITLOG}" )
RECIPE_FILENAME="${WORKSPACE}/asic_recipe.def"
# create container description file
......@@ -16,12 +12,48 @@ cat <<EOF >"${RECIPE_FILENAME}"
Bootstrap: docker
From: ${DOCKER_BASE_IMAGE}
%setup
# bind-mount spack-folder as moving involves copying the complete download cache
mkdir \${SINGULARITY_ROOTFS}/opt/spack
mount --no-mtab --bind "${WORKSPACE}/spack" "\${SINGULARITY_ROOTFS}/opt/spack"
# bind-mount ccache
mkdir \${SINGULARITY_ROOTFS}/opt/ccache
mount --no-mtab --bind "${HOME}/spack_ccache" "\${SINGULARITY_ROOTFS}/opt/ccache"
# bind-mount build_cache
mkdir -p "\${SINGULARITY_ROOTFS}${BUILD_CACHE_INSIDE}"
# create buildcache directory if it does not exist
[ ! -d "${BUILD_CACHE_OUTSIDE}" ] && mkdir -p "${BUILD_CACHE_OUTSIDE}"
# mount the full build cache folder into container because some files might be symlinked to other buildcaches
mount --no-mtab --bind "${BASE_BUILD_CACHE_OUTSIDE}" "\${SINGULARITY_ROOTFS}${BASE_BUILD_CACHE_INSIDE}"
# bind-mount preserved packages in case the build fails
mkdir -p "\${SINGULARITY_ROOTFS}${PRESERVED_PACKAGES_INSIDE}"
mount --no-mtab --bind "${PRESERVED_PACKAGES_OUTSIDE}" "\${SINGULARITY_ROOTFS}${PRESERVED_PACKAGES_INSIDE}"
# bind-mount tmp-folder
mkdir -p "\${SINGULARITY_ROOTFS}/tmp/spack"
mount --no-mtab --bind "${JOB_TMP_SPACK}" "\${SINGULARITY_ROOTFS}/tmp/spack"
# copy install scripts
mkdir "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
rsync -av "${SOURCE_DIR}"/*.sh "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
rsync -av "${SOURCE_DIR}"/*.awk "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
rsync -av "${SOURCE_DIR}"/pinned "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
rsync -av "${SOURCE_DIR}"/patches "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
mkdir -p "\${SINGULARITY_ROOTFS}/${META_DIR_INSIDE}"
rsync -av "${META_DIR_OUTSIDE}"/* "\${SINGULARITY_ROOTFS}/${META_DIR_INSIDE}"
# init scripts for user convenience
mkdir -p "\${SINGULARITY_ROOTFS}/opt/init"
rsync -av "${WORKSPACE}"/misc-files/init/*.sh "\${SINGULARITY_ROOTFS}/opt/init"
%files
# NOTE: Due to a bug in singularity 2.6 all paths in this section _cannot_
# be surrounded in quotes.. ergo there should be no spaces in filenames! If
# there are, I pray for your poor soul that escaping them works..
# --obreitwi, 17-02-19 # 23:45:51
${WORKSPACE}/${GITLOG} ${GITLOG}
# provide spack command to login shells
${WORKSPACE}/misc-files/setup-spack.sh /etc/profile.d/setup-spack.sh
${WORKSPACE}/misc-files/locale.gen /etc/locale.gen
${WORKSPACE}/misc-files/locale.alias /etc/locale.alias
${WORKSPACE}/misc-files/sudoers /etc/sudoers
${JENKINS_ENV_FILE} ${JENKINS_ENV_FILE_INSIDE}
%post
# Apparently, upon building the CentOS docker images it has been decided that
......@@ -46,6 +78,33 @@ From: ${DOCKER_BASE_IMAGE}
ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh
/opt/conda/bin/conda install -y pylint pycodestyle nose pyyaml
# ECM: and now some abspacking
yum -y install ccache sudo parallel
# create a fingerprint by which we can identify the container from within
cat /proc/sys/kernel/random/uuid > /opt/fingerprint
## prerequisites
#"${SPACK_INSTALL_SCRIPTS}/install_prerequisites.sh" || exit 1
## cannot specify permissions in files-section
#chmod 440 /etc/sudoers
#chown root:root /etc/sudoers
## install locales
#locale-gen
# propagate environment variables to container recipe
export DEPENDENCY_PYTHON="${DEPENDENCY_PYTHON}"
export DEPENDENCY_PYTHON3="${DEPENDENCY_PYTHON3}"
export VISIONARY_GCC="${VISIONARY_GCC}"
export VISIONARY_GCC_VERSION="${VISIONARY_GCC_VERSION}"
export CONTAINER_STYLE="${CONTAINER_STYLE}"
"${SPACK_INSTALL_SCRIPTS}/complete_spack_install_routine_called_in_post_as_root.sh"
wait
"${SPACK_INSTALL_SCRIPTS}/install_singularity_as_root.sh" || \
(
sudo -Eu spack "${SPACK_INSTALL_SCRIPTS}/preserve_built_spack_packages.sh" &&
exit 1 # propagate the error
)
%environment
# NOTE: We provide a MODULESHOME in all cases (otherwise a login shell is
# required to load the module environment)
......@@ -64,3 +123,46 @@ From: ${DOCKER_BASE_IMAGE}
# ensure conda sees a clean env
unset PYTHONHOME
EOF
# create appenvs for all views...
# append apps for each spackview...
generate_appenv() {
local name_app="$1"
local name_view="$2"
cat <<EOF
%appenv ${name_app}
# there can only be one app loaded at any time
export VISIONARY_ENV=${name_view}
SVF=/opt/spack_views/\${VISIONARY_ENV}
export PATH=\${SVF}/bin\${PATH:+:}\${PATH}
# there is no python in asic app for now
#export PYTHONHOME=\${SVF}
#export SPACK_PYTHON_BINARY=\${SVF}/bin/python
export MANPATH=\${SVF}/man:\${SVF}/share/man\${MANPATH:+:}\${MANPATH}
export LIBRARY_PATH=\${SVF}/lib:\${SVF}/lib64\${LIBRARY_PATH:+:}\${LIBRARY_PATH}
export LD_LIBRARY_PATH=\${SVF}/lib:\${SVF}/lib64\${LD_LIBRARY_PATH:+:}\${LD_LIBRARY_PATH}
export TCLLIBPATH=\${SVF}/lib\${TCLLIBPATH:+:}\${TCLLIBPATH}
export CPATH=\${SVF}/include\${CPATH:+:}\${CPATH}
export C_INCLUDE_PATH=\${SVF}/include\${C_INCLUDE_PATH:+:}\${C_INCLUDE_PATH}
export CPLUS_INCLUDE_PATH=\${SVF}/include\${CPLUS_INCLUDE_PATH:+:}\${CPLUS_INCLUDE_PATH}
export QUIET_CPATH=\${CPATH}
export QUIET_C_INCLUDE_PATH=\${C_INCLUDE_PATH}
export QUIET_CPLUS_INCLUDE_PATH=\${CPLUS_INCLUDE_PATH}
export PKG_CONFIG_PATH=\${SVF}/lib/pkgconfig:\${SVF}/lib64/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig\${PKG_CONFIG_PATH:+:}\${PKG_CONFIG_PATH}
export CMAKE_PREFIX_PATH=\${SVF}\${CMAKE_PREFIX_PATH:+:}\${CMAKE_PREFIX_PATH}
EOF
}
for view in "${spack_views[@]}"; do
# generate two apps, one with visionary- prefix for compatability with old
# scripts and one with stripped visionary- prefix
(
generate_appenv "${view}" "${view}"
[[ "${view}" =~ ^visionary- ]] && generate_appenv "${view#visionary-}" "${view}"
) >> "${RECIPE_FILENAME}"
if [ "${view}" = "visionary-simulation" ];then
cat <<EOF >>"${RECIPE_FILENAME}"
export NEST_MODULES=visionarymodule
EOF
fi
done
#!/usr/bin/env bash
echo "Setting undefined required environment variables to 'undefined'." >&2
export BUILD_CACHE_NAME="${BUILD_CACHE_NAME:-undefined}"
export DEPENDENCY_PYTHON3="${DEPENDENCY_PYTHON3:-undefined}"
export DEPENDENCY_PYTHON="${DEPENDENCY_PYTHON:-undefined}"
export VISIONARY_GCC="${VISIONARY_GCC:-undefined}"
# All spack packages that should be fetched/installed in the container
spack_packages=(
# "${SPEC_VIEW_VISIONARY_DEV_TOOLS}" # FIXME
"visionary-asic ^${DEPENDENCY_PYTHON} %${VISIONARY_GCC}"
)
spack_views=(\
visionary-asic
)
spack_views_no_default_gcc=(\
visionary-asic # ECM: system compiler for now
)
spack_gid="nobody"
spack_create_user_cmd() {
adduser spack --uid 888 --gid nobody --no-create-home --no-user-group --home /opt/spack --system --shell /bin/bash
}
cat <<EOF
# just visionary-asic
${MY_SPACK_BIN} ${SPACK_ARGS_VIEW[@]+"${SPACK_ARGS_VIEW[@]}"} view -d yes symlink -i ${MY_SPACK_VIEW_PREFIX}/visionary-asic $(get_latest_hash "visionary-asic")
EOF
......@@ -27,7 +27,10 @@ rm_tmp_modules() {
add_cleanup_step rm_tmp_modules
{
echo "source /opt/init/modules.sh"
spack module tcl loads -r "$(get_latest_hash "${VISIONARY_GCC}")"
if [ "${CONTAINER_STYLE}" != "asic" ]; then
# TODO: the ASIC container does not feature a spack compiler yet
spack module tcl loads -r "$(get_latest_hash "${VISIONARY_GCC}")"
fi
} | tee "${TMP_MODULES}"
source "${TMP_MODULES}"
# --------- 8< ---------- 8< -------- 8< ---------
......
tk@8.5.19
tcl@8.5.19
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment