-
Eleni Mathioulaki authored
to avoid stopping the entire pipeline due to transient network issues/mirror unavailability when installing many packages
af3eee23
install_spack_env.sh 8.14 KiB
#!/bin/bash
# =========================================================================================================================================
# title : install_spack_env.sh
# usage : ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT $EBRAINS_REPO $EBRAINS_SPACK_ENV $UPSTREAM_INSTANCE \
# $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
# description : installs or updates the spack environment defined in the EBRAINS spack repo
# (if the specified spack instance doesn't exist, it also creates it)
# =========================================================================================================================================
set -eo pipefail
SPACK_JOBS=$1 # number of jobs
INSTALLATION_ROOT=$2 # where to set up the installation
EBRAINS_REPO=$3 # location of ebrains-spack-builds repository
EBRAINS_SPACK_ENV=$4 # name of EBRAINS Spack environment to be created/updated
UPSTREAM_INSTANCE=$5 # path to Spack instance to use as upstream (optional)
UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results
export OCI_CACHE_PREFIX=$7
# make sure spack uses the symlinked folder as path
export CI_SPACK_ROOT=${INSTALLATION_ROOT}/spack
# disable local configuration and cache directories
export SPACK_DISABLE_LOCAL_CONFIG=true
export SPACK_USER_CACHE_PATH=/tmp/spack
# define SYSTEMNAME variable in sites where it's not already defined
export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}}
# cache related variables
export CACHE_SPECFILE=${CACHE_SPECFILE:-"env_specfile.yaml"}
export YASHCHIKI_HOME=${EBRAINS_REPO}/vendor/yashchiki
export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${CI_SPACK_ROOT}/var/spack/cache}
export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${CI_SPACK_ROOT}/var/spack/cache}
if [ ! -d ${INSTALLATION_ROOT} ]; then
mkdir -p ${INSTALLATION_ROOT}
fi
# initial setup: use spack submodule if spack dir doesn't already exist
SPACK_ROOT_EXISTED=1
if [ ! -d ${CI_SPACK_ROOT} ]; then
ln -s ${EBRAINS_REPO}/vendor/spack ${CI_SPACK_ROOT}
SPACK_ROOT_EXISTED=0
fi
# activate Spack
source ${CI_SPACK_ROOT}/share/spack/setup-env.sh
if [[ $UPSTREAM_INSTANCE ]]; then
UPSTREAM_PREFIX=$(find $UPSTREAM_INSTANCE/spack/opt/spack/ -type d -name ".spack-db" 2>/dev/null | xargs -I {} dirname {})
spack config add upstreams:upstream-spack-instance:install_tree:$UPSTREAM_PREFIX
fi
if [ "${SPACK_ROOT_EXISTED}" -eq 0 ]; then
# for caching purposes it's nice if we can relocate into long paths, but we
# can't do that for existing installations -> else path
# ECM (2025-01-23) true seems to yield too large paths for some packages (e.g., gcc)
spack config add config:install_tree:padded_length:128
fi
# add repo if it does not exist
if [[ ! $(spack repo list | grep ebrains-spack-builds$) ]]
then
spack repo add $EBRAINS_REPO
fi
# make sure all fetching/clingo stuff happens before anything else
spack spec aida
# rebuild spack's database (could be an debugging session)
spack reindex
# add local mirror if it does not exist
if [[ ! $(spack mirror list | grep local_cache) ]]; then
# TODO for newer spack versions, add: --autopush --unsigned, drop create cache command below
# (Note: spack expects `build_cache/` below the folder we specify here
spack mirror add local_cache ${SPACK_CACHE_BUILD}
fi
spack compiler find
# create environment if it does not exist
if [ ! -d "${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV" ]
then
spack env create $EBRAINS_SPACK_ENV
fi
# update environment site-configs
rm -rf ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/site-config && cp -r $EBRAINS_REPO/site-config ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV
# update spack.yaml: merge top-level and site-specific spack.yaml files
spack-python $EBRAINS_REPO/site-config/ymerge.py $EBRAINS_REPO/spack.yaml $EBRAINS_REPO/site-config/$SYSTEMNAME/spack.yaml > /tmp/spack.yaml
cp /tmp/spack.yaml ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/
# activate environment
spack env activate --without-view $EBRAINS_SPACK_ENV
spack concretize --force --fresh --test root
# dump dag to file
spack spec -y > "${CACHE_SPECFILE}"
if [ -n "${OCI_CACHE_PREFIX}" ]; then
# fetch missing sources (if packages not yet installed)
python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \
--local-cache=${SPACK_CACHE_SOURCE} \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/source_cache \
--yashchiki-home=${YASHCHIKI_HOME} \
/tmp/missing_paths_sources.dat ${CACHE_SPECFILE}
# fetch missing build results (if packages not yet installed)
python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \
--local-cache=${SPACK_CACHE_BUILD}/build_cache \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/build_cache \
--yashchiki-home=${YASHCHIKI_HOME} \
/tmp/missing_paths_buildresults.dat ${CACHE_SPECFILE}
fi
spack-python -c "exit(not len(spack.environment.active_environment().uninstalled_specs()))" && (
# fetch all sources but delay exit code handling
spack fetch --dependencies --missing && ret=$? || ret=$?;
if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then
# push freshly fetched sources to remote cache
echo "Performing update of the source cache"
python3 ${YASHCHIKI_HOME}/update_cached_sources.py \
--local-cache=${SPACK_CACHE_SOURCE} \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/source_cache \
/tmp/missing_paths_sources.dat;
else
echo "Updating of the source cache disabled."
fi
# if [ "$ret" -ne 0 ]; then
# (exit $ret)
# fi
)
if [ -n "${OCI_CACHE_PREFIX}" ]; then
# record the state of installed/uninstalled packages before actually installing them
dag_hashes_pre_install=$(spack-python ${YASHCHIKI_HOME}/specfile_dag_hash.py ${CACHE_SPECFILE})
fi
# install the environment, use 2 jobs to reduce the amount of required RAM
# delay exit code until we have updated the cache below
spack install --no-check-signature -y -j$SPACK_JOBS --fresh --test root && spack_install_ret=$? || spack_install_ret=$?
# no need to update the local cache nor the remote cache if we don't want to update
if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then
# push previously missing (but now installed) packages to the local cache
for dag_hash in $dag_hashes_pre_install; do
spack buildcache create --unsigned --only package ${SPACK_CACHE_BUILD} /${dag_hash} && ret=$? || ret=$?
if [ $ret -ne 0 ]; then
echo "Failed to push ${dag_hash}, trying to call spack find on it:"
spack find -Lvp /${dag_hash} || true
fi
done
# upload packages from local to remote cache
echo "Performing update of the build cache"
python3 ${YASHCHIKI_HOME}/update_cached_buildresults.py \
--local-cache=${SPACK_CACHE_BUILD}/build_cache \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/build_cache \
/tmp/missing_paths_buildresults.dat
else
echo "Updating of the build cache disabled."
fi
# propagate spack install exit code
if [ $spack_install_ret -ne 0 ]; then
(exit "$spack_install_ret")
fi
# remove local cache content
if [ -d ${SPACK_CACHE_BUILD} ]; then
spack mirror destroy --mirror-name local_cache
fi
# TODO: when using spack remote OCI build caches require an index file
#spack mirror add ebrains oci://docker-registry.ebrains.eu/esd/build_cache
#spack buildcache list -a ebrains && ret=$? || ret=$?
# rebuild spack's database
spack reindex
# create load script that when sourced activates and loads the installed spack environment, using views
# this needs deactivating the environment first:
spack env deactivate
unset SPACK_LD_LIBRARY_PATH
spack env activate --sh $EBRAINS_SPACK_ENV > ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/load_env.sh
# create modules files with spack
# spack module tcl refresh -y
# create loads script that when sourced activates and loads the installed spack environment, using modules
# spack env loads -r