Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • hartmut/ebrains-spack-builds
  • filippomarchetti/ebrains-spack-builds
  • woodman/ebrains-spack-builds
  • ri/tech-hub/platform/esd/ebrains-spack-builds
  • ri/tech-hub/platform/esd/ebrains-spack-builds-lab-testing
  • hl11/ebrains-spack-builds
  • ansimsek/ebrains-spack-builds
  • deepu/ebrains-spack-builds
  • lcalori0/ebrains-spack-builds
  • ziaee/ebrains-spack-builds
  • noelp/ebrains-spack-builds
  • jkaiser/ebrains-spack-builds
  • mloshakov/ebrains-spack-builds
  • dsegebarth/ebrains-spack-builds
  • lupoc/ebrains-spack-builds
  • dsegebarth/ebrains-spack-builds-na-3
  • rshimoura/ebrains-spack-builds
  • kozlov/ebrains-spack-builds
  • rominabaila/ebrains-spack-builds
  • willemw/ebrains-spack-builds
20 results
Show changes
Commits on Source (153)
Showing
with 244 additions and 335 deletions
...@@ -3,11 +3,13 @@ stages: ...@@ -3,11 +3,13 @@ stages:
- test - test
variables: variables:
BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:24.12 BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:devel
SPACK_PATH_GITLAB: /mnt/spack_v0.21.1 RUN_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/clb-jupyter-image/ebrains:dev-a7ab31be
SPACK_PATH_GITLAB: /mnt/spack_v0.23.1
SYSTEMNAME: ebrainslab SYSTEMNAME: ebrainslab
GIT_SUBMODULE_STRATEGY: recursive GIT_SUBMODULE_STRATEGY: recursive
GIT_CLEAN_FLAGS: -ffdxq GIT_CLEAN_FLAGS: -ffdxq
RUNNER_AFTER_SCRIPT_TIMEOUT: 20m
# =================================================================== # ===================================================================
# LAB DEPLOYMENTS # LAB DEPLOYMENTS
...@@ -47,6 +49,15 @@ variables: ...@@ -47,6 +49,15 @@ variables:
- if [ $(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi; - if [ $(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi;
# delete the job, as we have the logs here # delete the job, as we have the logs here
- kubectl delete job simplejob${CI_PIPELINE_ID} || true - kubectl delete job simplejob${CI_PIPELINE_ID} || true
after_script:
- kubectl config use-context $KUBE_CONTEXT
- sh create_job_widget_script.sh $CI_PIPELINE_ID $RUN_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $RELEASE_NAME $LAB_KERNEL_ROOT
- cat widget-script.yml
- kubectl create -f widget-script.yml
- while true; do sleep 300; x=$(kubectl get pods -l job-name=widget-script${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.phase}'); if [ $x != "Running" ]; then break; fi; done
- kubectl logs jobs/widget-script${CI_PIPELINE_ID} | tee log.txt
- if [ $(kubectl get pods -l job-name=widget-script${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi;
- kubectl delete job widget-script${CI_PIPELINE_ID} || true
# artifacts: # artifacts:
# paths: # paths:
# - spack_logs # - spack_logs
...@@ -62,14 +73,14 @@ variables: ...@@ -62,14 +73,14 @@ variables:
extends: .deploy-build-environment extends: .deploy-build-environment
variables: variables:
LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/int LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/int
INSTALLATION_ROOT: /srv/test-build-2402 INSTALLATION_ROOT: /srv/test-build-2502
# deploy to a prod lab environment # deploy to a prod lab environment
.deploy-prod-server: .deploy-prod-server:
extends: .deploy-build-environment extends: .deploy-build-environment
variables: variables:
LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod
INSTALLATION_ROOT: /srv/main-spack-instance-2402 INSTALLATION_ROOT: /srv/main-spack-instance-2502
# deploy to the dev lab environment at CINECA # deploy to the dev lab environment at CINECA
.deploy-dev-server-cineca: .deploy-dev-server-cineca:
...@@ -97,12 +108,24 @@ variables: ...@@ -97,12 +108,24 @@ variables:
# ------------------------------------------------------------------- # -------------------------------------------------------------------
# deploy int release (latest changes) to dev env to be tested before release to production # deploy int release (latest changes) to dev env to be tested before release to production
# (the master branch and any branch starting with "lab-" is deployed to a dedicated kernel)
.deploy-int-release: .deploy-int-release:
variables: variables:
SPACK_ENV: test SPACK_ENV: $CI_COMMIT_BRANCH
RELEASE_NAME: EBRAINS-test RELEASE_NAME: $CI_COMMIT_BRANCH
rules:
- if: '($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH =~ /^lab-/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"'
# deploy a pre-production environment first, to avoid directly modifying the experimental or official release environments
.deploy-ppd-release:
variables:
SPACK_ENV: ppd
RELEASE_NAME: EBRAINS-ppd
allow_failure: false
rules: rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE != "schedule" - if: $CI_PIPELINE_SOURCE == "schedule" && $DEPLOYMENT == "prod"
- if: $CI_COMMIT_BRANCH =~ /^ebrains/
when: manual
# deploy the experimental release of tools once a week from latest working version of int release # deploy the experimental release of tools once a week from latest working version of int release
.deploy-exp-release: .deploy-exp-release:
...@@ -125,8 +148,8 @@ variables: ...@@ -125,8 +148,8 @@ variables:
# deploy the production release of tools # deploy the production release of tools
.deploy-prod-release: .deploy-prod-release:
variables: variables:
SPACK_ENV: ebrains-24-04 SPACK_ENV: ebrains-25-02
RELEASE_NAME: EBRAINS-24.04 RELEASE_NAME: EBRAINS-25.02
rules: rules:
- if: $CI_COMMIT_BRANCH =~ /^ebrains/ - if: $CI_COMMIT_BRANCH =~ /^ebrains/
when: manual when: manual
...@@ -147,26 +170,42 @@ deploy-exp-release-dev-cineca: ...@@ -147,26 +170,42 @@ deploy-exp-release-dev-cineca:
- .deploy-exp-dev-release - .deploy-exp-dev-release
- .deploy-dev-server-cineca - .deploy-dev-server-cineca
# deploy ppd release to prod environment at JSC
deploy-ppd-release-prod-jsc:
extends:
- .deploy-ppd-release
- .deploy-prod-server-jsc
# deploy ppd release to prod environment at CINECA
deploy-ppd-release-prod-cineca:
extends:
- .deploy-ppd-release
- .deploy-prod-server-cineca
# deploy exp release to prod environment at JSC # deploy exp release to prod environment at JSC
deploy-exp-release-prod-jsc: deploy-exp-release-prod-jsc:
needs: [deploy-ppd-release-prod-jsc]
extends: extends:
- .deploy-exp-prod-release - .deploy-exp-prod-release
- .deploy-prod-server-jsc - .deploy-prod-server-jsc
# deploy exp release to prod environment at CINECA # deploy exp release to prod environment at CINECA
deploy-exp-release-prod-cineca: deploy-exp-release-prod-cineca:
needs: [deploy-ppd-release-prod-cineca]
extends: extends:
- .deploy-exp-prod-release - .deploy-exp-prod-release
- .deploy-prod-server-cineca - .deploy-prod-server-cineca
# deploy prod release to prod environment at JSC # deploy prod release to prod environment at JSC
deploy-prod-release-prod-jsc: deploy-prod-release-prod-jsc:
needs: [deploy-ppd-release-prod-jsc]
extends: extends:
- .deploy-prod-release - .deploy-prod-release
- .deploy-prod-server-jsc - .deploy-prod-server-jsc
# deploy prod release to prod environment at CINECA # deploy prod release to prod environment at CINECA
deploy-prod-release-prod-cineca: deploy-prod-release-prod-cineca:
needs: [deploy-ppd-release-prod-cineca]
extends: extends:
- .deploy-prod-release - .deploy-prod-release
- .deploy-prod-server-cineca - .deploy-prod-server-cineca
...@@ -197,9 +236,9 @@ build-spack-env-on-runner: ...@@ -197,9 +236,9 @@ build-spack-env-on-runner:
after_script: after_script:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job # for succesfully installed packages: keep the spack logs for any package modified during this CI job
- shopt -s globstar - . $CI_PROJECT_DIR/spack/share/spack/setup-env.sh
- PKG_DIR=$CI_PROJECT_DIR/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 - cd $(spack-python -c "print(spack.store.parse_install_tree(spack.config.get('config'))[0])")
- if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi - find . -mindepth 4 -maxdepth 4 \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;
# for not succesfully installed packages: also keep the spack logs for any packages that failed # for not succesfully installed packages: also keep the spack logs for any packages that failed
- if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi
# - if [ -d /tmp/spack_tests ]; then mv /tmp/spack_tests $CI_PROJECT_DIR; fi # - if [ -d /tmp/spack_tests ]; then mv /tmp/spack_tests $CI_PROJECT_DIR; fi
...@@ -210,7 +249,7 @@ build-spack-env-on-runner: ...@@ -210,7 +249,7 @@ build-spack-env-on-runner:
when: always when: always
timeout: 2 days timeout: 2 days
rules: rules:
- if: $CI_PIPELINE_SOURCE != "schedule" && $CI_PIPELINE_SOURCE != "merge_request_event" - if: $CI_PIPELINE_SOURCE == "push"
# this one fills the spack caches and updates the ESD (ebrainslab-variant) images on harbor # this one fills the spack caches and updates the ESD (ebrainslab-variant) images on harbor
sync-esd-image: sync-esd-image:
...@@ -250,7 +289,7 @@ sync-esd-image: ...@@ -250,7 +289,7 @@ sync-esd-image:
after_script: after_script:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job # for succesfully installed packages: keep the spack logs for any package modified during this CI job
- PKG_DIR=${SANDBOX_ROOT}/${INSTALLATION_ROOT}/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 - PKG_DIR=${SANDBOX_ROOT}/${INSTALLATION_ROOT}/spack/opt/spack/**/linux-*/gcc-13.3.0
- if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi - if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi
# for not succesfully installed packages: also keep the spack logs for any packages that failed # for not succesfully installed packages: also keep the spack logs for any packages that failed
- if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi
...@@ -262,7 +301,7 @@ sync-esd-image: ...@@ -262,7 +301,7 @@ sync-esd-image:
resource_group: registry-esd-master-image resource_group: registry-esd-master-image
rules: rules:
# branches that update the gitlab-runner upstream (read-only) installation and the spack OCI caches # branches that update the gitlab-runner upstream (read-only) installation and the spack OCI caches
- if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE != "schedule" - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE == "push"
when: manual when: manual
# update gitlab-runner upstream (read-only) installation # update gitlab-runner upstream (read-only) installation
...@@ -274,7 +313,7 @@ sync-gitlab-spack-instance: ...@@ -274,7 +313,7 @@ sync-gitlab-spack-instance:
image: $BUILD_ENV_DOCKER_IMAGE image: $BUILD_ENV_DOCKER_IMAGE
variables: variables:
SPACK_REPO_PATH: $SPACK_PATH_GITLAB/ebrains-spack-builds SPACK_REPO_PATH: $SPACK_PATH_GITLAB/ebrains-spack-builds
SPACK_JOBS: 4 SPACK_JOBS: 16
OCI_CACHE_PREFIX: "" OCI_CACHE_PREFIX: ""
UPDATE_SPACK_OCI_CACHES: false UPDATE_SPACK_OCI_CACHES: false
script: script:
...@@ -296,9 +335,9 @@ sync-gitlab-spack-instance: ...@@ -296,9 +335,9 @@ sync-gitlab-spack-instance:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job # for succesfully installed packages: keep the spack logs for any package modified during this CI job
# (we use repo.yaml, that is modified at each start of the pipeline, as a reference file) # (we use repo.yaml, that is modified at each start of the pipeline, as a reference file)
- shopt -s globstar - . $SPACK_PATH_GITLAB/spack/share/spack/setup-env.sh
- PKG_DIR=$SPACK_PATH_GITLAB/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 - cd $(spack-python -c "print(spack.store.parse_install_tree(spack.config.get('config'))[0])")
- if cd $PKG_DIR; then find . -newer $SPACK_REPO_PATH/repo.yaml \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi - find . -mindepth 4 -maxdepth 4 -newer $SPACK_REPO_PATH/repo.yaml \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;
# for not succesfully installed packages: also keep the spack logs for any packages that failed # for not succesfully installed packages: also keep the spack logs for any packages that failed
- if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi
artifacts: artifacts:
...@@ -307,7 +346,7 @@ sync-gitlab-spack-instance: ...@@ -307,7 +346,7 @@ sync-gitlab-spack-instance:
when: always when: always
rules: rules:
# branches that update the gitlab-runner upstream (read-only) installation # branches that update the gitlab-runner upstream (read-only) installation
- if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE != "schedule" - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push"
when: manual when: manual
# run (scheduled) standalone tests for environment # run (scheduled) standalone tests for environment
......
...@@ -4,5 +4,5 @@ ...@@ -4,5 +4,5 @@
shallow = true shallow = true
[submodule "vendor/yashchiki"] [submodule "vendor/yashchiki"]
path = vendor/yashchiki path = vendor/yashchiki
url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/yashchiki url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal
shallow = true shallow = true
...@@ -38,9 +38,9 @@ Clone this repository. You can use the `ebrains-yy-mm` branches to install the E ...@@ -38,9 +38,9 @@ Clone this repository. You can use the `ebrains-yy-mm` branches to install the E
git clone --branch {branch-name} https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git git clone --branch {branch-name} https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git
``` ```
Clone Spack. We currently use version v0.21.1: Clone Spack. We currently use version v0.23.1:
``` ```
git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.21.1 https://github.com/spack/spack git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.23.1 https://github.com/spack/spack
``` ```
Activate Spack: Activate Spack:
......
...@@ -37,7 +37,7 @@ cp $INSTALLATION_ROOT/spack/var/spack/environments/$EBRAINS_SPACK_ENV/load_env.s ...@@ -37,7 +37,7 @@ cp $INSTALLATION_ROOT/spack/var/spack/environments/$EBRAINS_SPACK_ENV/load_env.s
# and the location of python modules installed in the base docker Collab image # and the location of python modules installed in the base docker Collab image
cat <<EOF >> $KERNEL_PATH/bin/env.sh cat <<EOF >> $KERNEL_PATH/bin/env.sh
export PATH=\$PATH:/opt/app-root/src/.local/bin export PATH=\$PATH:/opt/app-root/src/.local/bin
export PYTHONPATH=\$PYTHONPATH:/opt/app-root/src/.local/lib/python3.8/site-packages:/usr/local/lib/python3.8/dist-packages export PYTHONPATH=\$PYTHONPATH:/opt/conda/lib/python3.11/site-packages
export R_LIBS_USER=/opt/app-root/src/.local/lib/R/site-library export R_LIBS_USER=/opt/app-root/src/.local/lib/R/site-library
mkdir -p \$R_LIBS_USER mkdir -p \$R_LIBS_USER
export R_LIBS=\$R_LIBS_USER:\$R_LIBS export R_LIBS=\$R_LIBS_USER:\$R_LIBS
......
...@@ -36,11 +36,13 @@ spec: ...@@ -36,11 +36,13 @@ spec:
imagePullPolicy: Always imagePullPolicy: Always
resources: resources:
limits: limits:
cpu: '6' cpu: '8'
memory: '18Gi' memory: '32Gi'
ephemeral-storage: '10Gi'
requests: requests:
cpu: '4' cpu: '4'
memory: '12Gi' memory: '20Gi'
ephemeral-storage: '256Mi'
volumeMounts: volumeMounts:
- name: sharedbin - name: sharedbin
mountPath: /srv mountPath: /srv
...@@ -99,7 +101,7 @@ spec: ...@@ -99,7 +101,7 @@ spec:
- name: EBRAINS_REPO_PATH - name: EBRAINS_REPO_PATH
value: $INSTALLATION_ROOT/ebrains-spack-builds value: $INSTALLATION_ROOT/ebrains-spack-builds
- name: SPACK_JOBS - name: SPACK_JOBS
value: '4' value: '6'
volumes: volumes:
- name: sharedbin - name: sharedbin
persistentVolumeClaim: persistentVolumeClaim:
......
#!/bin/bash
# ===========================================================================================================
# title : create_job_widget_script.sh
# usage : ./create_job_widget_script.sh $OC_JOB_ID $RUN_ENV_DOCKER_IMAGE $INSTALLATION_ROOT
# $SPACK_ENV $RELEASE_NAME $LAB_KERNEL_ROOT
# description : creates job file that generates a script to load the jupyter extensions for a given env
# ===========================================================================================================
OC_JOB_ID=$1
RUN_ENV_DOCKER_IMAGE=$2
INSTALLATION_ROOT=$3
EBRAINS_SPACK_ENV=$4
RELEASE_NAME=$5
LAB_KERNEL_ROOT=$6
cat <<EOT >> widget-script.yml
apiVersion: batch/v1
kind: Job
metadata:
name: widget-script${OC_JOB_ID}
spec:
parallelism: 1
completions: 1
backoffLimit: 0
template:
spec:
containers:
- name: widget-script
image: ${RUN_ENV_DOCKER_IMAGE}
imagePullPolicy: Always
resources:
limits:
cpu: '1'
memory: '1Gi'
requests:
cpu: '0.5'
memory: '500Mi'
volumeMounts:
- name: sharedbin
mountPath: /srv
command:
- /bin/bash
- -c
- |
. \$INSTALLATION_ROOT/spack/share/spack/setup-env.sh
spack env activate --without-view \$EBRAINS_SPACK_ENV
KERNEL_PATH=\$LAB_KERNEL_ROOT/\$(echo "\$RELEASE_NAME" | tr '[:upper:]' '[:lower:]')
spack load --sh --first clb-nb-utils py-pip py-tvb-ext-bucket py-tvb-ext-unicore py-tvb-ext-xircuits > \$KERNEL_PATH/bin/widget_activation.sh
env:
- name: SYSTEMNAME
value: ebrainslab
- name: SPACK_DISABLE_LOCAL_CONFIG
value: "true"
- name: INSTALLATION_ROOT
value: "$INSTALLATION_ROOT"
- name: EBRAINS_SPACK_ENV
value: "$EBRAINS_SPACK_ENV"
- name: RELEASE_NAME
value: "$RELEASE_NAME"
- name: LAB_KERNEL_ROOT
value: "$LAB_KERNEL_ROOT"
volumes:
- name: sharedbin
persistentVolumeClaim:
claimName: shared-binaries
restartPolicy: Never
EOT
...@@ -21,10 +21,9 @@ export OCI_CACHE_PREFIX=$7 ...@@ -21,10 +21,9 @@ export OCI_CACHE_PREFIX=$7
# make sure spack uses the symlinked folder as path # make sure spack uses the symlinked folder as path
export CI_SPACK_ROOT=${INSTALLATION_ROOT}/spack export CI_SPACK_ROOT=${INSTALLATION_ROOT}/spack
# specify location of .spack dir (by default in ~) # disable local configuration and cache directories
# this is where cache and configuration settings are stored export SPACK_DISABLE_LOCAL_CONFIG=true
export SPACK_USER_CACHE_PATH=${CI_SPACK_ROOT}/.spack export SPACK_USER_CACHE_PATH=/tmp/spack
export SPACK_USER_CONFIG_PATH=${CI_SPACK_ROOT}/.spack
# define SYSTEMNAME variable in sites where it's not already defined # define SYSTEMNAME variable in sites where it's not already defined
export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}} export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}}
...@@ -44,24 +43,16 @@ SPACK_ROOT_EXISTED=1 ...@@ -44,24 +43,16 @@ SPACK_ROOT_EXISTED=1
if [ ! -d ${CI_SPACK_ROOT} ]; then if [ ! -d ${CI_SPACK_ROOT} ]; then
ln -s ${EBRAINS_REPO}/vendor/spack ${CI_SPACK_ROOT} ln -s ${EBRAINS_REPO}/vendor/spack ${CI_SPACK_ROOT}
SPACK_ROOT_EXISTED=0 SPACK_ROOT_EXISTED=0
# SPACK PATCH: the post-build logs on install-time-test-logs.txt gets ovewritten by the post-install logs.
# quick fix for that: (TODO: investigate more and open PR)
sed -i "s/self.file_like, \"w\"/self.file_like, \"a\"/g" ${CI_SPACK_ROOT}/lib/spack/llnl/util/tty/log.py
fi
if [[ $UPSTREAM_INSTANCE ]]
then
cat <<EOF > ${CI_SPACK_ROOT}/etc/spack/defaults/upstreams.yaml
upstreams:
upstream-spack-instance:
install_tree: $UPSTREAM_INSTANCE/spack/opt/spack
EOF
fi fi
# activate Spack # activate Spack
source ${CI_SPACK_ROOT}/share/spack/setup-env.sh source ${CI_SPACK_ROOT}/share/spack/setup-env.sh
if [[ $UPSTREAM_INSTANCE ]]; then
UPSTREAM_PREFIX=$(find $UPSTREAM_INSTANCE/spack/opt/spack/ -type d -name ".spack-db" 2>/dev/null | xargs -I {} dirname {})
spack config add upstreams:upstream-spack-instance:install_tree:$UPSTREAM_PREFIX
fi
if [ "${SPACK_ROOT_EXISTED}" -eq 0 ]; then if [ "${SPACK_ROOT_EXISTED}" -eq 0 ]; then
# for caching purposes it's nice if we can relocate into long paths, but we # for caching purposes it's nice if we can relocate into long paths, but we
# can't do that for existing installations -> else path # can't do that for existing installations -> else path
...@@ -88,52 +79,6 @@ if [[ ! $(spack mirror list | grep local_cache) ]]; then ...@@ -88,52 +79,6 @@ if [[ ! $(spack mirror list | grep local_cache) ]]; then
spack mirror add local_cache ${SPACK_CACHE_BUILD} spack mirror add local_cache ${SPACK_CACHE_BUILD}
fi fi
# install platform compiler (extract version from packages.yaml)
if [ $SYSTEMNAME == ebrainslab ]
then
EBRAINS_SPACK_COMPILER=$(grep 'compiler' $EBRAINS_REPO/site-config/$SYSTEMNAME/packages.yaml | awk -F'[][]' '{ print $2 }')
spack compiler find
spack load $EBRAINS_SPACK_COMPILER || {
# dump dag to file
spack spec -y $EBRAINS_SPACK_COMPILER arch=x86_64 > "/tmp/req_compiler.yaml"
if [ -n "${OCI_CACHE_PREFIX}" ]; then
# fetch missing sources (if packages not yet installed)
python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \
--local-cache=${SPACK_CACHE_SOURCE} \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/source_cache \
--yashchiki-home=${YASHCHIKI_HOME} \
/tmp/compiler_missing_paths_sources.dat /tmp/req_compiler.yaml
# fetch missing build results (if packages not yet installed)
python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \
--local-cache=${SPACK_CACHE_BUILD}/build_cache \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/build_cache \
--yashchiki-home=${YASHCHIKI_HOME} \
/tmp/compiler_missing_paths_buildresults.dat /tmp/req_compiler.yaml
fi
spack install --no-check-signature -y -j$SPACK_JOBS $EBRAINS_SPACK_COMPILER arch=x86_64
if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then
echo "Performing update of the source cache (for base compiler)"
python3 ${YASHCHIKI_HOME}/update_cached_sources.py \
--local-cache=${SPACK_CACHE_SOURCE} \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/source_cache \
/tmp/compiler_missing_paths_sources.dat
# push previously missing (but now installed) packages to the local cache
spack buildcache create --unsigned ${SPACK_CACHE_BUILD} ${EBRAINS_SPACK_COMPILER} && ret=$? || ret=$?
# upload packages from local to remote cache
echo "Performing update of the build cache (for base compiler)"
python3 ${YASHCHIKI_HOME}/update_cached_buildresults.py \
--local-cache=${SPACK_CACHE_BUILD}/build_cache \
--remote-cache-type=oci \
--remote-cache=${OCI_CACHE_PREFIX}/build_cache \
/tmp/compiler_missing_paths_buildresults.dat
fi
spack load $EBRAINS_SPACK_COMPILER
}
fi
spack compiler find spack compiler find
# create environment if it does not exist # create environment if it does not exist
...@@ -185,9 +130,9 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled ...@@ -185,9 +130,9 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled
else else
echo "Updating of the source cache disabled." echo "Updating of the source cache disabled."
fi fi
if [ $ret -ne 0 ]; then # if [ "$ret" -ne 0 ]; then
(exit $ret) # (exit $ret)
fi # fi
) )
if [ -n "${OCI_CACHE_PREFIX}" ]; then if [ -n "${OCI_CACHE_PREFIX}" ]; then
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import * from spack.package import *
class Acpype(PythonPackage): class Acpype(PythonPackage):
"""A tool based in Python to use Antechamber to generate topologies for chemical """A tool based in Python to use Antechamber to generate topologies for chemical
......
...@@ -3,52 +3,58 @@ ...@@ -3,52 +3,58 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import * from spack.package import *
class Ambertools (CMakePackage):
"""AmberTools is a free, useful standalone package and a prerequisite for installing Amber itself.
The AmberTools suite is free of charge, and its components are mostly released under the GNU General Public License (GPL).
A few components are included that are in the public domain or which have other, open-source, licenses.
The libsander and libpbsa libraries use the LGPL license."""
# Set the homepage and download url class Ambertools(CMakePackage):
homepage = "http://ambermd.org/AmberTools.php" """AmberTools is a free, useful standalone package and a prerequisite
url = "http://ambermd.org/downloads/AmberTools22jlmrcc.tar.bz2" for installing Amber itself. The AmberTools suite is free of charge,
and its components are mostly released under the GNU General Public
License (GPL). A few components are included that are in the public
domain or which have other, open-source, licenses. The libsander and
libpbsa libraries use the LGPL license."""
# Set the gitlab accounts of this package maintainers homepage = "https://ambermd.org/AmberTools.php"
maintainers = ['dbeltran', 'elmath'] url = "https://ambermd.org/downloads/AmberTools22jlmrcc.tar.bz2"
version('22jlmrcc', sha256='1571d4e0f7d45b2a71dce5999fa875aea8c90ee219eb218d7916bf30ea229121') maintainers("d-beltran")
# Dependencies # begin EBRAINS (added): add version
depends_on("flex", type="build") # This is necessary for sure (experimentally tested) version("23_rc6", sha256="debb52e6ef2e1b4eaa917a8b4d4934bd2388659c660501a81ea044903bf9ee9d")
depends_on("bison", type="build") # This is necessary for sure (experimentally tested) # end EBRAINS
depends_on("tcsh", type="build") version("22jlmrcc", sha256="1571d4e0f7d45b2a71dce5999fa875aea8c90ee219eb218d7916bf30ea229121")
depends_on("zlib", type=("build", "link", "run"))
depends_on("bzip2", type=("build", "run"))
depends_on("blas", type=("build", "run"))
depends_on("lapack", type=("build", "run"))
depends_on("arpack-ng", type=("build", "run"))
depends_on("netcdf-c", type=("build", "run"))
depends_on("netcdf-fortran", type=("build", "run"))
depends_on("fftw", type=("build", "run"))
depends_on("readline", type=("build", "run"))
depends_on("netlib-xblas~plain_blas", type=("build", "run"))
# specific variants needed for boost - from the build log "Could NOT find Boost (missing: thread system program_options iostreams regex timer chrono filesystem graph)"
depends_on("boost+thread+system+program_options+iostreams+regex+timer+chrono+filesystem+graph", type=("build", "run"))
depends_on("flex", type="build")
depends_on("bison", type="build")
depends_on("tcsh", type="build")
depends_on("zlib", type=("build", "run"))
depends_on("bzip2", type=("build", "run"))
depends_on("blas", type=("build", "run"))
depends_on("lapack", type=("build", "run"))
depends_on("arpack-ng", type=("build", "run"))
depends_on("netcdf-c", type=("build", "run"))
depends_on("netcdf-fortran", type=("build", "run"))
depends_on("fftw", type=("build", "run"))
depends_on("readline", type=("build", "run"))
depends_on("netlib-xblas~plain_blas", type=("build", "run"))
# Specific variants needed for boost according to build logs
depends_on(
"boost+thread+system+program_options+iostreams+regex+timer+chrono+filesystem+graph",
type=("build", "run"),
)
# Python dependencies # Python dependencies
# WARNING: If a python 3.8 version is already installed in spack then the '+tkinter' variant makes spack ignore the version # begin EBRAINS (modified): add version
# WARNING: Spack may try to install the preferred python version (i.e. python 3.10.8) depends_on("python@3.8:3.10 +tkinter", type=("build", "run"), when="@22jlmrcc")
# WARNING: The soultion is uninstall python and reinstall with this variant depends_on("python@3.8: +tkinter", type=("build", "run"), when="@23_rc6")
depends_on('python@3.8: +tkinter', type=('build', 'run')) # end EBRAINS
depends_on("py-numpy", type=("build", "run")) depends_on("py-setuptools", type="build")
depends_on("py-matplotlib", type=("build", "run")) depends_on("py-numpy", type=("build", "run"))
depends_on("py-scipy", type=("build", "run")) depends_on("py-matplotlib", type=("build", "run"))
depends_on("py-scipy", type=("build", "run"))
def cmake_args(self): def cmake_args(self):
# Translated from ambertools build/run_cmake script # Translated from ambertools build/run_cmake script
# We also add the TRUST_SYSTEM_LIBS argument that is mentioned in the ambertools CMake guide # We also add the TRUST_SYSTEM_LIBS argument mentioned in the ambertools guide
# https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Guide-to-Options # https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Guide-to-Options
args = [ args = [
self.define("COMPILER", "GNU"), self.define("COMPILER", "GNU"),
...@@ -58,9 +64,9 @@ class Ambertools (CMakePackage): ...@@ -58,9 +64,9 @@ class Ambertools (CMakePackage):
self.define("DOWNLOAD_MINICONDA", False), self.define("DOWNLOAD_MINICONDA", False),
self.define("TRUST_SYSTEM_LIBS", True), self.define("TRUST_SYSTEM_LIBS", True),
# This is to avoid the x11 (X11_Xext_LIB) error # This is to avoid the x11 (X11_Xext_LIB) error
# It is equivalent to the '-noX11' flag accoridng to the docs: # It is equivalent to the "-noX11" flag accoridng to the docs:
# https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Common-Options # https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Common-Options
self.define("BUILD_GUI", False) self.define("BUILD_GUI", False),
] ]
return args return args
...@@ -72,7 +78,17 @@ class Ambertools (CMakePackage): ...@@ -72,7 +78,17 @@ class Ambertools (CMakePackage):
env.set("AMBER_PREFIX", self.prefix) env.set("AMBER_PREFIX", self.prefix)
env.set("AMBERHOME", self.prefix) env.set("AMBERHOME", self.prefix)
@run_after('install') @run_after("install")
@on_package_attributes(run_tests=True) @on_package_attributes(run_tests=True)
def check_install(self): def check_install(self):
make("test.serial") make("test.serial")
# Temporarily copy netcdf.h header file to netcdf-fortran/include to pass the Ambertools
# cmake check (quickest fix, will probably cause problems, needs to change)
@run_before("cmake")
def fix_check(self):
cp = Executable("cp")
cp(
self.spec["netcdf-c"].headers.directories[0] + "/netcdf.h",
self.spec["netcdf-fortran"].headers.directories[0],
)
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import * from spack.package import *
class Apbs(CMakePackage): class Apbs(CMakePackage):
...@@ -40,7 +40,7 @@ class Apbs(CMakePackage): ...@@ -40,7 +40,7 @@ class Apbs(CMakePackage):
depends_on('arpack-ng', type=('build', 'run')) depends_on('arpack-ng', type=('build', 'run'))
depends_on('suite-sparse', type=('build', 'run')) depends_on('suite-sparse', type=('build', 'run'))
depends_on('maloc', type=('build', 'run')) depends_on('maloc', type=('build', 'run'))
depends_on('python@3.8:3.10', type=('build', 'run')) depends_on('python@3.8:3.11', type=('build', 'run'))
def cmake_args(self): def cmake_args(self):
# Min and max Python versions need to be set as variables to pass tests. # Min and max Python versions need to be set as variables to pass tests.
......
...@@ -13,10 +13,15 @@ class Arbor(CMakePackage, CudaPackage): ...@@ -13,10 +13,15 @@ class Arbor(CMakePackage, CudaPackage):
homepage = "https://arbor-sim.org" homepage = "https://arbor-sim.org"
git = "https://github.com/arbor-sim/arbor.git" git = "https://github.com/arbor-sim/arbor.git"
url = "https://github.com/arbor-sim/arbor/releases/download/v0.9.0/arbor-v0.9.0-full.tar.gz" url = "https://github.com/arbor-sim/arbor/releases/download/v0.9.0/arbor-v0.9.0-full.tar.gz"
maintainers = ["thorstenhater", "brenthuisman", "haampie"] maintainers = ("thorstenhater", "ErbB4", "haampie")
version("master", branch="master") version("master", branch="master", submodules=True)
version("develop") version("develop", branch="master", submodules=True)
version(
"0.10.0",
sha256="72966b7a2f45ce259b8ba167ca3e4f5ab9f212136a300267aaac0c04ed3fe3fc",
url="https://github.com/arbor-sim/arbor/releases/download/v0.10.1/arbor-v0.10.0-full.tar.gz",
)
version( version(
"0.9.0", "0.9.0",
sha256="5f9740955c821aca81e23298c17ad64f33f635756ad9b4a0c1444710f564306a", sha256="5f9740955c821aca81e23298c17ad64f33f635756ad9b4a0c1444710f564306a",
...@@ -75,36 +80,50 @@ class Arbor(CMakePackage, CudaPackage): ...@@ -75,36 +80,50 @@ class Arbor(CMakePackage, CudaPackage):
conflicts("%cce@:9.1") conflicts("%cce@:9.1")
conflicts("%intel") conflicts("%intel")
# begin EBRAINS (modified: added run dep)
depends_on("cmake@3.19:", type=("build", "run")) depends_on("cmake@3.19:", type=("build", "run"))
# end EBRAINS
# misc dependencies # misc dependencies
depends_on("fmt@7.1:", when="@0.5.3:") # required by the modcc compiler depends_on("fmt@7.1:", when="@0.5.3:") # required by the modcc compiler
depends_on("fmt@9.1:", when="@0.7.1:") depends_on("fmt@9.1:", when="@0.7.1:")
depends_on("googletest@1.12.1", when="@0.7.1:") # begin EBRAINS (modified: relaxed (upstream gave no info about update))
# upstream adds: depends_on("fmt@10.1:", when="@0.9.1:")
depends_on("googletest@1.12.1:", when="@0.7.1:")
depends_on("pugixml@1.11:", when="@0.7.1:") depends_on("pugixml@1.11:", when="@0.7.1:")
depends_on("nlohmann-json@3.11.2") # upstream adds: depends_on("pugixml@1.13:", when="@0.9.1:")
depends_on("nlohmann-json@3.11.2:")
depends_on("random123") depends_on("random123")
#upstream adds: depends_on("random123@1.14.0:", when="@0.10:")
# end EBRAINS (modified)
with when("+cuda"): with when("+cuda"):
depends_on("cuda@10:") depends_on("cuda@10:")
depends_on("cuda@11:", when="@0.7.1:") depends_on("cuda@11:", when="@0.7.1:")
depends_on("cuda@12:", when="@0.9.1:")
# mpi # mpi
# begin EBRAINS (modified: added run dep)
depends_on("mpi", when="+mpi", type=("build", "run")) depends_on("mpi", when="+mpi", type=("build", "run"))
# end EBRAINS (modified)
depends_on("py-mpi4py", when="+mpi+python", type=("build", "run")) depends_on("py-mpi4py", when="+mpi+python", type=("build", "run"))
# python (bindings) # python (bindings)
extends("python", when="+python")
depends_on("python@3.7:", when="+python", type=("build", "run"))
depends_on("py-numpy", when="+python", type=("build", "run"))
with when("+python"): with when("+python"):
extends("python")
depends_on("python@3.7:", type=("build", "run"))
depends_on("python@3.9:", when="@0.9.1:", type=("build", "run"))
depends_on("py-numpy", type=("build", "run"))
depends_on("py-pybind11@2.6:", type="build") depends_on("py-pybind11@2.6:", type="build")
depends_on("py-pybind11@2.8.1:", when="@0.5.3:", type="build") depends_on("py-pybind11@2.8.1:", when="@0.5.3:", type="build")
depends_on("py-pybind11@2.10.1:", when="@0.7.1:", type="build") depends_on("py-pybind11@2.10.1:", when="@0.7.1:", type="build")
depends_on("py-pandas", type="test")
depends_on("py-seaborn", type="test")
# sphinx based documentation # sphinx based documentation
depends_on("python@3.7:", when="+doc", type="build") with when("+doc"):
depends_on("py-sphinx", when="+doc", type="build") depends_on("python@3.10:", type="build")
depends_on("py-svgwrite", when="+doc", type="build") depends_on("py-sphinx", type="build")
depends_on("py-svgwrite", type="build")
@property @property
def build_targets(self): def build_targets(self):
...@@ -124,10 +143,14 @@ class Arbor(CMakePackage, CudaPackage): ...@@ -124,10 +143,14 @@ class Arbor(CMakePackage, CudaPackage):
# query spack for the architecture-specific compiler flags set by its wrapper # query spack for the architecture-specific compiler flags set by its wrapper
args.append("-DARB_ARCH=none") args.append("-DARB_ARCH=none")
opt_flags = self.spec.target.optimization_flags( opt_flags = spack.build_environment.optimization_flags(
self.spec.compiler.name, self.spec.compiler.version self.compiler, self.spec.target
) )
args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags) # Might return nothing
if opt_flags:
args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags)
# Needed, spack has no units package
args.append("-DARB_USE_BUNDLED_UNITS=ON")
return args return args
...@@ -135,3 +158,4 @@ class Arbor(CMakePackage, CudaPackage): ...@@ -135,3 +158,4 @@ class Arbor(CMakePackage, CudaPackage):
@on_package_attributes(run_tests=True) @on_package_attributes(run_tests=True)
def install_test(self): def install_test(self):
python("-c", "import arbor") python("-c", "import arbor")
python("python/example/single_cell_model.py")
--- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java
@@ -150,6 +150,13 @@
builder.put("PATH", null);
builder.put("LD_LIBRARY_PATH", null);
}
+
+ Map<String, String> spackEnv = System.getenv();
+ for (String envName : spackEnv.keySet()) {
+ if (envName.startsWith("SPACK_")) {
+ builder.put(envName, spackEnv.get(envName));
+ }
+ }
}
private static PathFragment determineShellExecutable(OS os, PathFragment fromOption) {
--- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java
@@ -168,6 +168,13 @@ public class BazelRuleClassProvider {
env.put("PATH", null);
}
+ Map<String, String> spackEnv = System.getenv();
+ for (String envName : spackEnv.keySet()) {
+ if (envName.startsWith("SPACK_")) {
+ env.put(envName, spackEnv.get(envName));
+ }
+ }
+
// Shell environment variables specified via options take precedence over the
// ones inherited from the fragments. In the long run, these fragments will
// be replaced by appropriate default rc files anyway.
From 9c9d27561780bc56d9f0867e325c7421a94ee1cb Mon Sep 17 00:00:00 2001
From: Harsh Bhatia <bhatia4@llnl.gov>
Date: Tue, 15 Dec 2020 15:56:10 -0800
Subject: [PATCH] https://github.com/bazelbuild/bazel/commit/ab62a6e097590dac5ec946ad7a796ea0e8593ae0
---
src/conditions/BUILD | 6 ++++++
third_party/BUILD | 8 ++++++--
2 files changed, 12 insertions(+), 2 deletions(-)
diff --git a/src/conditions/BUILD b/src/conditions/BUILD
index 2b28e28057..faa41a439d 100644
--- a/src/conditions/BUILD
+++ b/src/conditions/BUILD
@@ -10,6 +10,12 @@ filegroup(
visibility = ["//src:__pkg__"],
)
+config_setting(
+ name = "linux_ppc",
+ values = {"cpu": "ppc"},
+ visibility = ["//visibility:public"],
+)
+
config_setting(
name = "linux_x86_64",
values = {"cpu": "k8"},
diff --git a/third_party/BUILD b/third_party/BUILD
index 159006d741..4fcae54c00 100644
--- a/third_party/BUILD
+++ b/third_party/BUILD
@@ -523,12 +523,13 @@ UNNECESSARY_DYNAMIC_LIBRARIES = select({
"//src/conditions:darwin": "*.so *.dll",
"//src/conditions:darwin_x86_64": "*.so *.dll",
"//src/conditions:linux_x86_64": "*.jnilib *.dll",
+ "//src/conditions:linux_ppc": "*.so *.jnilib *.dll",
# The .so file is an x86 one, so we can just remove it if the CPU is not x86
"//src/conditions:arm": "*.so *.jnilib *.dll",
"//src/conditions:linux_aarch64": "*.so *.jnilib *.dll",
# Play it safe -- better have a big binary than a slow binary
# zip -d does require an argument. Supply something bogus.
- "//conditions:default": "*.bogusextension",
+ "//conditions:default": "",
})
# Remove native libraries that are for a platform different from the one we are
@@ -537,7 +538,10 @@ genrule(
name = "filter_netty_dynamic_libs",
srcs = ["netty_tcnative/netty-tcnative-boringssl-static-2.0.24.Final.jar"],
outs = ["netty_tcnative/netty-tcnative-filtered.jar"],
- cmd = "cp $< $@ && zip -qd $@ " + UNNECESSARY_DYNAMIC_LIBRARIES,
+ cmd = "cp $< $@ && " +
+ # End successfully if there is nothing to be deleted from the archive
+ "if [ -n '" + UNNECESSARY_DYNAMIC_LIBRARIES + "' ]; then " +
+ "zip -qd $@ " + UNNECESSARY_DYNAMIC_LIBRARIES + "; fi",
)
java_import(
--
2.21.0 (Apple Git-122.2)
--- a/tools/cpp/cc_configure.bzl
+++ b/tools/cpp/cc_configure.bzl
@@ -173,8 +173,19 @@
else:
inc_dirs = result.stderr[index1 + 1:index2].strip()
- return [repository_ctx.path(_cxx_inc_convert(p))
- for p in inc_dirs.split("\n")]
+ default_inc_directories = [
+ repository_ctx.path(_cxx_inc_convert(p))
+ for p in inc_dirs.split("\n")
+ ]
+
+ env = repository_ctx.os.environ
+ if "SPACK_INCLUDE_DIRS" in env:
+ for path in env["SPACK_INCLUDE_DIRS"].split(":"):
+ default_inc_directories.append(
+ repository_ctx.path(_cxx_inc_convert(path))
+ )
+
+ return default_inc_directories
def _add_option_if_supported(repository_ctx, cc, option):
"""Checks that `option` is supported by the C compiler."""
--- a/tools/cpp/cc_configure.bzl
+++ b/tools/cpp/cc_configure.bzl
@@ -200,8 +200,19 @@
else:
inc_dirs = result.stderr[index1 + 1:index2].strip()
- return [_escape_string(repository_ctx.path(_cxx_inc_convert(p)))
- for p in inc_dirs.split("\n")]
+ default_inc_directories = [
+ _escape_string(repository_ctx.path(_cxx_inc_convert(p)))
+ for p in inc_dirs.split("\n")
+ ]
+
+ env = repository_ctx.os.environ
+ if "SPACK_INCLUDE_DIRS" in env:
+ for path in env["SPACK_INCLUDE_DIRS"].split(":"):
+ default_inc_directories.append(
+ repository_ctx.path(_cxx_inc_convert(path))
+ )
+
+ return default_inc_directories
def _add_option_if_supported(repository_ctx, cc, option):
--- a/compile.sh
+++ b/compile.sh
@@ -92,7 +92,7 @@
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" \
--host_platform=@bazel_tools//platforms:host_platform \
--platforms=@bazel_tools//platforms:target_platform \
|| fail "Could not build Bazel"
--- a/compile.sh
+++ b/compile.sh
@@ -92,7 +92,7 @@ display "."
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel_nojdk${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel_nojdk${EXE_EXT}" \
--host_platform=@bazel_tools//platforms:host_platform \
--platforms=@bazel_tools//platforms:target_platform \
|| fail "Could not build Bazel"
--- a/compile.sh
+++ b/compile.sh
@@ -92,7 +92,7 @@ display "."
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel_nojdk${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel_nojdk${EXE_EXT}" \
--action_env=PATH \
--host_platform=@bazel_tools//platforms:host_platform \
--platforms=@bazel_tools//platforms:target_platform \
--- a/compile.sh
+++ b/compile.sh
@@ -99,7 +99,7 @@
new_step 'Building Bazel with Bazel'
display "."
log "Building output/bazel"
- bazel_build "src:bazel${EXE_EXT}"
+ CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}"
cp -f "bazel-bin/src/bazel${EXE_EXT}" "output/bazel${EXE_EXT}"
chmod 0755 "output/bazel${EXE_EXT}"
BAZEL="$(pwd)/output/bazel${EXE_EXT}"