From 5d1a9b5e5f2d42ef1ae5796725f7a9c1bc3c4525 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 23 Dec 2024 18:08:00 +0100 Subject: [PATCH 001/111] fix: correct handling of failed K8s build jobs --- .gitlab-ci.yml | 14 +++++--------- create_JupyterLab_kernel.sh | 2 ++ create_job.sh | 10 ++++------ install_spack_env.sh | 2 ++ 4 files changed, 13 insertions(+), 15 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 5162ebdd..8155d8f4 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -30,7 +30,7 @@ variables: # start the deploy job - kubectl create -f simplejob.yml # wait for job to finish to get the logs - - while true; do sleep 300; x=$(kubectl get pods | grep simplejob${CI_PIPELINE_ID} | awk '{ print $3}'); if [ $x != "Running" ]; then break; fi; done + - while true; do sleep 300; x=$(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.phase}'); if [ $x != "Running" ]; then break; fi; done # # copy logs of failed packages locally, to keep as job artifacts # - oc rsync $(oc get pods -l job-name=simplejob${CI_PIPELINE_ID} -o name):/tmp ./ --include="*/" --include="spack/spack-stage/*/*.txt" --exclude="*" # - mv tmp/spack/spack-stage spack_logs @@ -40,7 +40,7 @@ variables: # - mv .$LAB_KERNEL_PATH kernel_specs # if spack install has failed, fail the pipeline - kubectl logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt - - if [ $(cat log.txt | grep "No module available for package" | wc -l) -gt 0 ]; then exit 1; fi; + - if [ $(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi; # delete the job, as we have the logs here - kubectl delete job simplejob${CI_PIPELINE_ID} || true # artifacts: @@ -207,11 +207,7 @@ build-spack-env-on-runner: - > echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml # run installation script - - . install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $SPACK_VERSION $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB - # re-activate envionment and run tests - - spack env activate $SPACK_DEV_ENV - # TODO: run all tests when test dependency issue is fixed - # - spack test run -x wf-brainscales2-demos wf-multi-area-model + - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $SPACK_VERSION $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB after_script: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job @@ -246,10 +242,10 @@ sync-gitlab-spack-instance: # get latest state of EBRAINS repo - rm -rf $SPACK_REPO_PATH && cp -r $CI_PROJECT_DIR $SPACK_REPO_PATH # run installation script - - . install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_VERSION $SPACK_REPO_PATH $SPACK_NFS_ENV + - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_VERSION $SPACK_REPO_PATH $SPACK_NFS_ENV # create kernel spec, so that the environment can be used in gitlab CI jobs - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac); - - . create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env + - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env after_script: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job diff --git a/create_JupyterLab_kernel.sh b/create_JupyterLab_kernel.sh index 49313cce..0f02e121 100644 --- a/create_JupyterLab_kernel.sh +++ b/create_JupyterLab_kernel.sh @@ -7,6 +7,8 @@ # loaded by all users. # =========================================================================================================== +set -euo pipefail + INSTALLATION_ROOT=$1 EBRAINS_SPACK_ENV=$2 RELEASE_NAME=$3 diff --git a/create_job.sh b/create_job.sh index 435bb60f..1e348826 100644 --- a/create_job.sh +++ b/create_job.sh @@ -61,18 +61,16 @@ spec: git clone ${CI_PROJECT_URL} --branch \$BRANCH \$EBRAINS_REPO_PATH # run installation script - . \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$SPACK_VERSION \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV + bash \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$SPACK_VERSION \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV if [ \$? -eq 0 ] then # build process succeeded - create or update kernel on the NFS based on the current spack environment - chmod +x \$EBRAINS_REPO_PATH/create_JupyterLab_kernel.sh - \$EBRAINS_REPO_PATH/create_JupyterLab_kernel.sh \$INSTALLATION_ROOT \$EBRAINS_SPACK_ENV \$RELEASE_NAME \$LAB_KERNEL_ROOT - exit 0 + bash \$EBRAINS_REPO_PATH/create_JupyterLab_kernel.sh \$INSTALLATION_ROOT \$EBRAINS_SPACK_ENV \$RELEASE_NAME \$LAB_KERNEL_ROOT && exit 0 else # build process failed - keep spack build logs and fail the pipeline - cp -r /tmp/spack/spack-stage/* \$SPACK_BUILD_LOGS - exit + cp -r /tmp/spack/spack-stage/* \$BUILD_LOGS_DIR + exit 1 fi env: - name: SYSTEMNAME diff --git a/install_spack_env.sh b/install_spack_env.sh index 2a9dbf9b..d09f48eb 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -7,6 +7,8 @@ # (if the specified spack instance doesn't exist, it also creates it) # ========================================================================================================================================= +set -eo pipefail + SPACK_JOBS=$1 # number of jobs INSTALLATION_ROOT=$2 # where to set up the installation SPACK_VERSION=$3 # which spack version to use -- GitLab From e6ddb63392f5b9c6d654cacc11afd4698ec98a0d Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Thu, 9 Jan 2025 15:52:05 +0100 Subject: [PATCH 002/111] feat: update ebrains-spack-build-env image --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 8155d8f4..164bfe98 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -3,7 +3,7 @@ stages: - test variables: - BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/tc/ebrains-spack-build-env/base:devel + BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:24.12 SPACK_VERSION: v0.21.1 SPACK_PATH_GITLAB: /mnt/spack_v0.21.1 SYSTEMNAME: ebrainslab -- GitLab From 3e31a298f5ebfc077edb6eabac57b8aa08e263e6 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 14 Jan 2025 15:54:16 +0100 Subject: [PATCH 003/111] feat: drop deployment jobs to CSCS --- .gitlab-ci.yml | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 164bfe98..075e9692 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -74,14 +74,6 @@ variables: KUBE_CONTEXT: cineca-int resource_group: shared-NFS-mount-dev-cineca -# deploy to the prod lab environment at CSCS -.deploy-prod-server-cscs: - extends: .deploy-prod-server - variables: - KUBE_CONTEXT: cscs-prod - BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/tc/ebrains-spack-build-env/okd:okd_23.06 - resource_group: shared-NFS-mount-prod-cscs - # deploy to the prod lab environment at JSC .deploy-prod-server-jsc: extends: .deploy-prod-server @@ -151,12 +143,6 @@ deploy-exp-release-dev-cineca: - .deploy-exp-dev-release - .deploy-dev-server-cineca -# deploy exp release to prod environment at CSCS -deploy-exp-release-prod-cscs: - extends: - - .deploy-exp-prod-release - - .deploy-prod-server-cscs - # deploy exp release to prod environment at JSC deploy-exp-release-prod-jsc: extends: @@ -169,12 +155,6 @@ deploy-exp-release-prod-cineca: - .deploy-exp-prod-release - .deploy-prod-server-cineca -# deploy prod release to prod environment at CSCS -deploy-prod-release-prod-cscs: - extends: - - .deploy-prod-release - - .deploy-prod-server-cscs - # deploy prod release to prod environment at JSC deploy-prod-release-prod-jsc: extends: -- GitLab From e380a666f9bdb760667ca68d75c3d0212ad48033 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Wed, 8 Jan 2025 17:39:10 +0100 Subject: [PATCH 004/111] fix: py-elephant needs sklearn --- packages/py-elephant/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/py-elephant/package.py b/packages/py-elephant/package.py index bf3b3432..1bccfca6 100644 --- a/packages/py-elephant/package.py +++ b/packages/py-elephant/package.py @@ -70,6 +70,7 @@ class PyElephant(PythonPackage, CudaPackage): depends_on("py-quantities@0.12.1:0.13.0", type=("build", "run"), when="@0.6.4:0.11.2") depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@develop") depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.12.0:") + depends_on("py-scikit-learn", type=("build", "run"), when="@0.3:") depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:1.0.0") depends_on("py-scipy@1.10.0:", type=("build", "run"), when="@1.1.0:") depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:") -- GitLab From 1e0fe4227b6e23889e43caf47b6a4c81e1ac4163 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Mon, 9 Dec 2024 13:44:39 +0100 Subject: [PATCH 005/111] feat: introduce submodule for spack We need to check if the flexibility of the script is actually needed. (And how this ever could have worked with a moving target spack ;).) --- .gitlab-ci.yml | 8 ++++---- .gitmodules | 4 ++++ create_job.sh | 15 ++++++--------- install_spack_env.sh | 10 ++++------ vendor/spack | 1 + 5 files changed, 19 insertions(+), 19 deletions(-) create mode 100644 .gitmodules create mode 160000 vendor/spack diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 075e9692..b9c20ad2 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,9 +4,9 @@ stages: variables: BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:24.12 - SPACK_VERSION: v0.21.1 SPACK_PATH_GITLAB: /mnt/spack_v0.21.1 SYSTEMNAME: ebrainslab + GIT_SUBMODULE_STRATEGY: recursive # =================================================================== # LAB DEPLOYMENTS @@ -25,7 +25,7 @@ variables: # use the site-specific kubectl context - kubectl config use-context $KUBE_CONTEXT # create job description file - - sh create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_VERSION $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT + - sh create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT - cat simplejob.yml # start the deploy job - kubectl create -f simplejob.yml @@ -187,7 +187,7 @@ build-spack-env-on-runner: - > echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml # run installation script - - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $SPACK_VERSION $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB + - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB after_script: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job @@ -222,7 +222,7 @@ sync-gitlab-spack-instance: # get latest state of EBRAINS repo - rm -rf $SPACK_REPO_PATH && cp -r $CI_PROJECT_DIR $SPACK_REPO_PATH # run installation script - - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_VERSION $SPACK_REPO_PATH $SPACK_NFS_ENV + - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV # create kernel spec, so that the environment can be used in gitlab CI jobs - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac); - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..b41078fa --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "vendor/spack"] + path = vendor/spack + url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/spack.git + shallow = true diff --git a/create_job.sh b/create_job.sh index 1e348826..bffac67d 100644 --- a/create_job.sh +++ b/create_job.sh @@ -2,7 +2,7 @@ # =========================================================================================================== # title : create_job.sh -# usage : ./create_job.sh $OC_JOB_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_VERSION +# usage : ./create_job.sh $OC_JOB_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT # $SPACK_ENV $BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT # description : creates OKD job yaml file that builds/updates spack environment and creates Lab kernel # =========================================================================================================== @@ -10,11 +10,10 @@ OC_JOB_ID=$1 BUILD_ENV_DOCKER_IMAGE=$2 INSTALLATION_ROOT=$3 -SPACK_VERSION=$4 -EBRAINS_SPACK_ENV=$5 -BRANCH=$6 -RELEASE_NAME=$7 -LAB_KERNEL_ROOT=$8 +EBRAINS_SPACK_ENV=$4 +BRANCH=$5 +RELEASE_NAME=$6 +LAB_KERNEL_ROOT=$7 cat <<EOT >> simplejob.yml apiVersion: batch/v1 @@ -61,7 +60,7 @@ spec: git clone ${CI_PROJECT_URL} --branch \$BRANCH \$EBRAINS_REPO_PATH # run installation script - bash \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$SPACK_VERSION \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV + bash \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV if [ \$? -eq 0 ] then @@ -77,8 +76,6 @@ spec: value: ebrainslab - name: INSTALLATION_ROOT value: $INSTALLATION_ROOT - - name: SPACK_VERSION - value: $SPACK_VERSION - name: EBRAINS_SPACK_ENV value: $EBRAINS_SPACK_ENV - name: BRANCH diff --git a/install_spack_env.sh b/install_spack_env.sh index d09f48eb..c9463e38 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -11,13 +11,11 @@ set -eo pipefail SPACK_JOBS=$1 # number of jobs INSTALLATION_ROOT=$2 # where to set up the installation -SPACK_VERSION=$3 # which spack version to use -EBRAINS_REPO=$4 # location of ebrains-spack-builds repository -EBRAINS_SPACK_ENV=$5 # name of EBRAINS Spack environment to be created/updated -UPSTREAM_INSTANCE=$6 # path to Spack instance to use as upstream (optional) +EBRAINS_REPO=$3 # location of ebrains-spack-builds repository +EBRAINS_SPACK_ENV=$4 # name of EBRAINS Spack environment to be created/updated +UPSTREAM_INSTANCE=$5 # path to Spack instance to use as upstream (optional) SPACK_REPO=https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/spack.git -SPACK_VERSION_EBRAINS=${SPACK_VERSION}_ebrains24.11 # specify location of .spack dir (by default in ~) # this is where cache and configuration settings are stored @@ -30,7 +28,7 @@ export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}} # initial setup: clone spack if spack dir doesn't already exist if [ ! -d $INSTALLATION_ROOT/spack ] then - git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION_EBRAINS $SPACK_REPO $INSTALLATION_ROOT/spack + ln -s vendor/spack $INSTALLATION_ROOT/spack # SPACK PATCH: the post-build logs on install-time-test-logs.txt gets ovewritten by the post-install logs. # quick fix for that: (TODO: investigate more and open PR) sed -i "s/self.file_like, \"w\"/self.file_like, \"a\"/g" $INSTALLATION_ROOT/spack/lib/spack/llnl/util/tty/log.py diff --git a/vendor/spack b/vendor/spack new file mode 160000 index 00000000..904e1a73 --- /dev/null +++ b/vendor/spack @@ -0,0 +1 @@ +Subproject commit 904e1a73567bc17d43fe0e4615ca0d7f1d50e2ed -- GitLab From 3436edf9ab4b17ec8d7ff779d2c4945551de8d22 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Thu, 16 Jan 2025 16:13:37 +0100 Subject: [PATCH 006/111] chore: silence git clean --- .gitlab-ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index b9c20ad2..3bec87eb 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -7,6 +7,7 @@ variables: SPACK_PATH_GITLAB: /mnt/spack_v0.21.1 SYSTEMNAME: ebrainslab GIT_SUBMODULE_STRATEGY: recursive + GIT_CLEAN_FLAGS: -ffdxq # =================================================================== # LAB DEPLOYMENTS -- GitLab From c6e1626fb0a35779bc4e9f2a9da0a74e08936663 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Mon, 9 Dec 2024 13:47:57 +0100 Subject: [PATCH 007/111] feat: implement OCI-based caching This introduces a submodule for yashchiki. --- .gitlab-ci.yml | 16 ++++++-- .gitmodules | 4 ++ create_job.sh | 11 +++++- install_spack_env.sh | 91 +++++++++++++++++++++++++++++++++++++++++--- vendor/yashchiki | 1 + 5 files changed, 111 insertions(+), 12 deletions(-) create mode 160000 vendor/yashchiki diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 3bec87eb..59cc11f9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -15,6 +15,9 @@ variables: # start a k8s Job that will build the Spack environment .deploy-build-environment: + variables: + OCI_CACHE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/${CI_COMMIT_BRANCH} + UPDATE_SPACK_OCI_CACHES: false stage: build tags: - docker-runner @@ -26,7 +29,7 @@ variables: # use the site-specific kubectl context - kubectl config use-context $KUBE_CONTEXT # create job description file - - sh create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT + - sh create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX - cat simplejob.yml # start the deploy job - kubectl create -f simplejob.yml @@ -183,12 +186,14 @@ build-spack-env-on-runner: variables: SPACK_DEV_ENV: ebrains-dev SPACK_JOBS: 2 + OCI_CACHE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/master + UPDATE_SPACK_OCI_CACHES: false script: # deactivate environment views (we don't need them for the test build-job) - > echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml # run installation script - - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB + - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX after_script: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job @@ -216,14 +221,16 @@ sync-gitlab-spack-instance: variables: SPACK_REPO_PATH: $SPACK_PATH_GITLAB/ebrains-spack-builds SPACK_JOBS: 4 + OCI_CACHE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/${CI_COMMIT_BRANCH} + UPDATE_SPACK_OCI_CACHES: true script: - SPACK_NFS_ENV=${CI_COMMIT_BRANCH//./-} # create spack dir if it doesn't exist - mkdir -p $SPACK_PATH_GITLAB # get latest state of EBRAINS repo - rm -rf $SPACK_REPO_PATH && cp -r $CI_PROJECT_DIR $SPACK_REPO_PATH - # run installation script - - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV + # run installation script and set UPDATE to true + - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX # create kernel spec, so that the environment can be used in gitlab CI jobs - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac); - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env @@ -240,6 +247,7 @@ sync-gitlab-spack-instance: - spack_logs when: always rules: + # branches that update the gitlab-runner upstream (read-only) installation and the spack OCI caches - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE != "schedule" when: manual diff --git a/.gitmodules b/.gitmodules index b41078fa..25bfff6a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -2,3 +2,7 @@ path = vendor/spack url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/spack.git shallow = true +[submodule "vendor/yashchiki"] + path = vendor/yashchiki + url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/yashchiki + shallow = true diff --git a/create_job.sh b/create_job.sh index bffac67d..82a6fe8e 100644 --- a/create_job.sh +++ b/create_job.sh @@ -14,6 +14,8 @@ EBRAINS_SPACK_ENV=$4 BRANCH=$5 RELEASE_NAME=$6 LAB_KERNEL_ROOT=$7 +UPDATE_SPACK_OCI_CACHES=$8 +OCI_CACHE_PREFIX=$9 cat <<EOT >> simplejob.yml apiVersion: batch/v1 @@ -54,13 +56,14 @@ spec: # reset build error log dir (delete previous logs to save space) rm -rf \$BUILD_LOGS_DIR mkdir -p \$BUILD_LOGS_DIR + cd \$BUILD_LOGS_DIR # reset spack repository dir by cloning the selected version rm -rf \$EBRAINS_REPO_PATH - git clone ${CI_PROJECT_URL} --branch \$BRANCH \$EBRAINS_REPO_PATH + git clone ${CI_PROJECT_URL} --recurse-submodules --branch \$BRANCH \$EBRAINS_REPO_PATH # run installation script - bash \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV + bash \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV "" \$UPDATE_SPACK_OCI_CACHES \$OCI_CACHE_PREFIX if [ \$? -eq 0 ] then @@ -78,6 +81,10 @@ spec: value: $INSTALLATION_ROOT - name: EBRAINS_SPACK_ENV value: $EBRAINS_SPACK_ENV + - name: UPDATE_SPACK_OCI_CACHES + value: '$UPDATE_SPACK_OCI_CACHES' + - name: OCI_CACHE_PREFIX + value: $OCI_CACHE_PREFIX - name: BRANCH value: $BRANCH - name: RELEASE_NAME diff --git a/install_spack_env.sh b/install_spack_env.sh index c9463e38..c21db599 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -2,7 +2,8 @@ # ========================================================================================================================================= # title : install_spack_env.sh -# usage : ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT $SPACK_VERSION $EBRAINS_REPO $EBRAINS_SPACK_ENV $UPSTREAM_INSTANCE +# usage : ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT $EBRAINS_REPO $EBRAINS_SPACK_ENV $UPSTREAM_INSTANCE \ +# $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX # description : installs or updates the spack environment defined in the EBRAINS spack repo # (if the specified spack instance doesn't exist, it also creates it) # ========================================================================================================================================= @@ -14,8 +15,8 @@ INSTALLATION_ROOT=$2 # where to set up the installation EBRAINS_REPO=$3 # location of ebrains-spack-builds repository EBRAINS_SPACK_ENV=$4 # name of EBRAINS Spack environment to be created/updated UPSTREAM_INSTANCE=$5 # path to Spack instance to use as upstream (optional) - -SPACK_REPO=https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/spack.git +UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results +OCI_CACHE_PREFIX=$7 # specify location of .spack dir (by default in ~) # this is where cache and configuration settings are stored @@ -77,12 +78,90 @@ cp /tmp/spack.yaml $SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV/ # activate environment spack env activate --without-view $EBRAINS_SPACK_ENV -# fetch all sources spack concretize --force --fresh --test root -spack-python -c "exit(not len(spack.environment.active_environment().uninstalled_specs()))" && spack fetch --dependencies --missing + +export CACHE_SPECFILE=${CACHE_SPECFILE:-"env_specfile.yaml"} +export YASHCHIKI_HOME=${EBRAINS_REPO}/vendor/yashchiki +export OCI_CACHE_PREFIX=${OCI_CACHE_PREFIX:-${HARBOR_HOST:-"docker-registry.ebrains.eu"}/${HARBOR_PROJECT:-"esd"}/master} +export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${INSTALLATION_ROOT}/spack/var/spack/cache} +export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${INSTALLATION_ROOT}/spack/var/spack/cache} + +# dump dag to file +spack spec -y > "${CACHE_SPECFILE}" +# fetch missing sources (if packages not yet installed) +python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \ + --local-cache=${SPACK_CACHE_SOURCE} \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ + --yashchiki-home=${YASHCHIKI_HOME} \ + missing_paths_sources.dat ${CACHE_SPECFILE} +# fetch missing build results (if packages not yet installed) +python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \ + --local-cache=${SPACK_CACHE_BUILD}/build_cache \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ + --yashchiki-home=${YASHCHIKI_HOME} \ + missing_paths_buildresults.dat ${CACHE_SPECFILE} +spack-python -c "exit(not len(spack.environment.active_environment().uninstalled_specs()))" && ( + # fetch all sources but delay exit code handling + spack fetch --dependencies --missing && ret=$? || ret=$?; + # push freshly fetched sources to remote cache + if [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then + echo "Performing update of the source cache" + python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ + --local-cache=${SPACK_CACHE_SOURCE} \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ + missing_paths_sources.dat; + else + echo "Updating of the source cache disabled." + fi + if [ $ret -ne 0 ]; then + (exit $ret) + fi +) + +# TODO for newer spack versions, add: --autopush --unsigned, drop create cache command below +# (Note: spack expects `build_cache/` below the folder we specify here +spack mirror add local_cache ${SPACK_CACHE_BUILD} + +# record the state of installed/uninstalled packages before actually installing them +dag_hashes_pre_install=$(spack-python ${YASHCHIKI_HOME}/specfile_dag_hash.py ${CACHE_SPECFILE}) # install the environment, use 2 jobs to reduce the amount of required RAM -spack install -y -j$SPACK_JOBS --fresh --test root +# delay exit code until we have updated the cache below +spack install --no-check-signature -y -j$SPACK_JOBS --fresh --test root && spack_install_ret=$? || spack_install_ret=$? + +# no need to update the local cache nor the remote cache if we don't want to update +if [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then + # push previously missing (but now installed) packages to the local cache + for dag_hash in $dag_hashes_pre_install; do + spack buildcache create --unsigned --only package ${SPACK_CACHE_BUILD} /${dag_hash} && ret=$? || ret=$? + if [ $ret -ne 0 ]; then + echo "Failed to push ${dag_hash}, trying to call spack find on it:" + spack find -Lvp /${dag_hash} || true + fi + done + + # upload packages from local to remote cache + echo "Performing update of the build cache" + python3 ${YASHCHIKI_HOME}/update_cached_buildresults.py \ + --local-cache=${SPACK_CACHE_BUILD}/build_cache \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ + missing_paths_buildresults.dat +else + echo "Updating of the build cache disabled." +fi + +# propagate spack install exit code +if [ $spack_install_ret -ne 0 ]; then + (exit "$spack_install_ret") +fi + +# TODO: when using spack remote OCI build caches require an index file +#spack mirror add ebrains oci://docker-registry.ebrains.eu/esd/build_cache +#spack buildcache list -a ebrains && ret=$? || ret=$? # rebuild spack's database spack reindex diff --git a/vendor/yashchiki b/vendor/yashchiki new file mode 160000 index 00000000..3f732e79 --- /dev/null +++ b/vendor/yashchiki @@ -0,0 +1 @@ +Subproject commit 3f732e79ce77b12fa872f6308dae6523a23a9fce -- GitLab From a000bba067472e17531630b114bf2844e4bb10e8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Thu, 23 Jan 2025 10:24:07 +0100 Subject: [PATCH 008/111] fix: use fixed oci buildcache handling --- vendor/yashchiki | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vendor/yashchiki b/vendor/yashchiki index 3f732e79..5690db7d 160000 --- a/vendor/yashchiki +++ b/vendor/yashchiki @@ -1 +1 @@ -Subproject commit 3f732e79ce77b12fa872f6308dae6523a23a9fce +Subproject commit 5690db7dbccd78f3ceef2123a605e662bb8b2c0f -- GitLab From 48541cc233a4248f038df856448700cfa60f8709 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Wed, 22 Jan 2025 16:51:10 +0100 Subject: [PATCH 009/111] fix: omit OCI cache handling code if cache isn't specified Changes in CI file allow to use existing spack deployments w/o cache. --- install_spack_env.sh | 63 ++++++++++++++++++++++++-------------------- 1 file changed, 34 insertions(+), 29 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index c21db599..0784e35b 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -16,7 +16,7 @@ EBRAINS_REPO=$3 # location of ebrains-spack-builds repository EBRAINS_SPACK_ENV=$4 # name of EBRAINS Spack environment to be created/updated UPSTREAM_INSTANCE=$5 # path to Spack instance to use as upstream (optional) UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results -OCI_CACHE_PREFIX=$7 +export OCI_CACHE_PREFIX=$7 # specify location of .spack dir (by default in ~) # this is where cache and configuration settings are stored @@ -82,39 +82,42 @@ spack concretize --force --fresh --test root export CACHE_SPECFILE=${CACHE_SPECFILE:-"env_specfile.yaml"} export YASHCHIKI_HOME=${EBRAINS_REPO}/vendor/yashchiki -export OCI_CACHE_PREFIX=${OCI_CACHE_PREFIX:-${HARBOR_HOST:-"docker-registry.ebrains.eu"}/${HARBOR_PROJECT:-"esd"}/master} export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${INSTALLATION_ROOT}/spack/var/spack/cache} export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${INSTALLATION_ROOT}/spack/var/spack/cache} # dump dag to file spack spec -y > "${CACHE_SPECFILE}" -# fetch missing sources (if packages not yet installed) -python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \ - --local-cache=${SPACK_CACHE_SOURCE} \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ - --yashchiki-home=${YASHCHIKI_HOME} \ - missing_paths_sources.dat ${CACHE_SPECFILE} -# fetch missing build results (if packages not yet installed) -python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \ - --local-cache=${SPACK_CACHE_BUILD}/build_cache \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ - --yashchiki-home=${YASHCHIKI_HOME} \ - missing_paths_buildresults.dat ${CACHE_SPECFILE} +if [ -n "${OCI_CACHE_PREFIX}" ]; then + # fetch missing sources (if packages not yet installed) + python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \ + --local-cache=${SPACK_CACHE_SOURCE} \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ + --yashchiki-home=${YASHCHIKI_HOME} \ + missing_paths_sources.dat ${CACHE_SPECFILE} + # fetch missing build results (if packages not yet installed) + python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \ + --local-cache=${SPACK_CACHE_BUILD}/build_cache \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ + --yashchiki-home=${YASHCHIKI_HOME} \ + missing_paths_buildresults.dat ${CACHE_SPECFILE} +fi spack-python -c "exit(not len(spack.environment.active_environment().uninstalled_specs()))" && ( # fetch all sources but delay exit code handling spack fetch --dependencies --missing && ret=$? || ret=$?; - # push freshly fetched sources to remote cache - if [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then - echo "Performing update of the source cache" - python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ - --local-cache=${SPACK_CACHE_SOURCE} \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ - missing_paths_sources.dat; - else - echo "Updating of the source cache disabled." + if [ -n "${OCI_CACHE_PREFIX}" ]; then + # push freshly fetched sources to remote cache + if [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then + echo "Performing update of the source cache" + python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ + --local-cache=${SPACK_CACHE_SOURCE} \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ + missing_paths_sources.dat; + else + echo "Updating of the source cache disabled." + fi fi if [ $ret -ne 0 ]; then (exit $ret) @@ -125,15 +128,17 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled # (Note: spack expects `build_cache/` below the folder we specify here spack mirror add local_cache ${SPACK_CACHE_BUILD} -# record the state of installed/uninstalled packages before actually installing them -dag_hashes_pre_install=$(spack-python ${YASHCHIKI_HOME}/specfile_dag_hash.py ${CACHE_SPECFILE}) +if [ -n "${OCI_CACHE_PREFIX}" ]; then + # record the state of installed/uninstalled packages before actually installing them + dag_hashes_pre_install=$(spack-python ${YASHCHIKI_HOME}/specfile_dag_hash.py ${CACHE_SPECFILE}) +fi # install the environment, use 2 jobs to reduce the amount of required RAM # delay exit code until we have updated the cache below spack install --no-check-signature -y -j$SPACK_JOBS --fresh --test root && spack_install_ret=$? || spack_install_ret=$? # no need to update the local cache nor the remote cache if we don't want to update -if [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then +if [ -n "${OCI_CACHE_PREFIX}" ] && [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then # push previously missing (but now installed) packages to the local cache for dag_hash in $dag_hashes_pre_install; do spack buildcache create --unsigned --only package ${SPACK_CACHE_BUILD} /${dag_hash} && ret=$? || ret=$? -- GitLab From e662ac77fc181109c6a146086721ed3243011b0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Wed, 29 Jan 2025 15:52:01 +0100 Subject: [PATCH 010/111] chore: disable oci cache usage for all CI jobs --- .gitlab-ci.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 59cc11f9..e7602247 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -16,7 +16,7 @@ variables: # start a k8s Job that will build the Spack environment .deploy-build-environment: variables: - OCI_CACHE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/${CI_COMMIT_BRANCH} + OCI_CACHE_PREFIX: "" UPDATE_SPACK_OCI_CACHES: false stage: build tags: @@ -186,7 +186,7 @@ build-spack-env-on-runner: variables: SPACK_DEV_ENV: ebrains-dev SPACK_JOBS: 2 - OCI_CACHE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/master + OCI_CACHE_PREFIX: "" UPDATE_SPACK_OCI_CACHES: false script: # deactivate environment views (we don't need them for the test build-job) @@ -221,15 +221,15 @@ sync-gitlab-spack-instance: variables: SPACK_REPO_PATH: $SPACK_PATH_GITLAB/ebrains-spack-builds SPACK_JOBS: 4 - OCI_CACHE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/${CI_COMMIT_BRANCH} - UPDATE_SPACK_OCI_CACHES: true + OCI_CACHE_PREFIX: "" + UPDATE_SPACK_OCI_CACHES: false script: - SPACK_NFS_ENV=${CI_COMMIT_BRANCH//./-} # create spack dir if it doesn't exist - mkdir -p $SPACK_PATH_GITLAB # get latest state of EBRAINS repo - rm -rf $SPACK_REPO_PATH && cp -r $CI_PROJECT_DIR $SPACK_REPO_PATH - # run installation script and set UPDATE to true + # run installation script - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX # create kernel spec, so that the environment can be used in gitlab CI jobs - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac); @@ -247,7 +247,7 @@ sync-gitlab-spack-instance: - spack_logs when: always rules: - # branches that update the gitlab-runner upstream (read-only) installation and the spack OCI caches + # branches that update the gitlab-runner upstream (read-only) installation - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE != "schedule" when: manual -- GitLab From 08204d5b67b0470f73b824edfafae1b97f57248b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 21 Jan 2025 16:24:01 +0100 Subject: [PATCH 011/111] fix: complete concretization-related early --- install_spack_env.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/install_spack_env.sh b/install_spack_env.sh index 0784e35b..65798df6 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -53,6 +53,9 @@ then spack repo add $EBRAINS_REPO fi +# make sure all fetching/clingo stuff happens before anything else +spack spec aida + # install platform compiler (extract version from packages.yaml) if [ $SYSTEMNAME == ebrainslab ] then -- GitLab From 96b6e5bbeb4a05b5d3f183ff2bfc3a5e8f459143 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 21 Jan 2025 12:53:45 +0100 Subject: [PATCH 012/111] feat(CI): handle (OCI) caching of base compiler --- install_spack_env.sh | 80 ++++++++++++++++++++++++++++++++------------ 1 file changed, 59 insertions(+), 21 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index 65798df6..b6b991be 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -26,6 +26,12 @@ export SPACK_USER_CONFIG_PATH=$INSTALLATION_ROOT/spack/.spack # define SYSTEMNAME variable in sites where it's not already defined export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}} +# cache related variables +export CACHE_SPECFILE=${CACHE_SPECFILE:-"env_specfile.yaml"} +export YASHCHIKI_HOME=${EBRAINS_REPO}/vendor/yashchiki +export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${INSTALLATION_ROOT}/spack/var/spack/cache} +export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${INSTALLATION_ROOT}/spack/var/spack/cache} + # initial setup: clone spack if spack dir doesn't already exist if [ ! -d $INSTALLATION_ROOT/spack ] then @@ -59,9 +65,47 @@ spack spec aida # install platform compiler (extract version from packages.yaml) if [ $SYSTEMNAME == ebrainslab ] then - EBRAINS_SPACK_COMPILER=$(grep 'compiler' $EBRAINS_REPO/site-config/$SYSTEMNAME/packages.yaml | awk -F'[][]' '{ print $2 }') - spack compiler find - spack load $EBRAINS_SPACK_COMPILER || { spack install $EBRAINS_SPACK_COMPILER; spack load $EBRAINS_SPACK_COMPILER; } + EBRAINS_SPACK_COMPILER=$(grep 'compiler' $EBRAINS_REPO/site-config/$SYSTEMNAME/packages.yaml | awk -F'[][]' '{ print $2 }') + spack compiler find + spack load $EBRAINS_SPACK_COMPILER || { + # dump dag to file + spack spec -y $EBRAINS_SPACK_COMPILER arch=x86_64 > "req_compiler.yaml" + if [ -n "${OCI_CACHE_PREFIX}" ]; then + # fetch missing sources (if packages not yet installed) + python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \ + --local-cache=${SPACK_CACHE_SOURCE} \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ + --yashchiki-home=${YASHCHIKI_HOME} \ + compiler_missing_paths_sources.dat req_compiler.yaml + # fetch missing build results (if packages not yet installed) + python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \ + --local-cache=${SPACK_CACHE_BUILD}/build_cache \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ + --yashchiki-home=${YASHCHIKI_HOME} \ + compiler_missing_paths_buildresults.dat req_compiler.yaml + fi + spack install $EBRAINS_SPACK_COMPILER arch=x86_64 + if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then + echo "Performing update of the source cache (for base compiler)" + python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ + --local-cache=${SPACK_CACHE_SOURCE} \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ + compiler_missing_paths_sources.dat + # push previously missing (but now installed) packages to the local cache + spack buildcache create --unsigned --only package ${EBRAINS_SPACK_COMPILER} && ret=$? || ret=$? + # upload packages from local to remote cache + echo "Performing update of the build cache (for base compiler)" + python3 ${YASHCHIKI_HOME}/update_cached_buildresults.py \ + --local-cache=${SPACK_CACHE_BUILD}/build_cache \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ + compiler_missing_paths_buildresults.dat + fi + spack load $EBRAINS_SPACK_COMPILER + } fi spack compiler find @@ -83,11 +127,6 @@ spack env activate --without-view $EBRAINS_SPACK_ENV spack concretize --force --fresh --test root -export CACHE_SPECFILE=${CACHE_SPECFILE:-"env_specfile.yaml"} -export YASHCHIKI_HOME=${EBRAINS_REPO}/vendor/yashchiki -export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${INSTALLATION_ROOT}/spack/var/spack/cache} -export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${INSTALLATION_ROOT}/spack/var/spack/cache} - # dump dag to file spack spec -y > "${CACHE_SPECFILE}" if [ -n "${OCI_CACHE_PREFIX}" ]; then @@ -109,18 +148,17 @@ fi spack-python -c "exit(not len(spack.environment.active_environment().uninstalled_specs()))" && ( # fetch all sources but delay exit code handling spack fetch --dependencies --missing && ret=$? || ret=$?; - if [ -n "${OCI_CACHE_PREFIX}" ]; then - # push freshly fetched sources to remote cache - if [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then - echo "Performing update of the source cache" - python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ - --local-cache=${SPACK_CACHE_SOURCE} \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ - missing_paths_sources.dat; - else - echo "Updating of the source cache disabled." - fi + if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then + # push freshly fetched sources to remote cache + if [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then + echo "Performing update of the source cache" + python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ + --local-cache=${SPACK_CACHE_SOURCE} \ + --remote-cache-type=oci \ + --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ + missing_paths_sources.dat; + else + echo "Updating of the source cache disabled." fi if [ $ret -ne 0 ]; then (exit $ret) @@ -141,7 +179,7 @@ fi spack install --no-check-signature -y -j$SPACK_JOBS --fresh --test root && spack_install_ret=$? || spack_install_ret=$? # no need to update the local cache nor the remote cache if we don't want to update -if [ -n "${OCI_CACHE_PREFIX}" ] && [ ${UPDATE_SPACK_OCI_CACHES:-false} = "true" ]; then +if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then # push previously missing (but now installed) packages to the local cache for dag_hash in $dag_hashes_pre_install; do spack buildcache create --unsigned --only package ${SPACK_CACHE_BUILD} /${dag_hash} && ret=$? || ret=$? -- GitLab From a6253493c60e3aa5b0944c30cfd693e943e85a0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Thu, 23 Jan 2025 09:59:20 +0100 Subject: [PATCH 013/111] fix: support install out of cwd --- install_spack_env.sh | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index b6b991be..6c98382f 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -18,10 +18,13 @@ UPSTREAM_INSTANCE=$5 # path to Spack instance to use as upstream (optional) UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results export OCI_CACHE_PREFIX=$7 +# make sure spack uses the symlinked folder as path +export CI_SPACK_ROOT=${INSTALLATION_ROOT}/spack + # specify location of .spack dir (by default in ~) # this is where cache and configuration settings are stored -export SPACK_USER_CACHE_PATH=$INSTALLATION_ROOT/spack/.spack -export SPACK_USER_CONFIG_PATH=$INSTALLATION_ROOT/spack/.spack +export SPACK_USER_CACHE_PATH=${CI_SPACK_ROOT}/.spack +export SPACK_USER_CONFIG_PATH=${CI_SPACK_ROOT}/.spack # define SYSTEMNAME variable in sites where it's not already defined export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}} @@ -29,21 +32,21 @@ export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}} # cache related variables export CACHE_SPECFILE=${CACHE_SPECFILE:-"env_specfile.yaml"} export YASHCHIKI_HOME=${EBRAINS_REPO}/vendor/yashchiki -export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${INSTALLATION_ROOT}/spack/var/spack/cache} -export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${INSTALLATION_ROOT}/spack/var/spack/cache} +export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${CI_SPACK_ROOT}/var/spack/cache} +export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${CI_SPACK_ROOT}/var/spack/cache} -# initial setup: clone spack if spack dir doesn't already exist -if [ ! -d $INSTALLATION_ROOT/spack ] +# initial setup: use spack submodule if spack dir doesn't already exist +if [ ! -d ${SPACK_ROOT} ] then - ln -s vendor/spack $INSTALLATION_ROOT/spack + ln -s ${EBRAINS_REPO}/vendor/spack ${CI_SPACK_ROOT} # SPACK PATCH: the post-build logs on install-time-test-logs.txt gets ovewritten by the post-install logs. # quick fix for that: (TODO: investigate more and open PR) - sed -i "s/self.file_like, \"w\"/self.file_like, \"a\"/g" $INSTALLATION_ROOT/spack/lib/spack/llnl/util/tty/log.py + sed -i "s/self.file_like, \"w\"/self.file_like, \"a\"/g" ${CI_SPACK_ROOT}/lib/spack/llnl/util/tty/log.py fi if [[ $UPSTREAM_INSTANCE ]] then - cat <<EOF > $INSTALLATION_ROOT/spack/etc/spack/defaults/upstreams.yaml + cat <<EOF > ${CI_SPACK_ROOT}/etc/spack/defaults/upstreams.yaml upstreams: upstream-spack-instance: install_tree: $UPSTREAM_INSTANCE/spack/opt/spack @@ -51,7 +54,7 @@ EOF fi # activate Spack -source $INSTALLATION_ROOT/spack/share/spack/setup-env.sh +source ${CI_SPACK_ROOT}/share/spack/setup-env.sh # add repo if it does not exist if [[ ! $(spack repo list | grep ebrains-spack-builds$) ]] @@ -111,16 +114,16 @@ fi spack compiler find # create environment if it does not exist -if [ ! -d "$SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV" ] +if [ ! -d "${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV" ] then spack env create $EBRAINS_SPACK_ENV fi # update environment site-configs -rm -rf $SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV/site-config && cp -r $EBRAINS_REPO/site-config $SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV +rm -rf ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/site-config && cp -r $EBRAINS_REPO/site-config ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV # update spack.yaml: merge top-level and site-specific spack.yaml files spack-python $EBRAINS_REPO/site-config/ymerge.py $EBRAINS_REPO/spack.yaml $EBRAINS_REPO/site-config/$SYSTEMNAME/spack.yaml > /tmp/spack.yaml -cp /tmp/spack.yaml $SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV/ +cp /tmp/spack.yaml ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/ # activate environment spack env activate --without-view $EBRAINS_SPACK_ENV @@ -216,7 +219,7 @@ spack reindex # this needs deactivating the environment first: spack env deactivate unset SPACK_LD_LIBRARY_PATH -spack env activate --sh $EBRAINS_SPACK_ENV > $SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV/load_env.sh +spack env activate --sh $EBRAINS_SPACK_ENV > ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/load_env.sh # create modules files with spack # spack module tcl refresh -y -- GitLab From 4ea148784831bd77c1d9689f4b95462db12a6db6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 21 Jan 2025 16:22:38 +0100 Subject: [PATCH 014/111] feat(CI): prepare spack installer for image builds --- install_spack_env.sh | 38 ++++++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index 6c98382f..cc248f49 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -35,10 +35,16 @@ export YASHCHIKI_HOME=${EBRAINS_REPO}/vendor/yashchiki export SPACK_CACHE_SOURCE=${SPACK_CACHE_SOURCE:-${CI_SPACK_ROOT}/var/spack/cache} export SPACK_CACHE_BUILD=${SPACK_CACHE_BUILD:-${CI_SPACK_ROOT}/var/spack/cache} +if [ ! -d ${INSTALLATION_ROOT} ]; then + mkdir -p ${INSTALLATION_ROOT} +fi + # initial setup: use spack submodule if spack dir doesn't already exist -if [ ! -d ${SPACK_ROOT} ] -then +SPACK_ROOT_EXISTED=1 +if [ ! -d ${CI_SPACK_ROOT} ]; then ln -s ${EBRAINS_REPO}/vendor/spack ${CI_SPACK_ROOT} + SPACK_ROOT_EXISTED=0 + # SPACK PATCH: the post-build logs on install-time-test-logs.txt gets ovewritten by the post-install logs. # quick fix for that: (TODO: investigate more and open PR) sed -i "s/self.file_like, \"w\"/self.file_like, \"a\"/g" ${CI_SPACK_ROOT}/lib/spack/llnl/util/tty/log.py @@ -56,6 +62,13 @@ fi # activate Spack source ${CI_SPACK_ROOT}/share/spack/setup-env.sh +if [ "${SPACK_ROOT_EXISTED}" -eq 0 ]; then + # for caching purposes it's nice if we can relocate into long paths, but we + # can't do that for existing installations -> else path + # ECM (2025-01-23) true seems to yield too large paths for some packages (e.g., gcc) + spack config add config:install_tree:padded_length:128 +fi + # add repo if it does not exist if [[ ! $(spack repo list | grep ebrains-spack-builds$) ]] then @@ -65,6 +78,13 @@ fi # make sure all fetching/clingo stuff happens before anything else spack spec aida +# rebuild spack's database (could be an debugging session) +spack reindex + +# TODO for newer spack versions, add: --autopush --unsigned, drop create cache command below +# (Note: spack expects `build_cache/` below the folder we specify here +spack mirror add local_cache ${SPACK_CACHE_BUILD} + # install platform compiler (extract version from packages.yaml) if [ $SYSTEMNAME == ebrainslab ] then @@ -89,7 +109,7 @@ then --yashchiki-home=${YASHCHIKI_HOME} \ compiler_missing_paths_buildresults.dat req_compiler.yaml fi - spack install $EBRAINS_SPACK_COMPILER arch=x86_64 + spack install --no-check-signature -y -j$SPACK_JOBS $EBRAINS_SPACK_COMPILER arch=x86_64 if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then echo "Performing update of the source cache (for base compiler)" python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ @@ -98,7 +118,7 @@ then --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ compiler_missing_paths_sources.dat # push previously missing (but now installed) packages to the local cache - spack buildcache create --unsigned --only package ${EBRAINS_SPACK_COMPILER} && ret=$? || ret=$? + spack buildcache create --unsigned ${SPACK_CACHE_BUILD} ${EBRAINS_SPACK_COMPILER} && ret=$? || ret=$? # upload packages from local to remote cache echo "Performing update of the build cache (for base compiler)" python3 ${YASHCHIKI_HOME}/update_cached_buildresults.py \ @@ -152,8 +172,7 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled # fetch all sources but delay exit code handling spack fetch --dependencies --missing && ret=$? || ret=$?; if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then - # push freshly fetched sources to remote cache - if [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then + # push freshly fetched sources to remote cache echo "Performing update of the source cache" python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ --local-cache=${SPACK_CACHE_SOURCE} \ @@ -168,10 +187,6 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled fi ) -# TODO for newer spack versions, add: --autopush --unsigned, drop create cache command below -# (Note: spack expects `build_cache/` below the folder we specify here -spack mirror add local_cache ${SPACK_CACHE_BUILD} - if [ -n "${OCI_CACHE_PREFIX}" ]; then # record the state of installed/uninstalled packages before actually installing them dag_hashes_pre_install=$(spack-python ${YASHCHIKI_HOME}/specfile_dag_hash.py ${CACHE_SPECFILE}) @@ -208,6 +223,9 @@ if [ $spack_install_ret -ne 0 ]; then (exit "$spack_install_ret") fi +# remove local cache content +spack mirror destroy --mirror-name local_cache + # TODO: when using spack remote OCI build caches require an index file #spack mirror add ebrains oci://docker-registry.ebrains.eu/esd/build_cache #spack buildcache list -a ebrains && ret=$? || ret=$? -- GitLab From 4443e48458dc08921e220ff47268036a449e6601 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 28 Jan 2025 13:27:35 +0100 Subject: [PATCH 015/111] fix(py-ray): update download url for boost --- packages/py-ray/fix-url-boost.patch | 12 ++++++++++++ packages/py-ray/package.py | 4 ++++ 2 files changed, 16 insertions(+) create mode 100644 packages/py-ray/fix-url-boost.patch diff --git a/packages/py-ray/fix-url-boost.patch b/packages/py-ray/fix-url-boost.patch new file mode 100644 index 00000000..ed16ee1b --- /dev/null +++ b/packages/py-ray/fix-url-boost.patch @@ -0,0 +1,12 @@ +diff -pur ray-2.4.0/bazel/ray_deps_setup.bzl ray-2.4.0/bazel/ray_deps_setup.bzl +--- ray-2.4.0/bazel/ray_deps_setup.bzl 2023-04-25 19:22:06.000000000 +0200 ++++ ray-2.4.0/bazel/ray_deps_setup.bzl 2025-01-28 13:25:03.362441377 +0100 +@@ -157,7 +157,7 @@ def ray_deps_setup(): + name = "boost", + build_file = "@com_github_nelhage_rules_boost//:BUILD.boost", + sha256 = "71feeed900fbccca04a3b4f2f84a7c217186f28a940ed8b7ed4725986baf99fa", +- url = "https://boostorg.jfrog.io/artifactory/main/release/1.81.0/source/boost_1_81_0.tar.bz2", ++ url = "https://archives.boost.io/release/1.81.0/source/boost_1_81_0.tar.bz2", + ) + + auto_http_archive( diff --git a/packages/py-ray/package.py b/packages/py-ray/package.py index 453f371f..6feab9f5 100644 --- a/packages/py-ray/package.py +++ b/packages/py-ray/package.py @@ -106,6 +106,10 @@ class PyRay(PythonPackage): build_directory = "python" + # begin EBRAINS (added): fix boost download url + patch("fix-url-boost.patch", when="@2.4.0:") + # end EBRAINS + def patch(self): filter_file( 'bazel_flags = ["--verbose_failures"]', -- GitLab From 9026e671e77f00f68350aab79134b94f70a2988d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 28 Jan 2025 13:29:38 +0100 Subject: [PATCH 016/111] fix(py-elephant): disable test_parallel.py --- packages/py-elephant/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/py-elephant/package.py b/packages/py-elephant/package.py index 1bccfca6..c2701889 100644 --- a/packages/py-elephant/package.py +++ b/packages/py-elephant/package.py @@ -87,4 +87,6 @@ class PyElephant(PythonPackage, CudaPackage): # see https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds/-/issues/38 # test_WPLI_ground_truth_consistency_real_LFP_dataset, test_multitaper_cohere_perfect_cohere # skip the following due to issue with neo > 0.13.0 https://github.com/NeuralEnsemble/elephant/pull/634 - pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_neo_tools and not test_statistics and not test_trials') + # ECM (2025-02-05): also disable "test_parallel" test due to some test hang, cf. ESD issue 86 + # https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds/-/issues/86 + pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_neo_tools and not test_statistics and not test_trials and not test_mean_firing_rate') -- GitLab From d7eaa89400d122aa0915f4397a317ec540e0e6c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Wed, 29 Jan 2025 16:47:12 +0100 Subject: [PATCH 017/111] feat(CI): add sync-esd-image (seeding caches and uploading images) --- .gitlab-ci.yml | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e7602247..6b7b273d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -211,6 +211,59 @@ build-spack-env-on-runner: rules: - if: $CI_PIPELINE_SOURCE != "schedule" && $CI_PIPELINE_SOURCE != "merge_request_event" +# this one fills the spack caches and updates the ESD (ebrainslab-variant) images on harbor +sync-esd-image: + stage: build + tags: + - esd_image + image: docker-registry.ebrains.eu/esd/tmp:latest + variables: + CI_SPACK_ENV: esd + SPACK_JOBS: 4 + # we access the branch-specific cache here (and also update it!) + OCI_CACHE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/${CI_COMMIT_BRANCH} + UPDATE_SPACK_OCI_CACHES: true + OCI_IMAGE_PREFIX: ${HARBOR_HOST}/${HARBOR_PROJECT}/${CI_COMMIT_BRANCH}/image + INSTALLATION_ROOT: /esd + SANDBOX_ROOT_RELATIVE: esd_image + SANDBOX_ROOT: ${CI_PROJECT_DIR}/${SANDBOX_ROOT_RELATIVE} + script: + # create a sandbox/image for performing an spack install inside based on some base image/install + - apptainer build --fix-perms --sandbox ${SANDBOX_ROOT}/ docker://${BUILD_ENV_DOCKER_IMAGE} + # run installation script inside future container environment + # => DAG concretization, subsequent cache access + fetching and actual build should be separate steps + - mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT} + - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX + - echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh + - echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh + # preparing to assemble the image: move in the CI project contents... + - shopt -s dotglob + - find . -maxdepth 1 -and -not -name "." -and -not -name "${SANDBOX_ROOT_RELATIVE}" -exec mv -t ${SANDBOX_ROOT}/${INSTALLATION_ROOT} {} \; + # convert to SIF image file + - apptainer build latest.sif "${SANDBOX_ROOT}" + # upload SIF image as an artifact to OCI repo + - oras push --username $HARBOR_USERNAME --password $HARBOR_PASSWORD ${OCI_IMAGE_PREFIX}:lab_latest.sif latest.sif && oras_ret=$? || oras_ret=$? + # convert SIF image to OCI format and upload to OCI repo + - skopeo --insecure-policy copy --dest-creds=${HARBOR_USERNAME}:${HARBOR_PASSWORD} sif:latest.sif docker://${OCI_IMAGE_PREFIX}:lab_latest.oci && skopeo_ret=$? || skopeo_ret=$? + - exit $(expr $oras_ret + $skopeo_ret) + after_script: + - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed + # for succesfully installed packages: keep the spack logs for any package modified during this CI job + - PKG_DIR=${SANDBOX_ROOT}/${INSTALLATION_ROOT}/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 + - if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi + # for not succesfully installed packages: also keep the spack logs for any packages that failed + - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi + artifacts: + paths: + - spack_logs + when: always + timeout: 2 days + resource_group: registry-esd-master-image + rules: + # branches that update the gitlab-runner upstream (read-only) installation and the spack OCI caches + - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE != "schedule" + when: manual + # update gitlab-runner upstream (read-only) installation sync-gitlab-spack-instance: stage: build -- GitLab From d4aeb24d171bb280380485e99b81b3b04e678bf3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Fri, 7 Feb 2025 18:06:45 +0100 Subject: [PATCH 018/111] fix(50%): move generated files to /tmp MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is not how this should be done in the end… we should use mktemp to generate unique and fresh temp files. For now this restores functionality to build from r/o folders. --- install_spack_env.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index cc248f49..7510f389 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -92,7 +92,7 @@ then spack compiler find spack load $EBRAINS_SPACK_COMPILER || { # dump dag to file - spack spec -y $EBRAINS_SPACK_COMPILER arch=x86_64 > "req_compiler.yaml" + spack spec -y $EBRAINS_SPACK_COMPILER arch=x86_64 > "/tmp/req_compiler.yaml" if [ -n "${OCI_CACHE_PREFIX}" ]; then # fetch missing sources (if packages not yet installed) python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \ @@ -100,14 +100,14 @@ then --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ --yashchiki-home=${YASHCHIKI_HOME} \ - compiler_missing_paths_sources.dat req_compiler.yaml + /tmp/compiler_missing_paths_sources.dat /tmp/req_compiler.yaml # fetch missing build results (if packages not yet installed) python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \ --local-cache=${SPACK_CACHE_BUILD}/build_cache \ --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ --yashchiki-home=${YASHCHIKI_HOME} \ - compiler_missing_paths_buildresults.dat req_compiler.yaml + /tmp/compiler_missing_paths_buildresults.dat /tmp/req_compiler.yaml fi spack install --no-check-signature -y -j$SPACK_JOBS $EBRAINS_SPACK_COMPILER arch=x86_64 if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then @@ -116,7 +116,7 @@ then --local-cache=${SPACK_CACHE_SOURCE} \ --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ - compiler_missing_paths_sources.dat + /tmp/compiler_missing_paths_sources.dat # push previously missing (but now installed) packages to the local cache spack buildcache create --unsigned ${SPACK_CACHE_BUILD} ${EBRAINS_SPACK_COMPILER} && ret=$? || ret=$? # upload packages from local to remote cache @@ -125,7 +125,7 @@ then --local-cache=${SPACK_CACHE_BUILD}/build_cache \ --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ - compiler_missing_paths_buildresults.dat + /tmp/compiler_missing_paths_buildresults.dat fi spack load $EBRAINS_SPACK_COMPILER } @@ -159,14 +159,14 @@ if [ -n "${OCI_CACHE_PREFIX}" ]; then --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ --yashchiki-home=${YASHCHIKI_HOME} \ - missing_paths_sources.dat ${CACHE_SPECFILE} + /tmp/missing_paths_sources.dat ${CACHE_SPECFILE} # fetch missing build results (if packages not yet installed) python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \ --local-cache=${SPACK_CACHE_BUILD}/build_cache \ --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ --yashchiki-home=${YASHCHIKI_HOME} \ - missing_paths_buildresults.dat ${CACHE_SPECFILE} + /tmp/missing_paths_buildresults.dat ${CACHE_SPECFILE} fi spack-python -c "exit(not len(spack.environment.active_environment().uninstalled_specs()))" && ( # fetch all sources but delay exit code handling @@ -178,7 +178,7 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled --local-cache=${SPACK_CACHE_SOURCE} \ --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ - missing_paths_sources.dat; + /tmp/missing_paths_sources.dat; else echo "Updating of the source cache disabled." fi @@ -213,7 +213,7 @@ if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true --local-cache=${SPACK_CACHE_BUILD}/build_cache \ --remote-cache-type=oci \ --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ - missing_paths_buildresults.dat + /tmp/missing_paths_buildresults.dat else echo "Updating of the build cache disabled." fi -- GitLab From 613b1914a9a80305479325aa1ee17b6fee8a2375 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 10 Feb 2025 23:26:58 +0100 Subject: [PATCH 019/111] fix(CI): check for existing mirror before adding or deleting local cache --- install_spack_env.sh | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index 7510f389..3bb01936 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -81,9 +81,12 @@ spack spec aida # rebuild spack's database (could be an debugging session) spack reindex -# TODO for newer spack versions, add: --autopush --unsigned, drop create cache command below -# (Note: spack expects `build_cache/` below the folder we specify here -spack mirror add local_cache ${SPACK_CACHE_BUILD} +# add local mirror if it does not exist +if [[ ! $(spack mirror list | grep local_cache) ]]; then + # TODO for newer spack versions, add: --autopush --unsigned, drop create cache command below + # (Note: spack expects `build_cache/` below the folder we specify here + spack mirror add local_cache ${SPACK_CACHE_BUILD} +fi # install platform compiler (extract version from packages.yaml) if [ $SYSTEMNAME == ebrainslab ] @@ -224,7 +227,9 @@ if [ $spack_install_ret -ne 0 ]; then fi # remove local cache content -spack mirror destroy --mirror-name local_cache +if [ -d ${SPACK_CACHE_BUILD} ]; then + spack mirror destroy --mirror-name local_cache +fi # TODO: when using spack remote OCI build caches require an index file #spack mirror add ebrains oci://docker-registry.ebrains.eu/esd/build_cache -- GitLab From 26b47356c3432e2be7f6a30abf011967280d8023 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Fri, 17 Jan 2025 12:35:52 +0100 Subject: [PATCH 020/111] fix: build w/ modern intel-tbb MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit suite-sparse dropped tbb support a long time ago… since 5.11.0: https://github.com/DrTimothyAldenDavis/SuiteSparse/issues/110 --- packages/apbs/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/apbs/package.py b/packages/apbs/package.py index 36773b9a..63025099 100644 --- a/packages/apbs/package.py +++ b/packages/apbs/package.py @@ -38,7 +38,7 @@ class Apbs(CMakePackage): depends_on('boost', type=('build', 'run')) depends_on('blas', type=('build', 'run')) depends_on('arpack-ng', type=('build', 'run')) - depends_on('suite-sparse+tbb', type=('build', 'run')) + depends_on('suite-sparse', type=('build', 'run')) depends_on('maloc', type=('build', 'run')) depends_on('python@3.8:3.10', type=('build', 'run')) -- GitLab From 87c5c9bd178c125938091ffce2051d7263fd8f18 Mon Sep 17 00:00:00 2001 From: Moritz Kern <212-moritzkern@users.noreply.gitlab.ebrains.eu> Date: Wed, 12 Feb 2025 12:36:25 +0100 Subject: [PATCH 021/111] fix(py-elephant): disable only test_parallel.py tests instead of all test_mean_firing_rate* --- packages/py-elephant/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/py-elephant/package.py b/packages/py-elephant/package.py index c2701889..1ceebcfb 100644 --- a/packages/py-elephant/package.py +++ b/packages/py-elephant/package.py @@ -89,4 +89,4 @@ class PyElephant(PythonPackage, CudaPackage): # skip the following due to issue with neo > 0.13.0 https://github.com/NeuralEnsemble/elephant/pull/634 # ECM (2025-02-05): also disable "test_parallel" test due to some test hang, cf. ESD issue 86 # https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds/-/issues/86 - pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_neo_tools and not test_statistics and not test_trials and not test_mean_firing_rate') + pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_neo_tools and not test_statistics and not test_trials and not test_parallel') -- GitLab From 3615222c91539ff91034c5155f0961a39196ecae Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 12 Feb 2025 13:06:40 +0100 Subject: [PATCH 022/111] fix(CI): fix log collection of successfully built packages --- .gitlab-ci.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6b7b273d..ee42cc81 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -197,7 +197,8 @@ build-spack-env-on-runner: after_script: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job - - PKG_DIR=$CI_PROJECT_DIR/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-10.3.0 + - shopt -s globstar + - PKG_DIR=$CI_PROJECT_DIR/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 - if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi # for not succesfully installed packages: also keep the spack logs for any packages that failed - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi @@ -291,7 +292,8 @@ sync-gitlab-spack-instance: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job # (we use repo.yaml, that is modified at each start of the pipeline, as a reference file) - - PKG_DIR=$SPACK_PATH_GITLAB/spack/opt/spack/linux-ubuntu20.04-x86_64/gcc-10.3.0 + - shopt -s globstar + - PKG_DIR=$SPACK_PATH_GITLAB/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 - if cd $PKG_DIR; then find . -newer $SPACK_REPO_PATH/repo.yaml \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi # for not succesfully installed packages: also keep the spack logs for any packages that failed - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi -- GitLab From 234f854d5c615e2a0cc66158b6361c3f4f7b435d Mon Sep 17 00:00:00 2001 From: Maria-Teodora Misan <teodora.misan@codemart.ro> Date: Mon, 17 Feb 2025 16:47:58 +0100 Subject: [PATCH 023/111] feat(py-tvb-data): add version 2.8.1 and fix python dependency --- packages/py-tvb-data/package.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/py-tvb-data/package.py b/packages/py-tvb-data/package.py index c79762f6..1e5e9353 100644 --- a/packages/py-tvb-data/package.py +++ b/packages/py-tvb-data/package.py @@ -11,17 +11,19 @@ class PyTvbData(PythonPackage): Various demonstration datasets for use with The Virtual Brain are provided here. """ - homepage = "https://zenodo.org/record/8331301" - url = 'https://zenodo.org/record/8331301/files/tvb_data.zip' + homepage = "https://zenodo.org/records/10128131" maintainers = ['paulapopa', "ldomide"] - version('2.8', 'd2f9b5933327c1d106838d301a4bf7b5') - version('2.7', 'f74ec53edadb4540da3de7268257dd20') - version('2.0.3', '1e02cdc21147f46644c57b14429f564f') + version('2.8.1', + '08ae19833ba8ac158c91fbcb988b9bf0', + url='https://zenodo.org/records/10128131/files/tvb_data.zip') + version('2.8', + 'd2f9b5933327c1d106838d301a4bf7b5', + url='https://zenodo.org/records/8331301/files/tvb_data.zip') # python_requires - depends_on('python@3.8:3.10', type=('build', 'run')) + depends_on('python', type=('build', 'run')) # setup_requires depends_on('py-pip', type='build') -- GitLab From a406f98468bcd1e940e5397de421730e066ae630 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 18 Feb 2025 00:32:40 +0100 Subject: [PATCH 024/111] fix(CI): reset repo to target commit instead of copying to avoid deleting spack submodule when updating persistent installations --- .gitlab-ci.yml | 8 ++++++-- create_job.sh | 19 +++++++++++-------- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ee42cc81..766dfb9d 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -29,7 +29,7 @@ variables: # use the site-specific kubectl context - kubectl config use-context $KUBE_CONTEXT # create job description file - - sh create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX + - sh create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $CI_COMMIT_SHA $RELEASE_NAME $LAB_KERNEL_ROOT $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX - cat simplejob.yml # start the deploy job - kubectl create -f simplejob.yml @@ -282,7 +282,11 @@ sync-gitlab-spack-instance: # create spack dir if it doesn't exist - mkdir -p $SPACK_PATH_GITLAB # get latest state of EBRAINS repo - - rm -rf $SPACK_REPO_PATH && cp -r $CI_PROJECT_DIR $SPACK_REPO_PATH + - if [ ! -d $SPACK_REPO_PATH ]; then git clone $CI_REPOSITORY_URL --recurse-submodules $SPACK_REPO_PATH; fi + - cd $SPACK_REPO_PATH + - git fetch origin + - git reset --hard $CI_COMMIT_SHA + - git submodule update --force # run installation script - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX # create kernel spec, so that the environment can be used in gitlab CI jobs diff --git a/create_job.sh b/create_job.sh index 82a6fe8e..60958e3b 100644 --- a/create_job.sh +++ b/create_job.sh @@ -3,7 +3,7 @@ # =========================================================================================================== # title : create_job.sh # usage : ./create_job.sh $OC_JOB_ID $BUILD_ENV_DOCKER_IMAGE $INSTALLATION_ROOT -# $SPACK_ENV $BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT +# $SPACK_ENV $COMMIT_SHA $RELEASE_NAME $LAB_KERNEL_ROOT # description : creates OKD job yaml file that builds/updates spack environment and creates Lab kernel # =========================================================================================================== @@ -11,7 +11,7 @@ OC_JOB_ID=$1 BUILD_ENV_DOCKER_IMAGE=$2 INSTALLATION_ROOT=$3 EBRAINS_SPACK_ENV=$4 -BRANCH=$5 +COMMIT_SHA=$5 RELEASE_NAME=$6 LAB_KERNEL_ROOT=$7 UPDATE_SPACK_OCI_CACHES=$8 @@ -53,15 +53,18 @@ spec: # create root dir if it doesn't exist mkdir -p \$INSTALLATION_ROOT + # reset spack repository dir by cloning the selected version + if [ ! -d \$EBRAINS_REPO_PATH ]; then git clone ${CI_PROJECT_URL} --recurse-submodules \$EBRAINS_REPO_PATH; fi + cd \$EBRAINS_REPO_PATH + git fetch origin + git reset --hard \$COMMIT_SHA + git submodule update --force + # reset build error log dir (delete previous logs to save space) rm -rf \$BUILD_LOGS_DIR mkdir -p \$BUILD_LOGS_DIR cd \$BUILD_LOGS_DIR - # reset spack repository dir by cloning the selected version - rm -rf \$EBRAINS_REPO_PATH - git clone ${CI_PROJECT_URL} --recurse-submodules --branch \$BRANCH \$EBRAINS_REPO_PATH - # run installation script bash \$EBRAINS_REPO_PATH/install_spack_env.sh \$SPACK_JOBS \$INSTALLATION_ROOT \$EBRAINS_REPO_PATH \$EBRAINS_SPACK_ENV "" \$UPDATE_SPACK_OCI_CACHES \$OCI_CACHE_PREFIX @@ -85,8 +88,8 @@ spec: value: '$UPDATE_SPACK_OCI_CACHES' - name: OCI_CACHE_PREFIX value: $OCI_CACHE_PREFIX - - name: BRANCH - value: $BRANCH + - name: COMMIT_SHA + value: $COMMIT_SHA - name: RELEASE_NAME value: $RELEASE_NAME - name: LAB_KERNEL_ROOT -- GitLab From 7fed80b236972a9e134a11a865762e00afc21d38 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 19 Feb 2025 11:16:20 +0100 Subject: [PATCH 025/111] fix(py-pytest-tornasync): add missing setuptools dependency --- packages/py-pytest-tornasync/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/py-pytest-tornasync/package.py b/packages/py-pytest-tornasync/package.py index 784f7db7..3beaa73c 100644 --- a/packages/py-pytest-tornasync/package.py +++ b/packages/py-pytest-tornasync/package.py @@ -15,6 +15,7 @@ class PyPytestTornasync(PythonPackage): version("0.6.0.post2", sha256="d781b6d951a2e7c08843141d3ff583610b4ea86bfa847714c76edefb576bbe5d") depends_on('python@3.5:', type=('build', 'run')) + depends_on('py-setuptools', type=('build')) depends_on('py-pytest', type=('build', 'run')) depends_on('py-tornado@5.0:', type=('build', 'run')) -- GitLab From cccf2330622f3898c6c7257dea6a00ffd1b6b2a7 Mon Sep 17 00:00:00 2001 From: liadomide <lia_domide@yahoo.com> Date: Fri, 21 Feb 2025 13:54:56 +0200 Subject: [PATCH 026/111] feat(py-tvb-library): add version 2.9.2 --- packages/py-tvb-library/package.py | 10 +++++----- spack.yaml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/py-tvb-library/package.py b/packages/py-tvb-library/package.py index 5506bc36..c624a102 100644 --- a/packages/py-tvb-library/package.py +++ b/packages/py-tvb-library/package.py @@ -14,10 +14,11 @@ class PyTvbLibrary(PythonPackage): """ homepage = "https://www.thevirtualbrain.org/" - pypi = 'tvb-library/tvb_library-2.9.1.tar.gz' + pypi = 'tvb-library/tvb_library-2.9.2.tar.gz' maintainers = ['ldomide', 'paulapopa', 'teodoramisan'] + version('2.9.2', 'be3562b28464e51d09e9378d71996fc11522121124c5438dd3e9cdd71060d09e') version('2.9.1', 'a251f602bffe768e6184ffc14d3d6ad9f03be879a78062a7397c30a5f8878b41') version('2.9', '0a65fa3d803ef8da3d69a0f27a13e40b79876ee6d5b627c34d0c9fc5c12697e6') version('2.8.2', '4222dadbecce53b29e2141801b1ca8eb99c4595ace6c7d0469f7e0a41d3f384a') @@ -35,8 +36,8 @@ class PyTvbLibrary(PythonPackage): # python_requires depends_on('python@3.8:', type=('build', 'run')) - # setup_requires - depends_on('py-setuptools', type='build') + # build_requires + depends_on('py-hatchling', type='build') # install_requires depends_on('py-autopep8', type=('build', 'run')) @@ -53,6 +54,7 @@ class PyTvbLibrary(PythonPackage): depends_on('py-numba', type=('build', 'run')) depends_on('py-numexpr', type=('build', 'run')) depends_on('py-scipy', type=('build', 'run')) + depends_on('py-scipy@:1.13', type=('build', 'run'), when='@:2.9.1') depends_on('py-six', type=('build', 'run')) depends_on('py-tvb-data', type=('run', 'test')) depends_on('py-tvb-gdist', type=('run', 'test')) @@ -62,8 +64,6 @@ class PyTvbLibrary(PythonPackage): depends_on('py-pytest-benchmark', type='test') depends_on('py-pytest-xdist', type='test') - skip_modules = ['tvb.tests.library.simulator.rateml_test', "tvb/tests/library/simulator/rateml_test"] - @run_after('install') @on_package_attributes(run_tests=True) def install_test(self): diff --git a/spack.yaml b/spack.yaml index 5f8217b5..79fd41e2 100644 --- a/spack.yaml +++ b/spack.yaml @@ -49,7 +49,7 @@ spack: - py-spynnaker@7.3.0 - py-tvb-framework@2.9 - py-tvb-contrib@2.9.1 - - py-tvb-library@2.9.1 + - py-tvb-library@2.9.2 - py-tvb-multiscale@2.1.0.ebrains - py-tvb-widgets@2.1.0 - py-tvb-ext-bucket -- GitLab From 3d4e0591a149c304d4c7d7ea57a989a2098bd87e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Mon, 3 Feb 2025 18:56:08 +0100 Subject: [PATCH 027/111] fix(llvm+visionary): port patches to llvm@13 --- ...alify-template-parameters-of-nested-.patch | 32 ++ ...port-for-obtaining-fully-qualified-n.patch | 136 +++++ ...ion-to-keep-whitespace-when-tokenizi.patch | 238 +++++++++ ...ow-visiting-of-implicit-declarations.patch | 504 ++++++++++++++++++ packages/llvm/package.py | 25 +- 5 files changed, 926 insertions(+), 9 deletions(-) create mode 100644 packages/llvm/llvm13-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch create mode 100644 packages/llvm/llvm13-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch create mode 100644 packages/llvm/llvm13-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch create mode 100644 packages/llvm/llvm13-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch diff --git a/packages/llvm/llvm13-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch b/packages/llvm/llvm13-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch new file mode 100644 index 00000000..56ff9ccb --- /dev/null +++ b/packages/llvm/llvm13-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch @@ -0,0 +1,32 @@ +diff -pur spack-src/clang/unittests/Tooling/QualTypeNamesTest.cpp spack-src-new/clang/unittests/Tooling/QualTypeNamesTest.cpp +--- spack-src/clang/unittests/Tooling/QualTypeNamesTest.cpp 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/unittests/Tooling/QualTypeNamesTest.cpp 2025-02-03 18:25:49.156686779 +0100 +@@ -66,6 +66,10 @@ TEST(QualTypeNameTest, getFullyQualified + // Template parameter expansion. + Visitor.ExpectedQualTypeNames["CheckC"] = + "A::B::Template0<A::B::C::MyInt, A::B::AnotherClass>"; ++ // Template parameters of nested name specifier should also be fully expanded. ++ Visitor.ExpectedQualTypeNames["CheckNested"] = ++ // "typename A::B::Template0<A::B::C::MyInt, A::B::AnotherClass>::nested"; ++ "typename A::B::Template0<int, A::B::Class0>::nested"; + // Recursive template parameter expansion. + Visitor.ExpectedQualTypeNames["CheckD"] = + "A::B::Template0<A::B::Template1<A::B::C::MyInt, A::B::AnotherClass>, " +@@ -111,7 +115,7 @@ TEST(QualTypeNameTest, getFullyQualified + " InnerAlias<int> AliasTypeVal;\n" + " InnerAlias<Class0>::Inner AliasInnerTypeVal;\n" + " }\n" +- " template<class X, class Y> class Template0;" ++ " template<class X, class Y> struct Template0 { typedef int nested; };" + " template<class X, class Y> class Template1;" + " typedef B::Class0 AnotherClass;\n" + " void Function1(Template0<C::MyInt,\n" +@@ -119,6 +123,8 @@ TEST(QualTypeNameTest, getFullyQualified + " void Function2(Template0<Template1<C::MyInt, AnotherClass>,\n" + " Template0<int, long> > CheckD);\n" + " void Function3(const B::Class0* CheckM);\n" ++ " void Function4(typename Template0<C::MyInt,\n" ++ " AnotherClass>::nested CheckNested);\n" + " }\n" + "template<typename... Values> class Variadic {};\n" + "Variadic<int, B::Template0<int, char>, " diff --git a/packages/llvm/llvm13-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch b/packages/llvm/llvm13-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch new file mode 100644 index 00000000..b27ac4c0 --- /dev/null +++ b/packages/llvm/llvm13-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch @@ -0,0 +1,136 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:35:08.767069975 +0100 +@@ -2428,6 +2428,14 @@ class Type(Structure): + """Retrieve the spelling of this Type.""" + return conf.lib.clang_getTypeSpelling(self) + ++ @property ++ def fully_qualified_name(self): ++ """Retrieve the fully qualified name of this Type.""" ++ if not hasattr(self, '_fully_qualified_name'): ++ self._fully_qualified_name = conf.lib.clang_getFullyQualifiedTypeName(self) ++ ++ return self._fully_qualified_name ++ + def __eq__(self, other): + if type(other) != type(self): + return False +@@ -3869,6 +3877,11 @@ functionList = [ + [Type], + _CXString, + _CXString.from_result), ++ ++ ("clang_getFullyQualifiedTypeName", ++ [Type], ++ _CXString, ++ _CXString.from_result), + + ("clang_hashCursor", + [Cursor], +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:35:08.767069975 +0100 +@@ -316,6 +316,14 @@ class TestCursor(unittest.TestCase): + underlying = typedef.underlying_typedef_type + self.assertEqual(underlying.kind, TypeKind.INT) + ++ def test_fully_qualified_type_name(): ++ source = 'namespace uiae { struct X { typedef int sometype; }; }' ++ tu = get_tu(source, lang='cpp') ++ ++ cls = get_cursor(tu, 'sometype') ++ fqn = cls.type.fully_qualified_name ++ self.assertTrue(fqn.endswith("uiae::X::sometype"), fqn) ++ + def test_semantic_parent(self): + tu = get_tu(kParentTest, 'cpp') + curs = get_cursors(tu, 'f') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:35:52.971255790 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 62 ++#define CINDEX_VERSION_MINOR 63 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -3447,6 +3447,14 @@ CINDEX_LINKAGE CXType clang_getCursorTyp + CINDEX_LINKAGE CXString clang_getTypeSpelling(CXType CT); + + /** ++ * Retrieve the fully qualified name of the underlying type. ++ * This includes full qualification of all template parameters etc. ++ * ++ * If the type is invalid, an empty string is returned. ++ */ ++CINDEX_LINKAGE CXString clang_getFullyQualifiedTypeName(CXType CT); ++ ++/** + * Retrieve the underlying type of a typedef declaration. + * + * If the cursor does not reference a typedef declaration, an invalid type is +diff -pur spack-src/clang/tools/libclang/CMakeLists.txt spack-src-new/clang/tools/libclang/CMakeLists.txt +--- spack-src/clang/tools/libclang/CMakeLists.txt 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CMakeLists.txt 2025-02-03 18:35:11.263080474 +0100 +@@ -52,6 +52,7 @@ set(LIBS + clangSema + clangSerialization + clangTooling ++ clangToolingCore + ) + + if (CLANG_ENABLE_ARCMT) +diff -pur spack-src/clang/tools/libclang/CXType.cpp spack-src-new/clang/tools/libclang/CXType.cpp +--- spack-src/clang/tools/libclang/CXType.cpp 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CXType.cpp 2025-02-03 18:35:11.263080474 +0100 +@@ -19,6 +19,7 @@ + #include "clang/AST/DeclObjC.h" + #include "clang/AST/DeclTemplate.h" + #include "clang/AST/Expr.h" ++#include "clang/AST/QualTypeNames.h" + #include "clang/AST/Type.h" + #include "clang/Basic/AddressSpaces.h" + #include "clang/Frontend/ASTUnit.h" +@@ -303,6 +304,27 @@ CXString clang_getTypeSpelling(CXType CT + return cxstring::createDup(OS.str()); + } + ++CXString clang_getFullyQualifiedTypeName(CXType CT) { ++ QualType T = GetQualType(CT); ++ if (T.isNull()) ++ return cxstring::createEmpty(); ++ ++ // For builtin types (but not typedefs pointing to builtin types) return their ++ // spelling. Otherwise "bool" will be turned into "_Bool". ++ const Type *TP = T.getTypePtrOrNull(); ++ if (TP && TP->isBuiltinType() && T->getAs<TypedefType>() == nullptr) ++ return clang_getTypeSpelling(CT); ++ ++ CXTranslationUnit TU = GetTU(CT); ++ ASTContext &Ctx = cxtu::getASTUnit(TU)->getASTContext(); ++ PrintingPolicy Policy(Ctx.getPrintingPolicy()); ++ Policy.SuppressScope = false; ++ Policy.AnonymousTagLocations = false; ++ Policy.PolishForDeclaration = true; ++ std::string name = TypeName::getFullyQualifiedName(T, Ctx, Policy, /*WithGlobalNsPrefix=*/true); ++ return cxstring::createDup(name.c_str()); ++} ++ + CXType clang_getTypedefDeclUnderlyingType(CXCursor C) { + using namespace cxcursor; + CXTranslationUnit TU = cxcursor::getCursorTU(C); +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:36:14.531346336 +0100 +@@ -303,6 +303,7 @@ LLVM_13 { + clang_getFileName; + clang_getFileTime; + clang_getFileUniqueID; ++ clang_getFullyQualifiedTypeName; + clang_getFunctionTypeCallingConv; + clang_getIBOutletCollectionType; + clang_getIncludedFile; diff --git a/packages/llvm/llvm13-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch b/packages/llvm/llvm13-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch new file mode 100644 index 00000000..3771cfc6 --- /dev/null +++ b/packages/llvm/llvm13-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch @@ -0,0 +1,238 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:37:54.447765317 +0100 +@@ -529,6 +529,13 @@ class TokenGroup(object): + + You should not instantiate this class outside of this module. + """ ++ ++ # Default tokenization mode. ++ TOKENIZE_NONE = 0 ++ ++ # Used to indicate that tokens for whitespace should be returned. ++ TOKENIZE_KEEP_WHITESPACE = 1 ++ + def __init__(self, tu, memory, count): + self._tu = tu + self._memory = memory +@@ -538,7 +545,7 @@ class TokenGroup(object): + conf.lib.clang_disposeTokens(self._tu, self._memory, self._count) + + @staticmethod +- def get_tokens(tu, extent): ++ def get_tokens(tu, extent, options=0): + """Helper method to return all tokens in an extent. + + This functionality is needed multiple places in this module. We define +@@ -547,8 +554,8 @@ class TokenGroup(object): + tokens_memory = POINTER(Token)() + tokens_count = c_uint() + +- conf.lib.clang_tokenize(tu, extent, byref(tokens_memory), +- byref(tokens_count)) ++ conf.lib.clang_tokenizeRange( ++ tu, extent, byref(tokens_memory), byref(tokens_count), options) + + count = int(tokens_count.value) + +@@ -1852,13 +1859,16 @@ class Cursor(Structure): + for descendant in child.walk_preorder(): + yield descendant + +- def get_tokens(self): ++ def get_tokens(self, options=0): + """Obtain Token instances formulating that compose this Cursor. + + This is a generator for Token instances. It returns all tokens which + occupy the extent this cursor occupies. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ +- return TokenGroup.get_tokens(self._tu, self.extent) ++ return TokenGroup.get_tokens(self._tu, self.extent, options) + + def get_field_offsetof(self): + """Returns the offsetof the FIELD_DECL pointed by this Cursor.""" +@@ -3073,18 +3091,21 @@ class TranslationUnit(ClangObject): + return CodeCompletionResults(ptr) + return None + +- def get_tokens(self, locations=None, extent=None): ++ def get_tokens(self, locations=None, extent=None, options=0): + """Obtain tokens in this translation unit. + + This is a generator for Token instances. The caller specifies a range + of source code to obtain tokens for. The range can be specified as a + 2-tuple of SourceLocation or as a SourceRange. If both are defined, + behavior is undefined. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ + if locations is not None: + extent = SourceRange(start=locations[0], end=locations[1]) + +- return TokenGroup.get_tokens(self, extent) ++ return TokenGroup.get_tokens(self, extent, options) + + class File(ClangObject): + """ +@@ -3957,6 +3983,10 @@ functionList = [ + ("clang_tokenize", + [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint)]), + ++ ("clang_tokenizeRange", ++ [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint), ++ c_uint]), ++ + ("clang_visitChildren", + [Cursor, callbacks['cursor_visit'], py_object], + c_uint), +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:37:54.447765317 +0100 +@@ -10,6 +10,7 @@ import unittest + from clang.cindex import AvailabilityKind + from clang.cindex import CursorKind + from clang.cindex import TemplateArgumentKind ++from clang.cindex import TokenGroup + from clang.cindex import TranslationUnit + from clang.cindex import TypeKind + from .util import get_cursor +@@ -480,6 +489,14 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tokens[0].spelling, 'int') + self.assertEqual(tokens[1].spelling, 'foo') + ++ def test_get_tokens_with_whitespace(): ++ source = 'class C { void f(); }\nvoid C::f() { }' ++ tu = get_tu(source) ++ ++ tokens = list(tu.cursor.get_tokens(TokenGroup.TOKENIZE_KEEP_WHITESPACE)) ++ self.assertEqual(''.join(t.spelling for t in tokens), source) ++ self.assertEqual(len(tokens), 27, [t.spelling for t in tokens]) ++ + def test_get_token_cursor(self): + """Ensure we can map tokens to cursors.""" + tu = get_tu('class A {}; int foo(A var = A());', lang='cpp') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:38:17.919863604 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 63 ++#define CINDEX_VERSION_MINOR 64 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -5036,6 +5044,28 @@ CINDEX_LINKAGE CXSourceLocation clang_ge + */ + CINDEX_LINKAGE CXSourceRange clang_getTokenExtent(CXTranslationUnit, CXToken); + ++typedef enum { ++ /** ++ * \brief Used to indicate that no special tokenization options are needed. ++ */ ++ CXTokenize_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that tokens for whitespace should be returned. ++ */ ++ CXTokenize_KeepWhitespace = 0x1 ++} CXTokenize_Flags; ++ ++/** ++ * \brief Tokenize the source code described by the given range into raw ++ * lexical tokens. ++ * ++ * \see clang_tokenizeRange ++ * ++ */ ++CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens); ++ + /** + * Tokenize the source code described by the given range into raw + * lexical tokens. +@@ -5052,9 +5082,13 @@ CINDEX_LINKAGE CXSourceRange clang_getTo + * \param NumTokens will be set to the number of tokens in the \c *Tokens + * array. + * ++ * \param options A bitmask of options that affects tokenization. This should be ++ * a bitwise OR of the CXTokenize_XXX flags. ++ * + */ +-CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, +- CXToken **Tokens, unsigned *NumTokens); ++CINDEX_LINKAGE void clang_tokenizeRange(CXTranslationUnit TU, ++ CXSourceRange Range, CXToken **Tokens, ++ unsigned *NumTokens, unsigned options); + + /** + * Annotate the given set of tokens by providing cursors for each token +diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/libclang/CIndex.cpp +--- spack-src/clang/tools/libclang/CIndex.cpp 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CIndex.cpp 2025-02-03 18:37:55.855771214 +0100 +@@ -6882,7 +6882,7 @@ CXSourceRange clang_getTokenExtent(CXTra + } + + static void getTokens(ASTUnit *CXXUnit, SourceRange Range, +- SmallVectorImpl<CXToken> &CXTokens) { ++ SmallVectorImpl<CXToken> &CXTokens, unsigned options) { + SourceManager &SourceMgr = CXXUnit->getSourceManager(); + std::pair<FileID, unsigned> BeginLocInfo = + SourceMgr.getDecomposedSpellingLoc(Range.getBegin()); +@@ -6903,6 +6903,9 @@ static void getTokens(ASTUnit *CXXUnit, + CXXUnit->getASTContext().getLangOpts(), Buffer.begin(), + Buffer.data() + BeginLocInfo.second, Buffer.end()); + Lex.SetCommentRetentionState(true); ++ if (options & CXTokenize_KeepWhitespace) { ++ Lex.SetKeepWhitespaceMode(true); ++ } + + // Lex tokens until we hit the end of the range. + const char *EffectiveBufferEnd = Buffer.data() + EndLocInfo.second; +@@ -6973,7 +6976,7 @@ CXToken *clang_getToken(CXTranslationUni + SM.getComposedLoc(DecomposedEnd.first, DecomposedEnd.second); + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, SourceRange(Begin, End), CXTokens); ++ getTokens(CXXUnit, SourceRange(Begin, End), CXTokens, CXTokenize_None); + + if (CXTokens.empty()) + return NULL; +@@ -6987,6 +6990,12 @@ CXToken *clang_getToken(CXTranslationUni + + void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, CXToken **Tokens, + unsigned *NumTokens) { ++ return clang_tokenizeRange(TU, Range, Tokens, NumTokens, CXTokenize_None); ++} ++ ++void clang_tokenizeRange(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens, ++ unsigned options) { + LOG_FUNC_SECTION { *Log << TU << ' ' << Range; } + + if (Tokens) +@@ -7010,7 +7019,7 @@ void clang_tokenize(CXTranslationUnit TU + return; + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, R, CXTokens); ++ getTokens(CXXUnit, R, CXTokens, options); + + if (CXTokens.empty()) + return; +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:38:37.647946177 +0100 +@@ -398,6 +399,7 @@ LLVM_13 { + clang_suspendTranslationUnit; + clang_toggleCrashRecovery; + clang_tokenize; ++ clang_tokenizeRange; + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; diff --git a/packages/llvm/llvm13-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch b/packages/llvm/llvm13-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch new file mode 100644 index 00000000..c9f997dc --- /dev/null +++ b/packages/llvm/llvm13-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch @@ -0,0 +1,504 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:39:41.616213701 +0100 +@@ -1426,6 +1426,15 @@ class Cursor(Structure): + """ + _fields_ = [("_kind_id", c_int), ("xdata", c_int), ("data", c_void_p * 3)] + ++ # Default behavior. ++ GET_CHILDREN_NONE = 0 ++ ++ # Used to indicate that implicit cursors should be visited. ++ GET_CHILDREN_WITH_IMPLICIT = 1 ++ ++ # Used to indicate that template instantiations should be visited. ++ GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS = 2 ++ + @staticmethod + def from_location(tu, location): + # We store a reference to the TU in the instance so the TU won't get +@@ -1515,6 +1524,10 @@ class Cursor(Structure): + """ + return conf.lib.clang_EnumDecl_isScoped(self) + ++ def is_implicit(self): ++ """Test whether the cursor refers to an implicit declaration.""" ++ return conf.lib.clang_isImplicit(self) ++ + def get_definition(self): + """ + If the cursor is a reference to a declaration or a declaration of +@@ -1831,8 +1844,12 @@ class Cursor(Structure): + """Returns the value of the indicated arg as an unsigned 64b integer.""" + return conf.lib.clang_Cursor_getTemplateArgumentUnsignedValue(self, num) + +- def get_children(self): +- """Return an iterator for accessing the children of this cursor.""" ++ def get_children(self, with_implicit=False, with_template_instantiations=False): ++ """Return an iterator for accessing the children of this cursor. ++ ++ By default, cursors representing implicit declarations or template instantiations ++ will be skipped. ++ """ + + # FIXME: Expose iteration from CIndex, PR6125. + def visitor(child, parent, children): +@@ -1845,18 +1862,24 @@ class Cursor(Structure): + children.append(child) + return 1 # continue + children = [] +- conf.lib.clang_visitChildren(self, callbacks['cursor_visit'](visitor), +- children) ++ dispatch = conf.lib.clang_visitChildren ++ options = Cursor.GET_CHILDREN_NONE ++ if with_implicit: ++ options |= Cursor.GET_CHILDREN_WITH_IMPLICIT ++ if with_template_instantiations: ++ options |= Cursor.GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS ++ conf.lib.clang_visitChildrenWithOptions( ++ self, callbacks['cursor_visit'](visitor), children, options) + return iter(children) + +- def walk_preorder(self): ++ def walk_preorder(self, **kwargs): + """Depth-first preorder walk over the cursor and its descendants. + + Yields cursors. + """ + yield self +- for child in self.get_children(): +- for descendant in child.walk_preorder(): ++ for child in self.get_children(**kwargs): ++ for descendant in child.walk_preorder(**kwargs): + yield descendant + + def get_tokens(self, options=0): +@@ -3928,6 +3951,10 @@ functionList = [ + [Type], + bool), + ++ ("clang_isImplicit", ++ [Cursor], ++ bool), ++ + ("clang_isInvalid", + [CursorKind], + bool), +@@ -3991,6 +4018,10 @@ functionList = [ + [Cursor, callbacks['cursor_visit'], py_object], + c_uint), + ++ ("clang_visitChildrenWithOptions", ++ [Cursor, callbacks['cursor_visit'], py_object, c_uint], ++ c_uint), ++ + ("clang_Cursor_getNumArguments", + [Cursor], + c_int), +diff -pur spack-src/clang/bindings/python/clang/cindex.py.orig spack-src-new/clang/bindings/python/clang/cindex.py.orig +--- spack-src/clang/bindings/python/clang/cindex.py.orig 2025-02-03 18:51:19.639119257 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py.orig 2025-02-03 18:37:54.447765317 +0100 +@@ -529,6 +529,13 @@ class TokenGroup(object): + + You should not instantiate this class outside of this module. + """ ++ ++ # Default tokenization mode. ++ TOKENIZE_NONE = 0 ++ ++ # Used to indicate that tokens for whitespace should be returned. ++ TOKENIZE_KEEP_WHITESPACE = 1 ++ + def __init__(self, tu, memory, count): + self._tu = tu + self._memory = memory +@@ -538,7 +545,7 @@ class TokenGroup(object): + conf.lib.clang_disposeTokens(self._tu, self._memory, self._count) + + @staticmethod +- def get_tokens(tu, extent): ++ def get_tokens(tu, extent, options=0): + """Helper method to return all tokens in an extent. + + This functionality is needed multiple places in this module. We define +@@ -547,8 +554,8 @@ class TokenGroup(object): + tokens_memory = POINTER(Token)() + tokens_count = c_uint() + +- conf.lib.clang_tokenize(tu, extent, byref(tokens_memory), +- byref(tokens_count)) ++ conf.lib.clang_tokenizeRange( ++ tu, extent, byref(tokens_memory), byref(tokens_count), options) + + count = int(tokens_count.value) + +@@ -1852,13 +1859,16 @@ class Cursor(Structure): + for descendant in child.walk_preorder(): + yield descendant + +- def get_tokens(self): ++ def get_tokens(self, options=0): + """Obtain Token instances formulating that compose this Cursor. + + This is a generator for Token instances. It returns all tokens which + occupy the extent this cursor occupies. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ +- return TokenGroup.get_tokens(self._tu, self.extent) ++ return TokenGroup.get_tokens(self._tu, self.extent, options) + + def get_field_offsetof(self): + """Returns the offsetof the FIELD_DECL pointed by this Cursor.""" +@@ -3081,18 +3091,21 @@ class TranslationUnit(ClangObject): + return CodeCompletionResults(ptr) + return None + +- def get_tokens(self, locations=None, extent=None): ++ def get_tokens(self, locations=None, extent=None, options=0): + """Obtain tokens in this translation unit. + + This is a generator for Token instances. The caller specifies a range + of source code to obtain tokens for. The range can be specified as a + 2-tuple of SourceLocation or as a SourceRange. If both are defined, + behavior is undefined. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ + if locations is not None: + extent = SourceRange(start=locations[0], end=locations[1]) + +- return TokenGroup.get_tokens(self, extent) ++ return TokenGroup.get_tokens(self, extent, options) + + class File(ClangObject): + """ +@@ -3970,6 +3983,10 @@ functionList = [ + ("clang_tokenize", + [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint)]), + ++ ("clang_tokenizeRange", ++ [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint), ++ c_uint]), ++ + ("clang_visitChildren", + [Cursor, callbacks['cursor_visit'], py_object], + c_uint), +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:39:41.616213701 +0100 +@@ -94,6 +94,39 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tu_nodes[2].displayname, 'f0(int, int)') + self.assertEqual(tu_nodes[2].is_definition(), True) + ++ def test_get_children_with_implicit(): ++ tu = get_tu('struct X {}; X x;', lang='cpp') ++ cursor = get_cursor(tu, 'X') ++ ++ children = list(cursor.get_children()) ++ self.assertEqual(len(children), 0, [(c.kind, c.spelling) for c in children]) ++ ++ children = list(cursor.get_children(with_implicit=True)) ++ self.assertNotEqual(len(children), 0) ++ for child in children: ++ self.assertTrue(child.is_implicit()) ++ self.assertEqual(child.spelling, "X") ++ self.assertIn(child.kind, [CursorKind.CONSTRUCTOR, CursorKind.STRUCT_DECL]) ++ ++ def test_get_children_with_template_instantiations(): ++ tu = get_tu( ++ 'template <typename T> T frobnicate(T val);' ++ 'extern template int frobnicate<int>(int);', ++ lang='cpp') ++ cursor = get_cursor(tu, 'frobnicate') ++ self.assertEqual(cursor.kind, CursorKind.FUNCTION_TEMPLATE) ++ ++ for child in cursor.get_children(): ++ # should not return an instantiation: ++ self.assertNotEqual(child.kind, CursorKind.FUNCTION_DECL) ++ ++ for child in cursor.get_children(with_template_instantiations=True): ++ if child.kind == CursorKind.FUNCTION_DECL: ++ self.assertEqual(child.spelling, 'frobnicate') ++ break ++ else: ++ self.fail("Couldn't find template instantiation") ++ + def test_references(self): + """Ensure that references to TranslationUnit are kept.""" + tu = get_tu('int x;') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:40:48.276492163 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 64 ++#define CINDEX_VERSION_MINOR 65 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -2817,6 +2817,11 @@ CINDEX_LINKAGE unsigned clang_isPreproce + */ + CINDEX_LINKAGE unsigned clang_isUnexposed(enum CXCursorKind); + ++/*** ++ * \brief Determine whether the given cursor represents an implicit declaration. ++ */ ++CINDEX_LINKAGE unsigned clang_isImplicit(CXCursor); ++ + /** + * Describe the linkage of the entity referred to by a cursor. + */ +@@ -4274,6 +4279,32 @@ clang_visitChildrenWithBlock(CXCursor pa + #endif + #endif + ++typedef enum { ++ /** ++ * \brief Default behavior. ++ */ ++ CXVisitChildren_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that implicit cursors should be visited. ++ */ ++ CXVisitChildren_WithImplicit = 0x1, ++ ++ /** ++ * \brief Used to indicate that template instantiations should be visited. ++ */ ++ CXVisitChildren_WithTemplateInstantiations = 0x2 ++} CXVisitChildren_Flags; ++ ++/** ++ * \brief Visits the children of a cursor, allowing to pass extra options. ++ * Behaves identically to clang_visitChildren() in all other respects. ++ */ ++CINDEX_LINKAGE unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options); ++ + /** + * @} + */ +diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/libclang/CIndex.cpp +--- spack-src/clang/tools/libclang/CIndex.cpp 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/tools/libclang/CIndex.cpp 2025-02-03 18:39:43.000219486 +0100 +@@ -203,9 +203,10 @@ bool CursorVisitor::Visit(CXCursor Curso + return true; // abort. + } + +- // Ignore implicit declarations, unless it's an objc method because +- // currently we should report implicit methods for properties when indexing. +- if (D->isImplicit() && !isa<ObjCMethodDecl>(D)) ++ // Unless instructed otherwise we ignore implicit declarations. ++ // ObjC methods are currently visited in any case, because implicit methods ++ // for properties should be reported when indexing. ++ if (!VisitImplicitDeclarations && D->isImplicit() && !isa<ObjCMethodDecl>(D)) + return false; + } + +@@ -713,10 +714,13 @@ bool CursorVisitor::VisitTagDecl(TagDecl + + bool CursorVisitor::VisitClassTemplateSpecializationDecl( + ClassTemplateSpecializationDecl *D) { +- bool ShouldVisitBody = false; ++ bool ShouldVisitBody = VisitTemplateInstantiations; + switch (D->getSpecializationKind()) { +- case TSK_Undeclared: + case TSK_ImplicitInstantiation: ++ if (VisitTemplateInstantiations && VisitImplicitDeclarations) { ++ break; ++ } ++ case TSK_Undeclared: + // Nothing to visit + return false; + +@@ -725,6 +729,7 @@ bool CursorVisitor::VisitClassTemplateSp + break; + + case TSK_ExplicitSpecialization: ++ // Always visit body of explicit specializations + ShouldVisitBody = true; + break; + } +@@ -945,7 +950,31 @@ bool CursorVisitor::VisitFunctionTemplat + return true; + + auto *FD = D->getTemplatedDecl(); +- return VisitAttributes(FD) || VisitFunctionDecl(FD); ++ if (VisitAttributes(FD) || VisitFunctionDecl(FD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *FD : D->specializations()) { ++ for (auto *RD : FD->redecls()) { ++ switch (RD->getTemplateSpecializationKind()) { ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitClassTemplateDecl(ClassTemplateDecl *D) { +@@ -956,6 +985,40 @@ bool CursorVisitor::VisitClassTemplateDe + + auto *CD = D->getTemplatedDecl(); + return VisitAttributes(CD) || VisitCXXRecordDecl(CD); ++ if (VisitAttributes(CD) || VisitCXXRecordDecl(CD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *SD : D->specializations()) { ++ for (auto *RD : SD->redecls()) { ++ // We don't want to visit injected-class-names in this traversal. ++ if (cast<CXXRecordDecl>(RD)->isInjectedClassName()) ++ continue; ++ ++ switch ( ++ cast<ClassTemplateSpecializationDecl>(RD)->getSpecializationKind()) { ++ // Visit the implicit instantiations with the requested pattern. ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ // We don't need to do anything on an explicit instantiation ++ // or explicit specialization because there will be an explicit ++ // node for it elsewhere. ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitTemplateTemplateParmDecl(TemplateTemplateParmDecl *D) { +@@ -4596,6 +4659,24 @@ unsigned clang_visitChildrenWithBlock(CX + return clang_visitChildren(parent, visitWithBlock, block); + } + ++unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options) { ++ CursorVisitor CursorVis( ++ getCursorTU(parent), visitor, client_data, ++ /*VisitPreprocessorLast=*/false, ++ /*VisitIncludedPreprocessingEntries=*/false, ++ /*RegionOfInterest=*/SourceRange(), ++ /*VisitDeclsOnly=*/false, ++ /*PostChildrenVisitor=*/nullptr, ++ /*VisitImplicitDeclarations=*/(options & CXVisitChildren_WithImplicit), ++ /*VisitTemplateInstantiations=*/ ++ (options & CXVisitChildren_WithTemplateInstantiations)); ++ ++ return CursorVis.VisitChildren(parent); ++} ++ + static CXString getDeclSpelling(const Decl *D) { + if (!D) + return cxstring::createEmpty(); +@@ -5983,6 +6064,22 @@ unsigned clang_isUnexposed(enum CXCursor + } + } + ++unsigned clang_isImplicit(CXCursor Cursor) { ++ if (clang_isInvalid(Cursor.kind)) ++ return false; ++ ++ if (!clang_isDeclaration(Cursor.kind)) ++ return false; ++ ++ const Decl *D = getCursorDecl(Cursor); ++ if (!D) { ++ assert(0 && "Invalid declaration cursor"); ++ return true; // abort. ++ } ++ ++ return D->isImplicit(); ++} ++ + CXCursorKind clang_getCursorKind(CXCursor C) { return C.kind; } + + CXSourceLocation clang_getCursorLocation(CXCursor C) { +diff -pur spack-src/clang/tools/libclang/CursorVisitor.h spack-src-new/clang/tools/libclang/CursorVisitor.h +--- spack-src/clang/tools/libclang/CursorVisitor.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CursorVisitor.h 2025-02-03 18:39:43.000219486 +0100 +@@ -104,6 +104,12 @@ private: + /// record entries. + bool VisitDeclsOnly; + ++ /// \brief Whether we should visit implicit declarations. ++ bool VisitImplicitDeclarations; ++ ++ /// \brief Whether we should recurse into template instantiations. ++ bool VisitTemplateInstantiations; ++ + // FIXME: Eventually remove. This part of a hack to support proper + // iteration over all Decls contained lexically within an ObjC container. + DeclContext::decl_iterator *DI_current; +@@ -152,12 +158,16 @@ public: + bool VisitIncludedPreprocessingEntries = false, + SourceRange RegionOfInterest = SourceRange(), + bool VisitDeclsOnly = false, +- PostChildrenVisitorTy PostChildrenVisitor = nullptr) ++ PostChildrenVisitorTy PostChildrenVisitor = nullptr, ++ bool VisitImplicitDeclarations = false, ++ bool VisitTemplateInstantiations = false) + : TU(TU), AU(cxtu::getASTUnit(TU)), Visitor(Visitor), + PostChildrenVisitor(PostChildrenVisitor), ClientData(ClientData), + VisitPreprocessorLast(VisitPreprocessorLast), + VisitIncludedEntities(VisitIncludedPreprocessingEntries), + RegionOfInterest(RegionOfInterest), VisitDeclsOnly(VisitDeclsOnly), ++ VisitImplicitDeclarations(VisitImplicitDeclarations), ++ VisitTemplateInstantiations(VisitTemplateInstantiations), + DI_current(nullptr), FileDI_current(nullptr) { + Parent.kind = CXCursor_NoDeclFound; + Parent.data[0] = nullptr; +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:40:21.600380765 +0100 +@@ -375,6 +375,7 @@ LLVM_13 { + clang_isExpression; + clang_isFileMultipleIncludeGuarded; + clang_isFunctionTypeVariadic; ++ clang_isImplicit; + clang_isInvalid; + clang_isInvalidDeclaration; + clang_isPODType; +@@ -403,6 +404,7 @@ LLVM_13 { + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; ++ clang_visitChildrenWithOptions; + + local: *; + }; diff --git a/packages/llvm/package.py b/packages/llvm/package.py index 3be46cd0..80e72b37 100644 --- a/packages/llvm/package.py +++ b/packages/llvm/package.py @@ -395,20 +395,27 @@ class Llvm(CMakePackage, CudaPackage): "bindings generator") conflicts("@:8", when="+visionary") - conflicts("@13:", when="+visionary") + conflicts("@14:", when="+visionary") - patch('llvm9-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch', when='@9.0.0:12.0.999 +visionary', level=2) + patch('llvm9-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch', when='@9:12 +visionary', level=2) # 0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch from above - patch('llvm11-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch', when='@11.0.0:11.0.999 +visionary', level=2) - patch('llvm11-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@11.0.0:11.0.999 +visionary', level=2) - patch('llvm11-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@11.0.0:11.0.999 +visionary', level=2) - patch('llvm11-0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch', when='@11.0.0:12.0.999 +visionary', level=2) + patch('llvm11-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch', when='@11 +visionary', level=2) + patch('llvm11-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@11 +visionary', level=2) + patch('llvm11-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@11 +visionary', level=2) + patch('llvm11-0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch', when='@11: +visionary', level=2) # 0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch from above - patch('llvm11_1-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch', when='@11.1.0:12.0.999 +visionary', level=2) - patch('llvm11_1-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@11.1.0:12.0.999 +visionary', level=2) - patch('llvm11_1-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@11.1.0:12.0.999 +visionary', level=2) + patch('llvm11_1-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch', when='@11.1:12.0 +visionary', level=2) + patch('llvm11_1-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@11.1:12.0 +visionary', level=2) + patch('llvm11_1-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@11.1:12.0 +visionary', level=2) + # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above + + patch('llvm13-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch', when='@13: +visionary', level=1) + patch('llvm13-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch', when='@13:15 +visionary', level=1) + patch('llvm13-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@13 +visionary', level=1) + patch('llvm13-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@13 +visionary', level=1) + # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above # disable check for member `mode` size in `struct ipc_perm`; newer glibc changed width -- GitLab From 3b42c794901e4c10c15eafcf2dae30ab021e76c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Mon, 3 Feb 2025 19:09:15 +0100 Subject: [PATCH 028/111] fix(llvm+visionary): port patches to llvm@14:15 --- ...ion-to-keep-whitespace-when-tokenizi.patch | 238 ++++++++++ ...ow-visiting-of-implicit-declarations.patch | 416 ++++++++++++++++++ ...blic-ClangToolingCommonOptionsParser.patch | 12 + packages/llvm/package.py | 8 +- 4 files changed, 673 insertions(+), 1 deletion(-) create mode 100644 packages/llvm/llvm14-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch create mode 100644 packages/llvm/llvm14-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch create mode 100644 packages/llvm/llvm14-public-ClangToolingCommonOptionsParser.patch diff --git a/packages/llvm/llvm14-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch b/packages/llvm/llvm14-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch new file mode 100644 index 00000000..88250852 --- /dev/null +++ b/packages/llvm/llvm14-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch @@ -0,0 +1,238 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:37:54.447765317 +0100 +@@ -529,6 +529,13 @@ class TokenGroup(object): + + You should not instantiate this class outside of this module. + """ ++ ++ # Default tokenization mode. ++ TOKENIZE_NONE = 0 ++ ++ # Used to indicate that tokens for whitespace should be returned. ++ TOKENIZE_KEEP_WHITESPACE = 1 ++ + def __init__(self, tu, memory, count): + self._tu = tu + self._memory = memory +@@ -538,7 +545,7 @@ class TokenGroup(object): + conf.lib.clang_disposeTokens(self._tu, self._memory, self._count) + + @staticmethod +- def get_tokens(tu, extent): ++ def get_tokens(tu, extent, options=0): + """Helper method to return all tokens in an extent. + + This functionality is needed multiple places in this module. We define +@@ -547,8 +554,8 @@ class TokenGroup(object): + tokens_memory = POINTER(Token)() + tokens_count = c_uint() + +- conf.lib.clang_tokenize(tu, extent, byref(tokens_memory), +- byref(tokens_count)) ++ conf.lib.clang_tokenizeRange(tu, extent, byref(tokens_memory), ++ byref(tokens_count), options) + + count = int(tokens_count.value) + +@@ -1875,13 +1882,16 @@ class Cursor(Structure): + for descendant in child.walk_preorder(**kwargs): + yield descendant + +- def get_tokens(self): ++ def get_tokens(self, options=0): + """Obtain Token instances formulating that compose this Cursor. + + This is a generator for Token instances. It returns all tokens which + occupy the extent this cursor occupies. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ +- return TokenGroup.get_tokens(self._tu, self.extent) ++ return TokenGroup.get_tokens(self._tu, self.extent, options) + + def get_field_offsetof(self): + """Returns the offsetof the FIELD_DECL pointed by this Cursor.""" +@@ -3105,18 +3115,21 @@ class TranslationUnit(ClangObject): + return CodeCompletionResults(ptr) + return None + +- def get_tokens(self, locations=None, extent=None): ++ def get_tokens(self, locations=None, extent=None, options=0): + """Obtain tokens in this translation unit. + + This is a generator for Token instances. The caller specifies a range + of source code to obtain tokens for. The range can be specified as a + 2-tuple of SourceLocation or as a SourceRange. If both are defined, + behavior is undefined. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ + if locations is not None: + extent = SourceRange(start=locations[0], end=locations[1]) + +- return TokenGroup.get_tokens(self, extent) ++ return TokenGroup.get_tokens(self, extent, options) + + class File(ClangObject): + """ +@@ -3957,6 +3983,10 @@ functionList = [ + ("clang_tokenize", + [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint)]), + ++ ("clang_tokenizeRange", ++ [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint), ++ c_uint]), ++ + ("clang_visitChildren", + [Cursor, callbacks['cursor_visit'], py_object], + c_uint), +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:37:54.447765317 +0100 +@@ -10,6 +10,7 @@ import unittest + from clang.cindex import AvailabilityKind + from clang.cindex import CursorKind + from clang.cindex import TemplateArgumentKind ++from clang.cindex import TokenGroup + from clang.cindex import TranslationUnit + from clang.cindex import TypeKind + from .util import get_cursor +@@ -480,6 +489,14 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tokens[0].spelling, 'int') + self.assertEqual(tokens[1].spelling, 'foo') + ++ def test_get_tokens_with_whitespace(): ++ source = 'class C { void f(); }\nvoid C::f() { }' ++ tu = get_tu(source) ++ ++ tokens = list(tu.cursor.get_tokens(TokenGroup.TOKENIZE_KEEP_WHITESPACE)) ++ self.assertEqual(''.join(t.spelling for t in tokens), source) ++ self.assertEqual(len(tokens), 27, [t.spelling for t in tokens]) ++ + def test_get_token_cursor(self): + """Ensure we can map tokens to cursors.""" + tu = get_tu('class A {}; int foo(A var = A());', lang='cpp') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:38:17.919863604 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 63 ++#define CINDEX_VERSION_MINOR 64 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -5036,6 +5044,28 @@ CINDEX_LINKAGE CXSourceLocation clang_ge + */ + CINDEX_LINKAGE CXSourceRange clang_getTokenExtent(CXTranslationUnit, CXToken); + ++typedef enum { ++ /** ++ * \brief Used to indicate that no special tokenization options are needed. ++ */ ++ CXTokenize_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that tokens for whitespace should be returned. ++ */ ++ CXTokenize_KeepWhitespace = 0x1 ++} CXTokenize_Flags; ++ ++/** ++ * \brief Tokenize the source code described by the given range into raw ++ * lexical tokens. ++ * ++ * \see clang_tokenizeRange ++ * ++ */ ++CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens); ++ + /** + * Tokenize the source code described by the given range into raw + * lexical tokens. +@@ -5052,9 +5082,13 @@ CINDEX_LINKAGE CXSourceRange clang_getTo + * \param NumTokens will be set to the number of tokens in the \c *Tokens + * array. + * ++ * \param options A bitmask of options that affects tokenization. This should be ++ * a bitwise OR of the CXTokenize_XXX flags. ++ * + */ +-CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, +- CXToken **Tokens, unsigned *NumTokens); ++CINDEX_LINKAGE void clang_tokenizeRange(CXTranslationUnit TU, ++ CXSourceRange Range, CXToken **Tokens, ++ unsigned *NumTokens, unsigned options); + + /** + * Annotate the given set of tokens by providing cursors for each token +diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/libclang/CIndex.cpp +--- spack-src/clang/tools/libclang/CIndex.cpp 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CIndex.cpp 2025-02-03 18:37:55.855771214 +0100 +@@ -6882,7 +6882,7 @@ CXSourceRange clang_getTokenExtent(CXTra + } + + static void getTokens(ASTUnit *CXXUnit, SourceRange Range, +- SmallVectorImpl<CXToken> &CXTokens) { ++ SmallVectorImpl<CXToken> &CXTokens, unsigned options) { + SourceManager &SourceMgr = CXXUnit->getSourceManager(); + std::pair<FileID, unsigned> BeginLocInfo = + SourceMgr.getDecomposedSpellingLoc(Range.getBegin()); +@@ -6903,6 +6903,9 @@ static void getTokens(ASTUnit *CXXUnit, + CXXUnit->getASTContext().getLangOpts(), Buffer.begin(), + Buffer.data() + BeginLocInfo.second, Buffer.end()); + Lex.SetCommentRetentionState(true); ++ if (options & CXTokenize_KeepWhitespace) { ++ Lex.SetKeepWhitespaceMode(true); ++ } + + // Lex tokens until we hit the end of the range. + const char *EffectiveBufferEnd = Buffer.data() + EndLocInfo.second; +@@ -6973,7 +6976,7 @@ CXToken *clang_getToken(CXTranslationUni + SM.getComposedLoc(DecomposedEnd.first, DecomposedEnd.second); + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, SourceRange(Begin, End), CXTokens); ++ getTokens(CXXUnit, SourceRange(Begin, End), CXTokens, CXTokenize_None); + + if (CXTokens.empty()) + return NULL; +@@ -6987,6 +6990,12 @@ CXToken *clang_getToken(CXTranslationUni + + void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, CXToken **Tokens, + unsigned *NumTokens) { ++ return clang_tokenizeRange(TU, Range, Tokens, NumTokens, CXTokenize_None); ++} ++ ++void clang_tokenizeRange(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens, ++ unsigned options) { + LOG_FUNC_SECTION { *Log << TU << ' ' << Range; } + + if (Tokens) +@@ -7010,7 +7019,7 @@ void clang_tokenize(CXTranslationUnit TU + return; + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, R, CXTokens); ++ getTokens(CXXUnit, R, CXTokens, options); + + if (CXTokens.empty()) + return; +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:38:37.647946177 +0100 +@@ -398,6 +399,7 @@ LLVM_13 { + clang_suspendTranslationUnit; + clang_toggleCrashRecovery; + clang_tokenize; ++ clang_tokenizeRange; + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; diff --git a/packages/llvm/llvm14-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch b/packages/llvm/llvm14-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch new file mode 100644 index 00000000..1380b699 --- /dev/null +++ b/packages/llvm/llvm14-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch @@ -0,0 +1,416 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:39:41.616213701 +0100 +@@ -1426,6 +1426,15 @@ class Cursor(Structure): + """ + _fields_ = [("_kind_id", c_int), ("xdata", c_int), ("data", c_void_p * 3)] + ++ # Default behavior. ++ GET_CHILDREN_NONE = 0 ++ ++ # Used to indicate that implicit cursors should be visited. ++ GET_CHILDREN_WITH_IMPLICIT = 1 ++ ++ # Used to indicate that template instantiations should be visited. ++ GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS = 2 ++ + @staticmethod + def from_location(tu, location): + # We store a reference to the TU in the instance so the TU won't get +@@ -1515,6 +1524,10 @@ class Cursor(Structure): + """ + return conf.lib.clang_EnumDecl_isScoped(self) + ++ def is_implicit(self): ++ """Test whether the cursor refers to an implicit declaration.""" ++ return conf.lib.clang_isImplicit(self) ++ + def get_definition(self): + """ + If the cursor is a reference to a declaration or a declaration of +@@ -1831,8 +1844,12 @@ class Cursor(Structure): + """Returns the value of the indicated arg as an unsigned 64b integer.""" + return conf.lib.clang_Cursor_getTemplateArgumentUnsignedValue(self, num) + +- def get_children(self): +- """Return an iterator for accessing the children of this cursor.""" ++ def get_children(self, with_implicit=False, with_template_instantiations=False): ++ """Return an iterator for accessing the children of this cursor. ++ ++ By default, cursors representing implicit declarations or template instantiations ++ will be skipped. ++ """ + + # FIXME: Expose iteration from CIndex, PR6125. + def visitor(child, parent, children): +@@ -1845,18 +1862,24 @@ class Cursor(Structure): + children.append(child) + return 1 # continue + children = [] +- conf.lib.clang_visitChildren(self, callbacks['cursor_visit'](visitor), +- children) ++ dispatch = conf.lib.clang_visitChildren ++ options = Cursor.GET_CHILDREN_NONE ++ if with_implicit: ++ options |= Cursor.GET_CHILDREN_WITH_IMPLICIT ++ if with_template_instantiations: ++ options |= Cursor.GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS ++ conf.lib.clang_visitChildrenWithOptions( ++ self, callbacks['cursor_visit'](visitor), children, options) + return iter(children) + +- def walk_preorder(self): ++ def walk_preorder(self, **kwargs): + """Depth-first preorder walk over the cursor and its descendants. + + Yields cursors. + """ + yield self +- for child in self.get_children(): +- for descendant in child.walk_preorder(): ++ for child in self.get_children(**kwargs): ++ for descendant in child.walk_preorder(**kwargs): + yield descendant + + def get_tokens(self, options=0): +@@ -3928,6 +3951,10 @@ functionList = [ + [Type], + bool), + ++ ("clang_isImplicit", ++ [Cursor], ++ bool), ++ + ("clang_isInvalid", + [CursorKind], + bool), +@@ -3991,6 +4018,10 @@ functionList = [ + [Cursor, callbacks['cursor_visit'], py_object], + c_uint), + ++ ("clang_visitChildrenWithOptions", ++ [Cursor, callbacks['cursor_visit'], py_object, c_uint], ++ c_uint), ++ + ("clang_Cursor_getNumArguments", + [Cursor], + c_int), +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:39:41.616213701 +0100 +@@ -94,6 +94,39 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tu_nodes[2].displayname, 'f0(int, int)') + self.assertEqual(tu_nodes[2].is_definition(), True) + ++ def test_get_children_with_implicit(): ++ tu = get_tu('struct X {}; X x;', lang='cpp') ++ cursor = get_cursor(tu, 'X') ++ ++ children = list(cursor.get_children()) ++ self.assertEqual(len(children), 0, [(c.kind, c.spelling) for c in children]) ++ ++ children = list(cursor.get_children(with_implicit=True)) ++ self.assertNotEqual(len(children), 0) ++ for child in children: ++ self.assertTrue(child.is_implicit()) ++ self.assertEqual(child.spelling, "X") ++ self.assertIn(child.kind, [CursorKind.CONSTRUCTOR, CursorKind.STRUCT_DECL]) ++ ++ def test_get_children_with_template_instantiations(): ++ tu = get_tu( ++ 'template <typename T> T frobnicate(T val);' ++ 'extern template int frobnicate<int>(int);', ++ lang='cpp') ++ cursor = get_cursor(tu, 'frobnicate') ++ self.assertEqual(cursor.kind, CursorKind.FUNCTION_TEMPLATE) ++ ++ for child in cursor.get_children(): ++ # should not return an instantiation: ++ self.assertNotEqual(child.kind, CursorKind.FUNCTION_DECL) ++ ++ for child in cursor.get_children(with_template_instantiations=True): ++ if child.kind == CursorKind.FUNCTION_DECL: ++ self.assertEqual(child.spelling, 'frobnicate') ++ break ++ else: ++ self.fail("Couldn't find template instantiation") ++ + def test_references(self): + """Ensure that references to TranslationUnit are kept.""" + tu = get_tu('int x;') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:40:48.276492163 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 64 ++#define CINDEX_VERSION_MINOR 65 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -2817,6 +2817,11 @@ CINDEX_LINKAGE unsigned clang_isPreproce + */ + CINDEX_LINKAGE unsigned clang_isUnexposed(enum CXCursorKind); + ++/*** ++ * \brief Determine whether the given cursor represents an implicit declaration. ++ */ ++CINDEX_LINKAGE unsigned clang_isImplicit(CXCursor); ++ + /** + * Describe the linkage of the entity referred to by a cursor. + */ +@@ -4274,6 +4279,32 @@ clang_visitChildrenWithBlock(CXCursor pa + #endif + #endif + ++typedef enum { ++ /** ++ * \brief Default behavior. ++ */ ++ CXVisitChildren_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that implicit cursors should be visited. ++ */ ++ CXVisitChildren_WithImplicit = 0x1, ++ ++ /** ++ * \brief Used to indicate that template instantiations should be visited. ++ */ ++ CXVisitChildren_WithTemplateInstantiations = 0x2 ++} CXVisitChildren_Flags; ++ ++/** ++ * \brief Visits the children of a cursor, allowing to pass extra options. ++ * Behaves identically to clang_visitChildren() in all other respects. ++ */ ++CINDEX_LINKAGE unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options); ++ + /** + * @} + */ +diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/libclang/CIndex.cpp +--- spack-src/clang/tools/libclang/CIndex.cpp 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/tools/libclang/CIndex.cpp 2025-02-03 18:39:43.000219486 +0100 +@@ -203,10 +203,11 @@ bool CursorVisitor::Visit(CXCursor Curso + return true; // abort. + } + +- // Ignore implicit declarations, unless it's an objc method because +- // currently we should report implicit methods for properties when indexing. +- if (D->isImplicit() && !isa<ObjCMethodDecl>(D)) +- return false; ++ // Unless instructed otherwise we ignore implicit declarations. ++ // ObjC methods are currently visited in any case, because implicit methods ++ // for properties should be reported when indexing. ++ if (!VisitImplicitDeclarations && D->isImplicit() && !isa<ObjCMethodDecl>(D)) ++ return false; + } + + // If we have a range of interest, and this cursor doesn't intersect with it, +@@ -713,10 +715,13 @@ bool CursorVisitor::VisitTagDecl(TagDecl + + bool CursorVisitor::VisitClassTemplateSpecializationDecl( + ClassTemplateSpecializationDecl *D) { +- bool ShouldVisitBody = false; ++ bool ShouldVisitBody = VisitTemplateInstantiations; + switch (D->getSpecializationKind()) { +- case TSK_Undeclared: + case TSK_ImplicitInstantiation: ++ if (VisitTemplateInstantiations && VisitImplicitDeclarations) { ++ break; ++ } ++ case TSK_Undeclared: + // Nothing to visit + return false; + +@@ -725,6 +730,7 @@ bool CursorVisitor::VisitClassTemplateSp + break; + + case TSK_ExplicitSpecialization: ++ // Always visit body of explicit specializations + ShouldVisitBody = true; + break; + } +@@ -945,7 +951,31 @@ bool CursorVisitor::VisitFunctionTemplat + return true; + + auto *FD = D->getTemplatedDecl(); +- return VisitAttributes(FD) || VisitFunctionDecl(FD); ++ if (VisitAttributes(FD) || VisitFunctionDecl(FD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *FD : D->specializations()) { ++ for (auto *RD : FD->redecls()) { ++ switch (RD->getTemplateSpecializationKind()) { ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitClassTemplateDecl(ClassTemplateDecl *D) { +@@ -955,7 +985,40 @@ bool CursorVisitor::VisitClassTemplateDe + return true; + + auto *CD = D->getTemplatedDecl(); +- return VisitAttributes(CD) || VisitCXXRecordDecl(CD); ++ if (VisitAttributes(CD) || VisitCXXRecordDecl(CD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *SD : D->specializations()) { ++ for (auto *RD : SD->redecls()) { ++ // We don't want to visit injected-class-names in this traversal. ++ if (cast<CXXRecordDecl>(RD)->isInjectedClassName()) ++ continue; ++ ++ switch ( ++ cast<ClassTemplateSpecializationDecl>(RD)->getSpecializationKind()) { ++ // Visit the implicit instantiations with the requested pattern. ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ // We don't need to do anything on an explicit instantiation ++ // or explicit specialization because there will be an explicit ++ // node for it elsewhere. ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitTemplateTemplateParmDecl(TemplateTemplateParmDecl *D) { +@@ -4596,6 +4659,24 @@ unsigned clang_visitChildrenWithBlock(CX + return clang_visitChildren(parent, visitWithBlock, block); + } + ++unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options) { ++ CursorVisitor CursorVis( ++ getCursorTU(parent), visitor, client_data, ++ /*VisitPreprocessorLast=*/false, ++ /*VisitIncludedPreprocessingEntries=*/false, ++ /*RegionOfInterest=*/SourceRange(), ++ /*VisitDeclsOnly=*/false, ++ /*PostChildrenVisitor=*/nullptr, ++ /*VisitImplicitDeclarations=*/(options & CXVisitChildren_WithImplicit), ++ /*VisitTemplateInstantiations=*/ ++ (options & CXVisitChildren_WithTemplateInstantiations)); ++ ++ return CursorVis.VisitChildren(parent); ++} ++ + static CXString getDeclSpelling(const Decl *D) { + if (!D) + return cxstring::createEmpty(); +@@ -5983,6 +6064,22 @@ unsigned clang_isUnexposed(enum CXCursor + } + } + ++unsigned clang_isImplicit(CXCursor Cursor) { ++ if (clang_isInvalid(Cursor.kind)) ++ return false; ++ ++ if (!clang_isDeclaration(Cursor.kind)) ++ return false; ++ ++ const Decl *D = getCursorDecl(Cursor); ++ if (!D) { ++ assert(0 && "Invalid declaration cursor"); ++ return true; // abort. ++ } ++ ++ return D->isImplicit(); ++} ++ + CXCursorKind clang_getCursorKind(CXCursor C) { return C.kind; } + + CXSourceLocation clang_getCursorLocation(CXCursor C) { +diff -pur spack-src/clang/tools/libclang/CursorVisitor.h spack-src-new/clang/tools/libclang/CursorVisitor.h +--- spack-src/clang/tools/libclang/CursorVisitor.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CursorVisitor.h 2025-02-03 18:39:43.000219486 +0100 +@@ -104,6 +104,12 @@ private: + /// record entries. + bool VisitDeclsOnly; + ++ /// \brief Whether we should visit implicit declarations. ++ bool VisitImplicitDeclarations; ++ ++ /// \brief Whether we should recurse into template instantiations. ++ bool VisitTemplateInstantiations; ++ + // FIXME: Eventually remove. This part of a hack to support proper + // iteration over all Decls contained lexically within an ObjC container. + DeclContext::decl_iterator *DI_current; +@@ -152,12 +158,16 @@ public: + bool VisitIncludedPreprocessingEntries = false, + SourceRange RegionOfInterest = SourceRange(), + bool VisitDeclsOnly = false, +- PostChildrenVisitorTy PostChildrenVisitor = nullptr) ++ PostChildrenVisitorTy PostChildrenVisitor = nullptr, ++ bool VisitImplicitDeclarations = false, ++ bool VisitTemplateInstantiations = false) + : TU(TU), AU(cxtu::getASTUnit(TU)), Visitor(Visitor), + PostChildrenVisitor(PostChildrenVisitor), ClientData(ClientData), + VisitPreprocessorLast(VisitPreprocessorLast), + VisitIncludedEntities(VisitIncludedPreprocessingEntries), + RegionOfInterest(RegionOfInterest), VisitDeclsOnly(VisitDeclsOnly), ++ VisitImplicitDeclarations(VisitImplicitDeclarations), ++ VisitTemplateInstantiations(VisitTemplateInstantiations), + DI_current(nullptr), FileDI_current(nullptr) { + Parent.kind = CXCursor_NoDeclFound; + Parent.data[0] = nullptr; +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:40:21.600380765 +0100 +@@ -375,6 +375,7 @@ LLVM_13 { + clang_isExpression; + clang_isFileMultipleIncludeGuarded; + clang_isFunctionTypeVariadic; ++ clang_isImplicit; + clang_isInvalid; + clang_isInvalidDeclaration; + clang_isPODType; +@@ -403,6 +404,7 @@ LLVM_13 { + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; ++ clang_visitChildrenWithOptions; + + local: *; + }; diff --git a/packages/llvm/llvm14-public-ClangToolingCommonOptionsParser.patch b/packages/llvm/llvm14-public-ClangToolingCommonOptionsParser.patch new file mode 100644 index 00000000..7641f8a3 --- /dev/null +++ b/packages/llvm/llvm14-public-ClangToolingCommonOptionsParser.patch @@ -0,0 +1,12 @@ +diff -pur spack-src/clang/include/clang/Tooling/CommonOptionsParser.h spack-src-new/clang/include/clang/Tooling/CommonOptionsParser.h +--- spack-src/clang/include/clang/Tooling/CommonOptionsParser.h 2025-02-17 12:35:27.296733912 +0100 ++++ spack-src-new/clang/include/clang/Tooling/CommonOptionsParser.h 2025-02-17 12:35:59.393070972 +0100 +@@ -64,7 +64,7 @@ namespace tooling { + /// \endcode + class CommonOptionsParser { + +-protected: ++public: + /// Parses command-line, initializes a compilation database. + /// + /// This constructor can change argc and argv contents, e.g. consume diff --git a/packages/llvm/package.py b/packages/llvm/package.py index 80e72b37..1af09da1 100644 --- a/packages/llvm/package.py +++ b/packages/llvm/package.py @@ -395,7 +395,7 @@ class Llvm(CMakePackage, CudaPackage): "bindings generator") conflicts("@:8", when="+visionary") - conflicts("@14:", when="+visionary") + conflicts("@16:", when="+visionary") patch('llvm9-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch', when='@9:12 +visionary', level=2) @@ -416,7 +416,13 @@ class Llvm(CMakePackage, CudaPackage): patch('llvm13-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@13 +visionary', level=1) patch('llvm13-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@13 +visionary', level=1) # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above + + # 0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch from above + # 0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch from above + patch('llvm14-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@14:15 +visionary', level=1) + patch('llvm14-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@14:15 +visionary', level=1) # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above + patch('llvm14-public-ClangToolingCommonOptionsParser.patch', when='@14: +visionary', level=1) # disable check for member `mode` size in `struct ipc_perm`; newer glibc changed width patch('llvm9-disable-check-for-ipc_perm-mode.patch', when='@9.0.0:9.0.999', level=2) -- GitLab From 7464c3964dafecb83564cd83e949c339686f8c4b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Mon, 3 Feb 2025 19:18:16 +0100 Subject: [PATCH 029/111] fix(llvm+visionary): port patches to llvm@16 --- ...port-for-obtaining-fully-qualified-n.patch | 136 ++++++ ...ion-to-keep-whitespace-when-tokenizi.patch | 238 ++++++++++ ...ow-visiting-of-implicit-declarations.patch | 413 ++++++++++++++++++ packages/llvm/package.py | 9 +- 4 files changed, 795 insertions(+), 1 deletion(-) create mode 100644 packages/llvm/llvm16-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch create mode 100644 packages/llvm/llvm16-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch create mode 100644 packages/llvm/llvm16-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch diff --git a/packages/llvm/llvm16-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch b/packages/llvm/llvm16-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch new file mode 100644 index 00000000..97e4293c --- /dev/null +++ b/packages/llvm/llvm16-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch @@ -0,0 +1,136 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:35:08.767069975 +0100 +@@ -2428,6 +2428,14 @@ class Type(Structure): + """Retrieve the spelling of this Type.""" + return conf.lib.clang_getTypeSpelling(self) + ++ @property ++ def fully_qualified_name(self): ++ """Retrieve the fully qualified name of this Type.""" ++ if not hasattr(self, '_fully_qualified_name'): ++ self._fully_qualified_name = conf.lib.clang_getFullyQualifiedTypeName(self) ++ ++ return self._fully_qualified_name ++ + def __eq__(self, other): + if type(other) != type(self): + return False +@@ -3869,6 +3877,11 @@ functionList = [ + [Type], + _CXString, + _CXString.from_result), ++ ++ ("clang_getFullyQualifiedTypeName", ++ [Type], ++ _CXString, ++ _CXString.from_result), + + ("clang_hashCursor", + [Cursor], +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:35:08.767069975 +0100 +@@ -316,6 +316,14 @@ class TestCursor(unittest.TestCase): + underlying = typedef.underlying_typedef_type + self.assertEqual(underlying.kind, TypeKind.INT) + ++ def test_fully_qualified_type_name(): ++ source = 'namespace uiae { struct X { typedef int sometype; }; }' ++ tu = get_tu(source, lang='cpp') ++ ++ cls = get_cursor(tu, 'sometype') ++ fqn = cls.type.fully_qualified_name ++ self.assertTrue(fqn.endswith("uiae::X::sometype"), fqn) ++ + def test_semantic_parent(self): + tu = get_tu(kParentTest, 'cpp') + curs = get_cursors(tu, 'f') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:35:52.971255790 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 63 ++#define CINDEX_VERSION_MINOR 64 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -3447,6 +3447,14 @@ CINDEX_LINKAGE CXType clang_getCursorTyp + CINDEX_LINKAGE CXString clang_getTypeSpelling(CXType CT); + + /** ++ * Retrieve the fully qualified name of the underlying type. ++ * This includes full qualification of all template parameters etc. ++ * ++ * If the type is invalid, an empty string is returned. ++ */ ++CINDEX_LINKAGE CXString clang_getFullyQualifiedTypeName(CXType CT); ++ ++/** + * Retrieve the underlying type of a typedef declaration. + * + * If the cursor does not reference a typedef declaration, an invalid type is +diff -pur spack-src/clang/tools/libclang/CMakeLists.txt spack-src-new/clang/tools/libclang/CMakeLists.txt +--- spack-src/clang/tools/libclang/CMakeLists.txt 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CMakeLists.txt 2025-02-03 18:35:11.263080474 +0100 +@@ -52,6 +52,7 @@ set(LIBS + clangSema + clangSerialization + clangTooling ++ clangToolingCore + ) + + if (CLANG_ENABLE_ARCMT) +diff -pur spack-src/clang/tools/libclang/CXType.cpp spack-src-new/clang/tools/libclang/CXType.cpp +--- spack-src/clang/tools/libclang/CXType.cpp 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CXType.cpp 2025-02-03 18:35:11.263080474 +0100 +@@ -19,6 +19,7 @@ + #include "clang/AST/DeclObjC.h" + #include "clang/AST/DeclTemplate.h" + #include "clang/AST/Expr.h" ++#include "clang/AST/QualTypeNames.h" + #include "clang/AST/Type.h" + #include "clang/Basic/AddressSpaces.h" + #include "clang/Frontend/ASTUnit.h" +@@ -303,6 +304,27 @@ CXString clang_getTypeSpelling(CXType CT + return cxstring::createDup(OS.str()); + } + ++CXString clang_getFullyQualifiedTypeName(CXType CT) { ++ QualType T = GetQualType(CT); ++ if (T.isNull()) ++ return cxstring::createEmpty(); ++ ++ // For builtin types (but not typedefs pointing to builtin types) return their ++ // spelling. Otherwise "bool" will be turned into "_Bool". ++ const Type *TP = T.getTypePtrOrNull(); ++ if (TP && TP->isBuiltinType() && T->getAs<TypedefType>() == nullptr) ++ return clang_getTypeSpelling(CT); ++ ++ CXTranslationUnit TU = GetTU(CT); ++ ASTContext &Ctx = cxtu::getASTUnit(TU)->getASTContext(); ++ PrintingPolicy Policy(Ctx.getPrintingPolicy()); ++ Policy.SuppressScope = false; ++ Policy.AnonymousTagLocations = false; ++ Policy.PolishForDeclaration = true; ++ std::string name = TypeName::getFullyQualifiedName(T, Ctx, Policy, /*WithGlobalNsPrefix=*/true); ++ return cxstring::createDup(name.c_str()); ++} ++ + CXType clang_getTypedefDeclUnderlyingType(CXCursor C) { + using namespace cxcursor; + CXTranslationUnit TU = cxcursor::getCursorTU(C); +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:36:14.531346336 +0100 +@@ -303,6 +303,7 @@ LLVM_13 { + clang_getFileName; + clang_getFileTime; + clang_getFileUniqueID; ++ clang_getFullyQualifiedTypeName; + clang_getFunctionTypeCallingConv; + clang_getIBOutletCollectionType; + clang_getIncludedFile; diff --git a/packages/llvm/llvm16-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch b/packages/llvm/llvm16-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch new file mode 100644 index 00000000..37a208e3 --- /dev/null +++ b/packages/llvm/llvm16-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch @@ -0,0 +1,238 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:37:54.447765317 +0100 +@@ -529,6 +529,13 @@ class TokenGroup(object): + + You should not instantiate this class outside of this module. + """ ++ ++ # Default tokenization mode. ++ TOKENIZE_NONE = 0 ++ ++ # Used to indicate that tokens for whitespace should be returned. ++ TOKENIZE_KEEP_WHITESPACE = 1 ++ + def __init__(self, tu, memory, count): + self._tu = tu + self._memory = memory +@@ -538,7 +545,7 @@ class TokenGroup(object): + conf.lib.clang_disposeTokens(self._tu, self._memory, self._count) + + @staticmethod +- def get_tokens(tu, extent): ++ def get_tokens(tu, extent, options=0): + """Helper method to return all tokens in an extent. + + This functionality is needed multiple places in this module. We define +@@ -547,8 +554,8 @@ class TokenGroup(object): + tokens_memory = POINTER(Token)() + tokens_count = c_uint() + +- conf.lib.clang_tokenize(tu, extent, byref(tokens_memory), +- byref(tokens_count)) ++ conf.lib.clang_tokenizeRange(tu, extent, byref(tokens_memory), ++ byref(tokens_count), options) + + count = int(tokens_count.value) + +@@ -1852,13 +1859,16 @@ class Cursor(Structure): + for descendant in child.walk_preorder(): + yield descendant + +- def get_tokens(self): ++ def get_tokens(self, options=0): + """Obtain Token instances formulating that compose this Cursor. + + This is a generator for Token instances. It returns all tokens which + occupy the extent this cursor occupies. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ +- return TokenGroup.get_tokens(self._tu, self.extent) ++ return TokenGroup.get_tokens(self._tu, self.extent, options) + + def get_field_offsetof(self): + """Returns the offsetof the FIELD_DECL pointed by this Cursor.""" +@@ -3073,18 +3091,21 @@ class TranslationUnit(ClangObject): + return CodeCompletionResults(ptr) + return None + +- def get_tokens(self, locations=None, extent=None): ++ def get_tokens(self, locations=None, extent=None, options=0): + """Obtain tokens in this translation unit. + + This is a generator for Token instances. The caller specifies a range + of source code to obtain tokens for. The range can be specified as a + 2-tuple of SourceLocation or as a SourceRange. If both are defined, + behavior is undefined. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ + if locations is not None: + extent = SourceRange(start=locations[0], end=locations[1]) + +- return TokenGroup.get_tokens(self, extent) ++ return TokenGroup.get_tokens(self, extent, options) + + class File(ClangObject): + """ +@@ -3957,6 +3983,10 @@ functionList = [ + ("clang_tokenize", + [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint)]), + ++ ("clang_tokenizeRange", ++ [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint), ++ c_uint]), ++ + ("clang_visitChildren", + [Cursor, callbacks['cursor_visit'], py_object], + c_uint), +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:37:54.447765317 +0100 +@@ -10,6 +10,7 @@ import unittest + from clang.cindex import AvailabilityKind + from clang.cindex import CursorKind + from clang.cindex import TemplateArgumentKind ++from clang.cindex import TokenGroup + from clang.cindex import TranslationUnit + from clang.cindex import TypeKind + from .util import get_cursor +@@ -480,6 +489,14 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tokens[0].spelling, 'int') + self.assertEqual(tokens[1].spelling, 'foo') + ++ def test_get_tokens_with_whitespace(): ++ source = 'class C { void f(); }\nvoid C::f() { }' ++ tu = get_tu(source) ++ ++ tokens = list(tu.cursor.get_tokens(TokenGroup.TOKENIZE_KEEP_WHITESPACE)) ++ self.assertEqual(''.join(t.spelling for t in tokens), source) ++ self.assertEqual(len(tokens), 27, [t.spelling for t in tokens]) ++ + def test_get_token_cursor(self): + """Ensure we can map tokens to cursors.""" + tu = get_tu('class A {}; int foo(A var = A());', lang='cpp') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:38:17.919863604 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 64 ++#define CINDEX_VERSION_MINOR 65 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -5036,6 +5044,28 @@ CINDEX_LINKAGE CXSourceLocation clang_ge + */ + CINDEX_LINKAGE CXSourceRange clang_getTokenExtent(CXTranslationUnit, CXToken); + ++typedef enum { ++ /** ++ * \brief Used to indicate that no special tokenization options are needed. ++ */ ++ CXTokenize_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that tokens for whitespace should be returned. ++ */ ++ CXTokenize_KeepWhitespace = 0x1 ++} CXTokenize_Flags; ++ ++/** ++ * \brief Tokenize the source code described by the given range into raw ++ * lexical tokens. ++ * ++ * \see clang_tokenizeRange ++ * ++ */ ++CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens); ++ + /** + * Tokenize the source code described by the given range into raw + * lexical tokens. +@@ -5052,9 +5082,13 @@ CINDEX_LINKAGE CXSourceRange clang_getTo + * \param NumTokens will be set to the number of tokens in the \c *Tokens + * array. + * ++ * \param options A bitmask of options that affects tokenization. This should be ++ * a bitwise OR of the CXTokenize_XXX flags. ++ * + */ +-CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, +- CXToken **Tokens, unsigned *NumTokens); ++CINDEX_LINKAGE void clang_tokenizeRange(CXTranslationUnit TU, ++ CXSourceRange Range, CXToken **Tokens, ++ unsigned *NumTokens, unsigned options); + + /** + * Annotate the given set of tokens by providing cursors for each token +diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/libclang/CIndex.cpp +--- spack-src/clang/tools/libclang/CIndex.cpp 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CIndex.cpp 2025-02-03 18:37:55.855771214 +0100 +@@ -6882,7 +6882,7 @@ CXSourceRange clang_getTokenExtent(CXTra + } + + static void getTokens(ASTUnit *CXXUnit, SourceRange Range, +- SmallVectorImpl<CXToken> &CXTokens) { ++ SmallVectorImpl<CXToken> &CXTokens, unsigned options) { + SourceManager &SourceMgr = CXXUnit->getSourceManager(); + std::pair<FileID, unsigned> BeginLocInfo = + SourceMgr.getDecomposedSpellingLoc(Range.getBegin()); +@@ -6903,6 +6903,9 @@ static void getTokens(ASTUnit *CXXUnit, + CXXUnit->getASTContext().getLangOpts(), Buffer.begin(), + Buffer.data() + BeginLocInfo.second, Buffer.end()); + Lex.SetCommentRetentionState(true); ++ if (options & CXTokenize_KeepWhitespace) { ++ Lex.SetKeepWhitespaceMode(true); ++ } + + // Lex tokens until we hit the end of the range. + const char *EffectiveBufferEnd = Buffer.data() + EndLocInfo.second; +@@ -6973,7 +6976,7 @@ CXToken *clang_getToken(CXTranslationUni + SM.getComposedLoc(DecomposedEnd.first, DecomposedEnd.second); + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, SourceRange(Begin, End), CXTokens); ++ getTokens(CXXUnit, SourceRange(Begin, End), CXTokens, CXTokenize_None); + + if (CXTokens.empty()) + return NULL; +@@ -6987,6 +6990,12 @@ CXToken *clang_getToken(CXTranslationUni + + void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, CXToken **Tokens, + unsigned *NumTokens) { ++ return clang_tokenizeRange(TU, Range, Tokens, NumTokens, CXTokenize_None); ++} ++ ++void clang_tokenizeRange(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens, ++ unsigned options) { + LOG_FUNC_SECTION { *Log << TU << ' ' << Range; } + + if (Tokens) +@@ -7010,7 +7019,7 @@ void clang_tokenize(CXTranslationUnit TU + return; + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, R, CXTokens); ++ getTokens(CXXUnit, R, CXTokens, options); + + if (CXTokens.empty()) + return; +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:38:37.647946177 +0100 +@@ -398,6 +399,7 @@ LLVM_13 { + clang_suspendTranslationUnit; + clang_toggleCrashRecovery; + clang_tokenize; ++ clang_tokenizeRange; + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; diff --git a/packages/llvm/llvm16-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch b/packages/llvm/llvm16-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch new file mode 100644 index 00000000..c415eaa1 --- /dev/null +++ b/packages/llvm/llvm16-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch @@ -0,0 +1,413 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 18:39:41.616213701 +0100 +@@ -1426,6 +1426,15 @@ class Cursor(Structure): + """ + _fields_ = [("_kind_id", c_int), ("xdata", c_int), ("data", c_void_p * 3)] + ++ # Default behavior. ++ GET_CHILDREN_NONE = 0 ++ ++ # Used to indicate that implicit cursors should be visited. ++ GET_CHILDREN_WITH_IMPLICIT = 1 ++ ++ # Used to indicate that template instantiations should be visited. ++ GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS = 2 ++ + @staticmethod + def from_location(tu, location): + # We store a reference to the TU in the instance so the TU won't get +@@ -1515,6 +1524,10 @@ class Cursor(Structure): + """ + return conf.lib.clang_EnumDecl_isScoped(self) + ++ def is_implicit(self): ++ """Test whether the cursor refers to an implicit declaration.""" ++ return conf.lib.clang_isImplicit(self) ++ + def get_definition(self): + """ + If the cursor is a reference to a declaration or a declaration of +@@ -1831,8 +1844,12 @@ class Cursor(Structure): + """Returns the value of the indicated arg as an unsigned 64b integer.""" + return conf.lib.clang_Cursor_getTemplateArgumentUnsignedValue(self, num) + +- def get_children(self): +- """Return an iterator for accessing the children of this cursor.""" ++ def get_children(self, with_implicit=False, with_template_instantiations=False): ++ """Return an iterator for accessing the children of this cursor. ++ ++ By default, cursors representing implicit declarations or template instantiations ++ will be skipped. ++ """ + + # FIXME: Expose iteration from CIndex, PR6125. + def visitor(child, parent, children): +@@ -1845,18 +1862,24 @@ class Cursor(Structure): + children.append(child) + return 1 # continue + children = [] +- conf.lib.clang_visitChildren(self, callbacks['cursor_visit'](visitor), +- children) ++ dispatch = conf.lib.clang_visitChildren ++ options = Cursor.GET_CHILDREN_NONE ++ if with_implicit: ++ options |= Cursor.GET_CHILDREN_WITH_IMPLICIT ++ if with_template_instantiations: ++ options |= Cursor.GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS ++ conf.lib.clang_visitChildrenWithOptions( ++ self, callbacks['cursor_visit'](visitor), children, options) + return iter(children) + +- def walk_preorder(self): ++ def walk_preorder(self, **kwargs): + """Depth-first preorder walk over the cursor and its descendants. + + Yields cursors. + """ + yield self +- for child in self.get_children(): +- for descendant in child.walk_preorder(): ++ for child in self.get_children(**kwargs): ++ for descendant in child.walk_preorder(**kwargs): + yield descendant + + def get_tokens(self, options=0): +@@ -3928,6 +3951,10 @@ functionList = [ + [Type], + bool), + ++ ("clang_isImplicit", ++ [Cursor], ++ bool), ++ + ("clang_isInvalid", + [CursorKind], + bool), +@@ -3991,6 +4018,10 @@ functionList = [ + [Cursor, callbacks['cursor_visit'], py_object], + c_uint), + ++ ("clang_visitChildrenWithOptions", ++ [Cursor, callbacks['cursor_visit'], py_object, c_uint], ++ c_uint), ++ + ("clang_Cursor_getNumArguments", + [Cursor], + c_int), +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 18:39:41.616213701 +0100 +@@ -94,6 +94,39 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tu_nodes[2].displayname, 'f0(int, int)') + self.assertEqual(tu_nodes[2].is_definition(), True) + ++ def test_get_children_with_implicit(): ++ tu = get_tu('struct X {}; X x;', lang='cpp') ++ cursor = get_cursor(tu, 'X') ++ ++ children = list(cursor.get_children()) ++ self.assertEqual(len(children), 0, [(c.kind, c.spelling) for c in children]) ++ ++ children = list(cursor.get_children(with_implicit=True)) ++ self.assertNotEqual(len(children), 0) ++ for child in children: ++ self.assertTrue(child.is_implicit()) ++ self.assertEqual(child.spelling, "X") ++ self.assertIn(child.kind, [CursorKind.CONSTRUCTOR, CursorKind.STRUCT_DECL]) ++ ++ def test_get_children_with_template_instantiations(): ++ tu = get_tu( ++ 'template <typename T> T frobnicate(T val);' ++ 'extern template int frobnicate<int>(int);', ++ lang='cpp') ++ cursor = get_cursor(tu, 'frobnicate') ++ self.assertEqual(cursor.kind, CursorKind.FUNCTION_TEMPLATE) ++ ++ for child in cursor.get_children(): ++ # should not return an instantiation: ++ self.assertNotEqual(child.kind, CursorKind.FUNCTION_DECL) ++ ++ for child in cursor.get_children(with_template_instantiations=True): ++ if child.kind == CursorKind.FUNCTION_DECL: ++ self.assertEqual(child.spelling, 'frobnicate') ++ break ++ else: ++ self.fail("Couldn't find template instantiation") ++ + def test_references(self): + """Ensure that references to TranslationUnit are kept.""" + tu = get_tu('int x;') +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 18:40:48.276492163 +0100 +@@ -33,7 +33,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 65 ++#define CINDEX_VERSION_MINOR 66 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -2817,6 +2817,11 @@ CINDEX_LINKAGE unsigned clang_isPreproce + */ + CINDEX_LINKAGE unsigned clang_isUnexposed(enum CXCursorKind); + ++/*** ++ * \brief Determine whether the given cursor represents an implicit declaration. ++ */ ++CINDEX_LINKAGE unsigned clang_isImplicit(CXCursor); ++ + /** + * Describe the linkage of the entity referred to by a cursor. + */ +@@ -4274,6 +4279,32 @@ clang_visitChildrenWithBlock(CXCursor pa + #endif + #endif + ++typedef enum { ++ /** ++ * \brief Default behavior. ++ */ ++ CXVisitChildren_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that implicit cursors should be visited. ++ */ ++ CXVisitChildren_WithImplicit = 0x1, ++ ++ /** ++ * \brief Used to indicate that template instantiations should be visited. ++ */ ++ CXVisitChildren_WithTemplateInstantiations = 0x2 ++} CXVisitChildren_Flags; ++ ++/** ++ * \brief Visits the children of a cursor, allowing to pass extra options. ++ * Behaves identically to clang_visitChildren() in all other respects. ++ */ ++CINDEX_LINKAGE unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options); ++ + /** + * @} + */ +diff -pur spack-src/clang/tools/libclang/CIndex.cpp spack-src-new/clang/tools/libclang/CIndex.cpp +--- spack-src/clang/tools/libclang/CIndex.cpp 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/tools/libclang/CIndex.cpp 2025-02-03 18:39:43.000219486 +0100 +@@ -203,9 +203,10 @@ bool CursorVisitor::Visit(CXCursor Curso + return true; // abort. + } + +- // Ignore implicit declarations, unless it's an objc method because +- // currently we should report implicit methods for properties when indexing. +- if (D->isImplicit() && !isa<ObjCMethodDecl>(D)) ++ // Unless instructed otherwise we ignore implicit declarations. ++ // ObjC methods are currently visited in any case, because implicit methods ++ // for properties should be reported when indexing. ++ if (!VisitImplicitDeclarations && D->isImplicit() && !isa<ObjCMethodDecl>(D)) + return false; + } + +@@ -713,10 +714,13 @@ bool CursorVisitor::VisitTagDecl(TagDecl + + bool CursorVisitor::VisitClassTemplateSpecializationDecl( + ClassTemplateSpecializationDecl *D) { +- bool ShouldVisitBody = false; ++ bool ShouldVisitBody = VisitTemplateInstantiations; + switch (D->getSpecializationKind()) { +- case TSK_Undeclared: + case TSK_ImplicitInstantiation: ++ if (VisitTemplateInstantiations && VisitImplicitDeclarations) { ++ break; ++ } ++ case TSK_Undeclared: + // Nothing to visit + return false; + +@@ -725,6 +729,7 @@ bool CursorVisitor::VisitClassTemplateSp + break; + + case TSK_ExplicitSpecialization: ++ // Always visit body of explicit specializations + ShouldVisitBody = true; + break; + } +@@ -945,7 +950,31 @@ bool CursorVisitor::VisitFunctionTemplat + return true; + + auto *FD = D->getTemplatedDecl(); +- return VisitAttributes(FD) || VisitFunctionDecl(FD); ++ if (VisitAttributes(FD) || VisitFunctionDecl(FD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *FD : D->specializations()) { ++ for (auto *RD : FD->redecls()) { ++ switch (RD->getTemplateSpecializationKind()) { ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitClassTemplateDecl(ClassTemplateDecl *D) { +@@ -956,6 +985,40 @@ bool CursorVisitor::VisitClassTemplateDe + + auto *CD = D->getTemplatedDecl(); + return VisitAttributes(CD) || VisitCXXRecordDecl(CD); ++ if (VisitAttributes(CD) || VisitCXXRecordDecl(CD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *SD : D->specializations()) { ++ for (auto *RD : SD->redecls()) { ++ // We don't want to visit injected-class-names in this traversal. ++ if (cast<CXXRecordDecl>(RD)->isInjectedClassName()) ++ continue; ++ ++ switch ( ++ cast<ClassTemplateSpecializationDecl>(RD)->getSpecializationKind()) { ++ // Visit the implicit instantiations with the requested pattern. ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ // We don't need to do anything on an explicit instantiation ++ // or explicit specialization because there will be an explicit ++ // node for it elsewhere. ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitTemplateTemplateParmDecl(TemplateTemplateParmDecl *D) { +@@ -4596,6 +4659,24 @@ unsigned clang_visitChildrenWithBlock(CX + return clang_visitChildren(parent, visitWithBlock, block); + } + ++unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options) { ++ CursorVisitor CursorVis( ++ getCursorTU(parent), visitor, client_data, ++ /*VisitPreprocessorLast=*/false, ++ /*VisitIncludedPreprocessingEntries=*/false, ++ /*RegionOfInterest=*/SourceRange(), ++ /*VisitDeclsOnly=*/false, ++ /*PostChildrenVisitor=*/nullptr, ++ /*VisitImplicitDeclarations=*/(options & CXVisitChildren_WithImplicit), ++ /*VisitTemplateInstantiations=*/ ++ (options & CXVisitChildren_WithTemplateInstantiations)); ++ ++ return CursorVis.VisitChildren(parent); ++} ++ + static CXString getDeclSpelling(const Decl *D) { + if (!D) + return cxstring::createEmpty(); +@@ -5983,6 +6064,22 @@ unsigned clang_isUnexposed(enum CXCursor + } + } + ++unsigned clang_isImplicit(CXCursor Cursor) { ++ if (clang_isInvalid(Cursor.kind)) ++ return false; ++ ++ if (!clang_isDeclaration(Cursor.kind)) ++ return false; ++ ++ const Decl *D = getCursorDecl(Cursor); ++ if (!D) { ++ assert(0 && "Invalid declaration cursor"); ++ return true; // abort. ++ } ++ ++ return D->isImplicit(); ++} ++ + CXCursorKind clang_getCursorKind(CXCursor C) { return C.kind; } + + CXSourceLocation clang_getCursorLocation(CXCursor C) { +diff -pur spack-src/clang/tools/libclang/CursorVisitor.h spack-src-new/clang/tools/libclang/CursorVisitor.h +--- spack-src/clang/tools/libclang/CursorVisitor.h 2022-01-20 22:31:59.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CursorVisitor.h 2025-02-03 18:39:43.000219486 +0100 +@@ -104,6 +104,12 @@ private: + /// record entries. + bool VisitDeclsOnly; + ++ /// \brief Whether we should visit implicit declarations. ++ bool VisitImplicitDeclarations; ++ ++ /// \brief Whether we should recurse into template instantiations. ++ bool VisitTemplateInstantiations; ++ + // FIXME: Eventually remove. This part of a hack to support proper + // iteration over all Decls contained lexically within an ObjC container. + DeclContext::decl_iterator *DI_current; +@@ -152,12 +158,16 @@ public: + bool VisitIncludedPreprocessingEntries = false, + SourceRange RegionOfInterest = SourceRange(), + bool VisitDeclsOnly = false, +- PostChildrenVisitorTy PostChildrenVisitor = nullptr) ++ PostChildrenVisitorTy PostChildrenVisitor = nullptr, ++ bool VisitImplicitDeclarations = false, ++ bool VisitTemplateInstantiations = false) + : TU(TU), AU(cxtu::getASTUnit(TU)), Visitor(Visitor), + PostChildrenVisitor(PostChildrenVisitor), ClientData(ClientData), + VisitPreprocessorLast(VisitPreprocessorLast), + VisitIncludedEntities(VisitIncludedPreprocessingEntries), + RegionOfInterest(RegionOfInterest), VisitDeclsOnly(VisitDeclsOnly), ++ VisitImplicitDeclarations(VisitImplicitDeclarations), ++ VisitTemplateInstantiations(VisitTemplateInstantiations), + DI_current(nullptr), FileDI_current(nullptr) { + Parent.kind = CXCursor_NoDeclFound; + Parent.data[0] = nullptr; +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2025-02-03 18:51:43.871219854 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 18:40:21.600380765 +0100 +@@ -375,6 +375,7 @@ LLVM_13 { + clang_isExpression; + clang_isFileMultipleIncludeGuarded; + clang_isFunctionTypeVariadic; ++ clang_isImplicit; + clang_isInvalid; + clang_isInvalidDeclaration; + clang_isPODType; +@@ -403,6 +404,7 @@ LLVM_13 { + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; ++ clang_visitChildrenWithOptions; + + local: *; + }; diff --git a/packages/llvm/package.py b/packages/llvm/package.py index 1af09da1..d1ab9de9 100644 --- a/packages/llvm/package.py +++ b/packages/llvm/package.py @@ -395,7 +395,7 @@ class Llvm(CMakePackage, CudaPackage): "bindings generator") conflicts("@:8", when="+visionary") - conflicts("@16:", when="+visionary") + conflicts("@17:", when="+visionary") patch('llvm9-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch', when='@9:12 +visionary', level=2) @@ -424,6 +424,13 @@ class Llvm(CMakePackage, CudaPackage): # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above patch('llvm14-public-ClangToolingCommonOptionsParser.patch', when='@14: +visionary', level=1) + # 0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch from above + patch('llvm16-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch', when='@16 +visionary', level=1) + patch('llvm16-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@16 +visionary', level=1) + patch('llvm16-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@16 +visionary', level=1) + # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above + # llvm14-public-ClangToolingCommonOptionsParser.patch from above + # disable check for member `mode` size in `struct ipc_perm`; newer glibc changed width patch('llvm9-disable-check-for-ipc_perm-mode.patch', when='@9.0.0:9.0.999', level=2) # end VISIONS -- GitLab From 4b2e48d592b8214b3a174fa1ba12df294b3522ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Mon, 3 Feb 2025 19:45:29 +0100 Subject: [PATCH 030/111] fix(llvm+visionary): port patches to llvm@17 --- ...port-for-obtaining-fully-qualified-n.patch | 135 +++++ ...ion-to-keep-whitespace-when-tokenizi.patch | 243 +++++++++ ...ow-visiting-of-implicit-declarations.patch | 490 ++++++++++++++++++ packages/llvm/package.py | 9 +- 4 files changed, 876 insertions(+), 1 deletion(-) create mode 100644 packages/llvm/llvm17-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch create mode 100644 packages/llvm/llvm17-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch create mode 100644 packages/llvm/llvm17-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch diff --git a/packages/llvm/llvm17-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch b/packages/llvm/llvm17-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch new file mode 100644 index 00000000..d4fd8347 --- /dev/null +++ b/packages/llvm/llvm17-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch @@ -0,0 +1,135 @@ +diff -pur spack-src/clang/bindings/python/clang/cindex.py spack-src-new/clang/bindings/python/clang/cindex.py +--- spack-src/clang/bindings/python/clang/cindex.py 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 19:31:25.893090799 +0100 +@@ -2589,6 +2589,14 @@ class Type(Structure): + """Retrieve the spelling of this Type.""" + return conf.lib.clang_getTypeSpelling(self) + ++ @property ++ def fully_qualified_name(self): ++ """Retrieve the fully qualified name of this Type.""" ++ if not hasattr(self, '_fully_qualified_name'): ++ self._fully_qualified_name = conf.lib.clang_getFullyQualifiedTypeName(self) ++ ++ return self._fully_qualified_name ++ + def __eq__(self, other): + if type(other) != type(self): + return False +@@ -3758,6 +3766,7 @@ functionList = [ + ("clang_getTypedefName", [Type], _CXString, _CXString.from_result), + ("clang_getTypeKindSpelling", [c_uint], _CXString, _CXString.from_result), + ("clang_getTypeSpelling", [Type], _CXString, _CXString.from_result), ++ ("clang_getFullyQualifiedTypeName", [Type], _CXString, _CXString.from_result), + ("clang_hashCursor", [Cursor], c_uint), + ("clang_isAttribute", [CursorKind], bool), + ("clang_isConstQualifiedType", [Type], bool), +diff -pur spack-src/clang/bindings/python/tests/cindex/test_cursor.py spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src/clang/bindings/python/tests/cindex/test_cursor.py 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 19:30:48.340935255 +0100 +@@ -432,6 +432,14 @@ class TestCursor(unittest.TestCase): + underlying = typedef.underlying_typedef_type + self.assertEqual(underlying.kind, TypeKind.INT) + ++ def test_fully_qualified_type_name(): ++ source = 'namespace uiae { struct X { typedef int sometype; }; }' ++ tu = get_tu(source, lang='cpp') ++ ++ cls = get_cursor(tu, 'sometype') ++ fqn = cls.type.fully_qualified_name ++ self.assertTrue(fqn.endswith("uiae::X::sometype"), fqn) ++ + def test_semantic_parent(self): + tu = get_tu(kParentTest, "cpp") + curs = get_cursors(tu, "f") +Only in spack-src-new/clang/bindings/python/tests/cindex: test_cursor.py.orig +diff -pur spack-src/clang/include/clang-c/Index.h spack-src-new/clang/include/clang-c/Index.h +--- spack-src/clang/include/clang-c/Index.h 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-new/clang/include/clang-c/Index.h 2025-02-03 19:31:47.829181660 +0100 +@@ -34,7 +34,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 64 ++#define CINDEX_VERSION_MINOR 65 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -3004,6 +3004,14 @@ CINDEX_LINKAGE CXType clang_getCursorTyp + CINDEX_LINKAGE CXString clang_getTypeSpelling(CXType CT); + + /** ++ * Retrieve the fully qualified name of the underlying type. ++ * This includes full qualification of all template parameters etc. ++ * ++ * If the type is invalid, an empty string is returned. ++ */ ++CINDEX_LINKAGE CXString clang_getFullyQualifiedTypeName(CXType CT); ++ ++/** + * Retrieve the underlying type of a typedef declaration. + * + * If the cursor does not reference a typedef declaration, an invalid type is +diff -pur spack-src/clang/tools/libclang/CMakeLists.txt spack-src-new/clang/tools/libclang/CMakeLists.txt +--- spack-src/clang/tools/libclang/CMakeLists.txt 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CMakeLists.txt 2025-02-03 19:30:50.280943291 +0100 +@@ -69,6 +69,7 @@ set(LIBS + clangSema + clangSerialization + clangTooling ++ clangToolingCore + ) + + if (CLANG_ENABLE_ARCMT) +Only in spack-src-new/clang/tools/libclang: CMakeLists.txt.orig +diff -pur spack-src/clang/tools/libclang/CXType.cpp spack-src-new/clang/tools/libclang/CXType.cpp +--- spack-src/clang/tools/libclang/CXType.cpp 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/CXType.cpp 2025-02-03 19:30:50.280943291 +0100 +@@ -19,6 +19,7 @@ + #include "clang/AST/DeclObjC.h" + #include "clang/AST/DeclTemplate.h" + #include "clang/AST/Expr.h" ++#include "clang/AST/QualTypeNames.h" + #include "clang/AST/Type.h" + #include "clang/Basic/AddressSpaces.h" + #include "clang/Frontend/ASTUnit.h" +@@ -311,6 +312,27 @@ CXString clang_getTypeSpelling(CXType CT + return cxstring::createDup(OS.str()); + } + ++CXString clang_getFullyQualifiedTypeName(CXType CT) { ++ QualType T = GetQualType(CT); ++ if (T.isNull()) ++ return cxstring::createEmpty(); ++ ++ // For builtin types (but not typedefs pointing to builtin types) return their ++ // spelling. Otherwise "bool" will be turned into "_Bool". ++ const Type *TP = T.getTypePtrOrNull(); ++ if (TP && TP->isBuiltinType() && T->getAs<TypedefType>() == nullptr) ++ return clang_getTypeSpelling(CT); ++ ++ CXTranslationUnit TU = GetTU(CT); ++ ASTContext &Ctx = cxtu::getASTUnit(TU)->getASTContext(); ++ PrintingPolicy Policy(Ctx.getPrintingPolicy()); ++ Policy.SuppressScope = false; ++ Policy.AnonymousTagLocations = false; ++ Policy.PolishForDeclaration = true; ++ std::string name = TypeName::getFullyQualifiedName(T, Ctx, Policy, /*WithGlobalNsPrefix=*/true); ++ return cxstring::createDup(name.c_str()); ++} ++ + CXType clang_getTypedefDeclUnderlyingType(CXCursor C) { + using namespace cxcursor; + CXTranslationUnit TU = cxcursor::getCursorTU(C); +Only in spack-src-new/clang/tools/libclang: CXType.cpp.orig +diff -pur spack-src/clang/tools/libclang/libclang.map spack-src-new/clang/tools/libclang/libclang.map +--- spack-src/clang/tools/libclang/libclang.map 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 19:30:50.280943291 +0100 +@@ -303,6 +303,7 @@ LLVM_13 { + clang_getFileName; + clang_getFileTime; + clang_getFileUniqueID; ++ clang_getFullyQualifiedTypeName; + clang_getFunctionTypeCallingConv; + clang_getIBOutletCollectionType; + clang_getIncludedFile; diff --git a/packages/llvm/llvm17-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch b/packages/llvm/llvm17-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch new file mode 100644 index 00000000..a89aaf1c --- /dev/null +++ b/packages/llvm/llvm17-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch @@ -0,0 +1,243 @@ +diff -pur spack-src-new/clang/bindings/python/clang/cindex.py spack-src-newer/clang/bindings/python/clang/cindex.py +--- spack-src-new/clang/bindings/python/clang/cindex.py 2025-02-03 19:31:25.893090799 +0100 ++++ spack-src-newer/clang/bindings/python/clang/cindex.py 2025-02-03 19:37:00.122475192 +0100 +@@ -571,7 +571,7 @@ class TokenGroup(object): + conf.lib.clang_disposeTokens(self._tu, self._memory, self._count) + + @staticmethod +- def get_tokens(tu, extent): ++ def get_tokens(tu, extent, options=0): + """Helper method to return all tokens in an extent. + + This functionality is needed multiple places in this module. We define +@@ -581,6 +581,8 @@ class TokenGroup(object): + tokens_count = c_uint() + + conf.lib.clang_tokenize(tu, extent, byref(tokens_memory), byref(tokens_count)) ++ conf.lib.clang_tokenizeRange( ++ tu, extent, byref(tokens_memory), byref(tokens_count), options) + + count = int(tokens_count.value) + +@@ -1991,13 +1993,16 @@ class Cursor(Structure): + for descendant in child.walk_preorder(): + yield descendant + +- def get_tokens(self): ++ def get_tokens(self, options=0): + """Obtain Token instances formulating that compose this Cursor. + + This is a generator for Token instances. It returns all tokens which + occupy the extent this cursor occupies. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ +- return TokenGroup.get_tokens(self._tu, self.extent) ++ return TokenGroup.get_tokens(self._tu, self.extent, options) + + def get_field_offsetof(self): + """Returns the offsetof the FIELD_DECL pointed by this Cursor.""" +@@ -3283,18 +3288,21 @@ class TranslationUnit(ClangObject): + return CodeCompletionResults(ptr) + return None + +- def get_tokens(self, locations=None, extent=None): ++ def get_tokens(self, locations=None, extent=None, options=0): + """Obtain tokens in this translation unit. + + This is a generator for Token instances. The caller specifies a range + of source code to obtain tokens for. The range can be specified as a + 2-tuple of SourceLocation or as a SourceRange. If both are defined, + behavior is undefined. ++ ++ options is a bitwise or of TokenGroup.TOKENIZE_XXX flags which will ++ control tokenization behavior. + """ + if locations is not None: + extent = SourceRange(start=locations[0], end=locations[1]) + +- return TokenGroup.get_tokens(self, extent) ++ return TokenGroup.get_tokens(self, extent, options) + + + class File(ClangObject): +@@ -3796,6 +3804,11 @@ functionList = [ + "clang_tokenize", + [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint)], + ), ++ ( ++ "clang_tokenizeRange", ++ [TranslationUnit, SourceRange, POINTER(POINTER(Token)), POINTER(c_uint), ++ c_uint] ++ ), + ("clang_visitChildren", [Cursor, callbacks["cursor_visit"], py_object], c_uint), + ("clang_Cursor_getNumArguments", [Cursor], c_int), + ("clang_Cursor_getArgument", [Cursor, c_uint], Cursor, Cursor.from_result), +diff -pur spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 19:30:48.340935255 +0100 ++++ spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 19:34:29.005849262 +0100 +@@ -11,6 +11,7 @@ import unittest + from clang.cindex import AvailabilityKind + from clang.cindex import CursorKind + from clang.cindex import TemplateArgumentKind ++from clang.cindex import TokenGroup + from clang.cindex import TranslationUnit + from clang.cindex import TypeKind + from .util import get_cursor +@@ -608,6 +609,14 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tokens[0].spelling, "int") + self.assertEqual(tokens[1].spelling, "foo") + ++ def test_get_tokens_with_whitespace(): ++ source = 'class C { void f(); }\nvoid C::f() { }' ++ tu = get_tu(source) ++ ++ tokens = list(tu.cursor.get_tokens(TokenGroup.TOKENIZE_KEEP_WHITESPACE)) ++ self.assertEqual(''.join(t.spelling for t in tokens), source) ++ self.assertEqual(len(tokens), 27, [t.spelling for t in tokens]) ++ + def test_get_token_cursor(self): + """Ensure we can map tokens to cursors.""" + tu = get_tu("class A {}; int foo(A var = A());", lang="cpp") +diff -pur spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py.orig spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py.orig +--- spack-src-new/clang/bindings/python/tests/cindex/test_cursor.py.orig 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py.orig 2025-02-03 19:30:48.340935255 +0100 +@@ -432,6 +432,14 @@ class TestCursor(unittest.TestCase): + underlying = typedef.underlying_typedef_type + self.assertEqual(underlying.kind, TypeKind.INT) + ++ def test_fully_qualified_type_name(): ++ source = 'namespace uiae { struct X { typedef int sometype; }; }' ++ tu = get_tu(source, lang='cpp') ++ ++ cls = get_cursor(tu, 'sometype') ++ fqn = cls.type.fully_qualified_name ++ self.assertTrue(fqn.endswith("uiae::X::sometype"), fqn) ++ + def test_semantic_parent(self): + tu = get_tu(kParentTest, "cpp") + curs = get_cursors(tu, "f") +diff -pur spack-src-new/clang/include/clang-c/Index.h spack-src-newer/clang/include/clang-c/Index.h +--- spack-src-new/clang/include/clang-c/Index.h 2025-02-03 19:31:47.829181660 +0100 ++++ spack-src-newer/clang/include/clang-c/Index.h 2025-02-03 19:37:22.846569316 +0100 +@@ -34,7 +34,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 65 ++#define CINDEX_VERSION_MINOR 66 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -4762,6 +4762,28 @@ CINDEX_LINKAGE CXSourceLocation clang_ge + */ + CINDEX_LINKAGE CXSourceRange clang_getTokenExtent(CXTranslationUnit, CXToken); + ++typedef enum { ++ /** ++ * \brief Used to indicate that no special tokenization options are needed. ++ */ ++ CXTokenize_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that tokens for whitespace should be returned. ++ */ ++ CXTokenize_KeepWhitespace = 0x1 ++} CXTokenize_Flags; ++ ++/** ++ * \brief Tokenize the source code described by the given range into raw ++ * lexical tokens. ++ * ++ * \see clang_tokenizeRange ++ * ++ */ ++CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens); ++ + /** + * Tokenize the source code described by the given range into raw + * lexical tokens. +@@ -4778,9 +4800,13 @@ CINDEX_LINKAGE CXSourceRange clang_getTo + * \param NumTokens will be set to the number of tokens in the \c *Tokens + * array. + * ++ * \param options A bitmask of options that affects tokenization. This should be ++ * a bitwise OR of the CXTokenize_XXX flags. ++ * + */ +-CINDEX_LINKAGE void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, +- CXToken **Tokens, unsigned *NumTokens); ++CINDEX_LINKAGE void clang_tokenizeRange(CXTranslationUnit TU, ++ CXSourceRange Range, CXToken **Tokens, ++ unsigned *NumTokens, unsigned options); + + /** + * Annotate the given set of tokens by providing cursors for each token +diff -pur spack-src-new/clang/tools/libclang/CIndex.cpp spack-src-newer/clang/tools/libclang/CIndex.cpp +--- spack-src-new/clang/tools/libclang/CIndex.cpp 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-newer/clang/tools/libclang/CIndex.cpp 2025-02-03 19:34:29.861852808 +0100 +@@ -7199,7 +7199,7 @@ CXSourceRange clang_getTokenExtent(CXTra + } + + static void getTokens(ASTUnit *CXXUnit, SourceRange Range, +- SmallVectorImpl<CXToken> &CXTokens) { ++ SmallVectorImpl<CXToken> &CXTokens, unsigned options) { + SourceManager &SourceMgr = CXXUnit->getSourceManager(); + std::pair<FileID, unsigned> BeginLocInfo = + SourceMgr.getDecomposedSpellingLoc(Range.getBegin()); +@@ -7220,6 +7220,9 @@ static void getTokens(ASTUnit *CXXUnit, + CXXUnit->getASTContext().getLangOpts(), Buffer.begin(), + Buffer.data() + BeginLocInfo.second, Buffer.end()); + Lex.SetCommentRetentionState(true); ++ if (options & CXTokenize_KeepWhitespace) { ++ Lex.SetKeepWhitespaceMode(true); ++ } + + // Lex tokens until we hit the end of the range. + const char *EffectiveBufferEnd = Buffer.data() + EndLocInfo.second; +@@ -7290,7 +7293,7 @@ CXToken *clang_getToken(CXTranslationUni + SM.getComposedLoc(DecomposedEnd.first, DecomposedEnd.second); + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, SourceRange(Begin, End), CXTokens); ++ getTokens(CXXUnit, SourceRange(Begin, End), CXTokens, CXTokenize_None); + + if (CXTokens.empty()) + return nullptr; +@@ -7304,6 +7307,12 @@ CXToken *clang_getToken(CXTranslationUni + + void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, CXToken **Tokens, + unsigned *NumTokens) { ++ return clang_tokenizeRange(TU, Range, Tokens, NumTokens, CXTokenize_None); ++} ++ ++void clang_tokenizeRange(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens, ++ unsigned options) { + LOG_FUNC_SECTION { *Log << TU << ' ' << Range; } + + if (Tokens) +@@ -7327,7 +7336,7 @@ void clang_tokenize(CXTranslationUnit TU + return; + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, R, CXTokens); ++ getTokens(CXXUnit, R, CXTokens, options); + + if (CXTokens.empty()) + return; +Only in spack-src-newer/clang/tools/libclang: CIndex.cpp.orig +diff -pur spack-src-new/clang/tools/libclang/libclang.map spack-src-newer/clang/tools/libclang/libclang.map +--- spack-src-new/clang/tools/libclang/libclang.map 2025-02-03 19:30:50.280943291 +0100 ++++ spack-src-newer/clang/tools/libclang/libclang.map 2025-02-03 19:34:29.861852808 +0100 +@@ -399,6 +399,7 @@ LLVM_13 { + clang_suspendTranslationUnit; + clang_toggleCrashRecovery; + clang_tokenize; ++ clang_tokenizeRange; + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; +Only in spack-src-newer/clang/tools/libclang: libclang.map.orig diff --git a/packages/llvm/llvm17-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch b/packages/llvm/llvm17-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch new file mode 100644 index 00000000..2421b740 --- /dev/null +++ b/packages/llvm/llvm17-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch @@ -0,0 +1,490 @@ +diff -pur spack-src-newer/clang/bindings/python/clang/cindex.py spack-src-newerst/clang/bindings/python/clang/cindex.py +--- spack-src-newer/clang/bindings/python/clang/cindex.py 2025-02-03 19:37:00.122475192 +0100 ++++ spack-src-newerst/clang/bindings/python/clang/cindex.py 2025-02-03 19:42:19.199796814 +0100 +@@ -1463,6 +1463,15 @@ class Cursor(Structure): + + _fields_ = [("_kind_id", c_int), ("xdata", c_int), ("data", c_void_p * 3)] + ++ # Default behavior. ++ GET_CHILDREN_NONE = 0 ++ ++ # Used to indicate that implicit cursors should be visited. ++ GET_CHILDREN_WITH_IMPLICIT = 1 ++ ++ # Used to indicate that template instantiations should be visited. ++ GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS = 2 ++ + @staticmethod + def from_location(tu, location): + # We store a reference to the TU in the instance so the TU won't get +@@ -1648,6 +1657,10 @@ class Cursor(Structure): + """Returns True if the cursor refers to a scoped enum declaration.""" + return conf.lib.clang_EnumDecl_isScoped(self) + ++ def is_implicit(self): ++ """Test whether the cursor refers to an implicit declaration.""" ++ return conf.lib.clang_isImplicit(self) ++ + def get_definition(self): + """ + If the cursor is a reference to a declaration or a declaration of +@@ -1965,8 +1978,12 @@ class Cursor(Structure): + """Returns the value of the indicated arg as an unsigned 64b integer.""" + return conf.lib.clang_Cursor_getTemplateArgumentUnsignedValue(self, num) + +- def get_children(self): +- """Return an iterator for accessing the children of this cursor.""" ++ def get_children(self, with_implicit=False, with_template_instantiations=False): ++ """Return an iterator for accessing the children of this cursor. ++ ++ By default, cursors representing implicit declarations or template instantiations ++ will be skipped. ++ """ + + # FIXME: Expose iteration from CIndex, PR6125. + def visitor(child, parent, children): +@@ -1980,17 +1997,24 @@ class Cursor(Structure): + return 1 # continue + + children = [] +- conf.lib.clang_visitChildren(self, callbacks["cursor_visit"](visitor), children) ++ dispatch = conf.lib.clang_visitChildren ++ options = Cursor.GET_CHILDREN_NONE ++ if with_implicit: ++ options |= Cursor.GET_CHILDREN_WITH_IMPLICIT ++ if with_template_instantiations: ++ options |= Cursor.GET_CHILDREN_WITH_TEMPLATE_INSTANTIATIONS ++ conf.lib.clang_visitChildrenWithOptions( ++ self, callbacks['cursor_visit'](visitor), children, options) + return iter(children) + +- def walk_preorder(self): ++ def walk_preorder(self, **kwargs): + """Depth-first preorder walk over the cursor and its descendants. + + Yields cursors. + """ + yield self +- for child in self.get_children(): +- for descendant in child.walk_preorder(): ++ for child in self.get_children(**kwargs): ++ for descendant in child.walk_preorder(**kwargs): + yield descendant + + def get_tokens(self, options=0): +@@ -3783,6 +3807,7 @@ functionList = [ + ("clang_isExpression", [CursorKind], bool), + ("clang_isFileMultipleIncludeGuarded", [TranslationUnit, File], bool), + ("clang_isFunctionTypeVariadic", [Type], bool), ++ ("clang_isImplicit", [Cursor], bool), + ("clang_isInvalid", [CursorKind], bool), + ("clang_isPODType", [Type], bool), + ("clang_isPreprocessing", [CursorKind], bool), +@@ -3810,6 +3835,11 @@ functionList = [ + c_uint] + ), + ("clang_visitChildren", [Cursor, callbacks["cursor_visit"], py_object], c_uint), ++ ( ++ "clang_visitChildrenWithOptions", ++ [Cursor, callbacks['cursor_visit'], py_object, c_uint], ++ c_uint ++ ), + ("clang_Cursor_getNumArguments", [Cursor], c_int), + ("clang_Cursor_getArgument", [Cursor, c_uint], Cursor, Cursor.from_result), + ("clang_Cursor_getNumTemplateArguments", [Cursor], c_int), +diff -pur spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py spack-src-newerst/clang/bindings/python/tests/cindex/test_cursor.py +--- spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 19:34:29.005849262 +0100 ++++ spack-src-newerst/clang/bindings/python/tests/cindex/test_cursor.py 2025-02-03 19:38:51.074934760 +0100 +@@ -96,6 +96,39 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tu_nodes[2].displayname, "f0(int, int)") + self.assertEqual(tu_nodes[2].is_definition(), True) + ++ def test_get_children_with_implicit(): ++ tu = get_tu('struct X {}; X x;', lang='cpp') ++ cursor = get_cursor(tu, 'X') ++ ++ children = list(cursor.get_children()) ++ self.assertEqual(len(children), 0, [(c.kind, c.spelling) for c in children]) ++ ++ children = list(cursor.get_children(with_implicit=True)) ++ self.assertNotEqual(len(children), 0) ++ for child in children: ++ self.assertTrue(child.is_implicit()) ++ self.assertEqual(child.spelling, "X") ++ self.assertIn(child.kind, [CursorKind.CONSTRUCTOR, CursorKind.STRUCT_DECL]) ++ ++ def test_get_children_with_template_instantiations(): ++ tu = get_tu( ++ 'template <typename T> T frobnicate(T val);' ++ 'extern template int frobnicate<int>(int);', ++ lang='cpp') ++ cursor = get_cursor(tu, 'frobnicate') ++ self.assertEqual(cursor.kind, CursorKind.FUNCTION_TEMPLATE) ++ ++ for child in cursor.get_children(): ++ # should not return an instantiation: ++ self.assertNotEqual(child.kind, CursorKind.FUNCTION_DECL) ++ ++ for child in cursor.get_children(with_template_instantiations=True): ++ if child.kind == CursorKind.FUNCTION_DECL: ++ self.assertEqual(child.spelling, 'frobnicate') ++ break ++ else: ++ self.fail("Couldn't find template instantiation") ++ + def test_references(self): + """Ensure that references to TranslationUnit are kept.""" + tu = get_tu("int x;") +diff -pur spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py.orig spack-src-newerst/clang/bindings/python/tests/cindex/test_cursor.py.orig +--- spack-src-newer/clang/bindings/python/tests/cindex/test_cursor.py.orig 2025-02-03 19:30:48.340935255 +0100 ++++ spack-src-newerst/clang/bindings/python/tests/cindex/test_cursor.py.orig 2025-02-03 19:34:29.005849262 +0100 +@@ -11,6 +11,7 @@ import unittest + from clang.cindex import AvailabilityKind + from clang.cindex import CursorKind + from clang.cindex import TemplateArgumentKind ++from clang.cindex import TokenGroup + from clang.cindex import TranslationUnit + from clang.cindex import TypeKind + from .util import get_cursor +@@ -608,6 +609,14 @@ class TestCursor(unittest.TestCase): + self.assertEqual(tokens[0].spelling, "int") + self.assertEqual(tokens[1].spelling, "foo") + ++ def test_get_tokens_with_whitespace(): ++ source = 'class C { void f(); }\nvoid C::f() { }' ++ tu = get_tu(source) ++ ++ tokens = list(tu.cursor.get_tokens(TokenGroup.TOKENIZE_KEEP_WHITESPACE)) ++ self.assertEqual(''.join(t.spelling for t in tokens), source) ++ self.assertEqual(len(tokens), 27, [t.spelling for t in tokens]) ++ + def test_get_token_cursor(self): + """Ensure we can map tokens to cursors.""" + tu = get_tu("class A {}; int foo(A var = A());", lang="cpp") +diff -pur spack-src-newer/clang/include/clang-c/Index.h spack-src-newerst/clang/include/clang-c/Index.h +--- spack-src-newer/clang/include/clang-c/Index.h 2025-02-03 19:37:22.846569316 +0100 ++++ spack-src-newerst/clang/include/clang-c/Index.h 2025-02-03 19:39:20.307055840 +0100 +@@ -34,7 +34,7 @@ + * compatible, thus CINDEX_VERSION_MAJOR is expected to remain stable. + */ + #define CINDEX_VERSION_MAJOR 0 +-#define CINDEX_VERSION_MINOR 66 ++#define CINDEX_VERSION_MINOR 67 + + #define CINDEX_VERSION_ENCODE(major, minor) (((major)*10000) + ((minor)*1)) + +@@ -2366,6 +2366,11 @@ CINDEX_LINKAGE unsigned clang_isPreproce + */ + CINDEX_LINKAGE unsigned clang_isUnexposed(enum CXCursorKind); + ++/*** ++ * \brief Determine whether the given cursor represents an implicit declaration. ++ */ ++CINDEX_LINKAGE unsigned clang_isImplicit(CXCursor); ++ + /** + * Describe the linkage of the entity referred to by a cursor. + */ +@@ -3827,6 +3832,32 @@ enum CXChildVisitResult { + CXChildVisit_Recurse + }; + ++typedef enum { ++ /** ++ * \brief Default behavior. ++ */ ++ CXVisitChildren_None = 0x0, ++ ++ /** ++ * \brief Used to indicate that implicit cursors should be visited. ++ */ ++ CXVisitChildren_WithImplicit = 0x1, ++ ++ /** ++ * \brief Used to indicate that template instantiations should be visited. ++ */ ++ CXVisitChildren_WithTemplateInstantiations = 0x2 ++} CXVisitChildren_Flags; ++ ++/** ++ * \brief Visits the children of a cursor, allowing to pass extra options. ++ * Behaves identically to clang_visitChildren() in all other respects. ++ */ ++CINDEX_LINKAGE unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options); ++ + /** + * Visitor invoked for each cursor found by a traversal. + * +diff -pur spack-src-newer/clang/tools/libclang/CIndex.cpp spack-src-newerst/clang/tools/libclang/CIndex.cpp +--- spack-src-newer/clang/tools/libclang/CIndex.cpp 2025-02-03 19:34:29.861852808 +0100 ++++ spack-src-newerst/clang/tools/libclang/CIndex.cpp 2025-02-03 19:38:52.634941222 +0100 +@@ -207,9 +207,10 @@ bool CursorVisitor::Visit(CXCursor Curso + return true; // abort. + } + +- // Ignore implicit declarations, unless it's an objc method because +- // currently we should report implicit methods for properties when indexing. +- if (D->isImplicit() && !isa<ObjCMethodDecl>(D)) ++ // Unless instructed otherwise we ignore implicit declarations. ++ // ObjC methods are currently visited in any case, because implicit methods ++ // for properties should be reported when indexing. ++ if (!VisitImplicitDeclarations && D->isImplicit() && !isa<ObjCMethodDecl>(D)) + return false; + } + +@@ -717,10 +718,13 @@ bool CursorVisitor::VisitTagDecl(TagDecl + + bool CursorVisitor::VisitClassTemplateSpecializationDecl( + ClassTemplateSpecializationDecl *D) { +- bool ShouldVisitBody = false; ++ bool ShouldVisitBody = VisitTemplateInstantiations; + switch (D->getSpecializationKind()) { +- case TSK_Undeclared: + case TSK_ImplicitInstantiation: ++ if (VisitTemplateInstantiations && VisitImplicitDeclarations) { ++ break; ++ } ++ case TSK_Undeclared: + // Nothing to visit + return false; + +@@ -729,6 +733,7 @@ bool CursorVisitor::VisitClassTemplateSp + break; + + case TSK_ExplicitSpecialization: ++ // Always visit body of explicit specializations + ShouldVisitBody = true; + break; + } +@@ -954,7 +959,31 @@ bool CursorVisitor::VisitFunctionTemplat + return true; + + auto *FD = D->getTemplatedDecl(); +- return VisitAttributes(FD) || VisitFunctionDecl(FD); ++ if (VisitAttributes(FD) || VisitFunctionDecl(FD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *FD : D->specializations()) { ++ for (auto *RD : FD->redecls()) { ++ switch (RD->getTemplateSpecializationKind()) { ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitClassTemplateDecl(ClassTemplateDecl *D) { +@@ -965,6 +994,40 @@ bool CursorVisitor::VisitClassTemplateDe + + auto *CD = D->getTemplatedDecl(); + return VisitAttributes(CD) || VisitCXXRecordDecl(CD); ++ if (VisitAttributes(CD) || VisitCXXRecordDecl(CD)) ++ return true; ++ ++ if (VisitTemplateInstantiations && D == D->getCanonicalDecl()) { ++ for (auto *SD : D->specializations()) { ++ for (auto *RD : SD->redecls()) { ++ // We don't want to visit injected-class-names in this traversal. ++ if (cast<CXXRecordDecl>(RD)->isInjectedClassName()) ++ continue; ++ ++ switch ( ++ cast<ClassTemplateSpecializationDecl>(RD)->getSpecializationKind()) { ++ // Visit the implicit instantiations with the requested pattern. ++ case TSK_Undeclared: ++ case TSK_ImplicitInstantiation: { ++ const Optional<bool> V = handleDeclForVisitation(RD); ++ if (!V.hasValue()) ++ continue; ++ return V.getValue(); ++ } ++ ++ // We don't need to do anything on an explicit instantiation ++ // or explicit specialization because there will be an explicit ++ // node for it elsewhere. ++ case TSK_ExplicitInstantiationDeclaration: ++ case TSK_ExplicitInstantiationDefinition: ++ case TSK_ExplicitSpecialization: ++ break; ++ } ++ } ++ } ++ } ++ ++ return false; + } + + bool CursorVisitor::VisitTemplateTemplateParmDecl(TemplateTemplateParmDecl *D) { +@@ -4877,6 +4940,24 @@ unsigned clang_visitChildrenWithBlock(CX + return clang_visitChildren(parent, visitWithBlock, block); + } + ++unsigned clang_visitChildrenWithOptions(CXCursor parent, ++ CXCursorVisitor visitor, ++ CXClientData client_data, ++ unsigned options) { ++ CursorVisitor CursorVis( ++ getCursorTU(parent), visitor, client_data, ++ /*VisitPreprocessorLast=*/false, ++ /*VisitIncludedPreprocessingEntries=*/false, ++ /*RegionOfInterest=*/SourceRange(), ++ /*VisitDeclsOnly=*/false, ++ /*PostChildrenVisitor=*/nullptr, ++ /*VisitImplicitDeclarations=*/(options & CXVisitChildren_WithImplicit), ++ /*VisitTemplateInstantiations=*/ ++ (options & CXVisitChildren_WithTemplateInstantiations)); ++ ++ return CursorVis.VisitChildren(parent); ++} ++ + static CXString getDeclSpelling(const Decl *D) { + if (!D) + return cxstring::createEmpty(); +@@ -6296,6 +6377,22 @@ unsigned clang_isUnexposed(enum CXCursor + } + } + ++unsigned clang_isImplicit(CXCursor Cursor) { ++ if (clang_isInvalid(Cursor.kind)) ++ return false; ++ ++ if (!clang_isDeclaration(Cursor.kind)) ++ return false; ++ ++ const Decl *D = getCursorDecl(Cursor); ++ if (!D) { ++ assert(0 && "Invalid declaration cursor"); ++ return true; // abort. ++ } ++ ++ return D->isImplicit(); ++} ++ + CXCursorKind clang_getCursorKind(CXCursor C) { return C.kind; } + + CXSourceLocation clang_getCursorLocation(CXCursor C) { +diff -pur spack-src-newer/clang/tools/libclang/CIndex.cpp.orig spack-src-newerst/clang/tools/libclang/CIndex.cpp.orig +--- spack-src-newer/clang/tools/libclang/CIndex.cpp.orig 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-newerst/clang/tools/libclang/CIndex.cpp.orig 2025-02-03 19:34:29.861852808 +0100 +@@ -7199,7 +7199,7 @@ CXSourceRange clang_getTokenExtent(CXTra + } + + static void getTokens(ASTUnit *CXXUnit, SourceRange Range, +- SmallVectorImpl<CXToken> &CXTokens) { ++ SmallVectorImpl<CXToken> &CXTokens, unsigned options) { + SourceManager &SourceMgr = CXXUnit->getSourceManager(); + std::pair<FileID, unsigned> BeginLocInfo = + SourceMgr.getDecomposedSpellingLoc(Range.getBegin()); +@@ -7220,6 +7220,9 @@ static void getTokens(ASTUnit *CXXUnit, + CXXUnit->getASTContext().getLangOpts(), Buffer.begin(), + Buffer.data() + BeginLocInfo.second, Buffer.end()); + Lex.SetCommentRetentionState(true); ++ if (options & CXTokenize_KeepWhitespace) { ++ Lex.SetKeepWhitespaceMode(true); ++ } + + // Lex tokens until we hit the end of the range. + const char *EffectiveBufferEnd = Buffer.data() + EndLocInfo.second; +@@ -7290,7 +7293,7 @@ CXToken *clang_getToken(CXTranslationUni + SM.getComposedLoc(DecomposedEnd.first, DecomposedEnd.second); + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, SourceRange(Begin, End), CXTokens); ++ getTokens(CXXUnit, SourceRange(Begin, End), CXTokens, CXTokenize_None); + + if (CXTokens.empty()) + return nullptr; +@@ -7304,6 +7307,12 @@ CXToken *clang_getToken(CXTranslationUni + + void clang_tokenize(CXTranslationUnit TU, CXSourceRange Range, CXToken **Tokens, + unsigned *NumTokens) { ++ return clang_tokenizeRange(TU, Range, Tokens, NumTokens, CXTokenize_None); ++} ++ ++void clang_tokenizeRange(CXTranslationUnit TU, CXSourceRange Range, ++ CXToken **Tokens, unsigned *NumTokens, ++ unsigned options) { + LOG_FUNC_SECTION { *Log << TU << ' ' << Range; } + + if (Tokens) +@@ -7327,7 +7336,7 @@ void clang_tokenize(CXTranslationUnit TU + return; + + SmallVector<CXToken, 32> CXTokens; +- getTokens(CXXUnit, R, CXTokens); ++ getTokens(CXXUnit, R, CXTokens, options); + + if (CXTokens.empty()) + return; +diff -pur spack-src-newer/clang/tools/libclang/CursorVisitor.h spack-src-newerst/clang/tools/libclang/CursorVisitor.h +--- spack-src-newer/clang/tools/libclang/CursorVisitor.h 2023-10-31 09:00:30.000000000 +0100 ++++ spack-src-newerst/clang/tools/libclang/CursorVisitor.h 2025-02-03 19:38:52.634941222 +0100 +@@ -111,6 +111,12 @@ private: + /// record entries. + bool VisitDeclsOnly; + ++ /// \brief Whether we should visit implicit declarations. ++ bool VisitImplicitDeclarations; ++ ++ /// \brief Whether we should recurse into template instantiations. ++ bool VisitTemplateInstantiations; ++ + // FIXME: Eventually remove. This part of a hack to support proper + // iteration over all Decls contained lexically within an ObjC container. + DeclContext::decl_iterator *DI_current; +@@ -159,12 +165,16 @@ public: + bool VisitIncludedPreprocessingEntries = false, + SourceRange RegionOfInterest = SourceRange(), + bool VisitDeclsOnly = false, +- PostChildrenVisitorTy PostChildrenVisitor = nullptr) ++ PostChildrenVisitorTy PostChildrenVisitor = nullptr, ++ bool VisitImplicitDeclarations = false, ++ bool VisitTemplateInstantiations = false) + : TU(TU), AU(cxtu::getASTUnit(TU)), Visitor(Visitor), + PostChildrenVisitor(PostChildrenVisitor), ClientData(ClientData), + VisitPreprocessorLast(VisitPreprocessorLast), + VisitIncludedEntities(VisitIncludedPreprocessingEntries), + RegionOfInterest(RegionOfInterest), VisitDeclsOnly(VisitDeclsOnly), ++ VisitImplicitDeclarations(VisitImplicitDeclarations), ++ VisitTemplateInstantiations(VisitTemplateInstantiations), + DI_current(nullptr), FileDI_current(nullptr) { + Parent.kind = CXCursor_NoDeclFound; + Parent.data[0] = nullptr; +Only in spack-src-newerst/clang/tools/libclang: CursorVisitor.h.orig +diff -pur spack-src-newer/clang/tools/libclang/libclang.map spack-src-newerst/clang/tools/libclang/libclang.map +--- spack-src-newer/clang/tools/libclang/libclang.map 2025-02-03 19:34:29.861852808 +0100 ++++ spack-src-newerst/clang/tools/libclang/libclang.map 2025-02-03 19:38:52.634941222 +0100 +@@ -375,6 +375,7 @@ LLVM_13 { + clang_isExpression; + clang_isFileMultipleIncludeGuarded; + clang_isFunctionTypeVariadic; ++ clang_isImplicit; + clang_isInvalid; + clang_isInvalidDeclaration; + clang_isPODType; +@@ -403,6 +404,7 @@ LLVM_13 { + clang_uninstall_llvm_fatal_error_handler; + clang_visitChildren; + clang_visitChildrenWithBlock; ++ clang_visitChildrenWithOptions; + + local: *; + }; diff --git a/packages/llvm/package.py b/packages/llvm/package.py index d1ab9de9..c6cf4897 100644 --- a/packages/llvm/package.py +++ b/packages/llvm/package.py @@ -395,7 +395,7 @@ class Llvm(CMakePackage, CudaPackage): "bindings generator") conflicts("@:8", when="+visionary") - conflicts("@17:", when="+visionary") + conflicts("@18:", when="+visionary") patch('llvm9-0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch', when='@9:12 +visionary', level=2) @@ -431,6 +431,13 @@ class Llvm(CMakePackage, CudaPackage): # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above # llvm14-public-ClangToolingCommonOptionsParser.patch from above + # 0001-Tooling-Fully-qualify-template-parameters-of-nested-.patch from above + patch('llvm17-0002-libclang-Add-support-for-obtaining-fully-qualified-n.patch', when='@17: +visionary', level=1) + patch('llvm17-0003-libclang-Add-option-to-keep-whitespace-when-tokenizi.patch', when='@17: +visionary', level=1) + patch('llvm17-0004-libclang-WIP-Allow-visiting-of-implicit-declarations.patch', when='@17: +visionary', level=1) + # 0005-libclang-WIP-Fix-get_tokens-in-macro-expansion.patch from above + # llvm14-public-ClangToolingCommonOptionsParser.patch from above + # disable check for member `mode` size in `struct ipc_perm`; newer glibc changed width patch('llvm9-disable-check-for-ipc_perm-mode.patch', when='@9.0.0:9.0.999', level=2) # end VISIONS -- GitLab From cc9a1e3f5449863a90b94332ffefd3565b0cc920 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Thu, 27 Feb 2025 16:18:01 +0200 Subject: [PATCH 031/111] chore: yashchiki is dead, long live dedal! --- .gitmodules | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitmodules b/.gitmodules index 25bfff6a..30354655 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,5 +4,5 @@ shallow = true [submodule "vendor/yashchiki"] path = vendor/yashchiki - url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/yashchiki + url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal shallow = true -- GitLab From a4f214e684aebaa6f7974ffe82bb1649f529f41b Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 21 Feb 2025 12:04:43 +0100 Subject: [PATCH 032/111] fix(CI): find correct upstream prefix when it's padded --- install_spack_env.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index 3bb01936..bb7fb9be 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -52,10 +52,11 @@ fi if [[ $UPSTREAM_INSTANCE ]] then + UPSTREAM_PREFIX=$(find $UPSTREAM_INSTANCE/spack/opt/spack/ -type d -name ".spack-db" 2>/dev/null | xargs -I {} dirname {}) cat <<EOF > ${CI_SPACK_ROOT}/etc/spack/defaults/upstreams.yaml upstreams: upstream-spack-instance: - install_tree: $UPSTREAM_INSTANCE/spack/opt/spack + install_tree: $UPSTREAM_PREFIX EOF fi -- GitLab From d3bc5d9491eb8e3d4e213799581dfd6df67aff55 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Sat, 22 Feb 2025 15:47:45 +0000 Subject: [PATCH 033/111] fix(CI): only trigger CI build jobs on push --- .gitlab-ci.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 766dfb9d..be25c7c8 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -102,7 +102,7 @@ variables: SPACK_ENV: test RELEASE_NAME: EBRAINS-test rules: - - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE != "schedule" + - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push" # deploy the experimental release of tools once a week from latest working version of int release .deploy-exp-release: @@ -210,7 +210,7 @@ build-spack-env-on-runner: when: always timeout: 2 days rules: - - if: $CI_PIPELINE_SOURCE != "schedule" && $CI_PIPELINE_SOURCE != "merge_request_event" + - if: $CI_PIPELINE_SOURCE == "push" # this one fills the spack caches and updates the ESD (ebrainslab-variant) images on harbor sync-esd-image: @@ -262,7 +262,7 @@ sync-esd-image: resource_group: registry-esd-master-image rules: # branches that update the gitlab-runner upstream (read-only) installation and the spack OCI caches - - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE != "schedule" + - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_PATH =~ /platform\/esd\/ebrains-spack-builds/ && $CI_PIPELINE_SOURCE == "push" when: manual # update gitlab-runner upstream (read-only) installation @@ -307,7 +307,7 @@ sync-gitlab-spack-instance: when: always rules: # branches that update the gitlab-runner upstream (read-only) installation - - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE != "schedule" + - if: ($CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH || $CI_COMMIT_BRANCH == "experimental_rel" || $CI_COMMIT_BRANCH =~ /^ebrains/) && $CI_PROJECT_NAMESPACE =~ /platform\/esd/ && $CI_PIPELINE_SOURCE == "push" when: manual # run (scheduled) standalone tests for environment -- GitLab From 13bff2c40533a2264de5ddace89ffd609d2eb9bb Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 18 Feb 2025 07:34:08 +0000 Subject: [PATCH 034/111] feat: update Spack to v0.23.0 --- .gitlab-ci.yml | 6 +++--- README.md | 4 ++-- install_spack_env.sh | 4 ---- vendor/spack | 2 +- 4 files changed, 6 insertions(+), 10 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index be25c7c8..d76a5319 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,7 +4,7 @@ stages: variables: BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:24.12 - SPACK_PATH_GITLAB: /mnt/spack_v0.21.1 + SPACK_PATH_GITLAB: /mnt/spack_v0.23.0 SYSTEMNAME: ebrainslab GIT_SUBMODULE_STRATEGY: recursive GIT_CLEAN_FLAGS: -ffdxq @@ -62,14 +62,14 @@ variables: extends: .deploy-build-environment variables: LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/int - INSTALLATION_ROOT: /srv/test-build-2402 + INSTALLATION_ROOT: /srv/test-build-2502 # deploy to a prod lab environment .deploy-prod-server: extends: .deploy-build-environment variables: LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod - INSTALLATION_ROOT: /srv/main-spack-instance-2402 + INSTALLATION_ROOT: /srv/main-spack-instance-2502 # deploy to the dev lab environment at CINECA .deploy-dev-server-cineca: diff --git a/README.md b/README.md index 1f567131..36bce891 100644 --- a/README.md +++ b/README.md @@ -38,9 +38,9 @@ Clone this repository. You can use the `ebrains-yy-mm` branches to install the E git clone --branch {branch-name} https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git ``` -Clone Spack. We currently use version v0.21.1: +Clone Spack. We currently use version v0.23.0: ``` -git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.21.1 https://github.com/spack/spack +git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.23.0 https://github.com/spack/spack ``` Activate Spack: diff --git a/install_spack_env.sh b/install_spack_env.sh index bb7fb9be..faf7ef8b 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -44,10 +44,6 @@ SPACK_ROOT_EXISTED=1 if [ ! -d ${CI_SPACK_ROOT} ]; then ln -s ${EBRAINS_REPO}/vendor/spack ${CI_SPACK_ROOT} SPACK_ROOT_EXISTED=0 - - # SPACK PATCH: the post-build logs on install-time-test-logs.txt gets ovewritten by the post-install logs. - # quick fix for that: (TODO: investigate more and open PR) - sed -i "s/self.file_like, \"w\"/self.file_like, \"a\"/g" ${CI_SPACK_ROOT}/lib/spack/llnl/util/tty/log.py fi if [[ $UPSTREAM_INSTANCE ]] diff --git a/vendor/spack b/vendor/spack index 904e1a73..65abf4d1 160000 --- a/vendor/spack +++ b/vendor/spack @@ -1 +1 @@ -Subproject commit 904e1a73567bc17d43fe0e4615ca0d7f1d50e2ed +Subproject commit 65abf4d14071280c6d4a183e20c0f6991ed49986 -- GitLab From 1b33064ba357250db73ec862f24df637ed180c30 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 18 Feb 2025 07:35:13 +0000 Subject: [PATCH 035/111] feat: align packages with Spack v0.23.0 upstream --- packages/ambertools/package.py | 78 -- packages/bazel/bazelconfiguration-0.3.patch | 16 - .../bazel/bazelruleclassprovider-0.14.patch | 16 - packages/bazel/build-0.29.1.patch | 61 - packages/bazel/cc_configure-0.3.0.patch | 24 - packages/bazel/cc_configure-0.5.0.patch | 24 - packages/bazel/compile-0.13.patch | 11 - packages/bazel/compile-0.16.patch | 11 - packages/bazel/compile-0.21.patch | 11 - packages/bazel/compile-0.3.patch | 11 - packages/bazel/compile-0.9.patch | 11 - packages/bazel/cppcompileaction-0.3.0.patch | 11 - packages/bazel/cppcompileaction-7.0.0.patch | 12 + packages/bazel/package.py | 514 ++------ packages/bazel/unix_cc_configure-0.10.patch | 22 - packages/bazel/unix_cc_configure-0.5.3.patch | 24 - packages/gcc/darwin/apfs.patch | 12 - packages/gcc/darwin/clang13.patch | 32 - packages/gcc/darwin/gcc-4.9.patch1 | 42 - packages/gcc/darwin/gcc-4.9.patch2 | 28 - packages/gcc/darwin/gcc-6.1.0-jit.patch | 21 - packages/gcc/darwin/gcc-7.1.0-headerpad.patch | 19 - packages/gcc/darwin/headers-10.13-fix.patch | 127 -- packages/gcc/detection_test.yaml | 38 - packages/gcc/gcc-backport.patch | 138 -- .../gcc/glibc-2.31-libsanitizer-1-gcc-6.patch | 39 - packages/gcc/glibc-2.31-libsanitizer-1.patch | 37 - .../gcc/glibc-2.31-libsanitizer-2-gcc-6.patch | 69 - .../gcc/glibc-2.31-libsanitizer-2-gcc-7.patch | 69 - packages/gcc/glibc-2.31-libsanitizer-2.patch | 73 -- .../gcc/glibc-2.31-libsanitizer-3-gcc-5.patch | 81 -- .../glibc-2.36-libsanitizer-gcc-10-12.patch | 27 - .../gcc/glibc-2.36-libsanitizer-gcc-5-9.patch | 27 - packages/gcc/package.py | 1114 ----------------- ...941d23b1570cdd90083b58fa0f66aa58c86e.patch | 121 -- ...ae5923aba02982563481d75a21595df22ff8.patch | 123 -- ...b74046e0feb0596b93bbb822fae02940a90e.patch | 133 -- ...0b3010bd0de899a3da3209eab20664ddb703.patch | 133 -- packages/gcc/piclibs.patch | 62 - packages/gcc/signal.patch | 28 - packages/gcc/stack_t-4.9.patch | 80 -- packages/gcc/stack_t.patch | 88 -- packages/gcc/sys_ustat-4.9.patch | 34 - packages/gcc/sys_ustat.h.patch | 63 - packages/gcc/ucontext_t-java.patch | 60 - packages/gcc/ucontext_t.patch | 189 --- packages/gcc/zstd.patch | 43 - packages/libvips/package.py | 23 +- packages/libxcb/package.py | 54 +- packages/llvm/constexpr_longdouble.patch | 28 - packages/llvm/constexpr_longdouble_9.0.patch | 38 - packages/llvm/detection_test.yaml | 104 ++ packages/llvm/libomp-libflags-as-list.patch | 14 - packages/llvm/lldb_external_ncurses-10.patch | 31 - packages/llvm/llvm-gcc11.patch | 9 - packages/llvm/llvm13-thread.patch | 19 - packages/llvm/llvm14-hwloc-ompd.patch | 13 - packages/llvm/llvm17-18-thread.patch | 22 + packages/llvm/llvm17-fujitsu.patch | 28 + packages/llvm/llvm4-lld-ELF-Symbols.patch | 112 -- packages/llvm/llvm5-lld-ELF-Symbols.patch | 33 - packages/llvm/llvm5-sanitizer-ustat.patch | 25 - packages/llvm/llvm_py37.patch | 37 - packages/llvm/llvm_python_path.patch | 14 - packages/llvm/missing-includes.patch | 23 - packages/llvm/no_cyclades.patch | 81 -- packages/llvm/package.py | 369 ++++-- ...izer-platform-limits-posix-xdr-macos.patch | 11 + packages/llvm/thread-p9.patch | 16 - packages/log4cxx/package.py | 35 +- packages/nanoflann/package.py | 26 - packages/netlib-xblas/package.py | 74 -- packages/nglview/package.py | 36 - packages/open3d/package.py | 35 +- packages/openbabel/gcc12-cmake.patch | 37 - packages/openbabel/package.py | 97 -- .../openbabel/python-3.6-rtld-global.patch | 42 - .../testpdbformat-tabs-to-spaces.patch | 47 - packages/py-astropy/package.py | 25 +- packages/py-autopep8/package.py | 2 + packages/py-bokeh/package.py | 23 +- packages/py-chex/package.py | 34 - packages/py-dash/package.py | 29 - packages/py-flit-core/package.py | 40 - packages/py-ipycanvas/package.py | 2 + packages/py-ipympl/package.py | 53 +- packages/py-jax/package.py | 70 -- packages/py-jaxlib/package.py | 125 -- packages/py-numba/package.py | 30 +- packages/py-optax/package.py | 24 - packages/py-pycuda/package.py | 4 + packages/py-pyvista/package.py | 44 - packages/py-pyviz-comms/package.py | 2 + packages/py-ray/package.py | 6 + packages/py-sympy/package.py | 9 +- packages/py-tree-math/package.py | 26 - packages/sbml/package.py | 5 + packages/simpletraj/package.py | 32 - packages/sleef/package.py | 63 - packages/wf-biobb/package.py | 4 +- 100 files changed, 772 insertions(+), 5420 deletions(-) delete mode 100644 packages/ambertools/package.py delete mode 100644 packages/bazel/bazelconfiguration-0.3.patch delete mode 100644 packages/bazel/bazelruleclassprovider-0.14.patch delete mode 100644 packages/bazel/build-0.29.1.patch delete mode 100644 packages/bazel/cc_configure-0.3.0.patch delete mode 100644 packages/bazel/cc_configure-0.5.0.patch delete mode 100644 packages/bazel/compile-0.13.patch delete mode 100644 packages/bazel/compile-0.16.patch delete mode 100644 packages/bazel/compile-0.21.patch delete mode 100644 packages/bazel/compile-0.3.patch delete mode 100644 packages/bazel/compile-0.9.patch delete mode 100644 packages/bazel/cppcompileaction-0.3.0.patch create mode 100644 packages/bazel/cppcompileaction-7.0.0.patch delete mode 100644 packages/bazel/unix_cc_configure-0.10.patch delete mode 100644 packages/bazel/unix_cc_configure-0.5.3.patch delete mode 100644 packages/gcc/darwin/apfs.patch delete mode 100644 packages/gcc/darwin/clang13.patch delete mode 100644 packages/gcc/darwin/gcc-4.9.patch1 delete mode 100644 packages/gcc/darwin/gcc-4.9.patch2 delete mode 100644 packages/gcc/darwin/gcc-6.1.0-jit.patch delete mode 100644 packages/gcc/darwin/gcc-7.1.0-headerpad.patch delete mode 100644 packages/gcc/darwin/headers-10.13-fix.patch delete mode 100644 packages/gcc/detection_test.yaml delete mode 100644 packages/gcc/gcc-backport.patch delete mode 100644 packages/gcc/glibc-2.31-libsanitizer-1-gcc-6.patch delete mode 100644 packages/gcc/glibc-2.31-libsanitizer-1.patch delete mode 100644 packages/gcc/glibc-2.31-libsanitizer-2-gcc-6.patch delete mode 100644 packages/gcc/glibc-2.31-libsanitizer-2-gcc-7.patch delete mode 100644 packages/gcc/glibc-2.31-libsanitizer-2.patch delete mode 100644 packages/gcc/glibc-2.31-libsanitizer-3-gcc-5.patch delete mode 100644 packages/gcc/glibc-2.36-libsanitizer-gcc-10-12.patch delete mode 100644 packages/gcc/glibc-2.36-libsanitizer-gcc-5-9.patch delete mode 100644 packages/gcc/package.py delete mode 100644 packages/gcc/patch-2b40941d23b1570cdd90083b58fa0f66aa58c86e.patch delete mode 100644 packages/gcc/patch-745dae5923aba02982563481d75a21595df22ff8.patch delete mode 100644 packages/gcc/patch-f1feb74046e0feb0596b93bbb822fae02940a90e.patch delete mode 100644 packages/gcc/patch-fc930b3010bd0de899a3da3209eab20664ddb703.patch delete mode 100644 packages/gcc/piclibs.patch delete mode 100644 packages/gcc/signal.patch delete mode 100644 packages/gcc/stack_t-4.9.patch delete mode 100644 packages/gcc/stack_t.patch delete mode 100644 packages/gcc/sys_ustat-4.9.patch delete mode 100644 packages/gcc/sys_ustat.h.patch delete mode 100644 packages/gcc/ucontext_t-java.patch delete mode 100644 packages/gcc/ucontext_t.patch delete mode 100644 packages/gcc/zstd.patch delete mode 100644 packages/llvm/constexpr_longdouble.patch delete mode 100644 packages/llvm/constexpr_longdouble_9.0.patch create mode 100644 packages/llvm/detection_test.yaml delete mode 100644 packages/llvm/libomp-libflags-as-list.patch delete mode 100644 packages/llvm/lldb_external_ncurses-10.patch delete mode 100644 packages/llvm/llvm-gcc11.patch delete mode 100644 packages/llvm/llvm13-thread.patch delete mode 100644 packages/llvm/llvm14-hwloc-ompd.patch create mode 100644 packages/llvm/llvm17-18-thread.patch create mode 100644 packages/llvm/llvm17-fujitsu.patch delete mode 100644 packages/llvm/llvm4-lld-ELF-Symbols.patch delete mode 100644 packages/llvm/llvm5-lld-ELF-Symbols.patch delete mode 100644 packages/llvm/llvm5-sanitizer-ustat.patch delete mode 100644 packages/llvm/llvm_py37.patch delete mode 100644 packages/llvm/llvm_python_path.patch delete mode 100644 packages/llvm/missing-includes.patch delete mode 100644 packages/llvm/no_cyclades.patch create mode 100644 packages/llvm/sanitizer-platform-limits-posix-xdr-macos.patch delete mode 100644 packages/llvm/thread-p9.patch delete mode 100644 packages/nanoflann/package.py delete mode 100644 packages/netlib-xblas/package.py delete mode 100644 packages/nglview/package.py delete mode 100644 packages/openbabel/gcc12-cmake.patch delete mode 100644 packages/openbabel/package.py delete mode 100644 packages/openbabel/python-3.6-rtld-global.patch delete mode 100644 packages/openbabel/testpdbformat-tabs-to-spaces.patch delete mode 100644 packages/py-chex/package.py delete mode 100644 packages/py-dash/package.py delete mode 100644 packages/py-flit-core/package.py delete mode 100644 packages/py-jax/package.py delete mode 100644 packages/py-jaxlib/package.py delete mode 100644 packages/py-optax/package.py delete mode 100644 packages/py-pyvista/package.py delete mode 100644 packages/py-tree-math/package.py delete mode 100644 packages/simpletraj/package.py delete mode 100644 packages/sleef/package.py diff --git a/packages/ambertools/package.py b/packages/ambertools/package.py deleted file mode 100644 index 4e4552cd..00000000 --- a/packages/ambertools/package.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class Ambertools (CMakePackage): - """AmberTools is a free, useful standalone package and a prerequisite for installing Amber itself. - The AmberTools suite is free of charge, and its components are mostly released under the GNU General Public License (GPL). - A few components are included that are in the public domain or which have other, open-source, licenses. - The libsander and libpbsa libraries use the LGPL license.""" - - # Set the homepage and download url - homepage = "http://ambermd.org/AmberTools.php" - url = "http://ambermd.org/downloads/AmberTools22jlmrcc.tar.bz2" - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran', 'elmath'] - - version('22jlmrcc', sha256='1571d4e0f7d45b2a71dce5999fa875aea8c90ee219eb218d7916bf30ea229121') - - # Dependencies - depends_on("flex", type="build") # This is necessary for sure (experimentally tested) - depends_on("bison", type="build") # This is necessary for sure (experimentally tested) - depends_on("tcsh", type="build") - depends_on("zlib", type=("build", "link", "run")) - depends_on("bzip2", type=("build", "run")) - depends_on("blas", type=("build", "run")) - depends_on("lapack", type=("build", "run")) - depends_on("arpack-ng", type=("build", "run")) - depends_on("netcdf-c", type=("build", "run")) - depends_on("netcdf-fortran", type=("build", "run")) - depends_on("fftw", type=("build", "run")) - depends_on("readline", type=("build", "run")) - depends_on("netlib-xblas~plain_blas", type=("build", "run")) - # specific variants needed for boost - from the build log "Could NOT find Boost (missing: thread system program_options iostreams regex timer chrono filesystem graph)" - depends_on("boost+thread+system+program_options+iostreams+regex+timer+chrono+filesystem+graph", type=("build", "run")) - - # Python dependencies - # WARNING: If a python 3.8 version is already installed in spack then the '+tkinter' variant makes spack ignore the version - # WARNING: Spack may try to install the preferred python version (i.e. python 3.10.8) - # WARNING: The soultion is uninstall python and reinstall with this variant - depends_on('python@3.8: +tkinter', type=('build', 'run')) - depends_on("py-numpy", type=("build", "run")) - depends_on("py-matplotlib", type=("build", "run")) - depends_on("py-scipy", type=("build", "run")) - - def cmake_args(self): - # Translated from ambertools build/run_cmake script - # We also add the TRUST_SYSTEM_LIBS argument that is mentioned in the ambertools CMake guide - # https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Guide-to-Options - args = [ - self.define("COMPILER", "GNU"), - self.define("MPI", False), - self.define("CUDA", False), - self.define("INSTALL_TESTS", True), - self.define("DOWNLOAD_MINICONDA", False), - self.define("TRUST_SYSTEM_LIBS", True), - # This is to avoid the x11 (X11_Xext_LIB) error - # It is equivalent to the '-noX11' flag accoridng to the docs: - # https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Common-Options - self.define("BUILD_GUI", False) - ] - return args - - def setup_run_environment(self, env): - env.set("AMBER_PREFIX", self.prefix) - env.set("AMBERHOME", self.prefix) - - def setup_build_environment(self, env): - env.set("AMBER_PREFIX", self.prefix) - env.set("AMBERHOME", self.prefix) - - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install(self): - make("test.serial") diff --git a/packages/bazel/bazelconfiguration-0.3.patch b/packages/bazel/bazelconfiguration-0.3.patch deleted file mode 100644 index e6a974a9..00000000 --- a/packages/bazel/bazelconfiguration-0.3.patch +++ /dev/null @@ -1,16 +0,0 @@ ---- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java -+++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java -@@ -150,6 +150,13 @@ - builder.put("PATH", null); - builder.put("LD_LIBRARY_PATH", null); - } -+ -+ Map<String, String> spackEnv = System.getenv(); -+ for (String envName : spackEnv.keySet()) { -+ if (envName.startsWith("SPACK_")) { -+ builder.put(envName, spackEnv.get(envName)); -+ } -+ } - } - - private static PathFragment determineShellExecutable(OS os, PathFragment fromOption) { diff --git a/packages/bazel/bazelruleclassprovider-0.14.patch b/packages/bazel/bazelruleclassprovider-0.14.patch deleted file mode 100644 index b0aebdf3..00000000 --- a/packages/bazel/bazelruleclassprovider-0.14.patch +++ /dev/null @@ -1,16 +0,0 @@ ---- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java -+++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java -@@ -168,6 +168,13 @@ public class BazelRuleClassProvider { - env.put("PATH", null); - } - -+ Map<String, String> spackEnv = System.getenv(); -+ for (String envName : spackEnv.keySet()) { -+ if (envName.startsWith("SPACK_")) { -+ env.put(envName, spackEnv.get(envName)); -+ } -+ } -+ - // Shell environment variables specified via options take precedence over the - // ones inherited from the fragments. In the long run, these fragments will - // be replaced by appropriate default rc files anyway. diff --git a/packages/bazel/build-0.29.1.patch b/packages/bazel/build-0.29.1.patch deleted file mode 100644 index c3ed9ab2..00000000 --- a/packages/bazel/build-0.29.1.patch +++ /dev/null @@ -1,61 +0,0 @@ -From 9c9d27561780bc56d9f0867e325c7421a94ee1cb Mon Sep 17 00:00:00 2001 -From: Harsh Bhatia <bhatia4@llnl.gov> -Date: Tue, 15 Dec 2020 15:56:10 -0800 -Subject: [PATCH] https://github.com/bazelbuild/bazel/commit/ab62a6e097590dac5ec946ad7a796ea0e8593ae0 - ---- - src/conditions/BUILD | 6 ++++++ - third_party/BUILD | 8 ++++++-- - 2 files changed, 12 insertions(+), 2 deletions(-) - -diff --git a/src/conditions/BUILD b/src/conditions/BUILD -index 2b28e28057..faa41a439d 100644 ---- a/src/conditions/BUILD -+++ b/src/conditions/BUILD -@@ -10,6 +10,12 @@ filegroup( - visibility = ["//src:__pkg__"], - ) - -+config_setting( -+ name = "linux_ppc", -+ values = {"cpu": "ppc"}, -+ visibility = ["//visibility:public"], -+) -+ - config_setting( - name = "linux_x86_64", - values = {"cpu": "k8"}, -diff --git a/third_party/BUILD b/third_party/BUILD -index 159006d741..4fcae54c00 100644 ---- a/third_party/BUILD -+++ b/third_party/BUILD -@@ -523,12 +523,13 @@ UNNECESSARY_DYNAMIC_LIBRARIES = select({ - "//src/conditions:darwin": "*.so *.dll", - "//src/conditions:darwin_x86_64": "*.so *.dll", - "//src/conditions:linux_x86_64": "*.jnilib *.dll", -+ "//src/conditions:linux_ppc": "*.so *.jnilib *.dll", - # The .so file is an x86 one, so we can just remove it if the CPU is not x86 - "//src/conditions:arm": "*.so *.jnilib *.dll", - "//src/conditions:linux_aarch64": "*.so *.jnilib *.dll", - # Play it safe -- better have a big binary than a slow binary - # zip -d does require an argument. Supply something bogus. -- "//conditions:default": "*.bogusextension", -+ "//conditions:default": "", - }) - - # Remove native libraries that are for a platform different from the one we are -@@ -537,7 +538,10 @@ genrule( - name = "filter_netty_dynamic_libs", - srcs = ["netty_tcnative/netty-tcnative-boringssl-static-2.0.24.Final.jar"], - outs = ["netty_tcnative/netty-tcnative-filtered.jar"], -- cmd = "cp $< $@ && zip -qd $@ " + UNNECESSARY_DYNAMIC_LIBRARIES, -+ cmd = "cp $< $@ && " + -+ # End successfully if there is nothing to be deleted from the archive -+ "if [ -n '" + UNNECESSARY_DYNAMIC_LIBRARIES + "' ]; then " + -+ "zip -qd $@ " + UNNECESSARY_DYNAMIC_LIBRARIES + "; fi", - ) - - java_import( --- -2.21.0 (Apple Git-122.2) - diff --git a/packages/bazel/cc_configure-0.3.0.patch b/packages/bazel/cc_configure-0.3.0.patch deleted file mode 100644 index 79e12269..00000000 --- a/packages/bazel/cc_configure-0.3.0.patch +++ /dev/null @@ -1,24 +0,0 @@ ---- a/tools/cpp/cc_configure.bzl -+++ b/tools/cpp/cc_configure.bzl -@@ -173,8 +173,19 @@ - else: - inc_dirs = result.stderr[index1 + 1:index2].strip() - -- return [repository_ctx.path(_cxx_inc_convert(p)) -- for p in inc_dirs.split("\n")] -+ default_inc_directories = [ -+ repository_ctx.path(_cxx_inc_convert(p)) -+ for p in inc_dirs.split("\n") -+ ] -+ -+ env = repository_ctx.os.environ -+ if "SPACK_INCLUDE_DIRS" in env: -+ for path in env["SPACK_INCLUDE_DIRS"].split(":"): -+ default_inc_directories.append( -+ repository_ctx.path(_cxx_inc_convert(path)) -+ ) -+ -+ return default_inc_directories - - def _add_option_if_supported(repository_ctx, cc, option): - """Checks that `option` is supported by the C compiler.""" diff --git a/packages/bazel/cc_configure-0.5.0.patch b/packages/bazel/cc_configure-0.5.0.patch deleted file mode 100644 index 470986be..00000000 --- a/packages/bazel/cc_configure-0.5.0.patch +++ /dev/null @@ -1,24 +0,0 @@ ---- a/tools/cpp/cc_configure.bzl -+++ b/tools/cpp/cc_configure.bzl -@@ -200,8 +200,19 @@ - else: - inc_dirs = result.stderr[index1 + 1:index2].strip() - -- return [_escape_string(repository_ctx.path(_cxx_inc_convert(p))) -- for p in inc_dirs.split("\n")] -+ default_inc_directories = [ -+ _escape_string(repository_ctx.path(_cxx_inc_convert(p))) -+ for p in inc_dirs.split("\n") -+ ] -+ -+ env = repository_ctx.os.environ -+ if "SPACK_INCLUDE_DIRS" in env: -+ for path in env["SPACK_INCLUDE_DIRS"].split(":"): -+ default_inc_directories.append( -+ repository_ctx.path(_cxx_inc_convert(path)) -+ ) -+ -+ return default_inc_directories - - - def _add_option_if_supported(repository_ctx, cc, option): diff --git a/packages/bazel/compile-0.13.patch b/packages/bazel/compile-0.13.patch deleted file mode 100644 index 13c82e76..00000000 --- a/packages/bazel/compile-0.13.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/compile.sh -+++ b/compile.sh -@@ -92,7 +92,7 @@ - log "Building output/bazel" - # We set host and target platform directly since the defaults in @bazel_tools - # have not yet been generated. --bazel_build "src:bazel${EXE_EXT}" \ -+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" \ - --host_platform=@bazel_tools//platforms:host_platform \ - --platforms=@bazel_tools//platforms:target_platform \ - || fail "Could not build Bazel" diff --git a/packages/bazel/compile-0.16.patch b/packages/bazel/compile-0.16.patch deleted file mode 100644 index f61f521a..00000000 --- a/packages/bazel/compile-0.16.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/compile.sh -+++ b/compile.sh -@@ -92,7 +92,7 @@ display "." - log "Building output/bazel" - # We set host and target platform directly since the defaults in @bazel_tools - # have not yet been generated. --bazel_build "src:bazel_nojdk${EXE_EXT}" \ -+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel_nojdk${EXE_EXT}" \ - --host_platform=@bazel_tools//platforms:host_platform \ - --platforms=@bazel_tools//platforms:target_platform \ - || fail "Could not build Bazel" diff --git a/packages/bazel/compile-0.21.patch b/packages/bazel/compile-0.21.patch deleted file mode 100644 index d666a0f0..00000000 --- a/packages/bazel/compile-0.21.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/compile.sh -+++ b/compile.sh -@@ -92,7 +92,7 @@ display "." - log "Building output/bazel" - # We set host and target platform directly since the defaults in @bazel_tools - # have not yet been generated. --bazel_build "src:bazel_nojdk${EXE_EXT}" \ -+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel_nojdk${EXE_EXT}" \ - --action_env=PATH \ - --host_platform=@bazel_tools//platforms:host_platform \ - --platforms=@bazel_tools//platforms:target_platform \ diff --git a/packages/bazel/compile-0.3.patch b/packages/bazel/compile-0.3.patch deleted file mode 100644 index 82db6efb..00000000 --- a/packages/bazel/compile-0.3.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/compile.sh -+++ b/compile.sh -@@ -99,7 +99,7 @@ - new_step 'Building Bazel with Bazel' - display "." - log "Building output/bazel" -- bazel_build "src:bazel${EXE_EXT}" -+ CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" - cp -f "bazel-bin/src/bazel${EXE_EXT}" "output/bazel${EXE_EXT}" - chmod 0755 "output/bazel${EXE_EXT}" - BAZEL="$(pwd)/output/bazel${EXE_EXT}" diff --git a/packages/bazel/compile-0.9.patch b/packages/bazel/compile-0.9.patch deleted file mode 100644 index 135de3a0..00000000 --- a/packages/bazel/compile-0.9.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/compile.sh -+++ b/compile.sh -@@ -92,7 +92,7 @@ - log "Building output/bazel" - # We set host and target platform directly since the defaults in @bazel_tools - # have not yet been generated. --bazel_build "src:bazel${EXE_EXT}" \ -+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" \ - --host_platform=//tools/platforms:host_platform \ - --platforms=//tools/platforms:target_platform \ - || fail "Could not build Bazel" diff --git a/packages/bazel/cppcompileaction-0.3.0.patch b/packages/bazel/cppcompileaction-0.3.0.patch deleted file mode 100644 index dd23972d..00000000 --- a/packages/bazel/cppcompileaction-0.3.0.patch +++ /dev/null @@ -1,11 +0,0 @@ ---- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java.orig 2020-06-08 13:42:14.035342560 -0400 -+++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java 2020-06-08 13:42:25.149375458 -0400 -@@ -963,7 +963,7 @@ - // are, it's probably due to a non-hermetic #include, & we should stop - // the build with an error. - if (execPath.startsWith(execRoot)) { -- execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path -+ // execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path - } else { - problems.add(execPathFragment.getPathString()); - continue; diff --git a/packages/bazel/cppcompileaction-7.0.0.patch b/packages/bazel/cppcompileaction-7.0.0.patch new file mode 100644 index 00000000..b182f98f --- /dev/null +++ b/packages/bazel/cppcompileaction-7.0.0.patch @@ -0,0 +1,12 @@ +diff --color=auto --color=auto -Naur a/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java +--- a/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java 1980-01-01 00:00:00 ++++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java 2024-02-15 13:36:37 +@@ -143,7 +143,7 @@ + LabelConstants.EXPERIMENTAL_EXTERNAL_PATH_PREFIX.getRelative( + execPath.relativeTo(execRoot.getParentDirectory())); + } else { +- absolutePathProblems.add(execPathFragment.getPathString()); ++ // absolutePathProblems.add(execPathFragment.getPathString()); + continue; + } + } diff --git a/packages/bazel/package.py b/packages/bazel/package.py index 102bb5c5..9d548b61 100644 --- a/packages/bazel/package.py +++ b/packages/bazel/package.py @@ -18,8 +18,23 @@ class Bazel(Package): homepage = "https://bazel.build/" url = "https://github.com/bazelbuild/bazel/releases/download/3.1.0/bazel-3.1.0-dist.zip" + maintainers("LydDeb") + tags = ["build-tools"] + license("Apache-2.0") + + version("7.0.2", sha256="dea2b90575d43ef3e41c402f64c2481844ecbf0b40f8548b75a204a4d504e035") + version("7.0.1", sha256="596b13e071d27c43343ec8f5d263cb5312fafe7ef8702401f7ed492f182f4e6c") + version("7.0.0", sha256="477e54f6374001f439a9471ba1de9d7824daf129db95510849ecc5e19ce88170") + version("6.5.0", sha256="fc89da919415289f29e4ff18a5e01270ece9a6fe83cb60967218bac4a3bb3ed2") + version("6.4.0", sha256="bd88ff602c8bbb29ee82ba2a6b12ad092d51ec668c6577f9628f18e48ff4e51e") + version("6.3.2", sha256="8cd7feac58193be2bcba451ba6688a46824d37ca6359ff58e0d44eb98f042948") + version("6.3.1", sha256="2676319e86c5aeab142dccd42434364a33aa330a091c13562b7de87a10e68775") + version("6.3.0", sha256="902198981b1d26112fc05913e79f1b3e9772c3f95594caf85619d041ba06ede0") + version("6.2.1", sha256="4cf4d264bff388ee0012735728630d23832d3c9d021383b2fadceadb0775dd6b") + version("6.2.0", sha256="f1e8f788637ac574d471d619d2096baaca04a19b57a034399e079633db441945") + version("6.1.2", sha256="6fb3ee22fe9fa86d82e173572d504c089f10825d749725592626e090b38c9679") version("6.1.1", sha256="6b900f26d676c7eca1d2e7dff9b71890dabd3ff59cab2a2d2178bc8a0395342a") version("6.1.0", sha256="c4b85675541cf66ee7cb71514097fdd6c5fc0e02527243617a4f20ca6b4f2932") version("6.0.0", sha256="7bc0c5145c19a56d82a08fce6908c5e1a0e75e4fbfb3b6f12b4deae7f4b38cbc") @@ -31,6 +46,7 @@ class Bazel(Package): version("5.1.1", sha256="7f5d3bc1d344692b2400f3765fd4b5c0b636eb4e7a8a7b17923095c7b56a4f78") version("5.1.0", sha256="4de301f509fc6d0cbc697b2017384ecdc94df8f36245bbcbedc7ea6780acc9f5") version("5.0.0", sha256="072dd62d237dbc11e0bac02e118d8c2db4d0ba3ba09f1a0eb1e2a460fb8419db") + version("4.2.4", sha256="d5ba2ef28da5275f22e832aaa7f9319c61ea5db9b6a3e23b28a6a64ad03078f3") version("4.2.3", sha256="b0e84d0538f3ec2b95a49bae31a5066f0967281a3ca99965016fbe178acd2d3d") version("4.2.2", sha256="9981d0d53a356c4e87962847750a97c9e8054e460854748006c80f0d7e2b2d33") version("4.2.1", sha256="12ea7aa11e2bdb12de1dceb9939a22e96f5a480437cb17c123379d8e0fdf5e82") @@ -55,356 +71,6 @@ class Bazel(Package): version("2.1.0", sha256="3371cd9050989173a3b27364668328653a65653a50a85c320adc53953b4d5f46") version("2.0.1", sha256="a863ed9e6fc420fbd92e63a12fe1a5b9be1a7a36f11f61f1fdc582c813bbe543") version("2.0.0", sha256="724da3c656f68e787a86ebb9844773aa1c2e3a873cc39462a8f1b336153d6cbb") - version( - "1.2.1", - sha256="255da49d0f012bc4f2c1d6d3ccdbe578e22fe97b8d124e1629a486fe2a09d3e1", - deprecated=True, - ) - version( - "1.2.0", - sha256="9cb46b0a18b9166730307a0e82bf4c02281a1cc6da0fb11239e6fe4147bdee6e", - deprecated=True, - ) - version( - "1.1.0", - sha256="4b66a8c93af7832ed32e7236cf454a05f3aa06d25a8576fc3f83114f142f95ab", - deprecated=True, - ) - version( - "1.0.1", - sha256="f4d2dfad011ff03a5fae41b9b02cd96cd7297c1205d496603d66516934fbcfee", - deprecated=True, - ) - version( - "1.0.0", - sha256="c61daf0b69dd95205c695b2f9022d296d052c727062cfd396d54ffb2154f8cac", - deprecated=True, - ) - version( - "0.29.1", - sha256="872a52cff208676e1169b3e1cae71b1fe572c4109cbd66eab107d8607c378de5", - deprecated=True, - ) - version( - "0.29.0", - sha256="01cb6f2e808bd016cf0e217e12373c9efb808123e58b37885be8364458d3a40a", - deprecated=True, - ) - version( - "0.28.1", - sha256="2cea463d611f5255d2f3d41c8de5dcc0961adccb39cf0ac036f07070ba720314", - deprecated=True, - ) - version( - "0.28.0", - sha256="26ad8cdadd413b8432cf46d9fc3801e8db85d9922f85dd8a7f5a92fec876557f", - deprecated=True, - ) - version( - "0.27.2", - sha256="5e1bf2b48e54eb7e518430667d29aef53695d6dd7c718665a52131ab27aadab2", - deprecated=True, - ) - version( - "0.27.1", - sha256="8051d77da4ec338acd91770f853e4c25f4407115ed86fd35a6de25921673e779", - deprecated=True, - ) - version( - "0.27.0", - sha256="c3080d3b959ac08502ad5c84a51608c291accb1481baad88a628bbf79b30c67a", - deprecated=True, - ) - version( - "0.26.1", - sha256="c0e94f8f818759f3f67af798c38683520c540f469cb41aea8f5e5a0e43f11600", - deprecated=True, - ) - version( - "0.26.0", - sha256="d26dadf62959255d58e523da3448a6222af768fe1224e321b120c1d5bbe4b4f2", - deprecated=True, - ) - version( - "0.25.3", - sha256="23eafd3e439bc71baba9c592b52cb742dabc8640a13b9da1751fec090a2dda99", - deprecated=True, - ) - version( - "0.25.2", - sha256="7456032199852c043e6c5b3e4c71dd8089c1158f72ec554e6ec1c77007f0ab51", - deprecated=True, - ) - version( - "0.25.1", - sha256="a52bb31aeb1f821e649d25ef48023cfb54a12887aff875c6349ebcac36c2f056", - deprecated=True, - ) - version( - "0.25.0", - sha256="f624fe9ca8d51de192655369ac538c420afb7cde16e1ad052554b582fff09287", - deprecated=True, - ) - version( - "0.24.1", - sha256="56ea1b199003ad832813621744178e42b39e6206d34fbae342562c287da0cd54", - deprecated=True, - ) - version( - "0.24.0", - sha256="621d2a97899a88850a913eabf9285778331a309fd4658b225b1377f80060fa85", - deprecated=True, - ) - version( - "0.23.2", - sha256="293a5a7d851e0618eeb5e6958d94a11d45b6a00f2ba9376de61ac2bd5f917439", - deprecated=True, - ) - version( - "0.23.1", - sha256="dd47199f92452bf67b2c5d60ad4b7143554eaf2c6196ab6e8713449d81a0491d", - deprecated=True, - ) - version( - "0.23.0", - sha256="2daf9c2c6498836ed4ebae7706abb809748b1350cacd35b9f89452f31ac0acc1", - deprecated=True, - ) - version( - "0.22.0", - sha256="6860a226c8123770b122189636fb0c156c6e5c9027b5b245ac3b2315b7b55641", - deprecated=True, - ) - version( - "0.21.0", - sha256="6ccb831e683179e0cfb351cb11ea297b4db48f9eab987601c038aa0f83037db4", - deprecated=True, - ) - version( - "0.20.0", - sha256="1945afa84fd8858b0a3c68c09915a4bc81065c61df2591387b2985e2297d30bd", - deprecated=True, - ) - version( - "0.19.2", - sha256="11234cce4f6bdc62c3ac688f41c7b5c178eecb6f7e2c4ba0bcf00ba8565b1d19", - deprecated=True, - ) - version( - "0.19.1", - sha256="c9405f7b8c79ebc81f9f0e49bb656df4a0da246771d010c2cdd6bb30e2500ac0", - deprecated=True, - ) - version( - "0.19.0", - sha256="ee6135c5c47306c8421d43ad83aabc4f219cb065376ee37797f2c8ba9a615315", - deprecated=True, - ) - version( - "0.18.1", - sha256="baed9f28c317000a4ec1ad2571b3939356d22746ca945ac2109148d7abb860d4", - deprecated=True, - ) - version( - "0.18.0", - sha256="d0e86d2f7881ec8742a9823a986017452d2da0dfe4e989111da787cb89257155", - deprecated=True, - ) - version( - "0.17.2", - sha256="b6e87acfa0a405bb8b3417c58477b66d5bc27dc0d31ba6fa12bc255b9278d33b", - deprecated=True, - ) - version( - "0.17.1", - sha256="23e4281c3628cbd746da3f51330109bbf69780bd64461b63b386efae37203f20", - deprecated=True, - ) - version( - "0.16.1", - sha256="09c66b94356c82c52f212af52a81ac28eb06de1313755a2f23eeef84d167b36c", - deprecated=True, - ) - version( - "0.16.0", - sha256="c730593916ef0ba62f3d113cc3a268e45f7e8039daf7b767c8641b6999bd49b1", - deprecated=True, - ) - version( - "0.15.2", - sha256="bf53ec73be3a6d412d85ef612cec6e9c85db45da42001fab0cf1dad44cfc03f1", - deprecated=True, - ) - version( - "0.15.1", - sha256="c62b351fa4c1ba5aeb34d0a137176f8e8f1d89a32f548a10e96c11df176ffc6c", - deprecated=True, - ) - version( - "0.15.0", - sha256="c3b716e6625e6b8c323350c95cd3ae0f56aeb00458dddd10544d5bead8a7b602", - deprecated=True, - ) - version( - "0.14.1", - sha256="d49cdcd82618ae7a7a190e6f0a80d9bf85c1a66b732f994f37732dc14ffb0025", - deprecated=True, - ) - version( - "0.14.0", - sha256="259627de8b9d415cc80904523facf3d50e6e8e68448ab968eb1c9cb8ca1ef843", - deprecated=True, - ) - version( - "0.13.1", - sha256="b0269e75b40d87ff87886e5f3432cbf88f70c96f907ab588e6c21b2922d72db0", - deprecated=True, - ) - version( - "0.13.0", - sha256="82e9035084660b9c683187618a29aa896f8b05b5f16ae4be42a80b5e5b6a7690", - deprecated=True, - ) - version( - "0.12.0", - sha256="3b3e7dc76d145046fdc78db7cac9a82bc8939d3b291e53a7ce85315feb827754", - deprecated=True, - ) - version( - "0.11.1", - sha256="e8d762bcc01566fa50952c8028e95cfbe7545a39b8ceb3a0d0d6df33b25b333f", - deprecated=True, - ) - version( - "0.11.0", - sha256="abfeccc94728cb46be8dbb3507a23ccffbacef9fbda96a977ef4ea8d6ab0d384", - deprecated=True, - ) - version( - "0.10.1", - sha256="708248f6d92f2f4d6342006c520f22dffa2f8adb0a9dc06a058e3effe7fee667", - deprecated=True, - ) - version( - "0.10.0", - sha256="47e0798caaac4df499bce5fe554a914abd884a855a27085a4473de1d737d9548", - deprecated=True, - ) - version( - "0.9.0", - sha256="efb28fed4ffcfaee653e0657f6500fc4cbac61e32104f4208da385676e76312a", - deprecated=True, - ) - version( - "0.8.1", - sha256="dfd0761e0b7e36c1d74c928ad986500c905be5ebcfbc29914d574af1db7218cf", - deprecated=True, - ) - version( - "0.8.0", - sha256="aa840321d056abd3c6be10c4a1e98a64f9f73fff9aa89c468dae8c003974a078", - deprecated=True, - ) - version( - "0.7.0", - sha256="a084a9c5d843e2343bf3f319154a48abe3d35d52feb0ad45dec427a1c4ffc416", - deprecated=True, - ) - version( - "0.6.1", - sha256="dada1f60a512789747011184b2767d2b44136ef3b036d86947f1896d200d2ba7", - deprecated=True, - ) - version( - "0.6.0", - sha256="a0e53728a9541ef87934831f3d05f2ccfdc3b8aeffe3e037be2b92b12400598e", - deprecated=True, - ) - version( - "0.5.4", - sha256="2157b05309614d6af0e4bbc6065987aede590822634a0522161f3af5d647abc9", - deprecated=True, - ) - version( - "0.5.3", - sha256="76b5c5880a0b15f5b91f7d626c5bc3b76ce7e5d21456963c117ab711bf1c5333", - deprecated=True, - ) - version( - "0.5.2", - sha256="2418c619bdd44257a170b85b9d2ecb75def29e751b725e27186468ada2e009ea", - deprecated=True, - ) - version( - "0.5.1", - sha256="85e6a18b111afeea2e475fe991db2a441ec3824211d659bee7b0012c36be9a40", - deprecated=True, - ) - version( - "0.5.0", - sha256="ebba7330a8715e96a6d6dc0aa085125d529d0740d788f0544c6169d892e4f861", - deprecated=True, - ) - version( - "0.4.5", - sha256="2b737be42678900470ae9e48c975ac5b2296d9ae23c007bf118350dbe7c0552b", - deprecated=True, - ) - version( - "0.4.4", - sha256="d52a21dda271ae645711ce99c70cf44c5d3a809138e656bbff00998827548ebb", - deprecated=True, - ) - version( - "0.4.3", - sha256="cbd2ab580181c17317cf18b2bf825bcded2d97cab01cd5b5fe4f4d520b64f90f", - deprecated=True, - ) - version( - "0.4.2", - sha256="8e6f41252abadcdb2cc7a07f910ec4b45fb12c46f0a578672c6a186c7efcdb36", - deprecated=True, - ) - version( - "0.4.1", - sha256="008c648d3c46ece063ae8b5008480d8ae6d359d35967356685d1c09da07e1064", - deprecated=True, - ) - version( - "0.4.0", - sha256="6474714eee72ba2d4e271ed00ce8c05d67a9d15327bc03962b821b2af2c5ca36", - deprecated=True, - ) - version( - "0.3.2", - sha256="ca5caf7b2b48c7639f45d815b32e76d69650f3199eb8caa541d402722e3f6c10", - deprecated=True, - ) - version( - "0.3.1", - sha256="218d0e28b4d1ee34585f2ac6b18d169c81404d93958815e73e60cc0368efcbb7", - deprecated=True, - ) - version( - "0.3.0", - sha256="357fd8bdf86034b93902616f0844bd52e9304cccca22971ab7007588bf9d5fb3", - deprecated=True, - ) - version( - "0.2.0", - sha256="e9ba2740d9727ae6d0f9b1ac0c5df331814fd03518fe4b511396ed10780d5272", - deprecated=True, - ) - version( - "0.1.4", - sha256="f3c395f5cd78cfef96f4008fe842f327bc8b03b77f46999387bc0ad223b5d970", - deprecated=True, - ) - version( - "0.1.1", - sha256="c6ae19610b936a0aa940b44a3626d6e660fc457a8187d295cdf0b21169453d20", - deprecated=True, - ) variant( "nodepfail", @@ -418,45 +84,30 @@ class Bazel(Package): # end EBRAINS # https://bazel.build/install/compile-source#bootstrap-unix-prereq - depends_on("java", type=("build", "run")) depends_on("java@11", when="@5.3:", type=("build", "run")) depends_on("java@8,11", when="@3.3:5.2", type=("build", "run")) depends_on("java@8", when="@0.6:3.2", type=("build", "run")) - depends_on("java@7:8", when="@:0.5", type=("build", "run")) depends_on("python+pythoncmd", type=("build", "run")) depends_on("zip", when="platform=linux", type=("build", "run")) # Pass Spack environment variables to the build - patch("bazelruleclassprovider-0.25.patch", when="@0.25:") - patch("bazelruleclassprovider-0.14.patch", when="@0.14:0.24") - patch("bazelconfiguration-0.3.patch", when="@:0.13") + patch("bazelruleclassprovider-0.25.patch") # Inject include paths patch("unix_cc_configure-3.0.patch", when="@3:") - patch("unix_cc_configure-0.15.patch", when="@0.15:2") - patch("unix_cc_configure-0.10.patch", when="@0.10:0.14") - patch("unix_cc_configure-0.5.3.patch", when="@0.5.3:0.9") - patch("cc_configure-0.5.0.patch", when="@0.5.0:0.5.2") - patch("cc_configure-0.3.0.patch", when="@0.3:0.4") + patch("unix_cc_configure-0.15.patch", when="@:2") # Set CC and CXX # begin EBRAINS (modified): bring upstream after checking since when this is breaking the build - patch("compile-0.29.patch", when="@0.29:5.3") + patch("compile-0.29.patch", when="@:5.3") # end EBRAINS - patch("compile-0.21.patch", when="@0.21:0.28") - patch("compile-0.16.patch", when="@0.16:0.20") - patch("compile-0.13.patch", when="@0.13:0.15") - patch("compile-0.9.patch", when="@0.9:0.12") - patch("compile-0.6.patch", when="@0.6:0.8") - patch("compile-0.4.patch", when="@0.4:0.5") - patch("compile-0.3.patch", when="@0.2:0.3") # Disable dependency search - patch("cppcompileaction-0.3.2.patch", when="@0.3.2:+nodepfail") - patch("cppcompileaction-0.3.0.patch", when="@0.3.0:0.3.1+nodepfail") + patch("cppcompileaction-7.0.0.patch", when="@7: +nodepfail") + patch("cppcompileaction-0.3.2.patch", when="@:6 +nodepfail") # https://github.com/bazelbuild/bazel/issues/17956 - patch("apple-clang-14.0.3.patch", when="@0.3:5.4.0,6.0:6.1") + patch("apple-clang-14.0.3.patch", when="@:4.2.3,5:6.1.1") # https://github.com/bazelbuild/bazel/issues/17958 patch( @@ -465,56 +116,85 @@ class Bazel(Package): when="@5.0:5.4.0,6.0", ) - # Fix build on power9 (2x commits) - # https://github.com/bazelbuild/bazel/commit/5cff4f1edf8b95bf0612791632255852332f72b5 - # https://github.com/bazelbuild/bazel/commit/ab62a6e097590dac5ec946ad7a796ea0e8593ae0 - patch("build-0.29.1.patch", when="@0.29.1") - # Fix build with Fujitsu compiler - patch("blaze_util_posix-0.29.1.patch", when="@0.29.1:%fj") + patch("blaze_util_posix-0.29.1.patch", when="%fj") patch("unix_cc_configure_fj-5.2.patch", when="@5.2:%fj") patch("unix_cc_configure_fj-5.0.patch", when="@5.0:5.1%fj") - patch("unix_cc_configure_fj-0.29.1.patch", when="@0.29.1:4%fj") - patch("bazelruleclassprovider_fj-0.25.patch", when="@0.25:%fj") - conflicts( - "%fj", - when="@:0.24.1", - msg="Fujitsu Compiler cannot build 0.24.1 or less, " "please use a newer release.", - ) + patch("unix_cc_configure_fj-0.29.1.patch", when="@:4%fj") + patch("bazelruleclassprovider_fj-0.25.patch", when="%fj") # https://blog.bazel.build/2021/05/21/bazel-4-1.html conflicts("platform=darwin target=aarch64:", when="@:4.0") - # patches for compiling various older bazels which had ICWYU - # violations revealed by (but not unique to) GCC 11 header changes. - # these are derived from + # https://github.com/bazelbuild/bazel/issues/18642 + patch( + "https://github.com/bazelbuild/bazel/pull/20785.patch?full_index=1", + sha256="85dde31d129bbd31e004c5c87f23cdda9295fbb22946dc6d362f23d83bae1fd8", + when="@6.0:6.4", + ) + conflicts("%gcc@13:", when="@:5") + + # Patches for compiling various older bazels which had ICWYU violations revealed by + # (but not unique to) GCC 11 header changes. These are derived from # https://gitlab.alpinelinux.org/alpine/aports/-/merge_requests/29084/ - patch("gcc11_1.patch", when="@0.3.2:4") - patch("gcc11_2.patch", when="@0.3.2:4") - patch("gcc11_3.patch", when="@0.3:4") + patch("gcc11_1.patch", when="@:4") + patch("gcc11_2.patch", when="@:4") + patch("gcc11_3.patch", when="@:4") patch("gcc11_4.patch", when="@4.1:4") - # bazel-4.0.0 does not compile with gcc-11 - # newer versions of grpc and abseil dependencies are needed but are not in - # bazel-4.0.0 - conflicts("@:0.2,4.0.0", when="%gcc@11:") + # Bazel-4.0.0 does not compile with gcc-11 + # Newer versions of grpc and abseil dependencies are needed but are not in bazel-4.0.0 + conflicts("@4.0.0", when="%gcc@11:") + + # https://github.com/bazelbuild/bazel/pull/23667 + conflicts("%apple-clang@16:", when="@:7.3") executables = ["^bazel$"] + # Download resources to perform offline build with bazel. + # The following URLs and sha256 are in the file distdir_deps.bzl at the root of bazel sources. + resource_dictionary = {} + resource_dictionary["bazel_skylib"] = { + "url": "https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.1/bazel-skylib-1.0.1.tar.gz", + "sha256": "f1c8360c01fcf276778d3519394805dc2a71a64274a3a0908bc9edff7b5aebc8", + "when": "@4:6", + } + resource_dictionary["com_google_absl"] = { + "url": "https://github.com/abseil/abseil-cpp/archive/refs/tags/20230802.0.tar.gz", + "sha256": "59d2976af9d6ecf001a81a35749a6e551a335b949d34918cfade07737b9d93c5", + "when": "@6.0:6.4", + } + resource_dictionary["zulu_11_56_19"] = { + "url": "https://mirror.bazel.build/cdn.azul.com/zulu/bin/zulu11.56.19-ca-jdk11.0.15-linux_x64.tar.gz", + "sha256": "e064b61d93304012351242bf0823c6a2e41d9e28add7ea7f05378b7243d34247", + "when": "@6", + } + resource_dictionary["zulu_11_50_19"] = { + "url": "https://mirror.bazel.build/openjdk/azul-zulu11.50.19-ca-jdk11.0.12/zulu11.50.19-ca-jdk11.0.12-linux_x64.tar.gz", + "sha256": "b8e8a63b79bc312aa90f3558edbea59e71495ef1a9c340e38900dd28a1c579f3", + "when": "@5", + } + resource_dictionary["zulu_11_37_17"] = { + "url": "https://mirror.bazel.build/openjdk/azul-zulu11.37.17-ca-jdk11.0.6/zulu11.37.17-ca-jdk11.0.6-linux_x64.tar.gz", + "sha256": "360626cc19063bc411bfed2914301b908a8f77a7919aaea007a977fa8fb3cde1", + "when": "@4", + } + for resource_name in resource_dictionary.keys(): + resource( + when=resource_dictionary[resource_name]["when"], + name=resource_name, + url=resource_dictionary[resource_name]["url"], + sha256=resource_dictionary[resource_name]["sha256"], + destination="archive", + expand=False, + ) + @classmethod def determine_version(cls, exe): output = Executable(exe)("version", output=str, error=str) match = re.search(r"Build label: ([\d.]+)", output) return match.group(1) if match else None - def url_for_version(self, version): - if version >= Version("0.4.1"): - url = "https://github.com/bazelbuild/bazel/releases/download/{0}/bazel-{0}-dist.zip" - else: - url = "https://github.com/bazelbuild/bazel/archive/{0}.tar.gz" - - return url.format(version) - # begin EBRAINS (added) def setup_dependent_build_environment(self, env, dependent_spec): env.prepend_path("GCC_HOST_COMPILER_PREFIX", self.spec['binutils'].prefix.bin) @@ -526,19 +206,25 @@ class Bazel(Package): env.set("BAZEL_LINKOPTS", "") env.set("BAZEL_LINKLIBS", "-lstdc++") - env.set( - "EXTRA_BAZEL_ARGS", - # Spack's logs don't handle colored output well - "--color=no --host_javabase=@local_jdk//:jdk" - # Enable verbose output for failures - " --verbose_failures" - # begin EBRAINS (modified): bring upstream after checking since when the compile-x.patch is breaking the build - # expose CC and CXX (the spack compiler wrappers) - " --action_env=CC" - " --action_env=CXX" - # end EBRAINS - " --jobs={0}".format(make_jobs), + # .WARNING: Option 'host_javabase' is deprecated + # Use local java installation + # begin EBRAINS (modified): expose CC and CXX (the spack compiler wrappers) + args = "--color=no --define=ABSOLUTE_JAVABASE={0} --verbose_failures --action_env=CC --action_env=CXX --jobs={1}".format( + self.spec["java"].prefix, make_jobs ) + # end EBRAINS + + resource_stages = self.stage[1:] + for _resource in resource_stages: + try: + resource_name = _resource.resource.name + if self.spec.satisfies(self.resource_dictionary[resource_name]["when"]): + archive_path = _resource.source_path + args += " --distdir={0}".format(archive_path) + except AttributeError: + continue + + env.set("EXTRA_BAZEL_ARGS", args) @run_before("install") def bootstrap(self): diff --git a/packages/bazel/unix_cc_configure-0.10.patch b/packages/bazel/unix_cc_configure-0.10.patch deleted file mode 100644 index bdd24e11..00000000 --- a/packages/bazel/unix_cc_configure-0.10.patch +++ /dev/null @@ -1,22 +0,0 @@ ---- a/tools/cpp/unix_cc_configure.bzl -+++ b/tools/cpp/unix_cc_configure.bzl -@@ -147,9 +147,18 @@ def get_escaped_cxx_inc_directories(repository_ctx, cc, additional_flags = []): - else: - inc_dirs = result.stderr[index1 + 1:index2].strip() - -- return [_prepare_include_path(repository_ctx, _cxx_inc_convert(p)) -- for p in inc_dirs.split("\n")] -+ default_inc_directories = [ -+ _prepare_include_path(repository_ctx, _cxx_inc_convert(p)) -+ for p in inc_dirs.split("\n") -+ ] -+ -+ env = repository_ctx.os.environ -+ if "SPACK_INCLUDE_DIRS" in env: -+ for path in env["SPACK_INCLUDE_DIRS"].split(":"): -+ default_inc_directories.append(path) -+ -+ return default_inc_directories - - - def _is_option_supported(repository_ctx, cc, option): diff --git a/packages/bazel/unix_cc_configure-0.5.3.patch b/packages/bazel/unix_cc_configure-0.5.3.patch deleted file mode 100644 index 79b583ba..00000000 --- a/packages/bazel/unix_cc_configure-0.5.3.patch +++ /dev/null @@ -1,24 +0,0 @@ ---- a/tools/cpp/unix_cc_configure.bzl -+++ b/tools/cpp/unix_cc_configure.bzl -@@ -117,9 +117,19 @@ def get_escaped_cxx_inc_directories(repository_ctx, cc): - else: - inc_dirs = result.stderr[index1 + 1:index2].strip() - -- return [escape_string(repository_ctx.path(_cxx_inc_convert(p))) -- for p in inc_dirs.split("\n")] -+ default_inc_directories = [ -+ escape_string(repository_ctx.path(_cxx_inc_convert(p))) -+ for p in inc_dirs.split("\n") -+ ] - -+ env = repository_ctx.os.environ -+ if "SPACK_INCLUDE_DIRS" in env: -+ for path in env["SPACK_INCLUDE_DIRS"].split(":"): -+ default_inc_directories.append( -+ repository_ctx.path(_cxx_inc_convert(path)) -+ ) -+ -+ return default_inc_directories - - def _add_option_if_supported(repository_ctx, cc, option): - """Checks that `option` is supported by the C compiler. Doesn't %-escape the option.""" diff --git a/packages/gcc/darwin/apfs.patch b/packages/gcc/darwin/apfs.patch deleted file mode 100644 index d1f9d566..00000000 --- a/packages/gcc/darwin/apfs.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff -uNr gcc-7.2.0.orig/libstdc++-v3/include/Makefile.in gcc-7.2.0/libstdc++-v3/include/Makefile.in ---- gcc-7.2.0.orig/libstdc++-v3/include/Makefile.in 2017-07-25 14:05:07.000000000 -0400 -+++ gcc-7.2.0/libstdc++-v3/include/Makefile.in 2017-09-02 12:22:08.000000000 -0400 -@@ -1764,6 +1764,8 @@ - @GLIBCXX_HOSTED_TRUE@install-data-local: install-headers - @GLIBCXX_HOSTED_FALSE@install-data-local: install-freestanding-headers - -+.NOTPARALLEL: install-headers -+ - # This is a subset of the full install-headers rule. We only need <ciso646>, - # <cstddef>, <cfloat>, <limits>, <climits>, <cstdint>, <cstdlib>, <new>, - # <typeinfo>, <exception>, <initializer_list>, <cstdalign>, <cstdarg>, diff --git a/packages/gcc/darwin/clang13.patch b/packages/gcc/darwin/clang13.patch deleted file mode 100644 index ac3bfcbe..00000000 --- a/packages/gcc/darwin/clang13.patch +++ /dev/null @@ -1,32 +0,0 @@ ---- a/gcc/genconditions.c 2019-01-01 12:37:19.064943662 +0100 -+++ b/gcc/genconditions.c 2019-10-11 10:57:11.464595789 +0200 -@@ -57,8 +57,9 @@ write_header (void) - \n\ - /* It is necessary, but not entirely safe, to include the headers below\n\ - in a generator program. As a defensive measure, don't do so when the\n\ -- table isn't going to have anything in it. */\n\ --#if GCC_VERSION >= 3001\n\ -+ table isn't going to have anything in it.\n\ -+ Clang 9 is buggy and doesn't handle __builtin_constant_p correctly. */\n\ -+#if GCC_VERSION >= 3001 && __clang_major__ < 9\n\ - \n\ - /* Do not allow checking to confuse the issue. */\n\ - #undef CHECKING_P\n\ -@@ -170,7 +171,7 @@ struct c_test\n\ - vary at run time. It works in 3.0.1 and later; 3.0 only when not\n\ - optimizing. */\n\ - \n\ --#if GCC_VERSION >= 3001\n\ -+#if GCC_VERSION >= 3001 && __clang_major__ < 9\n\ - static const struct c_test insn_conditions[] = {\n"); - - traverse_c_tests (write_one_condition, 0); -@@ -191,7 +192,7 @@ write_writer (void) - " unsigned int i;\n" - " const char *p;\n" - " puts (\"(define_conditions [\");\n" -- "#if GCC_VERSION >= 3001\n" -+ "#if GCC_VERSION >= 3001 && __clang_major__ < 9\n" - " for (i = 0; i < ARRAY_SIZE (insn_conditions); i++)\n" - " {\n" - " printf (\" (%d \\\"\", insn_conditions[i].value);\n" diff --git a/packages/gcc/darwin/gcc-4.9.patch1 b/packages/gcc/darwin/gcc-4.9.patch1 deleted file mode 100644 index 444e2927..00000000 --- a/packages/gcc/darwin/gcc-4.9.patch1 +++ /dev/null @@ -1,42 +0,0 @@ -diff --git a/gcc/configure b/gcc/configure -index 9523773..52b0bf7 100755 ---- a/gcc/configure -+++ b/gcc/configure -@@ -24884,7 +24884,7 @@ if test "${gcc_cv_as_ix86_filds+set}" = set; then : - else - gcc_cv_as_ix86_filds=no - if test x$gcc_cv_as != x; then -- $as_echo 'filds mem; fists mem' > conftest.s -+ $as_echo 'filds (%ebp); fists (%ebp)' > conftest.s - if { ac_try='$gcc_cv_as $gcc_cv_as_flags -o conftest.o conftest.s >&5' - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 - (eval $ac_try) 2>&5 -@@ -24915,7 +24915,7 @@ if test "${gcc_cv_as_ix86_fildq+set}" = set; then : - else - gcc_cv_as_ix86_fildq=no - if test x$gcc_cv_as != x; then -- $as_echo 'fildq mem; fistpq mem' > conftest.s -+ $as_echo 'fildq (%ebp); fistpq (%ebp)' > conftest.s - if { ac_try='$gcc_cv_as $gcc_cv_as_flags -o conftest.o conftest.s >&5' - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_try\""; } >&5 - (eval $ac_try) 2>&5 -diff --git a/gcc/configure.ac b/gcc/configure.ac -index 68b0ee8..bd53978 100644 ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -3869,13 +3869,13 @@ foo: nop - - gcc_GAS_CHECK_FEATURE([filds and fists mnemonics], - gcc_cv_as_ix86_filds,,, -- [filds mem; fists mem],, -+ [filds (%ebp); fists (%ebp)],, - [AC_DEFINE(HAVE_AS_IX86_FILDS, 1, - [Define if your assembler uses filds and fists mnemonics.])]) - - gcc_GAS_CHECK_FEATURE([fildq and fistpq mnemonics], - gcc_cv_as_ix86_fildq,,, -- [fildq mem; fistpq mem],, -+ [fildq (%ebp); fistpq (%ebp)],, - [AC_DEFINE(HAVE_AS_IX86_FILDQ, 1, - [Define if your assembler uses fildq and fistq mnemonics.])]) - diff --git a/packages/gcc/darwin/gcc-4.9.patch2 b/packages/gcc/darwin/gcc-4.9.patch2 deleted file mode 100644 index b065997f..00000000 --- a/packages/gcc/darwin/gcc-4.9.patch2 +++ /dev/null @@ -1,28 +0,0 @@ -From 82f81877458ea372176eabb5de36329431dce99b Mon Sep 17 00:00:00 2001 -From: Iain Sandoe <iain@codesourcery.com> -Date: Sat, 21 Dec 2013 00:30:18 +0000 -Subject: [PATCH] don't try to mark local symbols as no-dead-strip - ---- - gcc/config/darwin.c | 5 +++++ - 1 file changed, 5 insertions(+) - -diff --git a/gcc/config/darwin.c b/gcc/config/darwin.c -index 40804b8..0080299 100644 ---- a/gcc/config/darwin.c -+++ b/gcc/config/darwin.c -@@ -1259,6 +1259,11 @@ darwin_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED) - void - darwin_mark_decl_preserved (const char *name) - { -+ /* Actually we shouldn't mark any local symbol this way, but for now -+ this only happens with ObjC meta-data. */ -+ if (darwin_label_is_anonymous_local_objc_name (name)) -+ return; -+ - fprintf (asm_out_file, "\t.no_dead_strip "); - assemble_name (asm_out_file, name); - fputc ('\n', asm_out_file); --- -2.2.1 - diff --git a/packages/gcc/darwin/gcc-6.1.0-jit.patch b/packages/gcc/darwin/gcc-6.1.0-jit.patch deleted file mode 100644 index 8cc4405d..00000000 --- a/packages/gcc/darwin/gcc-6.1.0-jit.patch +++ /dev/null @@ -1,21 +0,0 @@ -# Fix for libgccjit.so linkage on Darwin -# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=64089 -# https://github.com/Homebrew/homebrew-core/issues/1872#issuecomment-225625332 -# https://github.com/Homebrew/homebrew-core/issues/1872#issuecomment-225626490 - -# Stolen from Homebrew: -# https://raw.githubusercontent.com/Homebrew/formula-patches/e9e0ee09389a54cc4c8fe1c24ebca3cd765ed0ba/gcc/6.1.0-jit.patch -diff --git a/gcc/jit/Make-lang.in b/gcc/jit/Make-lang.in -index 44d0750..4df2a9c 100644 ---- a/gcc/jit/Make-lang.in -+++ b/gcc/jit/Make-lang.in -@@ -85,8 +85,7 @@ $(LIBGCCJIT_FILENAME): $(jit_OBJS) \ - $(jit_OBJS) libbackend.a libcommon-target.a libcommon.a \ - $(CPPLIB) $(LIBDECNUMBER) $(LIBS) $(BACKENDLIBS) \ - $(EXTRA_GCC_OBJS) \ -- -Wl,--version-script=$(srcdir)/jit/libgccjit.map \ -- -Wl,-soname,$(LIBGCCJIT_SONAME) -+ -Wl,-install_name,$(LIBGCCJIT_SONAME) - - $(LIBGCCJIT_SONAME_SYMLINK): $(LIBGCCJIT_FILENAME) - ln -sf $(LIBGCCJIT_FILENAME) $(LIBGCCJIT_SONAME_SYMLINK) diff --git a/packages/gcc/darwin/gcc-7.1.0-headerpad.patch b/packages/gcc/darwin/gcc-7.1.0-headerpad.patch deleted file mode 100644 index 11ca2d0e..00000000 --- a/packages/gcc/darwin/gcc-7.1.0-headerpad.patch +++ /dev/null @@ -1,19 +0,0 @@ -# Use -headerpad_max_install_names in the build, -# otherwise lto1 load commands cannot be edited on El Capitan - -# Stolen from Homebrew: -# https://raw.githubusercontent.com/Homebrew/formula-patches/32cf103/gcc/7.1.0-headerpad.patch - -diff --git a/config/mh-darwin b/config/mh-darwin -index 148b730..c2318b5 100644 ---- a/config/mh-darwin -+++ b/config/mh-darwin -@@ -16,7 +16,7 @@ DARWIN_GCC_MDYNAMIC_NO_PIC := \ - DARWIN_NO_PIE := `case ${host} in *-*-darwin[1][1-9]*) echo -Wl,-no_pie ;; esac;` - - BOOT_CFLAGS += $(DARWIN_MDYNAMIC_NO_PIC) --BOOT_LDFLAGS += $(DARWIN_NO_PIE) -+BOOT_LDFLAGS += $(DARWIN_NO_PIE) -Wl,-headerpad_max_install_names - - # Similarly, for cross-compilation. - STAGE1_CFLAGS += $(DARWIN_MDYNAMIC_NO_PIC) diff --git a/packages/gcc/darwin/headers-10.13-fix.patch b/packages/gcc/darwin/headers-10.13-fix.patch deleted file mode 100644 index 3dbfde02..00000000 --- a/packages/gcc/darwin/headers-10.13-fix.patch +++ /dev/null @@ -1,127 +0,0 @@ -diff -pur fixincludes/fixincl.x gcc-5.5.0/fixincludes/fixincl.x ---- a/fixincludes/fixincl.x 2017-01-30 17:08:42.000000000 +0100 -+++ b/fixincludes/fixincl.x 2017-12-21 14:34:35.000000000 +0100 -@@ -1,12 +1,12 @@ - /* -*- buffer-read-only: t -*- vi: set ro: -- * -+ * - * DO NOT EDIT THIS FILE (fixincl.x) -- * -- * It has been AutoGen-ed January 5, 2017 at 06:05:06 PM by AutoGen 5.16.2 -+ * -+ * It has been AutoGen-ed December 21, 2017 at 02:34:35 PM by AutoGen 5.18.7 - * From the definitions inclhack.def - * and the template file fixincl - */ --/* DO NOT SVN-MERGE THIS FILE, EITHER Thu Jan 5 18:05:06 CET 2017 -+/* DO NOT SVN-MERGE THIS FILE, EITHER Thu Dec 21 14:34:35 CET 2017 - * - * You must regenerate it. Use the ./genfixes script. - * -@@ -15,7 +15,7 @@ - * certain ANSI-incompatible system header files which are fixed to work - * correctly with ANSI C and placed in a directory that GNU C will search. - * -- * This file contains 240 fixup descriptions. -+ * This file contains 241 fixup descriptions. - * - * See README for more information. - * -@@ -2579,6 +2579,43 @@ extern \"C\" {\n\ - - /* * * * * * * * * * * * * * * * * * * * * * * * * * - * -+ * Description of Darwin_Osavailability fix -+ */ -+tSCC zDarwin_OsavailabilityName[] = -+ "darwin_osavailability"; -+ -+/* -+ * File name selection pattern -+ */ -+tSCC zDarwin_OsavailabilityList[] = -+ "os/availability.h\0"; -+/* -+ * Machine/OS name selection pattern -+ */ -+tSCC* apzDarwin_OsavailabilityMachs[] = { -+ "*-*-darwin*", -+ (const char*)NULL }; -+ -+/* -+ * content selection pattern - do fix if pattern found -+ */ -+tSCC zDarwin_OsavailabilitySelect0[] = -+ "#define[ \t]+__(API_[A-Z_]*)\\(\\.\\.\\.\\)"; -+ -+#define DARWIN_OSAVAILABILITY_TEST_CT 1 -+static tTestDesc aDarwin_OsavailabilityTests[] = { -+ { TT_EGREP, zDarwin_OsavailabilitySelect0, (regex_t*)NULL }, }; -+ -+/* -+ * Fix Command Arguments for Darwin_Osavailability -+ */ -+static const char* apzDarwin_OsavailabilityPatch[] = { -+ "format", -+ "#define %1(...)", -+ (char*)NULL }; -+ -+/* * * * * * * * * * * * * * * * * * * * * * * * * * -+ * - * Description of Darwin_9_Long_Double_Funcs_2 fix - */ - tSCC zDarwin_9_Long_Double_Funcs_2Name[] = -@@ -9818,9 +9855,9 @@ static const char* apzX11_SprintfPatch[] - * - * List of all fixes - */ --#define REGEX_COUNT 276 -+#define REGEX_COUNT 277 - #define MACH_LIST_SIZE_LIMIT 187 --#define FIX_COUNT 240 -+#define FIX_COUNT 241 - - /* - * Enumerate the fixes -@@ -9885,6 +9922,7 @@ typedef enum { - CTRL_QUOTES_DEF_FIXIDX, - CTRL_QUOTES_USE_FIXIDX, - CXX_UNREADY_FIXIDX, -+ DARWIN_OSAVAILABILITY_FIXIDX, - DARWIN_9_LONG_DOUBLE_FUNCS_2_FIXIDX, - DARWIN_EXTERNC_FIXIDX, - DARWIN_GCC4_BREAKAGE_FIXIDX, -@@ -10364,6 +10402,11 @@ tFixDesc fixDescList[ FIX_COUNT ] = { - CXX_UNREADY_TEST_CT, FD_MACH_ONLY | FD_SUBROUTINE, - aCxx_UnreadyTests, apzCxx_UnreadyPatch, 0 }, - -+ { zDarwin_OsavailabilityName, zDarwin_OsavailabilityList, -+ apzDarwin_OsavailabilityMachs, -+ DARWIN_OSAVAILABILITY_TEST_CT, FD_MACH_ONLY | FD_SUBROUTINE, -+ aDarwin_OsavailabilityTests, apzDarwin_OsavailabilityPatch, 0 }, -+ - { zDarwin_9_Long_Double_Funcs_2Name, zDarwin_9_Long_Double_Funcs_2List, - apzDarwin_9_Long_Double_Funcs_2Machs, - DARWIN_9_LONG_DOUBLE_FUNCS_2_TEST_CT, FD_MACH_ONLY | FD_SUBROUTINE, -diff -pur fixincludes/inclhack.def gcc-5.5.0/fixincludes/inclhack.def ---- a/fixincludes/inclhack.def 2017-01-30 17:08:42.000000000 +0100 -+++ b/fixincludes/inclhack.def 2017-12-21 14:34:28.000000000 +0100 -@@ -1337,6 +1337,18 @@ fix = { - test_text = "extern void* malloc( size_t );"; - }; - -+fix = { -+ hackname = darwin_osavailability; -+ mach = "*-*-darwin*"; -+ files = os/availability.h; -+ select = "#define[ \t]+__(API_[A-Z_]*)\\(\\.\\.\\.\\)"; -+ c_fix = format; -+ c_fix_arg = "#define %1(...)"; -+ -+ test_text = "#define __API_AVAILABLE(...)\n" -+ "#define API_AVAILABLE(...)\n"; -+}; -+ - /* - * For the AAB_darwin7_9_long_double_funcs fix to be useful, - * you have to not use "" includes. diff --git a/packages/gcc/detection_test.yaml b/packages/gcc/detection_test.yaml deleted file mode 100644 index 5e53101a..00000000 --- a/packages/gcc/detection_test.yaml +++ /dev/null @@ -1,38 +0,0 @@ -paths: - # Ubuntu 18.04, system compilers without Fortran - - layout: - - executables: - - "bin/gcc" - - "bin/g++" - script: "echo 7.5.0" - results: - - spec: "gcc@7.5.0 languages=c,c++" - # Mock a version < 7 of GCC that requires -dumpversion and - # errors with -dumpfullversion - - layout: - - executables: - - "bin/gcc-5" - - "bin/g++-5" - - "bin/gfortran-5" - script: | - if [[ "$1" == "-dumpversion" ]] ; then - echo "5.5.0" - else - echo "gcc-5: fatal error: no input files" - echo "compilation terminated." - exit 1 - fi - results: - - spec: "gcc@5.5.0 languages=c,c++,fortran" - # Multiple compilers present at the same time - - layout: - - executables: - - "bin/x86_64-linux-gnu-gcc-6" - script: 'echo 6.5.0' - - executables: - - "bin/x86_64-linux-gnu-gcc-10" - - "bin/x86_64-linux-gnu-g++-10" - script: "echo 10.1.0" - results: - - spec: "gcc@6.5.0 languages=c" - - spec: "gcc@10.1.0 languages=c,c++" \ No newline at end of file diff --git a/packages/gcc/gcc-backport.patch b/packages/gcc/gcc-backport.patch deleted file mode 100644 index f9fab68f..00000000 --- a/packages/gcc/gcc-backport.patch +++ /dev/null @@ -1,138 +0,0 @@ -2016-02-20 Bernd Edlinger <bernd.edlinger@hotmail.de> - - Backported from mainline - 2016-02-19 Jakub Jelinek <jakub@redhat.com> - Bernd Edlinger <bernd.edlinger@hotmail.de> - - * Make-lang.in: Invoke gperf with -L C++. - * cfns.gperf: Remove prototypes for hash and libc_name_p - inlines. - * cfns.h: Regenerated. - * except.c (nothrow_libfn_p): Adjust. - -Index: gcc/cp/Make-lang.in -=================================================================== ---- a/gcc/cp/Make-lang.in (revision 233574) -+++ b/gcc/cp/Make-lang.in (working copy) -@@ -111,7 +111,7 @@ else - # deleting the $(srcdir)/cp/cfns.h file. - $(srcdir)/cp/cfns.h: - endif -- gperf -o -C -E -k '1-6,$$' -j1 -D -N 'libc_name_p' -L ANSI-C \ -+ gperf -o -C -E -k '1-6,$$' -j1 -D -N 'libc_name_p' -L C++ \ - $(srcdir)/cp/cfns.gperf --output-file $(srcdir)/cp/cfns.h - - # -Index: gcc/cp/cfns.gperf -=================================================================== ---- a/gcc/cp/cfns.gperf (revision 233574) -+++ b/gcc/cp/cfns.gperf (working copy) -@@ -1,3 +1,5 @@ -+%language=C++ -+%define class-name libc_name - %{ - /* Copyright (C) 2000-2015 Free Software Foundation, Inc. - -@@ -16,14 +18,6 @@ for more details. - You should have received a copy of the GNU General Public License - along with GCC; see the file COPYING3. If not see - <http://www.gnu.org/licenses/>. */ --#ifdef __GNUC__ --__inline --#endif --static unsigned int hash (const char *, unsigned int); --#ifdef __GNUC__ --__inline --#endif --const char * libc_name_p (const char *, unsigned int); - %} - %% - # The standard C library functions, for feeding to gperf; the result is used -Index: gcc/cp/cfns.h -=================================================================== ---- a/gcc/cp/cfns.h (revision 233574) -+++ b/gcc/cp/cfns.h (working copy) -@@ -1,5 +1,5 @@ --/* ANSI-C code produced by gperf version 3.0.3 */ --/* Command-line: gperf -o -C -E -k '1-6,$' -j1 -D -N libc_name_p -L ANSI-C cfns.gperf */ -+/* C++ code produced by gperf version 3.0.4 */ -+/* Command-line: gperf -o -C -E -k '1-6,$' -j1 -D -N libc_name_p -L C++ --output-file cfns.h cfns.gperf */ - - #if !((' ' == 32) && ('!' == 33) && ('"' == 34) && ('#' == 35) \ - && ('%' == 37) && ('&' == 38) && ('\'' == 39) && ('(' == 40) \ -@@ -28,7 +28,7 @@ - #error "gperf generated tables don't work with this execution character set. Please report a bug to <bug-gnu-gperf@gnu.org>." - #endif - --#line 1 "cfns.gperf" -+#line 3 "cfns.gperf" - - /* Copyright (C) 2000-2015 Free Software Foundation, Inc. - -@@ -47,26 +47,19 @@ for more details. - You should have received a copy of the GNU General Public License - along with GCC; see the file COPYING3. If not see - <http://www.gnu.org/licenses/>. */ --#ifdef __GNUC__ --__inline --#endif --static unsigned int hash (const char *, unsigned int); --#ifdef __GNUC__ --__inline --#endif --const char * libc_name_p (const char *, unsigned int); - /* maximum key range = 391, duplicates = 0 */ - --#ifdef __GNUC__ --__inline --#else --#ifdef __cplusplus --inline --#endif --#endif --static unsigned int --hash (register const char *str, register unsigned int len) -+class libc_name - { -+private: -+ static inline unsigned int hash (const char *str, unsigned int len); -+public: -+ static const char *libc_name_p (const char *str, unsigned int len); -+}; -+ -+inline unsigned int -+libc_name::hash (register const char *str, register unsigned int len) -+{ - static const unsigned short asso_values[] = - { - 400, 400, 400, 400, 400, 400, 400, 400, 400, 400, -@@ -122,14 +115,8 @@ along with GCC; see the file COPYING3. If not see - return hval + asso_values[(unsigned char)str[len - 1]]; - } - --#ifdef __GNUC__ --__inline --#ifdef __GNUC_STDC_INLINE__ --__attribute__ ((__gnu_inline__)) --#endif --#endif - const char * --libc_name_p (register const char *str, register unsigned int len) -+libc_name::libc_name_p (register const char *str, register unsigned int len) - { - enum - { -Index: gcc/cp/except.c -=================================================================== ---- a/gcc/cp/except.c (revision 233574) -+++ b/gcc/cp/except.c (working copy) -@@ -1040,7 +1040,8 @@ nothrow_libfn_p (const_tree fn) - unless the system headers are playing rename tricks, and if - they are, we don't want to be confused by them. */ - id = DECL_NAME (fn); -- return !!libc_name_p (IDENTIFIER_POINTER (id), IDENTIFIER_LENGTH (id)); -+ return !!libc_name::libc_name_p (IDENTIFIER_POINTER (id), -+ IDENTIFIER_LENGTH (id)); - } - - /* Returns nonzero if an exception of type FROM will be caught by a diff --git a/packages/gcc/glibc-2.31-libsanitizer-1-gcc-6.patch b/packages/gcc/glibc-2.31-libsanitizer-1-gcc-6.patch deleted file mode 100644 index 4187b812..00000000 --- a/packages/gcc/glibc-2.31-libsanitizer-1-gcc-6.patch +++ /dev/null @@ -1,39 +0,0 @@ -From ce9568e9e9cf6094be30e748821421e703754ffc Mon Sep 17 00:00:00 2001 -From: Jakub Jelinek <jakub@redhat.com> -Date: Fri, 8 Nov 2019 19:53:18 +0100 -Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm - bootstrap due to libsanitizer) - - Backported from mainline - 2019-10-22 Tamar Christina <tamar.christina@arm.com> - Backported for version 5.3.0 <= gcc <= 6.5.0 - 2020-06-05 John L. Jolly <john.jolly@gmail.com> - - PR sanitizer/92154 - * sanitizer_common/sanitizer_platform_limits_posix.cc: - Cherry-pick compiler-rt revision r375220. - -From-SVN: r277981 ---- - libsanitizer/ChangeLog | 9 +++++++++ - .../sanitizer_common/sanitizer_platform_limits_posix.cc | 6 +++++- - 2 files changed, 14 insertions(+), 1 deletion(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index 6cd4a5bac8b0..06a605ff4670 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -1130,8 +1130,12 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); - #ifndef __GLIBC_PREREQ - #define __GLIBC_PREREQ(x, y) 0 - #endif --#if !defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21) -+#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ -+ !defined(__arm__) - /* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ -+/* On Arm glibc 2.31 and later provide a different mode field, this field is -+ never used by libsanitizer so we can simply ignore this assert for all glibc -+ versions. */ - CHECK_SIZE_AND_OFFSET(ipc_perm, mode); - #endif - diff --git a/packages/gcc/glibc-2.31-libsanitizer-1.patch b/packages/gcc/glibc-2.31-libsanitizer-1.patch deleted file mode 100644 index 96037707..00000000 --- a/packages/gcc/glibc-2.31-libsanitizer-1.patch +++ /dev/null @@ -1,37 +0,0 @@ -From ce9568e9e9cf6094be30e748821421e703754ffc Mon Sep 17 00:00:00 2001 -From: Jakub Jelinek <jakub@redhat.com> -Date: Fri, 8 Nov 2019 19:53:18 +0100 -Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm - bootstrap due to libsanitizer) - - Backported from mainline - 2019-10-22 Tamar Christina <tamar.christina@arm.com> - - PR sanitizer/92154 - * sanitizer_common/sanitizer_platform_limits_posix.cc: - Cherry-pick compiler-rt revision r375220. - -From-SVN: r277981 ---- - libsanitizer/ChangeLog | 9 +++++++++ - .../sanitizer_common/sanitizer_platform_limits_posix.cc | 6 +++++- - 2 files changed, 14 insertions(+), 1 deletion(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index 6cd4a5bac8b0..06a605ff4670 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -1156,8 +1156,12 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); - CHECK_SIZE_AND_OFFSET(ipc_perm, gid); - CHECK_SIZE_AND_OFFSET(ipc_perm, cuid); - CHECK_SIZE_AND_OFFSET(ipc_perm, cgid); --#if !defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21) -+#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ -+ !defined(__arm__) - /* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ -+/* On Arm glibc 2.31 and later provide a different mode field, this field is -+ never used by libsanitizer so we can simply ignore this assert for all glibc -+ versions. */ - CHECK_SIZE_AND_OFFSET(ipc_perm, mode); - #endif - diff --git a/packages/gcc/glibc-2.31-libsanitizer-2-gcc-6.patch b/packages/gcc/glibc-2.31-libsanitizer-2-gcc-6.patch deleted file mode 100644 index 755db173..00000000 --- a/packages/gcc/glibc-2.31-libsanitizer-2-gcc-6.patch +++ /dev/null @@ -1,69 +0,0 @@ -From 75003cdd23c310ec385344e8040d490e8dd6d2be Mon Sep 17 00:00:00 2001 -From: Jakub Jelinek <jakub@redhat.com> -Date: Fri, 20 Dec 2019 17:58:35 +0100 -Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm - bootstrap due to libsanitizer) - - Backported from mainline - 2019-11-26 Jakub Jelinek <jakub@redhat.com> - Backported for version 5.3.0 <= gcc <= 6.5.0 - 2020-06-05 John L. Jolly <john.jolly@gmail.com> - - PR sanitizer/92154 - * sanitizer_common/sanitizer_platform_limits_posix.h: Cherry-pick - llvm-project revision 947f9692440836dcb8d88b74b69dd379d85974ce. - * sanitizer_common/sanitizer_platform_limits_posix.cc: Likewise. - -From-SVN: r279653 ---- - libsanitizer/ChangeLog | 10 ++++++++++ - .../sanitizer_platform_limits_posix.cc | 9 +++------ - .../sanitizer_platform_limits_posix.h | 15 +-------------- - 3 files changed, 14 insertions(+), 20 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index 06a605ff4670..d823a12190c0 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -1130,12 +1130,9 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); - #ifndef __GLIBC_PREREQ - #define __GLIBC_PREREQ(x, y) 0 - #endif --#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ -- !defined(__arm__) --/* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ --/* On Arm glibc 2.31 and later provide a different mode field, this field is -- never used by libsanitizer so we can simply ignore this assert for all glibc -- versions. */ -+#if !SANITIZER_LINUX || __GLIBC_PREREQ (2, 31) -+/* glibc 2.30 and earlier provided 16-bit mode field instead of 32-bit -+ on many architectures. */ - CHECK_SIZE_AND_OFFSET(ipc_perm, mode); - #endif - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -index 73af92af1e8f..6a673a7c9959 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -@@ -211,20 +211,13 @@ namespace __sanitizer { - unsigned long __unused1; - unsigned long __unused2; - #elif defined(__sparc__) --# if defined(__arch64__) - unsigned mode; -- unsigned short __pad1; --# else -- unsigned short __pad1; -- unsigned short mode; - unsigned short __pad2; --# endif - unsigned short __seq; - unsigned long long __unused1; - unsigned long long __unused2; - #else -- unsigned short mode; -- unsigned short __pad1; -+ unsigned int mode; - unsigned short __seq; - unsigned short __pad2; - #if defined(__x86_64__) && !defined(_LP64) diff --git a/packages/gcc/glibc-2.31-libsanitizer-2-gcc-7.patch b/packages/gcc/glibc-2.31-libsanitizer-2-gcc-7.patch deleted file mode 100644 index 07cbb3fd..00000000 --- a/packages/gcc/glibc-2.31-libsanitizer-2-gcc-7.patch +++ /dev/null @@ -1,69 +0,0 @@ -From 75003cdd23c310ec385344e8040d490e8dd6d2be Mon Sep 17 00:00:00 2001 -From: Jakub Jelinek <jakub@redhat.com> -Date: Fri, 20 Dec 2019 17:58:35 +0100 -Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm - bootstrap due to libsanitizer) - - Backported from mainline - 2019-11-26 Jakub Jelinek <jakub@redhat.com> - Backported for version 7.1.0 <= gcc <= 7.4.0 - 2020-06-05 John L. Jolly <john.jolly@gmail.com> - - PR sanitizer/92154 - * sanitizer_common/sanitizer_platform_limits_posix.h: Cherry-pick - llvm-project revision 947f9692440836dcb8d88b74b69dd379d85974ce. - * sanitizer_common/sanitizer_platform_limits_posix.cc: Likewise. - -From-SVN: r279653 ---- - libsanitizer/ChangeLog | 10 ++++++++++ - .../sanitizer_platform_limits_posix.cc | 9 +++------ - .../sanitizer_platform_limits_posix.h | 15 +-------------- - 3 files changed, 14 insertions(+), 20 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index 06a605ff4670..d823a12190c0 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -1156,12 +1156,9 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); - CHECK_SIZE_AND_OFFSET(ipc_perm, gid); - CHECK_SIZE_AND_OFFSET(ipc_perm, cuid); - CHECK_SIZE_AND_OFFSET(ipc_perm, cgid); --#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ -- !defined(__arm__) --/* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ --/* On Arm glibc 2.31 and later provide a different mode field, this field is -- never used by libsanitizer so we can simply ignore this assert for all glibc -- versions. */ -+#if !SANITIZER_LINUX || __GLIBC_PREREQ (2, 31) -+/* glibc 2.30 and earlier provided 16-bit mode field instead of 32-bit -+ on many architectures. */ - CHECK_SIZE_AND_OFFSET(ipc_perm, mode); - #endif - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -index 73af92af1e8f..6a673a7c9959 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -@@ -211,20 +211,13 @@ namespace __sanitizer { - unsigned long __unused1; - unsigned long __unused2; - #elif defined(__sparc__) --# if defined(__arch64__) - unsigned mode; -- unsigned short __pad1; --# else -- unsigned short __pad1; -- unsigned short mode; - unsigned short __pad2; --# endif - unsigned short __seq; - unsigned long long __unused1; - unsigned long long __unused2; - #else -- unsigned short mode; -- unsigned short __pad1; -+ unsigned int mode; - unsigned short __seq; - unsigned short __pad2; - #if defined(__x86_64__) && !defined(_LP64) diff --git a/packages/gcc/glibc-2.31-libsanitizer-2.patch b/packages/gcc/glibc-2.31-libsanitizer-2.patch deleted file mode 100644 index 75234436..00000000 --- a/packages/gcc/glibc-2.31-libsanitizer-2.patch +++ /dev/null @@ -1,73 +0,0 @@ -From 75003cdd23c310ec385344e8040d490e8dd6d2be Mon Sep 17 00:00:00 2001 -From: Jakub Jelinek <jakub@redhat.com> -Date: Fri, 20 Dec 2019 17:58:35 +0100 -Subject: [PATCH] backport: re PR sanitizer/92154 (new glibc breaks arm - bootstrap due to libsanitizer) - - Backported from mainline - 2019-11-26 Jakub Jelinek <jakub@redhat.com> - - PR sanitizer/92154 - * sanitizer_common/sanitizer_platform_limits_posix.h: Cherry-pick - llvm-project revision 947f9692440836dcb8d88b74b69dd379d85974ce. - * sanitizer_common/sanitizer_platform_limits_posix.cc: Likewise. - -From-SVN: r279653 ---- - libsanitizer/ChangeLog | 10 ++++++++++ - .../sanitizer_platform_limits_posix.cc | 9 +++------ - .../sanitizer_platform_limits_posix.h | 15 +-------------- - 3 files changed, 14 insertions(+), 20 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index 06a605ff4670..d823a12190c0 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -1156,12 +1156,9 @@ CHECK_SIZE_AND_OFFSET(ipc_perm, uid); - CHECK_SIZE_AND_OFFSET(ipc_perm, gid); - CHECK_SIZE_AND_OFFSET(ipc_perm, cuid); - CHECK_SIZE_AND_OFFSET(ipc_perm, cgid); --#if (!defined(__aarch64__) || !SANITIZER_LINUX || __GLIBC_PREREQ (2, 21)) && \ -- !defined(__arm__) --/* On aarch64 glibc 2.20 and earlier provided incorrect mode field. */ --/* On Arm glibc 2.31 and later provide a different mode field, this field is -- never used by libsanitizer so we can simply ignore this assert for all glibc -- versions. */ -+#if !SANITIZER_LINUX || __GLIBC_PREREQ (2, 31) -+/* glibc 2.30 and earlier provided 16-bit mode field instead of 32-bit -+ on many architectures. */ - CHECK_SIZE_AND_OFFSET(ipc_perm, mode); - #endif - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -index 73af92af1e8f..6a673a7c9959 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -@@ -211,26 +211,13 @@ namespace __sanitizer { - u64 __unused1; - u64 __unused2; - #elif defined(__sparc__) --#if defined(__arch64__) - unsigned mode; -- unsigned short __pad1; --#else -- unsigned short __pad1; -- unsigned short mode; - unsigned short __pad2; --#endif - unsigned short __seq; - unsigned long long __unused1; - unsigned long long __unused2; --#elif defined(__mips__) || defined(__aarch64__) || defined(__s390x__) -- unsigned int mode; -- unsigned short __seq; -- unsigned short __pad1; -- unsigned long __unused1; -- unsigned long __unused2; - #else -- unsigned short mode; -- unsigned short __pad1; -+ unsigned int mode; - unsigned short __seq; - unsigned short __pad2; - #if defined(__x86_64__) && !defined(_LP64) diff --git a/packages/gcc/glibc-2.31-libsanitizer-3-gcc-5.patch b/packages/gcc/glibc-2.31-libsanitizer-3-gcc-5.patch deleted file mode 100644 index e1df265a..00000000 --- a/packages/gcc/glibc-2.31-libsanitizer-3-gcc-5.patch +++ /dev/null @@ -1,81 +0,0 @@ -diff -ru a/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc b/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc ---- a/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc 2014-09-23 17:59:53.000000000 +0000 -+++ b/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc 2021-10-30 19:48:38.690007561 +0000 -@@ -358,15 +358,6 @@ - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - // _(SIOCDEVPLIP, WRITE, struct_ifreq_sz); // the same as EQL_ENSLAVE -- _(CYGETDEFTHRESH, WRITE, sizeof(int)); -- _(CYGETDEFTIMEOUT, WRITE, sizeof(int)); -- _(CYGETMON, WRITE, struct_cyclades_monitor_sz); -- _(CYGETTHRESH, WRITE, sizeof(int)); -- _(CYGETTIMEOUT, WRITE, sizeof(int)); -- _(CYSETDEFTHRESH, NONE, 0); -- _(CYSETDEFTIMEOUT, NONE, 0); -- _(CYSETTHRESH, NONE, 0); -- _(CYSETTIMEOUT, NONE, 0); - _(EQL_EMANCIPATE, WRITE, struct_ifreq_sz); - _(EQL_ENSLAVE, WRITE, struct_ifreq_sz); - _(EQL_GETMASTRCFG, WRITE, struct_ifreq_sz); -diff -ru a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc 2021-10-30 19:40:51.805824323 +0000 -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc 2021-10-30 19:51:13.640403192 +0000 -@@ -143,7 +143,6 @@ - #include <sys/statvfs.h> - #include <sys/timex.h> - #include <sys/user.h> --#include <linux/cyclades.h> - #include <linux/if_eql.h> - #include <linux/if_plip.h> - #include <linux/lp.h> -@@ -392,7 +391,6 @@ - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - unsigned struct_ax25_parms_struct_sz = sizeof(struct ax25_parms_struct); -- unsigned struct_cyclades_monitor_sz = sizeof(struct cyclades_monitor); - #if EV_VERSION > (0x010000) - unsigned struct_input_keymap_entry_sz = sizeof(struct input_keymap_entry); - #else -@@ -759,15 +757,6 @@ - #endif // SANITIZER_LINUX || SANITIZER_FREEBSD - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- unsigned IOCTL_CYGETDEFTHRESH = CYGETDEFTHRESH; -- unsigned IOCTL_CYGETDEFTIMEOUT = CYGETDEFTIMEOUT; -- unsigned IOCTL_CYGETMON = CYGETMON; -- unsigned IOCTL_CYGETTHRESH = CYGETTHRESH; -- unsigned IOCTL_CYGETTIMEOUT = CYGETTIMEOUT; -- unsigned IOCTL_CYSETDEFTHRESH = CYSETDEFTHRESH; -- unsigned IOCTL_CYSETDEFTIMEOUT = CYSETDEFTIMEOUT; -- unsigned IOCTL_CYSETTHRESH = CYSETTHRESH; -- unsigned IOCTL_CYSETTIMEOUT = CYSETTIMEOUT; - unsigned IOCTL_EQL_EMANCIPATE = EQL_EMANCIPATE; - unsigned IOCTL_EQL_ENSLAVE = EQL_ENSLAVE; - unsigned IOCTL_EQL_GETMASTRCFG = EQL_GETMASTRCFG; -diff -ru a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h 2021-10-30 19:40:51.698824053 +0000 -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h 2021-10-30 19:51:39.680469814 +0000 -@@ -875,7 +875,6 @@ - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - extern unsigned struct_ax25_parms_struct_sz; -- extern unsigned struct_cyclades_monitor_sz; - extern unsigned struct_input_keymap_entry_sz; - extern unsigned struct_ipx_config_data_sz; - extern unsigned struct_kbdiacrs_sz; -@@ -1220,15 +1219,6 @@ - #endif // SANITIZER_LINUX || SANITIZER_FREEBSD - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- extern unsigned IOCTL_CYGETDEFTHRESH; -- extern unsigned IOCTL_CYGETDEFTIMEOUT; -- extern unsigned IOCTL_CYGETMON; -- extern unsigned IOCTL_CYGETTHRESH; -- extern unsigned IOCTL_CYGETTIMEOUT; -- extern unsigned IOCTL_CYSETDEFTHRESH; -- extern unsigned IOCTL_CYSETDEFTIMEOUT; -- extern unsigned IOCTL_CYSETTHRESH; -- extern unsigned IOCTL_CYSETTIMEOUT; - extern unsigned IOCTL_EQL_EMANCIPATE; - extern unsigned IOCTL_EQL_ENSLAVE; - extern unsigned IOCTL_EQL_GETMASTRCFG; diff --git a/packages/gcc/glibc-2.36-libsanitizer-gcc-10-12.patch b/packages/gcc/glibc-2.36-libsanitizer-gcc-10-12.patch deleted file mode 100644 index 90df6b04..00000000 --- a/packages/gcc/glibc-2.36-libsanitizer-gcc-10-12.patch +++ /dev/null @@ -1,27 +0,0 @@ -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp -index badf6a401cc..b43733033a6 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp -@@ -72,7 +72,6 @@ - #include <sys/vt.h> - #include <linux/cdrom.h> - #include <linux/fd.h> --#include <linux/fs.h> - #include <linux/hdreg.h> - #include <linux/input.h> - #include <linux/ioctl.h> -@@ -822,10 +821,10 @@ unsigned struct_ElfW_Phdr_sz = sizeof(Elf_Phdr); - unsigned IOCTL_EVIOCGPROP = IOCTL_NOT_PRESENT; - unsigned IOCTL_EVIOCSKEYCODE_V2 = IOCTL_NOT_PRESENT; - #endif -- unsigned IOCTL_FS_IOC_GETFLAGS = FS_IOC_GETFLAGS; -- unsigned IOCTL_FS_IOC_GETVERSION = FS_IOC_GETVERSION; -- unsigned IOCTL_FS_IOC_SETFLAGS = FS_IOC_SETFLAGS; -- unsigned IOCTL_FS_IOC_SETVERSION = FS_IOC_SETVERSION; -+ unsigned IOCTL_FS_IOC_GETFLAGS = _IOR('f', 1, long); -+ unsigned IOCTL_FS_IOC_GETVERSION = _IOR('v', 1, long); -+ unsigned IOCTL_FS_IOC_SETFLAGS = _IOW('f', 2, long); -+ unsigned IOCTL_FS_IOC_SETVERSION = _IOW('v', 2, long); - unsigned IOCTL_GIO_CMAP = GIO_CMAP; - unsigned IOCTL_GIO_FONT = GIO_FONT; - unsigned IOCTL_GIO_UNIMAP = GIO_UNIMAP; diff --git a/packages/gcc/glibc-2.36-libsanitizer-gcc-5-9.patch b/packages/gcc/glibc-2.36-libsanitizer-gcc-5-9.patch deleted file mode 100644 index 2cd2bc4e..00000000 --- a/packages/gcc/glibc-2.36-libsanitizer-gcc-5-9.patch +++ /dev/null @@ -1,27 +0,0 @@ -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index e8fce8a02..5122baa46 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -65,7 +65,6 @@ - #include <sys/vt.h> - #include <linux/cdrom.h> - #include <linux/fd.h> --#include <linux/fs.h> - #include <linux/hdreg.h> - #include <linux/input.h> - #include <linux/ioctl.h> -@@ -846,10 +845,10 @@ unsigned struct_ElfW_Phdr_sz = sizeof(Elf_Phdr); - unsigned IOCTL_EVIOCGPROP = IOCTL_NOT_PRESENT; - unsigned IOCTL_EVIOCSKEYCODE_V2 = IOCTL_NOT_PRESENT; - #endif -- unsigned IOCTL_FS_IOC_GETFLAGS = FS_IOC_GETFLAGS; -- unsigned IOCTL_FS_IOC_GETVERSION = FS_IOC_GETVERSION; -- unsigned IOCTL_FS_IOC_SETFLAGS = FS_IOC_SETFLAGS; -- unsigned IOCTL_FS_IOC_SETVERSION = FS_IOC_SETVERSION; -+ unsigned IOCTL_FS_IOC_GETFLAGS = _IOR('f', 1, long); -+ unsigned IOCTL_FS_IOC_GETVERSION = _IOR('v', 1, long); -+ unsigned IOCTL_FS_IOC_SETFLAGS = _IOW('f', 2, long); -+ unsigned IOCTL_FS_IOC_SETVERSION = _IOW('v', 2, long); - unsigned IOCTL_GIO_CMAP = GIO_CMAP; - unsigned IOCTL_GIO_FONT = GIO_FONT; - unsigned IOCTL_GIO_UNIMAP = GIO_UNIMAP; diff --git a/packages/gcc/package.py b/packages/gcc/package.py deleted file mode 100644 index 4eb88a25..00000000 --- a/packages/gcc/package.py +++ /dev/null @@ -1,1114 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import glob -import itertools -import os -import re -import sys - -from archspec.cpu import UnsupportedMicroarchitecture - -import llnl.util.tty as tty -from llnl.util.lang import classproperty - -import spack.platforms -import spack.util.executable -from spack.build_environment import dso_suffix -from spack.operating_systems.mac_os import macos_sdk_path, macos_version -from spack.package import * - - -class Gcc(AutotoolsPackage, GNUMirrorPackage): - """The GNU Compiler Collection includes front ends for C, C++, Objective-C, - Fortran, Ada, and Go, as well as libraries for these languages.""" - - homepage = "https://gcc.gnu.org" - gnu_mirror_path = "gcc/gcc-9.2.0/gcc-9.2.0.tar.xz" - git = "git://gcc.gnu.org/git/gcc.git" - list_url = "https://ftp.gnu.org/gnu/gcc/" - list_depth = 1 - keep_werror = "all" - - maintainers("michaelkuhn", "alalazo") - - version("master", branch="master") - - version("13.2.0", sha256="e275e76442a6067341a27f04c5c6b83d8613144004c0413528863dc6b5c743da") - version("13.1.0", sha256="61d684f0aa5e76ac6585ad8898a2427aade8979ed5e7f85492286c4dfc13ee86") - - version("12.3.0", sha256="949a5d4f99e786421a93b532b22ffab5578de7321369975b91aec97adfda8c3b") - version("12.2.0", sha256="e549cf9cf3594a00e27b6589d4322d70e0720cdd213f39beb4181e06926230ff") - version("12.1.0", sha256="62fd634889f31c02b64af2c468f064b47ad1ca78411c45abe6ac4b5f8dd19c7b") - - version("11.4.0", sha256="3f2db222b007e8a4a23cd5ba56726ef08e8b1f1eb2055ee72c1402cea73a8dd9") - version("11.3.0", sha256="b47cf2818691f5b1e21df2bb38c795fac2cfbd640ede2d0a5e1c89e338a3ac39") - version("11.2.0", sha256="d08edc536b54c372a1010ff6619dd274c0f1603aa49212ba20f7aa2cda36fa8b") - version("11.1.0", sha256="4c4a6fb8a8396059241c2e674b85b351c26a5d678274007f076957afa1cc9ddf") - - version("10.5.0", sha256="25109543fdf46f397c347b5d8b7a2c7e5694a5a51cce4b9c6e1ea8a71ca307c1") - version("10.4.0", sha256="c9297d5bcd7cb43f3dfc2fed5389e948c9312fd962ef6a4ce455cff963ebe4f1") - version("10.3.0", sha256="64f404c1a650f27fc33da242e1f2df54952e3963a49e06e73f6940f3223ac344") - version("10.2.0", sha256="b8dd4368bb9c7f0b98188317ee0254dd8cc99d1e3a18d0ff146c855fe16c1d8c") - version("10.1.0", sha256="b6898a23844b656f1b68691c5c012036c2e694ac4b53a8918d4712ad876e7ea2") - - version("9.5.0", sha256="27769f64ef1d4cd5e2be8682c0c93f9887983e6cfd1a927ce5a0a2915a95cf8f") - version("9.4.0", sha256="c95da32f440378d7751dd95533186f7fc05ceb4fb65eb5b85234e6299eb9838e") - version("9.3.0", sha256="71e197867611f6054aa1119b13a0c0abac12834765fe2d81f35ac57f84f742d1") - version("9.2.0", sha256="ea6ef08f121239da5695f76c9b33637a118dcf63e24164422231917fa61fb206") - version("9.1.0", sha256="79a66834e96a6050d8fe78db2c3b32fb285b230b855d0a66288235bc04b327a0") - - version("8.5.0", sha256="d308841a511bb830a6100397b0042db24ce11f642dab6ea6ee44842e5325ed50") - version("8.4.0", sha256="e30a6e52d10e1f27ed55104ad233c30bd1e99cfb5ff98ab022dc941edd1b2dd4") - version("8.3.0", sha256="64baadfe6cc0f4947a84cb12d7f0dfaf45bb58b7e92461639596c21e02d97d2c") - version("8.2.0", sha256="196c3c04ba2613f893283977e6011b2345d1cd1af9abeac58e916b1aab3e0080") - version("8.1.0", sha256="1d1866f992626e61349a1ccd0b8d5253816222cdc13390dcfaa74b093aa2b153") - - version("7.5.0", sha256="b81946e7f01f90528a1f7352ab08cc602b9ccc05d4e44da4bd501c5a189ee661") - version("7.4.0", sha256="eddde28d04f334aec1604456e536416549e9b1aa137fc69204e65eb0c009fe51") - version("7.3.0", sha256="832ca6ae04636adbb430e865a1451adf6979ab44ca1c8374f61fba65645ce15c") - version("7.2.0", sha256="1cf7adf8ff4b5aa49041c8734bbcf1ad18cc4c94d0029aae0f4e48841088479a") - version("7.1.0", sha256="8a8136c235f64c6fef69cac0d73a46a1a09bb250776a050aec8f9fc880bebc17") - - version("6.5.0", sha256="7ef1796ce497e89479183702635b14bb7a46b53249209a5e0f999bebf4740945") - version("6.4.0", sha256="850bf21eafdfe5cd5f6827148184c08c4a0852a37ccf36ce69855334d2c914d4") - version("6.3.0", sha256="f06ae7f3f790fbf0f018f6d40e844451e6bc3b7bc96e128e63b09825c1f8b29f") - version("6.2.0", sha256="9944589fc722d3e66308c0ce5257788ebd7872982a718aa2516123940671b7c5") - version("6.1.0", sha256="09c4c85cabebb971b1de732a0219609f93fc0af5f86f6e437fd8d7f832f1a351") - - version("5.5.0", sha256="530cea139d82fe542b358961130c69cfde8b3d14556370b65823d2f91f0ced87") - version("5.4.0", sha256="608df76dec2d34de6558249d8af4cbee21eceddbcb580d666f7a5a583ca3303a") - version("5.3.0", sha256="b84f5592e9218b73dbae612b5253035a7b34a9a1f7688d2e1bfaaf7267d5c4db") - version("5.2.0", sha256="5f835b04b5f7dd4f4d2dc96190ec1621b8d89f2dc6f638f9f8bc1b1014ba8cad") - version("5.1.0", sha256="b7dafdf89cbb0e20333dbf5b5349319ae06e3d1a30bf3515b5488f7e89dca5ad") - - version("4.9.4", sha256="6c11d292cd01b294f9f84c9a59c230d80e9e4a47e5c6355f046bb36d4f358092") - version("4.9.3", sha256="2332b2a5a321b57508b9031354a8503af6fdfb868b8c1748d33028d100a8b67e") - version("4.9.2", sha256="2020c98295856aa13fda0f2f3a4794490757fc24bcca918d52cc8b4917b972dd") - version("4.9.1", sha256="d334781a124ada6f38e63b545e2a3b8c2183049515a1abab6d513f109f1d717e") - version("4.8.5", sha256="22fb1e7e0f68a63cee631d85b20461d1ea6bda162f03096350e38c8d427ecf23") - version("4.8.4", sha256="4a80aa23798b8e9b5793494b8c976b39b8d9aa2e53cd5ed5534aff662a7f8695") - version("4.7.4", sha256="92e61c6dc3a0a449e62d72a38185fda550168a86702dea07125ebd3ec3996282") - version("4.6.4", sha256="35af16afa0b67af9b8eb15cafb76d2bc5f568540552522f5dc2c88dd45d977e8") - version("4.5.4", sha256="eef3f0456db8c3d992cbb51d5d32558190bc14f3bc19383dd93acc27acc6befc") - - # We specifically do not add 'all' variant here because: - # (i) Ada, D, Go, Jit, and Objective-C++ are not default languages. - # In that respect, the name 'all' is rather misleading. - # (ii) Languages other than c,c++,fortran are prone to configure bug in GCC - # For example, 'java' appears to ignore custom location of zlib - # (iii) meaning of 'all' changes with GCC version, i.e. 'java' is not part - # of gcc7. Correctly specifying conflicts() and depends_on() in such a - # case is a PITA. - # - # Also note that some languages get enabled by the configure scripts even if not listed in the - # arguments. For example, c++ is enabled when the bootstrapping is enabled and lto is enabled - # when the link time optimization support is enabled. - variant( - "languages", - default="c,c++,fortran", - values=( - "ada", - "brig", - "c", - "c++", - "d", - "fortran", - "go", - "java", - "jit", - "lto", - "objc", - "obj-c++", - ), - multi=True, - description="Compilers and runtime libraries to build", - ) - variant("binutils", default=False, description="Build via binutils") - variant( - "piclibs", default=False, description="Build PIC versions of libgfortran.a and libstdc++.a" - ) - variant("strip", default=False, description="Strip executables to reduce installation size") - variant("nvptx", default=False, description="Target nvptx offloading to NVIDIA GPUs") - variant("bootstrap", default=True, description="Enable 3-stage bootstrap") - variant( - "graphite", default=False, description="Enable Graphite loop optimizations (requires ISL)" - ) - variant( - "build_type", - default="RelWithDebInfo", - values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"), - description="CMake-like build type. " - "Debug: -O0 -g; Release: -O3; " - "RelWithDebInfo: -O2 -g; MinSizeRel: -Os", - ) - variant( - "profiled", - default=False, - description="Use Profile Guided Optimization", - when="+bootstrap %gcc", - ) - - depends_on("flex", type="build", when="@master") - - # https://gcc.gnu.org/install/prerequisites.html - depends_on("gmp@4.3.2:") - # mawk is not sufficient for go support - depends_on("gawk@3.1.5:", type="build") - depends_on("texinfo@4.7:", type="build") - depends_on("libtool", type="build") - # dependencies required for git versions - depends_on("m4@1.4.6:", when="@master", type="build") - depends_on("automake@1.15.1:", when="@master", type="build") - depends_on("autoconf@2.69:", when="@master", type="build") - - depends_on("gmake@3.80:", type="build") - depends_on("perl@5", type="build") - - # GCC 7.3 does not compile with newer releases on some platforms, see - # https://github.com/spack/spack/issues/6902#issuecomment-433030376 - depends_on("mpfr@2.4.2:3.1.6", when="@:9.9") - depends_on("mpfr@3.1.0:", when="@10:") - depends_on("mpc@1.0.1:", when="@4.5:") - # Already released GCC versions do not support any newer version of ISL - # GCC 5.4 https://github.com/spack/spack/issues/6902#issuecomment-433072097 - # GCC 7.3 https://github.com/spack/spack/issues/6902#issuecomment-433030376 - # GCC 9+ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=86724 - with when("+graphite"): - depends_on("isl@0.14", when="@5.0:5.2") - depends_on("isl@0.15", when="@5.3:5.9") - depends_on("isl@0.15:0.18", when="@6:8.9") - depends_on("isl@0.15:0.20", when="@9:9.9") - depends_on("isl@0.15:", when="@10:") - - depends_on("zlib-api", when="@6:") - depends_on("zstd", when="@10:") - depends_on("diffutils", type="build") - depends_on("iconv", when="platform=darwin") - depends_on("gnat", when="languages=ada") - depends_on( - "binutils+gas+ld+plugins~libiberty", when="+binutils", type=("build", "link", "run") - ) - depends_on("zip", type="build", when="languages=java") - - # The server is sometimes a bit slow to respond - timeout = {"timeout": 60} - - # TODO: integrate these libraries. - # depends_on('ppl') - # depends_on('cloog') - - # https://gcc.gnu.org/install/test.html - depends_on("dejagnu@1.4.4", type="test") - depends_on("expect", type="test") - depends_on("tcl", type="test") - depends_on("autogen@5.5.4:", type="test") - depends_on("guile@1.4.1:", type="test") - - # See https://go.dev/doc/install/gccgo#Releases - with when("languages=go"): - provides("go-or-gccgo-bootstrap@:1.0", when="@4.7.1:") - provides("go-or-gccgo-bootstrap@:1.2", when="@4.9:") - provides("go-or-gccgo-bootstrap@:1.4", when="@5:") - provides("go-or-gccgo-bootstrap@:1.6.1", when="@6:") - provides("go-or-gccgo-bootstrap@:1.8.1", when="@7:") - provides("go-or-gccgo-bootstrap@:1.10.1", when="@8:") - provides("go-or-gccgo-bootstrap@:1.12.2", when="@9:") - provides("go-or-gccgo-bootstrap@:1.14.6", when="@10:") - provides("go-or-gccgo-bootstrap@1.16.3:1.16.5", when="@11:") - - provides("golang@:1.0", when="@4.7.1:") - provides("golang@:1.2", when="@4.9:") - provides("golang@:1.4", when="@5:") - provides("golang@:1.6.1", when="@6:") - provides("golang@:1.8.1", when="@7:") - provides("golang@:1.10.1", when="@8:") - provides("golang@:1.12.2", when="@9:") - provides("golang@:1.14.6", when="@10:") - provides("golang@1.16.3:1.16.5", when="@11:") - - # GCC 4.7.1 added full support for the Go 1.x programming language. - conflicts("@:4.7.0") - - # Go is not supported on macOS - conflicts("platform=darwin", msg="GCC cannot build Go support on MacOS") - - # For a list of valid languages for a specific release, - # run the following command in the GCC source directory: - # $ grep ^language= gcc/*/config-lang.in - # See https://gcc.gnu.org/install/configure.html - - # Support for processing BRIG 1.0 files was added in GCC 7 - # BRIG is a binary format for HSAIL: - # (Heterogeneous System Architecture Intermediate Language). - # See https://gcc.gnu.org/gcc-7/changes.html - conflicts("languages=brig", when="@:6") - - # BRIG does not seem to be supported on macOS - conflicts("languages=brig", when="platform=darwin") - - # GCC 4.8 added a 'c' language. I'm sure C was always built, - # but this is the first version that accepts 'c' as a valid language. - conflicts("languages=c", when="@:4.7") - - # The GCC Java frontend and associated libjava runtime library - # have been removed from GCC as of GCC 7. - # See https://gcc.gnu.org/gcc-7/changes.html - conflicts("languages=java", when="@7:") - - # GCC 5 added the ability to build GCC as a Just-In-Time compiler. - # See https://gcc.gnu.org/gcc-5/changes.html - conflicts("languages=jit", when="@:4") - - with when("languages=d"): - # The very first version of GDC that became part of GCC already supported version 2.076 of - # the language and runtime. - # See https://wiki.dlang.org/GDC#Status - provides("D@2") - - # Support for the D programming language has been added to GCC 9. - # See https://gcc.gnu.org/gcc-9/changes.html#d - conflicts("@:8", msg="support for D has been added in GCC 9.1") - - # Versions of GDC prior to 12 can be built with an ISO C++11 compiler. Starting version 12, - # the D frontend requires a working GDC. Moreover, it is strongly recommended to use an - # older version of GDC to build GDC. - # See https://gcc.gnu.org/install/prerequisites.html#GDC-prerequisite - with when("@12:"): - # All versions starting 12 have to be built GCC: - requires("%gcc") - - # And it has to be GCC older than the version we build: - vv = ["11", "12.1.0", "12.2.0"] - for prev_v, curr_v in zip(vv, vv[1:]): - conflicts( - "%gcc@{0}:".format(curr_v), - when="@{0}".format(curr_v), - msg="'gcc@{0} languages=d' requires '%gcc@:{1}' " - "with the D language support".format(curr_v, prev_v), - ) - - # In principle, it is possible to have GDC even with GCC 5. - # See https://github.com/D-Programming-GDC/gdc - # We, however, require at least the oldest version that officially supports GDC. It is - # also a good opportunity to tell the users that they need a working GDC: - conflicts( - "%gcc@:8", - msg="'gcc@12: languages=d' requires '%gcc@9:' with the D language support", - ) - - with when("+nvptx"): - depends_on("cuda") - resource( - name="newlib", - url="ftp://sourceware.org/pub/newlib/newlib-3.0.0.20180831.tar.gz", - sha256="3ad3664f227357df15ff34e954bfd9f501009a647667cd307bf0658aefd6eb5b", - destination="newlibsource", - fetch_options=timeout, - ) - # nvptx-tools does not seem to work as a dependency, - # but does fine when the source is inside the gcc build directory - # nvptx-tools doesn't have any releases, so grabbing the last commit - resource( - name="nvptx-tools", - git="https://github.com/MentorEmbedded/nvptx-tools", - commit="d0524fbdc86dfca068db5a21cc78ac255b335be5", - ) - # NVPTX offloading supported in 7 and later by limited languages - conflicts("@:6", msg="NVPTX only supported in gcc 7 and above") - conflicts("languages=ada") - conflicts("languages=brig") - conflicts("languages=go") - conflicts("languages=java") - conflicts("languages=jit") - conflicts("languages=objc") - conflicts("languages=obj-c++") - conflicts("languages=d") - # NVPTX build disables bootstrap - conflicts("+bootstrap") - - # Binutils can't build ld on macOS - conflicts("+binutils", when="platform=darwin") - - # Bootstrap comparison failure: - # see https://github.com/spack/spack/issues/23296 - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100340 - # on XCode 12.5 - conflicts("+bootstrap", when="@:11.1 %apple-clang@12.0.5") - - # aarch64/M1 is supported in GCC 11.3-12.2 and 13 - requires( - "@11.3,12.2,13.1:", - when="target=aarch64: platform=darwin", - msg="Only GCC 11.3-12.2, 13.1+ support macOS M1 (aarch64)", - ) - - # Newer binutils than RHEL's is required to run `as` on some instructions - # generated by new GCC (see https://github.com/spack/spack/issues/12235) - conflicts("~binutils", when="@7: os=rhel6", msg="New GCC cannot use system assembler on RHEL6") - # Ditto for RHEL7/8: OpenBLAS uses flags which the RHEL system-binutils don't have: - # https://github.com/xianyi/OpenBLAS/issues/3805#issuecomment-1319878852 - conflicts( - "~binutils", when="@10: os=rhel7", msg="gcc: Add +binutils - preinstalled as might be old" - ) - conflicts( - "~binutils", when="@10: os=rhel8", msg="gcc: Add +binutils - preinstalled as might be old" - ) - - # GCC 11 requires GCC 4.8 or later (https://gcc.gnu.org/gcc-11/changes.html) - conflicts("%gcc@:4.7", when="@11:") - - # https://github.com/iains/gcc-12-branch/issues/6 - conflicts("@:12", when="%apple-clang@14:14.0") - - if sys.platform == "darwin": - # Fix parallel build on APFS filesystem - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81797 - if macos_version() >= Version("10.13"): - patch("darwin/apfs.patch", when="@5.5.0,6.1:6.4,7.1:7.3") - # from homebrew via macports - # https://trac.macports.org/ticket/56502#no1 - # see also: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=83531 - patch("darwin/headers-10.13-fix.patch", when="@5.5.0") - if macos_version() >= Version("10.14"): - # Fix system headers for Mojave SDK: - # https://github.com/Homebrew/homebrew-core/pull/39041 - patch( - "https://raw.githubusercontent.com/Homebrew/formula-patches/b8b8e65e/gcc/8.3.0-xcode-bug-_Atomic-fix.patch", - sha256="33ee92bf678586357ee8ab9d2faddf807e671ad37b97afdd102d5d153d03ca84", - when="@6:8.3", - ) - if macos_version() >= Version("10.15"): - # Fix system headers for Catalina SDK - # (otherwise __OSX_AVAILABLE_STARTING ends up undefined) - patch( - "https://raw.githubusercontent.com/Homebrew/formula-patches/b8b8e65e/gcc/9.2.0-catalina.patch", - sha256="0b8d14a7f3c6a2f0d2498526e86e088926671b5da50a554ffa6b7f73ac4f132b", - when="@9.2.0", - ) - - # See https://raw.githubusercontent.com/Homebrew/homebrew-core/3b7db4457ac64a31e3bbffc54b04c4bd824a4a4a/Formula/gcc.rb - patch( - "https://github.com/iains/gcc-darwin-arm64/commit/20f61faaed3b335d792e38892d826054d2ac9f15.patch?full_index=1", - sha256="c0605179a856ca046d093c13cea4d2e024809ec2ad4bf3708543fc3d2e60504b", - when="@11.2.0", - ) - - # Apple M1 support, created from branch of Darwin maintainer for GCC: - # https://github.com/iains/gcc-11-branch - patch( - "https://raw.githubusercontent.com/Homebrew/formula-patches/22dec3fc/gcc/gcc-11.3.0-arm.diff", - sha256="e02006b7ec917cc1390645d95735a6a866caed0dfe506d5bef742f7862cab218", - when="@11.3.0 target=aarch64:", - ) - # https://github.com/iains/gcc-12-branch - patch( - "https://raw.githubusercontent.com/Homebrew/formula-patches/76677f2b/gcc/gcc-12.1.0-arm.diff", - sha256="a000f1d9cb1dd98c7c4ef00df31435cd5d712d2f9d037ddc044f8bf82a16cf35", - when="@12.1.0 target=aarch64:", - ) - patch( - "https://raw.githubusercontent.com/Homebrew/formula-patches/1d184289/gcc/gcc-12.2.0-arm.diff", - sha256="a7843b5c6bf1401e40c20c72af69c8f6fc9754ae980bb4a5f0540220b3dcb62d", - when="@12.2.0 target=aarch64:", - ) - patch( - "https://raw.githubusercontent.com/Homebrew/formula-patches/5c206c47/gcc/gcc-13.1.0.diff", - sha256="cb4e8a89387f748a744da0273025d0dc2e3c76780cc390b18ada704676afea11", - when="@13.1.0 target=aarch64:", - ) - patch( - "https://raw.githubusercontent.com/Homebrew/formula-patches/3c5cbc8e9cf444a1967786af48e430588e1eb481/gcc/gcc-13.2.0.diff", - sha256="2df7ef067871a30b2531a2013b3db661ec9e61037341977bfc451e30bf2c1035", - when="@13.2.0 target=aarch64:", - ) - conflicts("+bootstrap", when="@11.3.0,13.1: target=aarch64:") - - # Use -headerpad_max_install_names in the build, - # otherwise updated load commands won't fit in the Mach-O header. - # This is needed because `gcc` avoids the superenv shim. - patch("darwin/gcc-7.1.0-headerpad.patch", when="@5:11.2") - patch("darwin/gcc-6.1.0-jit.patch", when="@5:7") - patch("darwin/gcc-4.9.patch1", when="@4.9.0:4.9.3") - patch("darwin/gcc-4.9.patch2", when="@4.9.0:4.9.3") - - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=92061 - patch("darwin/clang13.patch", when="@:11.1 %apple-clang@13") - - patch("piclibs.patch", when="+piclibs") - patch("gcc-backport.patch", when="@4.7:4.9.3,5:5.3") - - # Backport libsanitizer patch for glibc >= 2.31 and 5.3.0 <= gcc <= 9.2.0 - # https://bugs.gentoo.org/708346 - patch("glibc-2.31-libsanitizer-1.patch", when="@7.1.0:7.5.0,8.1.0:8.3.0,9.0.0:9.2.0") - patch("glibc-2.31-libsanitizer-1-gcc-6.patch", when="@5.3.0:5.5.0,6.1.0:6.5.0") - patch("glibc-2.31-libsanitizer-2.patch", when="@8.1.0:8.3.0,9.0.0:9.2.0") - patch("glibc-2.31-libsanitizer-2-gcc-6.patch", when="@5.3.0:5.5.0,6.1.0:6.5.0") - patch("glibc-2.31-libsanitizer-2-gcc-7.patch", when="@7.1.0:7.5.0") - patch( - "patch-2b40941d23b1570cdd90083b58fa0f66aa58c86e.patch", - when="@6.5.0,7.4.0:7.5.0,8.2.0:9.3.0", - ) - patch("patch-745dae5923aba02982563481d75a21595df22ff8.patch", when="@10.1.0:10.3.0,11.1.0") - - # Backport libsanitizer patch for glibc >= 2.36 - # https://reviews.llvm.org/D129471 - patch("glibc-2.36-libsanitizer-gcc-5-9.patch", when="@5.1:5.5,6.1:6.5,7.1:7.5,8.1:8.5,9.1:9.5") - patch("glibc-2.36-libsanitizer-gcc-10-12.patch", when="@10.1:10.4,11.1:11.3,12.1.0") - - # Older versions do not compile with newer versions of glibc - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81712 - patch("ucontext_t.patch", when="@4.9,5.1:5.4,6.1:6.4,7.1") - patch("ucontext_t-java.patch", when="@4.9,5.1:5.4,6.1:6.4 languages=java") - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81066 - patch("stack_t-4.9.patch", when="@4.9") - patch("stack_t.patch", when="@5.1:5.4,6.1:6.4,7.1") - # https://bugs.busybox.net/show_bug.cgi?id=10061 - patch("signal.patch", when="@4.9,5.1:5.4") - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=85835 - patch("sys_ustat.h.patch", when="@5.0:6.4,7.0:7.3,8.1") - patch("sys_ustat-4.9.patch", when="@4.9") - - # this patch removes cylades support from gcc-5 and allows gcc-5 to be built - # with newer glibc versions. - patch("glibc-2.31-libsanitizer-3-gcc-5.patch", when="@5.3.0:5.5.0") - - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=95005 - patch("zstd.patch", when="@10") - - # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100102 - patch("patch-fc930b3010bd0de899a3da3209eab20664ddb703.patch", when="@10.1:10.3") - patch("patch-f1feb74046e0feb0596b93bbb822fae02940a90e.patch", when="@11.1") - - # libstdc++: Fix inconsistent noexcept-specific for valarray begin/end - # begin EBRAINS (modified) - patch( - "https://github.com/gcc-mirror/gcc/commit/423cd47cfc9640ba3d6811b780e8a0b94b704dcb.patch?full_index=1", - sha256="0d136226eb07bc43f1b15284f48bd252e3748a0426b5d7ac9084ebc406e15490", - when="@9.5.0,10.4.0", - ) - # end EBRAINS - - build_directory = "spack-build" - - @classproperty - def executables(cls): - names = [r"gcc", r"[^\w]?g\+\+", r"gfortran", r"gdc", r"gccgo"] - suffixes = [r"", r"-mp-\d+\.\d", r"-\d+\.\d", r"-\d+", r"\d\d"] - return [r"".join(x) for x in itertools.product(names, suffixes)] - - @classmethod - def filter_detected_exes(cls, prefix, exes_in_prefix): - result = [] - for exe in exes_in_prefix: - # On systems like Ubuntu we might get multiple executables - # with the string "gcc" in them. See: - # https://helpmanual.io/packages/apt/gcc/ - basename = os.path.basename(exe) - substring_to_be_filtered = [ - "c99-gcc", - "c89-gcc", - "-nm", - "-ar", - "ranlib", - "clang", # clang++ matches g++ -> clan[g++] - ] - if any(x in basename for x in substring_to_be_filtered): - continue - # Filter out links in favor of real executables on - # all systems but Cray - host_platform = str(spack.platforms.host()) - if os.path.islink(exe) and host_platform != "cray": - continue - - result.append(exe) - - return result - - @classmethod - def determine_version(cls, exe): - try: - output = spack.compiler.get_compiler_version_output(exe, "--version") - except Exception: - output = "" - # Apple's gcc is actually apple clang, so skip it. - # Users can add it manually to compilers.yaml at their own risk. - if "Apple" in output: - return None - - version_regex = re.compile(r"([\d\.]+)") - for vargs in ("-dumpfullversion", "-dumpversion"): - try: - output = spack.compiler.get_compiler_version_output(exe, vargs) - match = version_regex.search(output) - if match: - return match.group(1) - except spack.util.executable.ProcessError: - pass - except Exception as e: - tty.debug(e) - - return None - - @classmethod - def determine_variants(cls, exes, version_str): - languages, compilers = set(), {} - # There are often at least two copies (not symlinks) of each compiler executable in the - # same directory: one with a canonical name, e.g. "gfortran", and another one with the - # target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc" - # with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency - # of values in the "compilers" dictionary (i.e. we prefer all of them to reference copies - # with canonical names if possible), we iterate over the executables in the reversed sorted - # order: - for exe in sorted(exes, reverse=True): - basename = os.path.basename(exe) - if "g++" in basename: - languages.add("c++") - compilers["cxx"] = exe - elif "gfortran" in basename: - languages.add("fortran") - compilers["fortran"] = exe - elif "gcc" in basename: - languages.add("c") - compilers["c"] = exe - elif "gccgo" in basename: - languages.add("go") - compilers["go"] = exe - elif "gdc" in basename: - languages.add("d") - compilers["d"] = exe - variant_str = "languages={0}".format(",".join(languages)) - return variant_str, {"compilers": compilers} - - @classmethod - def validate_detected_spec(cls, spec, extra_attributes): - # For GCC 'compilers' is a mandatory attribute - msg = 'the extra attribute "compilers" must be set for ' 'the detected spec "{0}"'.format( - spec - ) - assert "compilers" in extra_attributes, msg - - compilers = extra_attributes["compilers"] - for constraint, key in { - "languages=c": "c", - "languages=c++": "cxx", - "languages=d": "d", - "languages=fortran": "fortran", - }.items(): - if spec.satisfies(constraint): - msg = "{0} not in {1}" - assert key in compilers, msg.format(key, spec) - - @property - def cc(self): - msg = "cannot retrieve C compiler [spec is not concrete]" - assert self.spec.concrete, msg - if self.spec.external: - return self.spec.extra_attributes["compilers"].get("c", None) - result = None - if "languages=c" in self.spec: - result = str(self.spec.prefix.bin.gcc) - return result - - @property - def cxx(self): - msg = "cannot retrieve C++ compiler [spec is not concrete]" - assert self.spec.concrete, msg - if self.spec.external: - return self.spec.extra_attributes["compilers"].get("cxx", None) - result = None - if "languages=c++" in self.spec: - result = os.path.join(self.spec.prefix.bin, "g++") - return result - - @property - def fortran(self): - msg = "cannot retrieve Fortran compiler [spec is not concrete]" - assert self.spec.concrete, msg - if self.spec.external: - return self.spec.extra_attributes["compilers"].get("fortran", None) - result = None - if "languages=fortran" in self.spec: - result = str(self.spec.prefix.bin.gfortran) - return result - - def url_for_version(self, version): - # This function will be called when trying to fetch from url, before - # mirrors are tried. It takes care of modifying the suffix of gnu - # mirror path so that Spack will also look for the correct file in - # the mirrors - if (version < Version("6.4.0") and version != Version("5.5.0")) or version == Version( - "7.1.0" - ): - self.gnu_mirror_path = self.gnu_mirror_path.replace("xz", "bz2") - return super().url_for_version(version) - - def patch(self): - spec = self.spec - prefix = self.spec.prefix - - # Fix a standard header file for OS X Yosemite that - # is GCC incompatible by replacing non-GCC compliant macros - if "yosemite" in spec.architecture: - if os.path.isfile("/usr/include/dispatch/object.h"): - new_dispatch_dir = join_path(prefix, "include", "dispatch") - mkdirp(new_dispatch_dir) - new_header = join_path(new_dispatch_dir, "object.h") - install("/usr/include/dispatch/object.h", new_header) - filter_file( - r"typedef void \(\^dispatch_block_t\)\(void\)", - "typedef void* dispatch_block_t", - new_header, - ) - - # Use installed libz - if self.version >= Version("6"): - filter_file( - "@zlibdir@", "-L{0}".format(spec["zlib-api"].prefix.lib), "gcc/Makefile.in" - ) - filter_file( - "@zlibinc@", "-I{0}".format(spec["zlib-api"].prefix.include), "gcc/Makefile.in" - ) - - if spec.satisfies("+nvptx"): - # backport of 383400a6078d upstream to allow support of cuda@11: - filter_file( - '#define ASM_SPEC "%{misa=*:-m %*}"', - '#define ASM_SPEC "%{misa=*:-m %*; :-m sm_35}"', - "gcc/config/nvptx/nvptx.h", - string=True, - ) - filter_file( - "Target RejectNegative ToLower Joined " - "Enum(ptx_isa) Var(ptx_isa_option) Init(PTX_ISA_SM30)", - "Target RejectNegative ToLower Joined " - "Enum(ptx_isa) Var(ptx_isa_option) Init(PTX_ISA_SM35)", - "gcc/config/nvptx/nvptx.opt", - string=True, - ) - self.build_optimization_config() - - def get_common_target_flags(self, spec): - """Get the right (but pessimistic) architecture specific flags supported by - both host gcc and to-be-built gcc. For example: gcc@7 %gcc@12 target=znver3 - should pick -march=znver1, since that's what gcc@7 supports.""" - archs = [spec.target] + spec.target.ancestors - for arch in archs: - try: - return arch.optimization_flags("gcc", spec.version) - except UnsupportedMicroarchitecture: - pass - # no arch specific flags in common, unlikely to happen. - return "" - - def build_optimization_config(self): - """Write a config/spack.mk file with sensible optimization flags, taking into - account bootstrapping subtleties.""" - build_type_flags = { - "Debug": "-O0 -g", - "Release": "-O3", - "RelWithDebInfo": "-O2 -g", - "MinSizeRel": "-Os", - } - - # Generic optimization flags. - flags = build_type_flags[self.spec.variants["build_type"].value] - - # Pessimistic target specific flags. For example, when building - # gcc@11 %gcc@7 on znver3, Spack will fix the target to znver1 during - # concretization, so we'll stick to that. The other way around however can - # result in compilation errors, when gcc@7 is built with gcc@11, and znver3 - # is taken as a the target, which gcc@7 doesn't support. - # Note we're not adding this for aarch64 because of - # https://github.com/spack/spack/issues/31184 - if "+bootstrap %gcc" in self.spec and self.spec.target.family != "aarch64": - flags += " " + self.get_common_target_flags(self.spec) - - if "+bootstrap" in self.spec: - variables = ["BOOT_CFLAGS", "CFLAGS_FOR_TARGET", "CXXFLAGS_FOR_TARGET"] - else: - variables = ["CFLAGS", "CXXFLAGS"] - - # Redefine a few variables without losing other defaults: - # BOOT_CFLAGS = $(filter-out -O% -g%, $(BOOT_CFLAGS)) -O3 - # This makes sure that build_type=Release is really -O3, not -O3 -g. - fmt_string = "{} := $(filter-out -O% -g%, $({})) {}\n" - with open("config/spack.mk", "w") as f: - for var in variables: - f.write(fmt_string.format(var, var, flags)) - # Improve the build time for stage 2 a bit by enabling -O1 in stage 1. - # Note: this is ignored under ~bootstrap. - f.write("STAGE1_CFLAGS += -O1\n") - - # https://gcc.gnu.org/install/configure.html - def configure_args(self): - spec = self.spec - - # Generic options to compile GCC - options = [ - # Distributor options - "--with-pkgversion=Spack GCC", - "--with-bugurl=https://github.com/spack/spack/issues", - # Xcode 10 dropped 32-bit support - "--disable-multilib", - "--enable-languages={0}".format(",".join(spec.variants["languages"].value)), - # Drop gettext dependency - "--disable-nls", - ] - - # Avoid excessive realpath/stat calls for every system header - # by making -fno-canonical-system-headers the default. - if self.version >= Version("4.8.0"): - options.append("--disable-canonical-system-headers") - - # Use installed libz - if self.version >= Version("6"): - options.append("--with-system-zlib") - - if "zstd" in spec: - options.append("--with-zstd-include={0}".format(spec["zstd"].headers.directories[0])) - options.append("--with-zstd-lib={0}".format(spec["zstd"].libs.directories[0])) - - # Enabling language "jit" requires --enable-host-shared. - if "languages=jit" in spec: - options.append("--enable-host-shared") - - # Binutils - if spec.satisfies("+binutils"): - binutils = spec["binutils"].prefix.bin - options.extend( - [ - "--with-gnu-ld", - "--with-ld=" + binutils.ld, - "--with-gnu-as", - "--with-as=" + binutils.join("as"), - ] - ) - elif spec.satisfies("%apple-clang@15:"): - # https://github.com/iains/gcc-darwin-arm64/issues/117 - # https://github.com/iains/gcc-12-branch/issues/22 - # https://github.com/iains/gcc-13-branch/issues/8 - options.append("--with-ld=/Library/Developer/CommandLineTools/usr/bin/ld-classic") - - # enable_bootstrap - if spec.satisfies("+bootstrap"): - options.extend(["--enable-bootstrap"]) - else: - options.extend(["--disable-bootstrap"]) - - # Configure include and lib directories explicitly for these - # dependencies since the short GCC option assumes that libraries - # are installed in "/lib" which might not be true on all OS - # (see #10842) - # - # More info at: https://gcc.gnu.org/install/configure.html - for dep_str in ("mpfr", "gmp", "mpc", "isl"): - if dep_str not in spec: - options.append("--without-{0}".format(dep_str)) - continue - - dep_spec = spec[dep_str] - include_dir = dep_spec.headers.directories[0] - lib_dir = dep_spec.libs.directories[0] - options.extend( - [ - "--with-{0}-include={1}".format(dep_str, include_dir), - "--with-{0}-lib={1}".format(dep_str, lib_dir), - ] - ) - - # nvptx-none offloading for host compiler - if spec.satisfies("+nvptx"): - options.extend( - [ - "--enable-offload-targets=nvptx-none", - "--with-cuda-driver-include={0}".format(spec["cuda"].prefix.include), - "--with-cuda-driver-lib={0}".format(spec["cuda"].libs.directories[0]), - "--disable-bootstrap", - "--disable-multilib", - ] - ) - - if sys.platform == "darwin": - options.extend( - [ - "--with-native-system-header-dir=/usr/include", - "--with-sysroot={0}".format(macos_sdk_path()), - "--with-libiconv-prefix={0}".format(spec["iconv"].prefix), - ] - ) - - # enable appropriate bootstrapping flags - stage1_ldflags = str(self.rpath_args) - boot_ldflags = stage1_ldflags + " -static-libstdc++ -static-libgcc" - options.append("--with-stage1-ldflags=" + stage1_ldflags) - options.append("--with-boot-ldflags=" + boot_ldflags) - options.append("--with-build-config=spack") - - if "languages=d" in spec: - # Phobos is the standard library for the D Programming Language. The documentation says - # that on some targets, 'libphobos' is not enabled by default, but compiles and works - # if '--enable-libphobos' is used. Specifics are documented for affected targets. - # See https://gcc.gnu.org/install/prerequisites.html#GDC-prerequisite - # Unfortunately, it is unclear where exactly the aforementioned specifics are - # documented but GDC seems to be unusable without the library, therefore we enable it - # explicitly: - options.append("--enable-libphobos") - if spec.satisfies("@12:"): - options.append("GDC={0}".format(self.detect_gdc())) - - return options - - # run configure/make/make(install) for the nvptx-none target - # before running the host compiler phases - @run_before("configure") - def nvptx_install(self): - spec = self.spec - prefix = self.prefix - - if not spec.satisfies("+nvptx"): - return - - # config.guess returns the host triple, e.g. "x86_64-pc-linux-gnu" - guess = Executable("./config.guess") - targetguess = guess(output=str).rstrip("\n") - - options = getattr(self, "configure_flag_args", []) - options += ["--prefix={0}".format(prefix)] - - options += [ - "--with-cuda-driver-include={0}".format(spec["cuda"].prefix.include), - "--with-cuda-driver-lib={0}".format(spec["cuda"].libs.directories[0]), - ] - - with working_dir("nvptx-tools"): - configure = Executable("./configure") - configure(*options) - make() - make("install") - - pattern = join_path(self.stage.source_path, "newlibsource", "*") - files = glob.glob(pattern) - - if files: - symlink(join_path(files[0], "newlib"), "newlib") - - # self.build_directory = 'spack-build-nvptx' - with working_dir("spack-build-nvptx", create=True): - options = [ - "--prefix={0}".format(prefix), - "--enable-languages={0}".format(",".join(spec.variants["languages"].value)), - "--with-mpfr={0}".format(spec["mpfr"].prefix), - "--with-gmp={0}".format(spec["gmp"].prefix), - "--target=nvptx-none", - "--with-build-time-tools={0}".format(join_path(prefix, "nvptx-none", "bin")), - "--enable-as-accelerator-for={0}".format(targetguess), - "--disable-sjlj-exceptions", - "--enable-newlib-io-long-long", - ] - - configure = Executable("../configure") - configure(*options) - make() - make("install") - - @property - def build_targets(self): - if "+profiled" in self.spec: - return ["profiledbootstrap"] - return [] - - @property - def install_targets(self): - if "+strip" in self.spec: - return ["install-strip"] - return ["install"] - - @property - def spec_dir(self): - # e.g. lib/gcc/x86_64-unknown-linux-gnu/4.9.2 - spec_dir = glob.glob("{0}/gcc/*/*".format(self.prefix.lib)) - return spec_dir[0] if spec_dir else None - - @run_after("install") - def write_rpath_specs(self): - """Generate a spec file so the linker adds a rpath to the libs - the compiler used to build the executable. - - .. caution:: - - The custom spec file by default with *always* pass ``-Wl,-rpath - ...`` to the linker, which will cause the linker to *ignore* the - value of ``LD_RUN_PATH``, which otherwise would be saved to the - binary as the default rpath. See the mitigation below for how to - temporarily disable this behavior. - - Structure the specs file so that users can define a custom spec file - to suppress the spack-linked rpaths to facilitate rpath adjustment - for relocatable binaries. The custom spec file - :file:`{norpath}.spec` will have a single - line followed by two blanks lines:: - - *link_libgcc_rpath: - - - - It can be passed to the GCC linker using the argument - ``--specs=norpath.spec`` to disable the automatic rpath and restore - the behavior of ``LD_RUN_PATH``.""" - if not self.spec_dir: - tty.warn( - "Could not install specs for {0}.".format(self.spec.format("{name}{@version}")) - ) - return - - gcc = self.spec["gcc"].command - lines = gcc("-dumpspecs", output=str).splitlines(True) - specs_file = join_path(self.spec_dir, "specs") - - # Save a backup - with open(specs_file + ".orig", "w") as out: - out.writelines(lines) - - # Find which directories have shared libraries - rpath_libdirs = [] - for dir in ["lib", "lib64"]: - libdir = join_path(self.prefix, dir) - if glob.glob(join_path(libdir, "*." + dso_suffix)): - rpath_libdirs.append(libdir) - - if not rpath_libdirs: - # No shared libraries - tty.warn("No dynamic libraries found in lib/lib64") - return - - # Overwrite the specs file - with open(specs_file, "w") as out: - for line in lines: - out.write(line) - if line.startswith("*link_libgcc:"): - # Insert at start of line following link_libgcc, which gets - # inserted into every call to the linker - out.write("%(link_libgcc_rpath) ") - - # Add easily-overridable rpath string at the end - out.write("*link_libgcc_rpath:\n") - out.write(" ".join("-rpath " + lib for lib in rpath_libdirs)) - out.write("\n") - set_install_permissions(specs_file) - tty.info("Wrote new spec file to {0}".format(specs_file)) - - def setup_run_environment(self, env): - # Search prefix directory for possibly modified compiler names - from spack.compilers.gcc import Gcc as Compiler - - # Get the contents of the installed binary directory - bin_path = self.spec.prefix.bin - - if not os.path.isdir(bin_path): - return - - bin_contents = os.listdir(bin_path) - - # Find the first non-symlink compiler binary present for each language - for lang in ["cc", "cxx", "fc", "f77"]: - for filename, regexp in itertools.product(bin_contents, Compiler.search_regexps(lang)): - if not regexp.match(filename): - continue - - abspath = os.path.join(bin_path, filename) - if os.path.islink(abspath): - continue - - # Set the proper environment variable - env.set(lang.upper(), abspath) - # Stop searching filename/regex combos for this language - break - - def detect_gdc(self): - """Detect and return the path to GDC that belongs to the same instance of GCC that is used - by self.compiler. - - If the path cannot be detected, raise InstallError with recommendations for the users on - how to circumvent the problem. - - Should be use only if self.spec.satisfies("@12: languages=d") - """ - # Detect GCC package in the directory of the GCC compiler - # or in the $PATH if self.compiler.cc is not an absolute path: - from spack.detection import by_path - - compiler_dir = os.path.dirname(self.compiler.cc) - detected_packages = by_path( - [self.name], path_hints=([compiler_dir] if os.path.isdir(compiler_dir) else None) - ) - - # We consider only packages that satisfy the following constraint: - required_spec = Spec("languages=c,c++,d") - candidate_specs = [ - p.spec - for p in filter( - lambda p: p.spec.satisfies(required_spec), detected_packages.get(self.name, ()) - ) - ] - - if candidate_specs: - # We now need to filter specs that match the compiler version: - compiler_spec = Spec(repr(self.compiler.spec)) - - # First, try to filter specs that satisfy the compiler spec: - new_candidate_specs = list( - filter(lambda s: s.satisfies(compiler_spec), candidate_specs) - ) - - # The compiler version might be more specific than what we can detect. For example, the - # user might have "gcc@10.2.1-sys" as the compiler spec in compilers.yaml. In that - # case, we end up with an empty list of candidates. To circumvent the problem, we try - # to filter specs that are satisfied by the compiler spec: - if not new_candidate_specs: - new_candidate_specs = list( - filter(lambda s: compiler_spec.satisfies(s), candidate_specs) - ) - - candidate_specs = new_candidate_specs - - error_nl = "\n " # see SpackError.__str__() - - if not candidate_specs: - raise InstallError( - "Cannot detect GDC", - long_msg="Starting version 12, the D frontend requires a working GDC." - "{0}You can install it with Spack by running:" - "{0}{0}spack install gcc@9:11 languages=c,c++,d" - "{0}{0}Once that has finished, you will need to add it to your compilers.yaml file" - "{0}and use it to install this spec (i.e. {1} ...).".format( - error_nl, self.spec.format("{name}{@version} {variants.languages}") - ), - ) - elif len(candidate_specs) == 0: - return candidate_specs[0].extra_attributes["compilers"]["d"] - else: - # It is rather unlikely to end up here but let us try to resolve the ambiguity: - candidate_gdc = candidate_specs[0].extra_attributes["compilers"]["d"] - if all( - candidate_gdc == s.extra_attributes["compilers"]["d"] for s in candidate_specs[1:] - ): - # It does not matter which one we take if they are all the same: - return candidate_gdc - else: - raise InstallError( - "Cannot resolve ambiguity when detecting GDC that belongs to " - "%{0}".format(self.compiler.spec), - long_msg="The candidates are:{0}{0}{1}{0}".format( - error_nl, - error_nl.join( - "{0} (cc: {1})".format( - s.extra_attributes["compilers"]["d"], - s.extra_attributes["compilers"]["c"], - ) - for s in candidate_specs - ), - ), - ) diff --git a/packages/gcc/patch-2b40941d23b1570cdd90083b58fa0f66aa58c86e.patch b/packages/gcc/patch-2b40941d23b1570cdd90083b58fa0f66aa58c86e.patch deleted file mode 100644 index 3d76c47b..00000000 --- a/packages/gcc/patch-2b40941d23b1570cdd90083b58fa0f66aa58c86e.patch +++ /dev/null @@ -1,121 +0,0 @@ -From 2b40941d23b1570cdd90083b58fa0f66aa58c86e Mon Sep 17 00:00:00 2001 -From: Tamar Christina <tamar.christina@arm.com> -Date: Fri, 21 May 2021 12:16:56 +0100 -Subject: [PATCH] libsanitizer: Remove cyclades from libsanitizer - -The Linux kernel has removed the interface to cyclades from -the latest kernel headers[1] due to them being orphaned for the -past 13 years. - -libsanitizer uses this header when compiling against glibc, but -glibcs itself doesn't seem to have any references to cyclades. - -Further more it seems that the driver is broken in the kernel and -the firmware doesn't seem to be available anymore. - -As such since this is breaking the build of libsanitizer (and so the -GCC bootstrap[2]) I propose to remove this. - -[1] https://lkml.org/lkml/2021/3/2/153 -[2] https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100379 - -libsanitizer/ChangeLog: - - PR sanitizer/100379 - * sanitizer_common/sanitizer_common_interceptors_ioctl.inc: Cherry-pick - llvm-project revision f7c5351552387bd43f6ca3631016d7f0dfe0f135. - * sanitizer_common/sanitizer_platform_limits_posix.cc: Likewise. - * sanitizer_common/sanitizer_platform_limits_posix.h: Likewise. ---- - .../sanitizer_common_interceptors_ioctl.inc | 9 --------- - .../sanitizer_platform_limits_posix.cc | 11 ----------- - .../sanitizer_platform_limits_posix.h | 10 ---------- - 3 files changed, 30 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc b/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -index 5408ea17c59..7a9cd3f5968 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -+++ b/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -@@ -365,15 +365,6 @@ static void ioctl_table_fill() { - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - // _(SIOCDEVPLIP, WRITE, struct_ifreq_sz); // the same as EQL_ENSLAVE -- _(CYGETDEFTHRESH, WRITE, sizeof(int)); -- _(CYGETDEFTIMEOUT, WRITE, sizeof(int)); -- _(CYGETMON, WRITE, struct_cyclades_monitor_sz); -- _(CYGETTHRESH, WRITE, sizeof(int)); -- _(CYGETTIMEOUT, WRITE, sizeof(int)); -- _(CYSETDEFTHRESH, NONE, 0); -- _(CYSETDEFTIMEOUT, NONE, 0); -- _(CYSETTHRESH, NONE, 0); -- _(CYSETTIMEOUT, NONE, 0); - _(EQL_EMANCIPATE, WRITE, struct_ifreq_sz); - _(EQL_ENSLAVE, WRITE, struct_ifreq_sz); - _(EQL_GETMASTRCFG, WRITE, struct_ifreq_sz); -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index d823a12190c..e8fce8a0287 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -157,7 +157,6 @@ typedef struct user_fpregs elf_fpregset_t; - # include <sys/procfs.h> - #endif - #include <sys/user.h> --#include <linux/cyclades.h> - #include <linux/if_eql.h> - #include <linux/if_plip.h> - #include <linux/lp.h> -@@ -466,7 +465,6 @@ unsigned struct_ElfW_Phdr_sz = sizeof(Elf_Phdr); - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - unsigned struct_ax25_parms_struct_sz = sizeof(struct ax25_parms_struct); -- unsigned struct_cyclades_monitor_sz = sizeof(struct cyclades_monitor); - #if EV_VERSION > (0x010000) - unsigned struct_input_keymap_entry_sz = sizeof(struct input_keymap_entry); - #else -@@ -833,15 +831,6 @@ unsigned struct_ElfW_Phdr_sz = sizeof(Elf_Phdr); - #endif // SANITIZER_LINUX || SANITIZER_FREEBSD - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- unsigned IOCTL_CYGETDEFTHRESH = CYGETDEFTHRESH; -- unsigned IOCTL_CYGETDEFTIMEOUT = CYGETDEFTIMEOUT; -- unsigned IOCTL_CYGETMON = CYGETMON; -- unsigned IOCTL_CYGETTHRESH = CYGETTHRESH; -- unsigned IOCTL_CYGETTIMEOUT = CYGETTIMEOUT; -- unsigned IOCTL_CYSETDEFTHRESH = CYSETDEFTHRESH; -- unsigned IOCTL_CYSETDEFTIMEOUT = CYSETDEFTIMEOUT; -- unsigned IOCTL_CYSETTHRESH = CYSETTHRESH; -- unsigned IOCTL_CYSETTIMEOUT = CYSETTIMEOUT; - unsigned IOCTL_EQL_EMANCIPATE = EQL_EMANCIPATE; - unsigned IOCTL_EQL_ENSLAVE = EQL_ENSLAVE; - unsigned IOCTL_EQL_GETMASTRCFG = EQL_GETMASTRCFG; -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -index 6a673a7c995..f921bf2b5b5 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -@@ -1040,7 +1040,6 @@ struct __sanitizer_cookie_io_functions_t { - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - extern unsigned struct_ax25_parms_struct_sz; -- extern unsigned struct_cyclades_monitor_sz; - extern unsigned struct_input_keymap_entry_sz; - extern unsigned struct_ipx_config_data_sz; - extern unsigned struct_kbdiacrs_sz; -@@ -1385,15 +1384,6 @@ struct __sanitizer_cookie_io_functions_t { - #endif // SANITIZER_LINUX || SANITIZER_FREEBSD - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- extern unsigned IOCTL_CYGETDEFTHRESH; -- extern unsigned IOCTL_CYGETDEFTIMEOUT; -- extern unsigned IOCTL_CYGETMON; -- extern unsigned IOCTL_CYGETTHRESH; -- extern unsigned IOCTL_CYGETTIMEOUT; -- extern unsigned IOCTL_CYSETDEFTHRESH; -- extern unsigned IOCTL_CYSETDEFTIMEOUT; -- extern unsigned IOCTL_CYSETTHRESH; -- extern unsigned IOCTL_CYSETTIMEOUT; - extern unsigned IOCTL_EQL_EMANCIPATE; - extern unsigned IOCTL_EQL_ENSLAVE; - extern unsigned IOCTL_EQL_GETMASTRCFG; --- -2.31.1 - diff --git a/packages/gcc/patch-745dae5923aba02982563481d75a21595df22ff8.patch b/packages/gcc/patch-745dae5923aba02982563481d75a21595df22ff8.patch deleted file mode 100644 index 57c51eb2..00000000 --- a/packages/gcc/patch-745dae5923aba02982563481d75a21595df22ff8.patch +++ /dev/null @@ -1,123 +0,0 @@ -From 745dae5923aba02982563481d75a21595df22ff8 Mon Sep 17 00:00:00 2001 -From: Tamar Christina <tamar.christina@arm.com> -Date: Fri, 21 May 2021 10:30:59 +0100 -Subject: [PATCH] libsanitizer: Remove cyclades from libsanitizer - -The Linux kernel has removed the interface to cyclades from -the latest kernel headers[1] due to them being orphaned for the -past 13 years. - -libsanitizer uses this header when compiling against glibc, but -glibcs itself doesn't seem to have any references to cyclades. - -Further more it seems that the driver is broken in the kernel and -the firmware doesn't seem to be available anymore. - -As such since this is breaking the build of libsanitizer (and so the -GCC bootstrap[2]) I propose to remove this. - -[1] https://lkml.org/lkml/2021/3/2/153 -[2] https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100379 - -(cherry picked from commit f7c5351552387bd43f6ca3631016d7f0dfe0f135) - -libsanitizer/ChangeLog: - - PR sanitizer/100379 - * sanitizer_common/sanitizer_common_interceptors_ioctl.inc: Cherry-pick - llvm-project revision f7c5351552387bd43f6ca3631016d7f0dfe0f135. - * sanitizer_common/sanitizer_platform_limits_posix.cpp: Likewise. - * sanitizer_common/sanitizer_platform_limits_posix.h: Likewise. ---- - .../sanitizer_common_interceptors_ioctl.inc | 9 --------- - .../sanitizer_platform_limits_posix.cpp | 11 ----------- - .../sanitizer_platform_limits_posix.h | 10 ---------- - 3 files changed, 30 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc b/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -index 7f181258eab..b7da6598755 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -+++ b/libsanitizer/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -@@ -370,15 +370,6 @@ static void ioctl_table_fill() { - - #if SANITIZER_GLIBC - // _(SIOCDEVPLIP, WRITE, struct_ifreq_sz); // the same as EQL_ENSLAVE -- _(CYGETDEFTHRESH, WRITE, sizeof(int)); -- _(CYGETDEFTIMEOUT, WRITE, sizeof(int)); -- _(CYGETMON, WRITE, struct_cyclades_monitor_sz); -- _(CYGETTHRESH, WRITE, sizeof(int)); -- _(CYGETTIMEOUT, WRITE, sizeof(int)); -- _(CYSETDEFTHRESH, NONE, 0); -- _(CYSETDEFTIMEOUT, NONE, 0); -- _(CYSETTHRESH, NONE, 0); -- _(CYSETTIMEOUT, NONE, 0); - _(EQL_EMANCIPATE, WRITE, struct_ifreq_sz); - _(EQL_ENSLAVE, WRITE, struct_ifreq_sz); - _(EQL_GETMASTRCFG, WRITE, struct_ifreq_sz); -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp -index 35a690cba5c..6e5c330b98e 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cpp -@@ -143,7 +143,6 @@ typedef struct user_fpregs elf_fpregset_t; - # include <sys/procfs.h> - #endif - #include <sys/user.h> --#include <linux/cyclades.h> - #include <linux/if_eql.h> - #include <linux/if_plip.h> - #include <linux/lp.h> -@@ -460,7 +459,6 @@ unsigned struct_ElfW_Phdr_sz = sizeof(Elf_Phdr); - - #if SANITIZER_GLIBC - unsigned struct_ax25_parms_struct_sz = sizeof(struct ax25_parms_struct); -- unsigned struct_cyclades_monitor_sz = sizeof(struct cyclades_monitor); - #if EV_VERSION > (0x010000) - unsigned struct_input_keymap_entry_sz = sizeof(struct input_keymap_entry); - #else -@@ -824,15 +822,6 @@ unsigned struct_ElfW_Phdr_sz = sizeof(Elf_Phdr); - #endif // SANITIZER_LINUX - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- unsigned IOCTL_CYGETDEFTHRESH = CYGETDEFTHRESH; -- unsigned IOCTL_CYGETDEFTIMEOUT = CYGETDEFTIMEOUT; -- unsigned IOCTL_CYGETMON = CYGETMON; -- unsigned IOCTL_CYGETTHRESH = CYGETTHRESH; -- unsigned IOCTL_CYGETTIMEOUT = CYGETTIMEOUT; -- unsigned IOCTL_CYSETDEFTHRESH = CYSETDEFTHRESH; -- unsigned IOCTL_CYSETDEFTIMEOUT = CYSETDEFTIMEOUT; -- unsigned IOCTL_CYSETTHRESH = CYSETTHRESH; -- unsigned IOCTL_CYSETTIMEOUT = CYSETTIMEOUT; - unsigned IOCTL_EQL_EMANCIPATE = EQL_EMANCIPATE; - unsigned IOCTL_EQL_ENSLAVE = EQL_ENSLAVE; - unsigned IOCTL_EQL_GETMASTRCFG = EQL_GETMASTRCFG; -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -index ad358eef8b7..cba41ba5494 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.h -@@ -983,7 +983,6 @@ extern unsigned struct_vt_mode_sz; - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - extern unsigned struct_ax25_parms_struct_sz; --extern unsigned struct_cyclades_monitor_sz; - extern unsigned struct_input_keymap_entry_sz; - extern unsigned struct_ipx_config_data_sz; - extern unsigned struct_kbdiacrs_sz; -@@ -1328,15 +1327,6 @@ extern unsigned IOCTL_VT_WAITACTIVE; - #endif // SANITIZER_LINUX - - #if SANITIZER_LINUX && !SANITIZER_ANDROID --extern unsigned IOCTL_CYGETDEFTHRESH; --extern unsigned IOCTL_CYGETDEFTIMEOUT; --extern unsigned IOCTL_CYGETMON; --extern unsigned IOCTL_CYGETTHRESH; --extern unsigned IOCTL_CYGETTIMEOUT; --extern unsigned IOCTL_CYSETDEFTHRESH; --extern unsigned IOCTL_CYSETDEFTIMEOUT; --extern unsigned IOCTL_CYSETTHRESH; --extern unsigned IOCTL_CYSETTIMEOUT; - extern unsigned IOCTL_EQL_EMANCIPATE; - extern unsigned IOCTL_EQL_ENSLAVE; - extern unsigned IOCTL_EQL_GETMASTRCFG; --- -2.31.1 - diff --git a/packages/gcc/patch-f1feb74046e0feb0596b93bbb822fae02940a90e.patch b/packages/gcc/patch-f1feb74046e0feb0596b93bbb822fae02940a90e.patch deleted file mode 100644 index 2448be07..00000000 --- a/packages/gcc/patch-f1feb74046e0feb0596b93bbb822fae02940a90e.patch +++ /dev/null @@ -1,133 +0,0 @@ -From f1feb74046e0feb0596b93bbb822fae02940a90e Mon Sep 17 00:00:00 2001 -From: Patrick Palka <ppalka@redhat.com> -Date: Fri, 4 Jun 2021 13:46:53 -0400 -Subject: [PATCH] c++: tsubst_function_decl and excess arg levels [PR100102] - -Here, when instantiating the dependent alias template -duration::__is_harmonic with args={{T,U},{int}}, we find ourselves -substituting the function decl _S_gcd. Since we have more arg levels -than _S_gcd has parm levels, an old special case in tsubst_function_decl -causes us to unwantedly reduce args to its innermost level, yielding -args={int}, which leads to a nonsensical substitution into the decl -context and eventually a crash. - -The comment for this special case refers to three examples for which we -ought to see more arg levels than parm levels here, but none of the -examples actually demonstrate this. In the first example, when -defining S<int>::f(U) parms_depth is 2 and args_depth is 1, and -later when instantiating say S<int>::f<char> both depths are 2. In the -second example, when substituting the template friend declaration -parms_depth is 2 and args_depth is 1, and later when instantiating f -both depths are 1. Finally, the third example is invalid since we can't -specialize a member template of an unspecialized class template like -that. - -Given that this reduction code seems no longer relevant for its -documented purpose and that it causes problems as in the PR, this patch -just removes it. Note that as far as bootstrap/regtest is concerned, -this code is dead; the below two tests would be the first to reach it. - - PR c++/100102 - -gcc/cp/ChangeLog: - - * pt.c (tsubst_function_decl): Remove old code for reducing - args when it has excess levels. - -gcc/testsuite/ChangeLog: - - * g++.dg/cpp0x/alias-decl-72.C: New test. - * g++.dg/cpp0x/alias-decl-72a.C: New test. - -(cherry picked from commit 5357ab75dedef403b0eebf9277d61d1cbeb5898f) ---- - gcc/cp/pt.c | 39 --------------------- - gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C | 9 +++++ - gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C | 9 +++++ - 3 files changed, 18 insertions(+), 39 deletions(-) - create mode 100644 gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C - create mode 100644 gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C - -diff --git a/gcc/cp/pt.c b/gcc/cp/pt.c -index 1434beb80f4..1761a902218 100644 ---- a/gcc/cp/pt.c -+++ b/gcc/cp/pt.c -@@ -13954,45 +13954,6 @@ tsubst_function_decl (tree t, tree args, tsubst_flags_t complain, - if (tree spec = retrieve_specialization (gen_tmpl, argvec, hash)) - return spec; - } -- -- /* We can see more levels of arguments than parameters if -- there was a specialization of a member template, like -- this: -- -- template <class T> struct S { template <class U> void f(); } -- template <> template <class U> void S<int>::f(U); -- -- Here, we'll be substituting into the specialization, -- because that's where we can find the code we actually -- want to generate, but we'll have enough arguments for -- the most general template. -- -- We also deal with the peculiar case: -- -- template <class T> struct S { -- template <class U> friend void f(); -- }; -- template <class U> void f() {} -- template S<int>; -- template void f<double>(); -- -- Here, the ARGS for the instantiation of will be {int, -- double}. But, we only need as many ARGS as there are -- levels of template parameters in CODE_PATTERN. We are -- careful not to get fooled into reducing the ARGS in -- situations like: -- -- template <class T> struct S { template <class U> void f(U); } -- template <class T> template <> void S<T>::f(int) {} -- -- which we can spot because the pattern will be a -- specialization in this case. */ -- int args_depth = TMPL_ARGS_DEPTH (args); -- int parms_depth = -- TMPL_PARMS_DEPTH (DECL_TEMPLATE_PARMS (DECL_TI_TEMPLATE (t))); -- -- if (args_depth > parms_depth && !DECL_TEMPLATE_SPECIALIZATION (t)) -- args = get_innermost_template_args (args, parms_depth); - } - else - { -diff --git a/gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C -new file mode 100644 -index 00000000000..8009756dcba ---- /dev/null -+++ b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C -@@ -0,0 +1,9 @@ -+// PR c++/100102 -+// { dg-do compile { target c++11 } } -+ -+template<int()> struct ratio; -+template<class T, class U> struct duration { -+ static constexpr int _S_gcd(); -+ template<class> using __is_harmonic = ratio<_S_gcd>; -+ using type = __is_harmonic<int>; -+}; -diff --git a/gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C -new file mode 100644 -index 00000000000..a4443e18f9d ---- /dev/null -+++ b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C -@@ -0,0 +1,9 @@ -+// PR c++/100102 -+// { dg-do compile { target c++11 } } -+ -+template<int> struct ratio; -+template<class T> struct duration { -+ static constexpr int _S_gcd(); -+ template<class> using __is_harmonic = ratio<(duration::_S_gcd)()>; -+ using type = __is_harmonic<int>; -+}; --- -2.31.1 - diff --git a/packages/gcc/patch-fc930b3010bd0de899a3da3209eab20664ddb703.patch b/packages/gcc/patch-fc930b3010bd0de899a3da3209eab20664ddb703.patch deleted file mode 100644 index 27e5a2b5..00000000 --- a/packages/gcc/patch-fc930b3010bd0de899a3da3209eab20664ddb703.patch +++ /dev/null @@ -1,133 +0,0 @@ -From fc930b3010bd0de899a3da3209eab20664ddb703 Mon Sep 17 00:00:00 2001 -From: Patrick Palka <ppalka@redhat.com> -Date: Fri, 4 Jun 2021 13:46:53 -0400 -Subject: [PATCH] c++: tsubst_function_decl and excess arg levels [PR100102] - -Here, when instantiating the dependent alias template -duration::__is_harmonic with args={{T,U},{int}}, we find ourselves -substituting the function decl _S_gcd. Since we have more arg levels -than _S_gcd has parm levels, an old special case in tsubst_function_decl -causes us to unwantedly reduce args to its innermost level, yielding -args={int}, which leads to a nonsensical substitution into the decl -context and eventually a crash. - -The comment for this special case refers to three examples for which we -ought to see more arg levels than parm levels here, but none of the -examples actually demonstrate this. In the first example, when -defining S<int>::f(U) parms_depth is 2 and args_depth is 1, and -later when instantiating say S<int>::f<char> both depths are 2. In the -second example, when substituting the template friend declaration -parms_depth is 2 and args_depth is 1, and later when instantiating f -both depths are 1. Finally, the third example is invalid since we can't -specialize a member template of an unspecialized class template like -that. - -Given that this reduction code seems no longer relevant for its -documented purpose and that it causes problems as in the PR, this patch -just removes it. Note that as far as bootstrap/regtest is concerned, -this code is dead; the below two tests would be the first to reach it. - - PR c++/100102 - -gcc/cp/ChangeLog: - - * pt.c (tsubst_function_decl): Remove old code for reducing - args when it has excess levels. - -gcc/testsuite/ChangeLog: - - * g++.dg/cpp0x/alias-decl-72.C: New test. - * g++.dg/cpp0x/alias-decl-72a.C: New test. - -(cherry picked from commit 5357ab75dedef403b0eebf9277d61d1cbeb5898f) ---- - gcc/cp/pt.c | 39 --------------------- - gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C | 9 +++++ - gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C | 9 +++++ - 3 files changed, 18 insertions(+), 39 deletions(-) - create mode 100644 gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C - create mode 100644 gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C - -diff --git a/gcc/cp/pt.c b/gcc/cp/pt.c -index 5a957141ba3..7ce9ac234f8 100644 ---- a/gcc/cp/pt.c -+++ b/gcc/cp/pt.c -@@ -13811,45 +13811,6 @@ tsubst_function_decl (tree t, tree args, tsubst_flags_t complain, - if (tree spec = retrieve_specialization (gen_tmpl, argvec, hash)) - return spec; - } -- -- /* We can see more levels of arguments than parameters if -- there was a specialization of a member template, like -- this: -- -- template <class T> struct S { template <class U> void f(); } -- template <> template <class U> void S<int>::f(U); -- -- Here, we'll be substituting into the specialization, -- because that's where we can find the code we actually -- want to generate, but we'll have enough arguments for -- the most general template. -- -- We also deal with the peculiar case: -- -- template <class T> struct S { -- template <class U> friend void f(); -- }; -- template <class U> void f() {} -- template S<int>; -- template void f<double>(); -- -- Here, the ARGS for the instantiation of will be {int, -- double}. But, we only need as many ARGS as there are -- levels of template parameters in CODE_PATTERN. We are -- careful not to get fooled into reducing the ARGS in -- situations like: -- -- template <class T> struct S { template <class U> void f(U); } -- template <class T> template <> void S<T>::f(int) {} -- -- which we can spot because the pattern will be a -- specialization in this case. */ -- int args_depth = TMPL_ARGS_DEPTH (args); -- int parms_depth = -- TMPL_PARMS_DEPTH (DECL_TEMPLATE_PARMS (DECL_TI_TEMPLATE (t))); -- -- if (args_depth > parms_depth && !DECL_TEMPLATE_SPECIALIZATION (t)) -- args = get_innermost_template_args (args, parms_depth); - } - else - { -diff --git a/gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C -new file mode 100644 -index 00000000000..8009756dcba ---- /dev/null -+++ b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72.C -@@ -0,0 +1,9 @@ -+// PR c++/100102 -+// { dg-do compile { target c++11 } } -+ -+template<int()> struct ratio; -+template<class T, class U> struct duration { -+ static constexpr int _S_gcd(); -+ template<class> using __is_harmonic = ratio<_S_gcd>; -+ using type = __is_harmonic<int>; -+}; -diff --git a/gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C -new file mode 100644 -index 00000000000..a4443e18f9d ---- /dev/null -+++ b/gcc/testsuite/g++.dg/cpp0x/alias-decl-72a.C -@@ -0,0 +1,9 @@ -+// PR c++/100102 -+// { dg-do compile { target c++11 } } -+ -+template<int> struct ratio; -+template<class T> struct duration { -+ static constexpr int _S_gcd(); -+ template<class> using __is_harmonic = ratio<(duration::_S_gcd)()>; -+ using type = __is_harmonic<int>; -+}; --- -2.31.1 - diff --git a/packages/gcc/piclibs.patch b/packages/gcc/piclibs.patch deleted file mode 100644 index 0ecb7930..00000000 --- a/packages/gcc/piclibs.patch +++ /dev/null @@ -1,62 +0,0 @@ -diff --git a/libgfortran/Makefile.in b/libgfortran/Makefile.in -index 62b9f7a..7666fdb 100644 ---- a/libgfortran/Makefile.in -+++ b/libgfortran/Makefile.in -@@ -357,11 +357,11 @@ AUTOMAKE = @AUTOMAKE@ - AWK = @AWK@ - CC = @CC@ - CCDEPMODE = @CCDEPMODE@ --CFLAGS = @CFLAGS@ -+CFLAGS = @CFLAGS@ -fPIC - CPP = @CPP@ --CPPFLAGS = @CPPFLAGS@ -+CPPFLAGS = @CPPFLAGS@ -fPIC - CYGPATH_W = @CYGPATH_W@ --DEFS = @DEFS@ -+DEFS = @DEFS@ -fPIC - DEPDIR = @DEPDIR@ - DSYMUTIL = @DSYMUTIL@ - DUMPBIN = @DUMPBIN@ -@@ -371,7 +371,7 @@ ECHO_T = @ECHO_T@ - EGREP = @EGREP@ - EXEEXT = @EXEEXT@ - FC = @FC@ --FCFLAGS = @FCFLAGS@ -+FCFLAGS = @FCFLAGS@ -fPIC - FGREP = @FGREP@ - FPU_HOST_HEADER = @FPU_HOST_HEADER@ - GREP = @GREP@ -diff --git a/libstdc++-v3/Makefile.in b/libstdc++-v3/Makefile.in -index bede542..9b3e442 100644 ---- a/libstdc++-v3/Makefile.in -+++ b/libstdc++-v3/Makefile.in -@@ -115,7 +115,7 @@ CC = @CC@ - CCODECVT_CC = @CCODECVT_CC@ - CCOLLATE_CC = @CCOLLATE_CC@ - CCTYPE_CC = @CCTYPE_CC@ --CFLAGS = @CFLAGS@ -+CFLAGS = @CFLAGS@ -fPIC - CLOCALE_CC = @CLOCALE_CC@ - CLOCALE_H = @CLOCALE_H@ - CLOCALE_INTERNAL_H = @CLOCALE_INTERNAL_H@ -@@ -124,7 +124,7 @@ CMESSAGES_H = @CMESSAGES_H@ - CMONEY_CC = @CMONEY_CC@ - CNUMERIC_CC = @CNUMERIC_CC@ - CPP = @CPP@ --CPPFLAGS = @CPPFLAGS@ -+CPPFLAGS = @CPPFLAGS@ -fPIC - CPU_DEFINES_SRCDIR = @CPU_DEFINES_SRCDIR@ - CPU_OPT_BITS_RANDOM = @CPU_OPT_BITS_RANDOM@ - CPU_OPT_EXT_RANDOM = @CPU_OPT_EXT_RANDOM@ -@@ -139,7 +139,7 @@ CYGPATH_W = @CYGPATH_W@ - C_INCLUDE_DIR = @C_INCLUDE_DIR@ - DBLATEX = @DBLATEX@ - DEBUG_FLAGS = @DEBUG_FLAGS@ --DEFS = @DEFS@ -+DEFS = @DEFS@ -fPIC - DOT = @DOT@ - DOXYGEN = @DOXYGEN@ - DSYMUTIL = @DSYMUTIL@ --- -2.8.3 - diff --git a/packages/gcc/signal.patch b/packages/gcc/signal.patch deleted file mode 100644 index 21bf9e03..00000000 --- a/packages/gcc/signal.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 6c709b6262e8b6441b1e94526d6d65d4ce7a7dec Mon Sep 17 00:00:00 2001 -From: doko <doko@138bc75d-0d04-0410-961f-82ee72b054a4> -Date: Thu, 7 Sep 2017 07:18:57 +0000 -Subject: [PATCH] 2017-09-07 Matthias Klose <doko@ubuntu.com> - - * asan/asan_linux.cc: Include <signal.h> - - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-5-branch@251830 138bc75d-0d04-0410-961f-82ee72b054a4 ---- - libsanitizer/asan/asan_linux.cc | 1 + - 2 files changed, 5 insertions(+) - -diff --git a/libsanitizer/asan/asan_linux.cc b/libsanitizer/asan/asan_linux.cc -index c504168..59087b9 100644 ---- a/libsanitizer/asan/asan_linux.cc -+++ b/libsanitizer/asan/asan_linux.cc -@@ -29,6 +29,7 @@ - #include <dlfcn.h> - #include <fcntl.h> - #include <pthread.h> -+#include <signal.h> - #include <stdio.h> - #include <unistd.h> - #include <unwind.h> --- -2.9.3 - diff --git a/packages/gcc/stack_t-4.9.patch b/packages/gcc/stack_t-4.9.patch deleted file mode 100644 index b894557c..00000000 --- a/packages/gcc/stack_t-4.9.patch +++ /dev/null @@ -1,80 +0,0 @@ -From 833e00c01e96f61e24cd7ec97b93fad212dc914b Mon Sep 17 00:00:00 2001 -From: doko <doko@138bc75d-0d04-0410-961f-82ee72b054a4> -Date: Thu, 7 Sep 2017 07:17:17 +0000 -Subject: [PATCH] 2017-09-07 Matthias Klose <doko@ubuntu.com> - - Backported from mainline - 2017-07-14 Jakub Jelinek <jakub@redhat.com> - - PR sanitizer/81066 - * sanitizer_common/sanitizer_linux.h: Cherry-pick upstream r307969. - * sanitizer_common/sanitizer_linux.cc: Likewise. - * sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc: Likewise. - * tsan/tsan_platform_linux.cc: Likewise. - - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-5-branch@251829 138bc75d-0d04-0410-961f-82ee72b054a4 ---- - libsanitizer/sanitizer_common/sanitizer_linux.cc | 3 +-- - libsanitizer/sanitizer_common/sanitizer_linux.h | 4 +--- - .../sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc | 2 +- - libsanitizer/tsan/tsan_platform_linux.cc | 2 +- - 5 files changed, 15 insertions(+), 7 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_linux.cc b/libsanitizer/sanitizer_common/sanitizer_linux.cc -index 9feb307..821b26d 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_linux.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_linux.cc -@@ -514,8 +514,7 @@ uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5) { - } - #endif - --uptr internal_sigaltstack(const struct sigaltstack *ss, -- struct sigaltstack *oss) { -+uptr internal_sigaltstack(const void *ss, void *oss) { - return internal_syscall(__NR_sigaltstack, (uptr)ss, (uptr)oss); - } - -diff --git a/libsanitizer/sanitizer_common/sanitizer_linux.h b/libsanitizer/sanitizer_common/sanitizer_linux.h -index 086834c..3a6f4cd 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_linux.h -+++ b/libsanitizer/sanitizer_common/sanitizer_linux.h -@@ -27,8 +26,7 @@ struct linux_dirent; - // Syscall wrappers. - uptr internal_getdents(fd_t fd, struct linux_dirent *dirp, unsigned int count); - uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5); --uptr internal_sigaltstack(const struct sigaltstack* ss, -- struct sigaltstack* oss); -+uptr internal_sigaltstack(const void* ss, void* oss); - uptr internal_sigaction(int signum, const __sanitizer_kernel_sigaction_t *act, - __sanitizer_kernel_sigaction_t *oldact); - uptr internal_sigprocmask(int how, __sanitizer_kernel_sigset_t *set, -diff --git a/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc b/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc -index 5881202..c54894d 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc -@@ -234,7 +234,7 @@ static int TracerThread(void* argument) { - - // Alternate stack for signal handling. - InternalScopedBuffer<char> handler_stack_memory(kHandlerStackSize); -- struct sigaltstack handler_stack; -+ stack_t handler_stack; - internal_memset(&handler_stack, 0, sizeof(handler_stack)); - handler_stack.ss_sp = handler_stack_memory.data(); - handler_stack.ss_size = kHandlerStackSize; -diff --git a/libsanitizer/tsan/tsan_platform_linux.cc b/libsanitizer/tsan/tsan_platform_linux.cc -index 3259131..b8e9078 100644 ---- a/libsanitizer/tsan/tsan_platform_linux.cc -+++ b/libsanitizer/tsan/tsan_platform_linux.cc -@@ -377,7 +377,7 @@ bool IsGlobalVar(uptr addr) { - int ExtractResolvFDs(void *state, int *fds, int nfd) { - #if SANITIZER_LINUX - int cnt = 0; -- __res_state *statp = (__res_state*)state; -+ struct __res_state *statp = (struct __res_state*)state; - for (int i = 0; i < MAXNS && cnt < nfd; i++) { - if (statp->_u._ext.nsaddrs[i] && statp->_u._ext.nssocks[i] != -1) - fds[cnt++] = statp->_u._ext.nssocks[i]; --- -2.9.3 - diff --git a/packages/gcc/stack_t.patch b/packages/gcc/stack_t.patch deleted file mode 100644 index 48a5a47a..00000000 --- a/packages/gcc/stack_t.patch +++ /dev/null @@ -1,88 +0,0 @@ -From 833e00c01e96f61e24cd7ec97b93fad212dc914b Mon Sep 17 00:00:00 2001 -From: doko <doko@138bc75d-0d04-0410-961f-82ee72b054a4> -Date: Thu, 7 Sep 2017 07:17:17 +0000 -Subject: [PATCH] 2017-09-07 Matthias Klose <doko@ubuntu.com> - - Backported from mainline - 2017-07-14 Jakub Jelinek <jakub@redhat.com> - - PR sanitizer/81066 - * sanitizer_common/sanitizer_linux.h: Cherry-pick upstream r307969. - * sanitizer_common/sanitizer_linux.cc: Likewise. - * sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc: Likewise. - * tsan/tsan_platform_linux.cc: Likewise. - - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-5-branch@251829 138bc75d-0d04-0410-961f-82ee72b054a4 ---- - libsanitizer/sanitizer_common/sanitizer_linux.cc | 3 +-- - libsanitizer/sanitizer_common/sanitizer_linux.h | 4 +--- - .../sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc | 2 +- - libsanitizer/tsan/tsan_platform_linux.cc | 2 +- - 5 files changed, 15 insertions(+), 7 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_linux.cc b/libsanitizer/sanitizer_common/sanitizer_linux.cc -index 9feb307..821b26d 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_linux.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_linux.cc -@@ -514,8 +514,7 @@ uptr internal_prctl(int option, uptr arg2, uptr arg3, uptr arg4, uptr arg5) { - } - #endif - --uptr internal_sigaltstack(const struct sigaltstack *ss, -- struct sigaltstack *oss) { -+uptr internal_sigaltstack(const void *ss, void *oss) { - return internal_syscall(SYSCALL(sigaltstack), (uptr)ss, (uptr)oss); - } - -diff --git a/libsanitizer/sanitizer_common/sanitizer_linux.h b/libsanitizer/sanitizer_common/sanitizer_linux.h -index 086834c..3a6f4cd 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_linux.h -+++ b/libsanitizer/sanitizer_common/sanitizer_linux.h -@@ -18,7 +18,6 @@ - #include "sanitizer_platform_limits_posix.h" - - struct link_map; // Opaque type returned by dlopen(). --struct sigaltstack; - - namespace __sanitizer { - // Dirent structure for getdents(). Note that this structure is different from -@@ -27,8 +26,7 @@ struct linux_dirent; - - // Syscall wrappers. - uptr internal_getdents(fd_t fd, struct linux_dirent *dirp, unsigned int count); --uptr internal_sigaltstack(const struct sigaltstack* ss, -- struct sigaltstack* oss); -+uptr internal_sigaltstack(const void* ss, void* oss); - uptr internal_sigprocmask(int how, __sanitizer_sigset_t *set, - __sanitizer_sigset_t *oldset); - void internal_sigfillset(__sanitizer_sigset_t *set); -diff --git a/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc b/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc -index 5881202..c54894d 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_stoptheworld_linux_libcdep.cc -@@ -234,7 +234,7 @@ static int TracerThread(void* argument) { - - // Alternate stack for signal handling. - InternalScopedBuffer<char> handler_stack_memory(kHandlerStackSize); -- struct sigaltstack handler_stack; -+ stack_t handler_stack; - internal_memset(&handler_stack, 0, sizeof(handler_stack)); - handler_stack.ss_sp = handler_stack_memory.data(); - handler_stack.ss_size = kHandlerStackSize; -diff --git a/libsanitizer/tsan/tsan_platform_linux.cc b/libsanitizer/tsan/tsan_platform_linux.cc -index 3259131..b8e9078 100644 ---- a/libsanitizer/tsan/tsan_platform_linux.cc -+++ b/libsanitizer/tsan/tsan_platform_linux.cc -@@ -377,7 +377,7 @@ bool IsGlobalVar(uptr addr) { - int ExtractResolvFDs(void *state, int *fds, int nfd) { - #if SANITIZER_LINUX - int cnt = 0; -- __res_state *statp = (__res_state*)state; -+ struct __res_state *statp = (struct __res_state*)state; - for (int i = 0; i < MAXNS && cnt < nfd; i++) { - if (statp->_u._ext.nsaddrs[i] && statp->_u._ext.nssocks[i] != -1) - fds[cnt++] = statp->_u._ext.nssocks[i]; --- -2.9.3 - diff --git a/packages/gcc/sys_ustat-4.9.patch b/packages/gcc/sys_ustat-4.9.patch deleted file mode 100644 index 75453af3..00000000 --- a/packages/gcc/sys_ustat-4.9.patch +++ /dev/null @@ -1,34 +0,0 @@ -The sys_ustat.h patch modified for gcc 4.9.x. - -diff -Naurb gcc-4.9.4.orig/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc gcc-4.9.4/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc ---- gcc-4.9.4.orig/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc 2013-12-19 06:54:11.000000000 -0600 -+++ gcc-4.9.4/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc 2018-12-11 15:57:46.901800462 -0600 -@@ -81,7 +81,6 @@ - #include <sys/statvfs.h> - #include <sys/timex.h> - #include <sys/user.h> --#include <sys/ustat.h> - #include <linux/cyclades.h> - #include <linux/if_eql.h> - #include <linux/if_plip.h> -@@ -163,7 +162,19 @@ - unsigned struct_old_utsname_sz = sizeof(struct old_utsname); - unsigned struct_oldold_utsname_sz = sizeof(struct oldold_utsname); - unsigned struct_itimerspec_sz = sizeof(struct itimerspec); -- unsigned struct_ustat_sz = sizeof(struct ustat); -+ // Use pre-computed size of struct ustat to avoid <sys/ustat.h> which -+ // has been removed from glibc 2.28. -+#if defined(__aarch64__) || defined(__s390x__) || defined (__mips64) \ -+ || defined(__powerpc64__) || defined(__arch64__) || defined(__sparcv9) \ -+ || defined(__x86_64__) -+#define SIZEOF_STRUCT_USTAT 32 -+#elif defined(__arm__) || defined(__i386__) || defined(__mips__) \ -+ || defined(__powerpc__) || defined(__s390__) -+#define SIZEOF_STRUCT_USTAT 20 -+#else -+#error Unknown size of struct ustat -+#endif -+ unsigned struct_ustat_sz = SIZEOF_STRUCT_USTAT; - #endif // SANITIZER_LINUX - - #if SANITIZER_LINUX && !SANITIZER_ANDROID diff --git a/packages/gcc/sys_ustat.h.patch b/packages/gcc/sys_ustat.h.patch deleted file mode 100644 index c65757b4..00000000 --- a/packages/gcc/sys_ustat.h.patch +++ /dev/null @@ -1,63 +0,0 @@ -From 9569b61168b963a6cea7b782fd350dee489ad42c Mon Sep 17 00:00:00 2001 -From: "H.J. Lu" <hjl.tools@gmail.com> -Date: Mon, 21 May 2018 13:17:55 -0700 -Subject: [PATCH] libsanitizer: Use pre-computed size of struct ustat for Linux - -Cherry-pick compiler-rt revision 333213: - -<sys/ustat.h> has been removed from glibc 2.28 by: - -commit cf2478d53ad7071e84c724a986b56fe17f4f4ca7 -Author: Adhemerval Zanella <adhemerval.zanella@linaro.org> -Date: Sun Mar 18 11:28:59 2018 +0800 - - Deprecate ustat syscall interface - -This patch uses pre-computed size of struct ustat for Linux. - - PR sanitizer/85835 - * sanitizer_common/sanitizer_platform_limits_posix.cc: Don't - include <sys/ustat.h> for Linux. - (SIZEOF_STRUCT_USTAT): New. - (struct_ustat_sz): Use SIZEOF_STRUCT_USTAT for Linux. ---- - .../sanitizer_platform_limits_posix.cc | 15 +++++++++++++-- - 1 file changed, 13 insertions(+), 2 deletions(-) - -diff --git a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -index 858bb218450..de18e56d11c 100644 ---- a/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/libsanitizer/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -157,7 +157,6 @@ typedef struct user_fpregs elf_fpregset_t; - # include <sys/procfs.h> - #endif - #include <sys/user.h> --#include <sys/ustat.h> - #include <linux/cyclades.h> - #include <linux/if_eql.h> - #include <linux/if_plip.h> -@@ -250,7 +249,19 @@ namespace __sanitizer { - #endif // SANITIZER_LINUX || SANITIZER_FREEBSD - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- unsigned struct_ustat_sz = sizeof(struct ustat); -+ // Use pre-computed size of struct ustat to avoid <sys/ustat.h> which -+ // has been removed from glibc 2.28. -+#if defined(__aarch64__) || defined(__s390x__) || defined (__mips64) \ -+ || defined(__powerpc64__) || defined(__arch64__) || defined(__sparcv9) \ -+ || defined(__x86_64__) -+#define SIZEOF_STRUCT_USTAT 32 -+#elif defined(__arm__) || defined(__i386__) || defined(__mips__) \ -+ || defined(__powerpc__) || defined(__s390__) -+#define SIZEOF_STRUCT_USTAT 20 -+#else -+#error Unknown size of struct ustat -+#endif -+ unsigned struct_ustat_sz = SIZEOF_STRUCT_USTAT; - unsigned struct_rlimit64_sz = sizeof(struct rlimit64); - unsigned struct_statvfs64_sz = sizeof(struct statvfs64); - #endif // SANITIZER_LINUX && !SANITIZER_ANDROID --- -2.17.0 - - diff --git a/packages/gcc/ucontext_t-java.patch b/packages/gcc/ucontext_t-java.patch deleted file mode 100644 index a43e0b5a..00000000 --- a/packages/gcc/ucontext_t-java.patch +++ /dev/null @@ -1,60 +0,0 @@ -From 9b9287cde20ea57578cf07efb2a96ed4cc0da36f Mon Sep 17 00:00:00 2001 -From: doko <doko@138bc75d-0d04-0410-961f-82ee72b054a4> -Date: Thu, 7 Sep 2017 07:22:07 +0000 -Subject: [PATCH] 2017-09-07 Matthias Klose <doko@ubuntu.com> - - * include/x86_64-signal.h (HANDLE_DIVIDE_OVERFLOW): Replace - 'struct ucontext' with ucontext_t. - * include/i386-signal.h (HANDLE_DIVIDE_OVERFLOW): Likewise. - * include/s390-signal.h (HANDLE_DIVIDE_OVERFLOW): Likewise. - - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-5-branch@251832 138bc75d-0d04-0410-961f-82ee72b054a4 ---- - libjava/include/i386-signal.h | 2 +- - libjava/include/s390-signal.h | 2 +- - libjava/include/x86_64-signal.h | 2 +- - 4 files changed, 10 insertions(+), 3 deletions(-) - -diff --git a/libjava/include/i386-signal.h b/libjava/include/i386-signal.h -index c2409b0..ef77e7e 100644 ---- a/libjava/include/i386-signal.h -+++ b/libjava/include/i386-signal.h -@@ -29,7 +29,7 @@ static void _Jv_##_name (int, siginfo_t *, \ - #define HANDLE_DIVIDE_OVERFLOW \ - do \ - { \ -- struct ucontext *_uc = (struct ucontext *)_p; \ -+ ucontext_t *_uc = (ucontext_t *)_p; \ - gregset_t &_gregs = _uc->uc_mcontext.gregs; \ - unsigned char *_eip = (unsigned char *)_gregs[REG_EIP]; \ - \ -diff --git a/libjava/include/s390-signal.h b/libjava/include/s390-signal.h -index 4ca4c10..9261b52 100644 ---- a/libjava/include/s390-signal.h -+++ b/libjava/include/s390-signal.h -@@ -51,7 +51,7 @@ do \ - struct \ - { \ - unsigned long int uc_flags; \ -- struct ucontext *uc_link; \ -+ ucontext_t *uc_link; \ - stack_t uc_stack; \ - mcontext_t uc_mcontext; \ - unsigned long sigmask[2]; \ -diff --git a/libjava/include/x86_64-signal.h b/libjava/include/x86_64-signal.h -index 12383b5..e36c5a3 100644 ---- a/libjava/include/x86_64-signal.h -+++ b/libjava/include/x86_64-signal.h -@@ -28,7 +28,7 @@ static void _Jv_##_name (int, siginfo_t *, \ - #define HANDLE_DIVIDE_OVERFLOW \ - do \ - { \ -- struct ucontext *_uc = (struct ucontext *)_p; \ -+ ucontext_t *_uc = (ucontext_t *)_p; \ - gregset_t &_gregs = _uc->uc_mcontext.gregs; \ - unsigned char *_rip = (unsigned char *)_gregs[REG_RIP]; \ - \ --- -2.9.3 - diff --git a/packages/gcc/ucontext_t.patch b/packages/gcc/ucontext_t.patch deleted file mode 100644 index a4f04b47..00000000 --- a/packages/gcc/ucontext_t.patch +++ /dev/null @@ -1,189 +0,0 @@ -From ecf0d1a107133c715763940c2b197aa814710e1b Mon Sep 17 00:00:00 2001 -From: jsm28 <jsm28@138bc75d-0d04-0410-961f-82ee72b054a4> -Date: Tue, 4 Jul 2017 10:25:10 +0000 -Subject: [PATCH] Use ucontext_t not struct ucontext in linux-unwind.h files. - -Current glibc no longer gives the ucontext_t type the tag struct -ucontext, to conform with POSIX namespace rules. This requires -various linux-unwind.h files in libgcc, that were previously using -struct ucontext, to be fixed to use ucontext_t instead. This is -similar to the removal of the struct siginfo tag from siginfo_t some -years ago. - -This patch changes those files to use ucontext_t instead. As the -standard name that should be unconditionally safe, so this is not -restricted to architectures supported by glibc, or conditioned on the -glibc version. - -Tested compilation together with current glibc with glibc's -build-many-glibcs.py. - - * config/aarch64/linux-unwind.h (aarch64_fallback_frame_state), - config/alpha/linux-unwind.h (alpha_fallback_frame_state), - config/bfin/linux-unwind.h (bfin_fallback_frame_state), - config/i386/linux-unwind.h (x86_64_fallback_frame_state, - x86_fallback_frame_state), config/m68k/linux-unwind.h (struct - uw_ucontext), config/nios2/linux-unwind.h (struct nios2_ucontext), - config/pa/linux-unwind.h (pa32_fallback_frame_state), - config/sh/linux-unwind.h (sh_fallback_frame_state), - config/tilepro/linux-unwind.h (tile_fallback_frame_state), - config/xtensa/linux-unwind.h (xtensa_fallback_frame_state): Use - ucontext_t instead of struct ucontext. - - -git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/gcc-5-branch@249958 138bc75d-0d04-0410-961f-82ee72b054a4 ---- - libgcc/config/aarch64/linux-unwind.h | 2 +- - libgcc/config/alpha/linux-unwind.h | 2 +- - libgcc/config/bfin/linux-unwind.h | 2 +- - libgcc/config/i386/linux-unwind.h | 4 ++-- - libgcc/config/m68k/linux-unwind.h | 2 +- - libgcc/config/nios2/linux-unwind.h | 2 +- - libgcc/config/pa/linux-unwind.h | 2 +- - libgcc/config/sh/linux-unwind.h | 2 +- - libgcc/config/tilepro/linux-unwind.h | 2 +- - libgcc/config/xtensa/linux-unwind.h | 2 +- - 11 files changed, 25 insertions(+), 11 deletions(-) - -diff --git a/libgcc/config/aarch64/linux-unwind.h b/libgcc/config/aarch64/linux-unwind.h -index 86d17b1..909f68f 100644 ---- a/libgcc/config/aarch64/linux-unwind.h -+++ b/libgcc/config/aarch64/linux-unwind.h -@@ -52,7 +52,7 @@ aarch64_fallback_frame_state (struct _Unwind_Context *context, - struct rt_sigframe - { - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - }; - - struct rt_sigframe *rt_; -diff --git a/libgcc/config/alpha/linux-unwind.h b/libgcc/config/alpha/linux-unwind.h -index d65474f..9a226b1 100644 ---- a/libgcc/config/alpha/linux-unwind.h -+++ b/libgcc/config/alpha/linux-unwind.h -@@ -51,7 +51,7 @@ alpha_fallback_frame_state (struct _Unwind_Context *context, - { - struct rt_sigframe { - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - } *rt_ = context->cfa; - sc = &rt_->uc.uc_mcontext; - } -diff --git a/libgcc/config/bfin/linux-unwind.h b/libgcc/config/bfin/linux-unwind.h -index 0c270e4..7fa95d2 100644 ---- a/libgcc/config/bfin/linux-unwind.h -+++ b/libgcc/config/bfin/linux-unwind.h -@@ -52,7 +52,7 @@ bfin_fallback_frame_state (struct _Unwind_Context *context, - void *puc; - char retcode[8]; - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - } *rt_ = context->cfa; - - /* The void * cast is necessary to avoid an aliasing warning. -diff --git a/libgcc/config/i386/linux-unwind.h b/libgcc/config/i386/linux-unwind.h -index e54bf73..d35fc45 100644 ---- a/libgcc/config/i386/linux-unwind.h -+++ b/libgcc/config/i386/linux-unwind.h -@@ -58,7 +58,7 @@ x86_64_fallback_frame_state (struct _Unwind_Context *context, - if (*(unsigned char *)(pc+0) == 0x48 - && *(unsigned long long *)(pc+1) == RT_SIGRETURN_SYSCALL) - { -- struct ucontext *uc_ = context->cfa; -+ ucontext_t *uc_ = context->cfa; - /* The void * cast is necessary to avoid an aliasing warning. - The aliasing warning is correct, but should not be a problem - because it does not alias anything. */ -@@ -138,7 +138,7 @@ x86_fallback_frame_state (struct _Unwind_Context *context, - siginfo_t *pinfo; - void *puc; - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - } *rt_ = context->cfa; - /* The void * cast is necessary to avoid an aliasing warning. - The aliasing warning is correct, but should not be a problem -diff --git a/libgcc/config/m68k/linux-unwind.h b/libgcc/config/m68k/linux-unwind.h -index fb79a4d..b2f5ea4 100644 ---- a/libgcc/config/m68k/linux-unwind.h -+++ b/libgcc/config/m68k/linux-unwind.h -@@ -33,7 +33,7 @@ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see - /* <sys/ucontext.h> is unfortunately broken right now. */ - struct uw_ucontext { - unsigned long uc_flags; -- struct ucontext *uc_link; -+ ucontext_t *uc_link; - stack_t uc_stack; - mcontext_t uc_mcontext; - unsigned long uc_filler[80]; -diff --git a/libgcc/config/nios2/linux-unwind.h b/libgcc/config/nios2/linux-unwind.h -index dff1c20..1d88afe 100644 ---- a/libgcc/config/nios2/linux-unwind.h -+++ b/libgcc/config/nios2/linux-unwind.h -@@ -38,7 +38,7 @@ struct nios2_mcontext { - - struct nios2_ucontext { - unsigned long uc_flags; -- struct ucontext *uc_link; -+ ucontext_t *uc_link; - stack_t uc_stack; - struct nios2_mcontext uc_mcontext; - sigset_t uc_sigmask; /* mask last for extensibility */ -diff --git a/libgcc/config/pa/linux-unwind.h b/libgcc/config/pa/linux-unwind.h -index 0149468..9157535 100644 ---- a/libgcc/config/pa/linux-unwind.h -+++ b/libgcc/config/pa/linux-unwind.h -@@ -80,7 +80,7 @@ pa32_fallback_frame_state (struct _Unwind_Context *context, - struct sigcontext *sc; - struct rt_sigframe { - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - } *frame; - - /* rt_sigreturn trampoline: -diff --git a/libgcc/config/sh/linux-unwind.h b/libgcc/config/sh/linux-unwind.h -index e63091f..67033f0 100644 ---- a/libgcc/config/sh/linux-unwind.h -+++ b/libgcc/config/sh/linux-unwind.h -@@ -180,7 +180,7 @@ sh_fallback_frame_state (struct _Unwind_Context *context, - { - struct rt_sigframe { - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - } *rt_ = context->cfa; - /* The void * cast is necessary to avoid an aliasing warning. - The aliasing warning is correct, but should not be a problem -diff --git a/libgcc/config/tilepro/linux-unwind.h b/libgcc/config/tilepro/linux-unwind.h -index fd83ba7..e3c9ef0 100644 ---- a/libgcc/config/tilepro/linux-unwind.h -+++ b/libgcc/config/tilepro/linux-unwind.h -@@ -61,7 +61,7 @@ tile_fallback_frame_state (struct _Unwind_Context *context, - struct rt_sigframe { - unsigned char save_area[C_ABI_SAVE_AREA_SIZE]; - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - } *rt_; - - /* Return if this is not a signal handler. */ -diff --git a/libgcc/config/xtensa/linux-unwind.h b/libgcc/config/xtensa/linux-unwind.h -index 9a67b5d..98b7ea6 100644 ---- a/libgcc/config/xtensa/linux-unwind.h -+++ b/libgcc/config/xtensa/linux-unwind.h -@@ -67,7 +67,7 @@ xtensa_fallback_frame_state (struct _Unwind_Context *context, - - struct rt_sigframe { - siginfo_t info; -- struct ucontext uc; -+ ucontext_t uc; - } *rt_; - - /* movi a2, __NR_rt_sigreturn; syscall */ --- -2.9.3 - diff --git a/packages/gcc/zstd.patch b/packages/gcc/zstd.patch deleted file mode 100644 index 8fb7583a..00000000 --- a/packages/gcc/zstd.patch +++ /dev/null @@ -1,43 +0,0 @@ ---- a/gcc/Makefile.in -+++ b/gcc/Makefile.in -@@ -1075,7 +1075,8 @@ GNATMAKE = @GNATMAKE@ - # Libs needed (at present) just for jcf-dump. - LDEXP_LIB = @LDEXP_LIB@ - --ZSTD_LIB = @ZSTD_LIB@ -+ZSTD_INC = @ZSTD_CPPFLAGS@ -+ZSTD_LIB = @ZSTD_LDFLAGS@ @ZSTD_LIB@ - - # Likewise, for use in the tools that must run on this machine - # even if we are cross-building GCC. -@@ -2275,7 +2276,7 @@ CFLAGS-version.o += -DBASEVER=$(BASEVER_s) -DDATESTAMP=$(DATESTAMP_s) \ - version.o: $(REVISION) $(DATESTAMP) $(BASEVER) $(DEVPHASE) - - # lto-compress.o needs $(ZLIBINC) added to the include flags. --CFLAGS-lto-compress.o += $(ZLIBINC) -+CFLAGS-lto-compress.o += $(ZLIBINC) $(ZSTD_INC) - - CFLAGS-lto-streamer-in.o += -DTARGET_MACHINE=\"$(target_noncanonical)\" - ---- a/gcc/configure -+++ b/gcc/configure -@@ -786,6 +786,8 @@ LTLIBICONV - LIBICONV - ZSTD_LIB - ZSTD_INCLUDE -+ZSTD_LDFLAGS -+ZSTD_CPPFLAGS - DL_LIB - LDEXP_LIB - EXTRA_GCC_LIBS ---- a/gcc/configure.ac -+++ b/gcc/configure.ac -@@ -1339,6 +1339,8 @@ AC_SUBST(ZSTD_INCLUDE) - AC_SUBST(ZSTD_LIB) - ZSTD_CPPFLAGS= - ZSTD_LDFLAGS= -+AC_SUBST(ZSTD_CPPFLAGS) -+AC_SUBST(ZSTD_LDFLAGS) - AC_ARG_WITH(zstd, - [AS_HELP_STRING([--with-zstd=PATH], - [specify prefix directory for installed zstd library. diff --git a/packages/libvips/package.py b/packages/libvips/package.py index ab8a8849..19db301d 100644 --- a/packages/libvips/package.py +++ b/packages/libvips/package.py @@ -12,14 +12,26 @@ class Libvips(AutotoolsPackage): little memory.""" homepage = "https://libvips.github.io/libvips/" - url = "https://github.com/libvips/libvips/releases/download/v8.9.0/vips-8.9.0.tar.gz" + url = "https://github.com/libvips/libvips/releases/download/v8.15.3/vips-8.15.3.tar.xz" git = "https://github.com/libvips/libvips.git" + license("LGPL-2.1-or-later", checked_by="wdconinc") + + version("8.15.3", sha256="3e27d9f536eafad64013958fe9e8a1964c90b564c731d49db7c1a1c11b1052a0") version("8.13.3", sha256="4eff5cdc8dbe1a05a926290a99014e20ba386f5dcca38d9774bef61413435d4c") version("8.10.5", sha256="a4eef2f5334ab6dbf133cd3c6d6394d5bdb3e76d5ea4d578b02e1bc3d9e1cfd8") version("8.9.1", sha256="45633798877839005016c9d3494e98dee065f5cb9e20f4552d3b315b8e8bce91") version("8.9.0", sha256="97334a5e70aff343d2587f23cb8068fc846a58cd937c89a446142ccf00ea0349") + build_system( + conditional("autotools", when="@:8.13"), + conditional("meson", when="@8.13:"), + default="meson", + ) + + depends_on("c", type="build") # generated + depends_on("cxx", type="build") # generated + variant("fftw", default=True, description="Uses FFTW3 for fourier transforms.") variant("jpeg", default=False, description="Enable JPEG support") @@ -32,10 +44,7 @@ class Libvips(AutotoolsPackage): # TODO: Add more variants! - # begin EBRAINS (added) depends_on("pkgconfig", type="build") - # end EBRAINS - depends_on("glib") depends_on("expat") @@ -43,7 +52,7 @@ class Libvips(AutotoolsPackage): depends_on("libjpeg", when="+jpeg") depends_on("libtiff", when="+tiff") depends_on("libpng", when="+png") - depends_on("poppler", when="+poppler") + depends_on("poppler +glib", when="+poppler") # begin EBRAINS (added): to fix build errors depends_on("gobject-introspection") @@ -51,3 +60,7 @@ class Libvips(AutotoolsPackage): depends_on("pango") depends_on("libtiff") # end EBRAINS + + def url_for_version(self, version): + ext = "xz" if version >= Version("8.14") else "gz" + return f"https://github.com/libvips/libvips/releases/download/v{version}/vips-{version}.tar.{ext}" diff --git a/packages/libxcb/package.py b/packages/libxcb/package.py index 2cb1418c..dcaff208 100644 --- a/packages/libxcb/package.py +++ b/packages/libxcb/package.py @@ -6,26 +6,44 @@ from spack.package import * -class Libxcb(AutotoolsPackage): +class Libxcb(AutotoolsPackage, XorgPackage): """The X protocol C-language Binding (XCB) is a replacement for Xlib featuring a small footprint, latency hiding, direct access to the protocol, improved threading support, and extensibility.""" homepage = "https://xcb.freedesktop.org/" - url = "https://xorg.freedesktop.org/archive/individual/lib/libxcb-1.14.tar.xz" + xorg_mirror_path = "lib/libxcb-1.14.tar.xz" + license("MIT") + + maintainers("wdconinc") + + version("1.17.0", sha256="599ebf9996710fea71622e6e184f3a8ad5b43d0e5fa8c4e407123c88a59a6d55") + version("1.16.1", sha256="f24d187154c8e027b358fc7cb6588e35e33e6a92f11c668fe77396a7ae66e311") + version("1.16", sha256="4348566aa0fbf196db5e0a576321c65966189210cb51328ea2bb2be39c711d71") + version("1.15", sha256="cc38744f817cf6814c847e2df37fcb8997357d72fa4bcbc228ae0fe47219a059") version("1.14", sha256="a55ed6db98d43469801262d81dc2572ed124edc3db31059d4e9916eb9f844c34") - version("1.13", sha256="0bb3cfd46dbd90066bf4d7de3cad73ec1024c7325a4a0cbf5f4a0d4fa91155fb") + version( + "1.13", + sha256="0bb3cfd46dbd90066bf4d7de3cad73ec1024c7325a4a0cbf5f4a0d4fa91155fb", + url="https://xcb.freedesktop.org/dist/libxcb-1.13.tar.gz", + deprecated=True, + ) + + depends_on("c", type="build") # generated depends_on("libpthread-stubs") depends_on("libxau@0.99.2:") depends_on("libxdmcp") # libxcb 1.X requires xcb-proto >= 1.X - depends_on("xcb-proto") - depends_on("xcb-proto@1.14:", when="@1.14") - depends_on("xcb-proto@1.13:", when="@1.13") + depends_on("xcb-proto", type="build") + depends_on("xcb-proto@1.17:", when="@1.17", type="build") + depends_on("xcb-proto@1.16:", when="@1.16", type="build") + depends_on("xcb-proto@1.15:", when="@1.15", type="build") + depends_on("xcb-proto@1.14:", when="@1.14", type="build") + depends_on("xcb-proto@1.13:", when="@1.13", type="build") # begin EBRAINS (deleted): break cyclic dependency in python+tkinter # depends_on("python", type="build") @@ -33,14 +51,6 @@ class Libxcb(AutotoolsPackage): depends_on("pkgconfig", type="build") depends_on("util-macros", type="build") - def url_for_version(self, version): - if version >= Version("1.14"): - url = "https://xorg.freedesktop.org/archive/individual/lib/libxcb-{0}.tar.xz" - else: - url = "https://xcb.freedesktop.org/dist/libxcb-{0}.tar.gz" - - return url.format(version) - def configure_args(self): config_args = [] @@ -51,4 +61,18 @@ class Libxcb(AutotoolsPackage): return config_args def patch(self): - filter_file("typedef struct xcb_auth_info_t {", "typedef struct {", "src/xcb.h") \ No newline at end of file + filter_file("typedef struct xcb_auth_info_t {", "typedef struct {", "src/xcb.h") + + # libxcb fails to build with non-UTF-8 locales, see: + # https://www.linuxfromscratch.org/blfs/view/git/x/libxcb.html + # https://gitlab.freedesktop.org/xorg/lib/libxcb/-/merge_requests/53 (merged in 1.17.0) + # https://gitlab.freedesktop.org/xorg/lib/libxcb/-/merge_requests/60 + # If a newer release can be verified to build with LC_ALL=en_US.ISO-8859-1, + # then we can limit the following function, e.g. + # when("@:1.17") + def setup_build_environment(self, env): + env.set("LC_ALL", "C.UTF-8") + + # begin EBRAINS (deleted): break cyclic dependency in python+tkinter + # depends_on("python", type="build") + # end EBRAINS diff --git a/packages/llvm/constexpr_longdouble.patch b/packages/llvm/constexpr_longdouble.patch deleted file mode 100644 index 8b90001d..00000000 --- a/packages/llvm/constexpr_longdouble.patch +++ /dev/null @@ -1,28 +0,0 @@ -From 3bf63cf3b366d3a57cf5cbad4112a6abf6c0c3b1 Mon Sep 17 00:00:00 2001 -From: Marshall Clow <mclow.lists@gmail.com> -Date: Tue, 2 Apr 2019 14:46:36 +0000 -Subject: [PATCH] Special case some duration arithmetic for GCC and PPC because - their long double constant folding is broken. Fixes PR#39696. - -llvm-svn: 357478 ---- - libcxx/include/thread | 5 +++++ - 1 file changed, 5 insertions(+) - -diff --git a/libcxx/include/thread b/libcxx/include/thread -index df06ff70f8e37f22f4108be8e5e79a38052a11dd..400459ae7f32c4d7cd24b2d85c49d789500e432d 100644 ---- a/libcxx/include/thread -+++ b/libcxx/include/thread -@@ -434,7 +434,12 @@ sleep_for(const chrono::duration<_Rep, _Period>& __d) - using namespace chrono; - if (__d > duration<_Rep, _Period>::zero()) - { -+#if defined(_LIBCPP_COMPILER_GCC) && (__powerpc__ || __POWERPC__) -+ // GCC's long double const folding is incomplete for IBM128 long doubles. -+ _LIBCPP_CONSTEXPR duration<long double> _Max = duration<long double>(ULLONG_MAX/1000000000ULL) ; -+#else - _LIBCPP_CONSTEXPR duration<long double> _Max = nanoseconds::max(); -+#endif - nanoseconds __ns; - if (__d < _Max) - { diff --git a/packages/llvm/constexpr_longdouble_9.0.patch b/packages/llvm/constexpr_longdouble_9.0.patch deleted file mode 100644 index 9a62f270..00000000 --- a/packages/llvm/constexpr_longdouble_9.0.patch +++ /dev/null @@ -1,38 +0,0 @@ -From d9a42ec98adcb1ebc0c3837715df4e5a50c7ccc0 Mon Sep 17 00:00:00 2001 -From: "Joel E. Denny" <jdenny.ornl@gmail.com> -Date: Wed, 10 Jun 2020 12:40:43 -0400 -Subject: [PATCH] [libc++] Work around gcc/Power9 bug in `include/thread` - -This fixes PR39696, which breaks the libcxx build with gcc (I tested -7.5.0) on Power9. This fix was suggested at - -https://bugs.llvm.org/show_bug.cgi?id=39696#c38 - -but never applied. It just reverts 0583d9ea8d5e, which reverses -components of the original fix in 3bf63cf3b366, which is correct. - -Fixes https://llvm.org/PR39696 - -Reviewed By: ldionne - -Differential Revision: https://reviews.llvm.org/D81438 ---- - libcxx/include/thread | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/libcxx/include/thread b/libcxx/include/thread -index 22aa4f201295867cff57b7a944e6b7bd67b22ad3..6eff1800acdbef09eae4417eee977fa350c596ea 100644 ---- a/libcxx/include/thread -+++ b/libcxx/include/thread -@@ -365,9 +365,9 @@ sleep_for(const chrono::duration<_Rep, _Period>& __d) - { - #if defined(_LIBCPP_COMPILER_GCC) && (__powerpc__ || __POWERPC__) - // GCC's long double const folding is incomplete for IBM128 long doubles. -- _LIBCPP_CONSTEXPR duration<long double> _Max = nanoseconds::max(); --#else - _LIBCPP_CONSTEXPR duration<long double> _Max = duration<long double>(ULLONG_MAX/1000000000ULL) ; -+#else -+ _LIBCPP_CONSTEXPR duration<long double> _Max = nanoseconds::max(); - #endif - nanoseconds __ns; - if (__d < _Max) diff --git a/packages/llvm/detection_test.yaml b/packages/llvm/detection_test.yaml new file mode 100644 index 00000000..860b3061 --- /dev/null +++ b/packages/llvm/detection_test.yaml @@ -0,0 +1,104 @@ +paths: +- layout: + - executables: + - "bin/clang-3.9" + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + platforms: ["darwin", "linux"] + results: + - spec: 'llvm@3.9.1 +clang~lld~lldb' + extra_attributes: + compilers: + c: ".*/bin/clang-3.9$" + cxx: ".*/bin/clang[+][+]-3.9$" + +# `~` and other weird characters in the version string +- layout: + - executables: + - "bin/clang-6.0" + - "bin/clang++-6.0" + script: | + echo "clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin", + + platforms: ["darwin", "linux"] + results: + - spec: 'llvm@6.0.1 +clang~lld~lldb' + extra_attributes: + compilers: + c: ".*/bin/clang-6.0$" + cxx: ".*/bin/clang[+][+]-6.0$" +- layout: + - executables: + - "bin/clang-9.0" + - "bin/clang++-9.0" + script: | + echo "clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + + platforms: ["darwin", "linux"] + results: + - spec: 'llvm@9.0.1 +clang~lld~lldb' + extra_attributes: + compilers: + c: ".*/bin/clang-9.0$" + cxx: ".*/bin/clang[+][+]-9.0$" + +# Multiple LLVM packages in the same prefix +- layout: + - executables: + - "bin/clang-8" + - "bin/clang++-8" + script: | + echo "clang version 8.0.0-3~ubuntu18.04.2 (tags/RELEASE_800/final)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + - executables: + - "bin/ld.lld-8" + script: 'echo "LLD 8.0.0 (compatible with GNU linkers)"' + - executables: + - "bin/lldb" + script: 'echo "lldb version 8.0.0"' + - executables: + - "bin/clang-3.9" + - "bin/clang++-3.9" + script: | + echo "clang version 3.9.1-19ubuntu1 (tags/RELEASE_391/rc2)" + echo "Target: x86_64-pc-linux-gnu" + echo "Thread model: posix" + echo "InstalledDir: /usr/bin" + platforms: ["darwin", "linux"] + results: + - spec: 'llvm@8.0.0+clang+lld+lldb' + extra_attributes: + compilers: + c: ".*/bin/clang-8$" + cxx: ".*/bin/clang[+][+]-8$" + + - spec: 'llvm@3.9.1+clang~lld~lldb' + extra_attributes: + compilers: + c: ".*/bin/clang-3.9$" + cxx: ".*/bin/clang[+][+]-3.9$" + +# Apple Clang should not be detected +- layout: + - executables: + - "bin/clang" + - "bin/clang++" + script: | + echo "Apple clang version 11.0.0 (clang-1100.0.33.8)" + echo "Target: x86_64-apple-darwin19.5.0" + echo "Thread model: posix" + echo "InstalledDir: /Library/Developer/CommandLineTools/usr/bin" + platforms: ["darwin"] + results: [] diff --git a/packages/llvm/libomp-libflags-as-list.patch b/packages/llvm/libomp-libflags-as-list.patch deleted file mode 100644 index 995f55a4..00000000 --- a/packages/llvm/libomp-libflags-as-list.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff --git a/openmp/runtime/cmake/LibompHandleFlags.cmake b/openmp/runtime/cmake/LibompHandleFlags.cmake -index 9e19e59ba17d..f92fa12d851a 100644 ---- a/openmp/runtime/cmake/LibompHandleFlags.cmake -+++ b/openmp/runtime/cmake/LibompHandleFlags.cmake -@@ -144,7 +144,8 @@ function(libomp_get_libflags libflags) - endif() - set(libflags_local ${libflags_local} ${LIBOMP_LIBFLAGS}) - libomp_setup_flags(libflags_local) -- set(${libflags} ${libflags_local} PARENT_SCOPE) -+ libomp_string_to_list("${libflags_local}" libflags_local_list) -+ set(${libflags} ${libflags_local_list} PARENT_SCOPE) - endfunction() - - # Fortran flags diff --git a/packages/llvm/lldb_external_ncurses-10.patch b/packages/llvm/lldb_external_ncurses-10.patch deleted file mode 100644 index 34ed0e3c..00000000 --- a/packages/llvm/lldb_external_ncurses-10.patch +++ /dev/null @@ -1,31 +0,0 @@ -diff --git a/lldb/include/lldb/Host/Config.h.cmake b/lldb/include/lldb/Host/Config.h.cmake ---- a/lldb/include/lldb/Host/Config.h.cmake -+++ b/lldb/include/lldb/Host/Config.h.cmake -@@ -38,6 +38,8 @@ - - #cmakedefine01 LLDB_ENABLE_CURSES - -+#cmakedefine01 CURSES_HAVE_NCURSES_CURSES_H -+ - #cmakedefine01 LLDB_ENABLE_LIBEDIT - - #cmakedefine01 LLDB_ENABLE_LIBXML2 -diff --git a/lldb/source/Core/IOHandlerCursesGUI.cpp b/lldb/source/Core/IOHandlerCursesGUI.cpp ---- a/lldb/source/Core/IOHandlerCursesGUI.cpp -+++ b/lldb/source/Core/IOHandlerCursesGUI.cpp -@@ -10,9 +10,14 @@ - #include "lldb/Host/Config.h" - - #if LLDB_ENABLE_CURSES -+#if CURSES_HAVE_NCURSES_CURSES_H -+#include <ncurses/curses.h> -+#include <ncurses/panel.h> -+#else - #include <curses.h> - #include <panel.h> - #endif -+#endif - - #if defined(__APPLE__) - #include <deque> - diff --git a/packages/llvm/llvm-gcc11.patch b/packages/llvm/llvm-gcc11.patch deleted file mode 100644 index 8e081e87..00000000 --- a/packages/llvm/llvm-gcc11.patch +++ /dev/null @@ -1,9 +0,0 @@ ---- a/llvm/utils/benchmark/src/benchmark_register.h -+++ b/llvm/utils/benchmark/src/benchmark_register.h -@@ -2,6 +2,7 @@ - #define BENCHMARK_REGISTER_H - - #include <vector> -+#include <limits> - - #include "check.h" diff --git a/packages/llvm/llvm13-thread.patch b/packages/llvm/llvm13-thread.patch deleted file mode 100644 index 0067a930..00000000 --- a/packages/llvm/llvm13-thread.patch +++ /dev/null @@ -1,19 +0,0 @@ ---- spack-src/openmp/libomptarget/cmake/Modules/LibomptargetGetDependencies.cmake.org 2022-02-08 14:58:13.000000000 +0900 -+++ spack-src/openmp/libomptarget/cmake/Modules/LibomptargetGetDependencies.cmake 2022-02-08 13:58:53.000000000 +0900 -@@ -276,4 +276,5 @@ - endif() - endif() - --set(OPENMP_PTHREAD_LIB ${LLVM_PTHREAD_LIB}) -+find_package(Threads REQUIRED) -+set(OPENMP_PTHREAD_LIB ${CMAKE_THREAD_LIBS_INIT}) ---- spack-src/openmp/libomptarget/src/CMakeLists.txt.org 2022-02-09 08:49:35.000000000 +0900 -+++ spack-src/openmp/libomptarget/src/CMakeLists.txt 2022-02-09 08:50:18.000000000 +0900 -@@ -36,6 +36,7 @@ - endif() - target_link_libraries(omptarget PRIVATE - ${CMAKE_DL_LIBS} -+ ${OPENMP_PTHREAD_LIB} - "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/exports") - - # Install libomptarget under the lib destination folder. diff --git a/packages/llvm/llvm14-hwloc-ompd.patch b/packages/llvm/llvm14-hwloc-ompd.patch deleted file mode 100644 index bdae92e0..00000000 --- a/packages/llvm/llvm14-hwloc-ompd.patch +++ /dev/null @@ -1,13 +0,0 @@ ---- a/openmp/libompd/src/CMakeLists.txt -+++ b/openmp/libompd/src/CMakeLists.txt -@@ -44,6 +44,10 @@ - ${LIBOMP_SRC_DIR} - ) - -+if(${LIBOMP_USE_HWLOC}) -+ include_directories(${LIBOMP_HWLOC_INSTALL_DIR}/include) -+endif() -+ - INSTALL( TARGETS ompd - LIBRARY DESTINATION ${OPENMP_INSTALL_LIBDIR} - ARCHIVE DESTINATION ${OPENMP_INSTALL_LIBDIR} diff --git a/packages/llvm/llvm17-18-thread.patch b/packages/llvm/llvm17-18-thread.patch new file mode 100644 index 00000000..7e337433 --- /dev/null +++ b/packages/llvm/llvm17-18-thread.patch @@ -0,0 +1,22 @@ +diff --git a/openmp/libomptarget/cmake/Modules/LibomptargetGetDependencies.cmake b/openmp/libomptarget/cmake/Modules/LibomptargetGetDependencies.cmake +index 1f2a50667c4f..d3ff232f6bd3 100644 +--- a/openmp/libomptarget/cmake/Modules/LibomptargetGetDependencies.cmake ++++ b/openmp/libomptarget/cmake/Modules/LibomptargetGetDependencies.cmake +@@ -280,4 +280,5 @@ if (NOT LIBOMPTARGET_CUDA_TOOLKIT_ROOT_DIR_PRESET AND + endif() + endif() + +-set(OPENMP_PTHREAD_LIB ${LLVM_PTHREAD_LIB}) ++find_package(Threads REQUIRED) ++set(OPENMP_PTHREAD_LIB Threads::Threads) +diff --git a/openmp/libomptarget/src/CMakeLists.txt b/openmp/libomptarget/src/CMakeLists.txt +index 071ec61889a2..b782c3b07e6f 100644 +--- a/openmp/libomptarget/src/CMakeLists.txt.orig 2024-03-26 14:30:52.000000000 +0900 ++++ b/openmp/libomptarget/src/CMakeLists.txt 2024-03-26 14:34:02.000000000 +0900 +@@ -41,5 +41,6 @@ + + if (LIBOMP_HAVE_VERSION_SCRIPT_FLAG) + target_link_libraries(omptarget PRIVATE ++ ${OPENMP_PTHREAD_LIB} + "-Wl,--version-script=${CMAKE_CURRENT_SOURCE_DIR}/exports") + endif() diff --git a/packages/llvm/llvm17-fujitsu.patch b/packages/llvm/llvm17-fujitsu.patch new file mode 100644 index 00000000..f960830c --- /dev/null +++ b/packages/llvm/llvm17-fujitsu.patch @@ -0,0 +1,28 @@ +diff --git a/lldb/include/lldb/Utility/LLDBAssert.h_org b/lldb/include/lldb/Utility/LLDBAssert.h +index aeef3e5..2f14ff3 100644 +--- a/lldb/include/lldb/Utility/LLDBAssert.h_org ++++ b/lldb/include/lldb/Utility/LLDBAssert.h +@@ -14,7 +14,7 @@ + #ifndef NDEBUG + #define lldbassert(x) assert(x) + #else +-#if defined(__clang__) ++#if defined(__clang__) && !defined(__CLANG_FUJITSU) + // __FILE_NAME__ is a Clang-specific extension that functions similar to + // __FILE__ but only renders the last path component (the filename) instead of + // an invocation dependent full path to that file. + +diff --git runtimes/CMakeLists.txt_org runtimes/CMakeLists.txt +--- a/runtimes/CMakeLists.txt_org ++++ b/runtimes/CMakeLists.txt +@@ -6,2 +6,2 @@ + include(${LLVM_COMMON_CMAKE_UTILS}/Modules/CMakePolicy.cmake + NO_POLICY_SCOPE) + ++string(REPLACE "-Nclang" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") ++string(REPLACE "-Nnofjprof" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") ++string(REPLACE "-Nfjprof" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") ++ + project(Runtimes C CXX ASM) + + list(INSERT CMAKE_MODULE_PATH 0 diff --git a/packages/llvm/llvm4-lld-ELF-Symbols.patch b/packages/llvm/llvm4-lld-ELF-Symbols.patch deleted file mode 100644 index 1a86cda3..00000000 --- a/packages/llvm/llvm4-lld-ELF-Symbols.patch +++ /dev/null @@ -1,112 +0,0 @@ ---- a/lldb/include/lldb/Utility/TaskPool.h -+++ b/lldb/include/lldb/Utility/TaskPool.h -@@ -33,6 +33,7 @@ - #include <queue> - #include <thread> - #include <vector> -+#include <functional> - - // Global TaskPool class for running tasks in parallel on a set of worker thread - // created the first -# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463 ---- a/lld/ELF/LTO.cpp -+++ b/lld/ELF/LTO.cpp -@@ -158,7 +158,7 @@ - return Ret; - } - --template void BitcodeCompiler::template add<ELF32LE>(BitcodeFile &); --template void BitcodeCompiler::template add<ELF32BE>(BitcodeFile &); --template void BitcodeCompiler::template add<ELF64LE>(BitcodeFile &); --template void BitcodeCompiler::template add<ELF64BE>(BitcodeFile &); -+template void BitcodeCompiler::add<ELF32LE>(BitcodeFile &); -+template void BitcodeCompiler::add<ELF32BE>(BitcodeFile &); -+template void BitcodeCompiler::add<ELF64LE>(BitcodeFile &); -+template void BitcodeCompiler::add<ELF64BE>(BitcodeFile &); ---- a/lld/ELF/Symbols.cpp -+++ b/lld/ELF/Symbols.cpp -@@ -343,45 +343,45 @@ - template bool SymbolBody::hasThunk<ELF64LE>() const; - template bool SymbolBody::hasThunk<ELF64BE>() const; - --template uint32_t SymbolBody::template getVA<ELF32LE>(uint32_t) const; --template uint32_t SymbolBody::template getVA<ELF32BE>(uint32_t) const; --template uint64_t SymbolBody::template getVA<ELF64LE>(uint64_t) const; --template uint64_t SymbolBody::template getVA<ELF64BE>(uint64_t) const; -- --template uint32_t SymbolBody::template getGotVA<ELF32LE>() const; --template uint32_t SymbolBody::template getGotVA<ELF32BE>() const; --template uint64_t SymbolBody::template getGotVA<ELF64LE>() const; --template uint64_t SymbolBody::template getGotVA<ELF64BE>() const; -- --template uint32_t SymbolBody::template getGotOffset<ELF32LE>() const; --template uint32_t SymbolBody::template getGotOffset<ELF32BE>() const; --template uint64_t SymbolBody::template getGotOffset<ELF64LE>() const; --template uint64_t SymbolBody::template getGotOffset<ELF64BE>() const; -- --template uint32_t SymbolBody::template getGotPltVA<ELF32LE>() const; --template uint32_t SymbolBody::template getGotPltVA<ELF32BE>() const; --template uint64_t SymbolBody::template getGotPltVA<ELF64LE>() const; --template uint64_t SymbolBody::template getGotPltVA<ELF64BE>() const; -- --template uint32_t SymbolBody::template getThunkVA<ELF32LE>() const; --template uint32_t SymbolBody::template getThunkVA<ELF32BE>() const; --template uint64_t SymbolBody::template getThunkVA<ELF64LE>() const; --template uint64_t SymbolBody::template getThunkVA<ELF64BE>() const; -- --template uint32_t SymbolBody::template getGotPltOffset<ELF32LE>() const; --template uint32_t SymbolBody::template getGotPltOffset<ELF32BE>() const; --template uint64_t SymbolBody::template getGotPltOffset<ELF64LE>() const; --template uint64_t SymbolBody::template getGotPltOffset<ELF64BE>() const; -- --template uint32_t SymbolBody::template getPltVA<ELF32LE>() const; --template uint32_t SymbolBody::template getPltVA<ELF32BE>() const; --template uint64_t SymbolBody::template getPltVA<ELF64LE>() const; --template uint64_t SymbolBody::template getPltVA<ELF64BE>() const; -- --template uint32_t SymbolBody::template getSize<ELF32LE>() const; --template uint32_t SymbolBody::template getSize<ELF32BE>() const; --template uint64_t SymbolBody::template getSize<ELF64LE>() const; --template uint64_t SymbolBody::template getSize<ELF64BE>() const; -+template uint32_t SymbolBody::getVA<ELF32LE>(uint32_t) const; -+template uint32_t SymbolBody::getVA<ELF32BE>(uint32_t) const; -+template uint64_t SymbolBody::getVA<ELF64LE>(uint64_t) const; -+template uint64_t SymbolBody::getVA<ELF64BE>(uint64_t) const; -+ -+template uint32_t SymbolBody::getGotVA<ELF32LE>() const; -+template uint32_t SymbolBody::getGotVA<ELF32BE>() const; -+template uint64_t SymbolBody::getGotVA<ELF64LE>() const; -+template uint64_t SymbolBody::getGotVA<ELF64BE>() const; -+ -+template uint32_t SymbolBody::getGotOffset<ELF32LE>() const; -+template uint32_t SymbolBody::getGotOffset<ELF32BE>() const; -+template uint64_t SymbolBody::getGotOffset<ELF64LE>() const; -+template uint64_t SymbolBody::getGotOffset<ELF64BE>() const; -+ -+template uint32_t SymbolBody::getGotPltVA<ELF32LE>() const; -+template uint32_t SymbolBody::getGotPltVA<ELF32BE>() const; -+template uint64_t SymbolBody::getGotPltVA<ELF64LE>() const; -+template uint64_t SymbolBody::getGotPltVA<ELF64BE>() const; -+ -+template uint32_t SymbolBody::getThunkVA<ELF32LE>() const; -+template uint32_t SymbolBody::getThunkVA<ELF32BE>() const; -+template uint64_t SymbolBody::getThunkVA<ELF64LE>() const; -+template uint64_t SymbolBody::getThunkVA<ELF64BE>() const; -+ -+template uint32_t SymbolBody::getGotPltOffset<ELF32LE>() const; -+template uint32_t SymbolBody::getGotPltOffset<ELF32BE>() const; -+template uint64_t SymbolBody::getGotPltOffset<ELF64LE>() const; -+template uint64_t SymbolBody::getGotPltOffset<ELF64BE>() const; -+ -+template uint32_t SymbolBody::getPltVA<ELF32LE>() const; -+template uint32_t SymbolBody::getPltVA<ELF32BE>() const; -+template uint64_t SymbolBody::getPltVA<ELF64LE>() const; -+template uint64_t SymbolBody::getPltVA<ELF64BE>() const; -+ -+template uint32_t SymbolBody::getSize<ELF32LE>() const; -+template uint32_t SymbolBody::getSize<ELF32BE>() const; -+template uint64_t SymbolBody::getSize<ELF64LE>() const; -+template uint64_t SymbolBody::getSize<ELF64BE>() const; - - template class elf::Undefined<ELF32LE>; - template class elf::Undefined<ELF32BE>; diff --git a/packages/llvm/llvm5-lld-ELF-Symbols.patch b/packages/llvm/llvm5-lld-ELF-Symbols.patch deleted file mode 100644 index 727647d3..00000000 --- a/packages/llvm/llvm5-lld-ELF-Symbols.patch +++ /dev/null @@ -1,33 +0,0 @@ -# Fix lld templates: https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=230463 ---- a/lld/ELF/Symbols.cpp -+++ b/lld/ELF/Symbols.cpp -@@ -383,17 +383,17 @@ - return B.getName(); - } - --template uint32_t SymbolBody::template getSize<ELF32LE>() const; --template uint32_t SymbolBody::template getSize<ELF32BE>() const; --template uint64_t SymbolBody::template getSize<ELF64LE>() const; --template uint64_t SymbolBody::template getSize<ELF64BE>() const; -+template uint32_t SymbolBody::getSize<ELF32LE>() const; -+template uint32_t SymbolBody::getSize<ELF32BE>() const; -+template uint64_t SymbolBody::getSize<ELF64LE>() const; -+template uint64_t SymbolBody::getSize<ELF64BE>() const; - --template bool DefinedRegular::template isMipsPIC<ELF32LE>() const; --template bool DefinedRegular::template isMipsPIC<ELF32BE>() const; --template bool DefinedRegular::template isMipsPIC<ELF64LE>() const; --template bool DefinedRegular::template isMipsPIC<ELF64BE>() const; -+template bool DefinedRegular::isMipsPIC<ELF32LE>() const; -+template bool DefinedRegular::isMipsPIC<ELF32BE>() const; -+template bool DefinedRegular::isMipsPIC<ELF64LE>() const; -+template bool DefinedRegular::isMipsPIC<ELF64BE>() const; - --template uint32_t SharedSymbol::template getAlignment<ELF32LE>() const; --template uint32_t SharedSymbol::template getAlignment<ELF32BE>() const; --template uint32_t SharedSymbol::template getAlignment<ELF64LE>() const; --template uint32_t SharedSymbol::template getAlignment<ELF64BE>() const; -+template uint32_t SharedSymbol::getAlignment<ELF32LE>() const; -+template uint32_t SharedSymbol::getAlignment<ELF32BE>() const; -+template uint32_t SharedSymbol::getAlignment<ELF64LE>() const; -+template uint32_t SharedSymbol::getAlignment<ELF64BE>() const; diff --git a/packages/llvm/llvm5-sanitizer-ustat.patch b/packages/llvm/llvm5-sanitizer-ustat.patch deleted file mode 100644 index 531a3c5d..00000000 --- a/packages/llvm/llvm5-sanitizer-ustat.patch +++ /dev/null @@ -1,25 +0,0 @@ -# <sys/ustat.h> has been removed from glibc 2.28, -# backport fix from llvm-6.0.1: ---- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc -+++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cc -@@ -159,1 +159,0 @@ --#include <sys/ustat.h> -@@ -252,5 +252,17 @@ - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- unsigned struct_ustat_sz = sizeof(struct ustat); -+ // Use pre-computed size of struct ustat to avoid <sys/ustat.h> which -+ // has been removed from glibc 2.28. -+#if defined(__aarch64__) || defined(__s390x__) || defined (__mips64) \ -+ || defined(__powerpc64__) || defined(__arch64__) || defined(__sparcv9) \ -+ || defined(__x86_64__) -+#define SIZEOF_STRUCT_USTAT 32 -+#elif defined(__arm__) || defined(__i386__) || defined(__mips__) \ -+ || defined(__powerpc__) || defined(__s390__) -+#define SIZEOF_STRUCT_USTAT 20 -+#else -+#error Unknown size of struct ustat -+#endif -+ unsigned struct_ustat_sz = SIZEOF_STRUCT_USTAT; - unsigned struct_rlimit64_sz = sizeof(struct rlimit64); - unsigned struct_statvfs64_sz = sizeof(struct statvfs64); diff --git a/packages/llvm/llvm_py37.patch b/packages/llvm/llvm_py37.patch deleted file mode 100644 index 478be879..00000000 --- a/packages/llvm/llvm_py37.patch +++ /dev/null @@ -1,37 +0,0 @@ -From ecdefed7f6ba11421fe1ecc6c13a135ab7bcda73 Mon Sep 17 00:00:00 2001 -From: Pavel Labath <labath@google.com> -Date: Mon, 23 Jul 2018 11:37:36 +0100 -Subject: [PATCH] Fix PythonString::GetString for >=python-3.7 - -The return value of PyUnicode_AsUTF8AndSize is now "const char *". ---- - .../Plugins/ScriptInterpreter/Python/PythonDataObjects.cpp | 6 ++++-- - 1 file changed, 4 insertions(+), 2 deletions(-) - -diff --git a/tools/lldb/source/Plugins/ScriptInterpreter/Python/PythonDataObjects.cpp b/tools/lldb/source/Plugins/ScriptInterpreter/Python/PythonDataObjects.cpp -index 6a9d57d5a..94f16b2c7 100644 ---- a/lldb/source/Plugins/ScriptInterpreter/Python/PythonDataObjects.cpp -+++ b/lldb/source/Plugins/ScriptInterpreter/Python/PythonDataObjects.cpp -@@ -404,14 +404,16 @@ llvm::StringRef PythonString::GetString() const { - return llvm::StringRef(); - - Py_ssize_t size; -- char *c; -+ const char *data; - - #if PY_MAJOR_VERSION >= 3 -- c = PyUnicode_AsUTF8AndSize(m_py_obj, &size); -+ data = PyUnicode_AsUTF8AndSize(m_py_obj, &size); - #else -+ char *c; - PyString_AsStringAndSize(m_py_obj, &c, &size); -+ data = c; - #endif -- return llvm::StringRef(c, size); -+ return llvm::StringRef(data, size); - } - - size_t PythonString::GetSize() const { --- -2.18.0.233.g985f88cf7e-goog - diff --git a/packages/llvm/llvm_python_path.patch b/packages/llvm/llvm_python_path.patch deleted file mode 100644 index 9f821cc3..00000000 --- a/packages/llvm/llvm_python_path.patch +++ /dev/null @@ -1,14 +0,0 @@ -diff --git a/compiler-rt/cmake/Modules/AddCompilerRT.cmake b/compiler-rt/cmake/Modules/AddCompilerRT.cmake -index dab55707338..6f4c6791141 100644 ---- a/compiler-rt/cmake/Modules/AddCompilerRT.cmake -+++ b/compiler-rt/cmake/Modules/AddCompilerRT.cmake -@@ -612,6 +612,9 @@ macro(add_custom_libcxx name prefix) - CMAKE_OBJDUMP - CMAKE_STRIP - CMAKE_SYSROOT -+ PYTHON_EXECUTABLE -+ Python3_EXECUTABLE -+ Python2_EXECUTABLE - CMAKE_SYSTEM_NAME) - foreach(variable ${PASSTHROUGH_VARIABLES}) - get_property(is_value_set CACHE ${variable} PROPERTY VALUE SET) diff --git a/packages/llvm/missing-includes.patch b/packages/llvm/missing-includes.patch deleted file mode 100644 index e88b8fcf..00000000 --- a/packages/llvm/missing-includes.patch +++ /dev/null @@ -1,23 +0,0 @@ -# https://github.com/spack/spack/issues/24270 (This hunk is upstream since llvm-10) ---- a/llvm/include/llvm/Demangle/MicrosoftDemangleNodes.h -+++ b/llvm/include/llvm/Demangle/MicrosoftDemangleNodes.h -@@ -4,6 +4,8 @@ - #include "llvm/Demangle/Compiler.h" - #include "llvm/Demangle/StringView.h" - #include <array> -+#include <cstdint> -+#include <string> - - class OutputStream; - -# https://github.com/spack/spack/pull/27233 ---- a/llvm/utils/benchmark/src/benchmark_register.h -+++ b/llvm/utils/benchmark/src/benchmark_register.h -@@ -2,6 +2,7 @@ - #define BENCHMARK_REGISTER_H - - #include <vector> -+#include <limits> - - #include "check.h" - diff --git a/packages/llvm/no_cyclades.patch b/packages/llvm/no_cyclades.patch deleted file mode 100644 index 10f9d079..00000000 --- a/packages/llvm/no_cyclades.patch +++ /dev/null @@ -1,81 +0,0 @@ -diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc b/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc ---- a/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -+++ b/compiler-rt/lib/sanitizer_common/sanitizer_common_interceptors_ioctl.inc -@@ -370,15 +370,6 @@ - - #if SANITIZER_GLIBC - // _(SIOCDEVPLIP, WRITE, struct_ifreq_sz); // the same as EQL_ENSLAVE -- _(CYGETDEFTHRESH, WRITE, sizeof(int)); -- _(CYGETDEFTIMEOUT, WRITE, sizeof(int)); -- _(CYGETMON, WRITE, struct_cyclades_monitor_sz); -- _(CYGETTHRESH, WRITE, sizeof(int)); -- _(CYGETTIMEOUT, WRITE, sizeof(int)); -- _(CYSETDEFTHRESH, NONE, 0); -- _(CYSETDEFTIMEOUT, NONE, 0); -- _(CYSETTHRESH, NONE, 0); -- _(CYSETTIMEOUT, NONE, 0); - _(EQL_EMANCIPATE, WRITE, struct_ifreq_sz); - _(EQL_ENSLAVE, WRITE, struct_ifreq_sz); - _(EQL_GETMASTRCFG, WRITE, struct_ifreq_sz); -diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h ---- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h -+++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.h -@@ -983,7 +983,6 @@ - - #if SANITIZER_LINUX && !SANITIZER_ANDROID - extern unsigned struct_ax25_parms_struct_sz; --extern unsigned struct_cyclades_monitor_sz; - extern unsigned struct_input_keymap_entry_sz; - extern unsigned struct_ipx_config_data_sz; - extern unsigned struct_kbdiacrs_sz; -@@ -1328,15 +1327,6 @@ - #endif // SANITIZER_LINUX - - #if SANITIZER_LINUX && !SANITIZER_ANDROID --extern unsigned IOCTL_CYGETDEFTHRESH; --extern unsigned IOCTL_CYGETDEFTIMEOUT; --extern unsigned IOCTL_CYGETMON; --extern unsigned IOCTL_CYGETTHRESH; --extern unsigned IOCTL_CYGETTIMEOUT; --extern unsigned IOCTL_CYSETDEFTHRESH; --extern unsigned IOCTL_CYSETDEFTIMEOUT; --extern unsigned IOCTL_CYSETTHRESH; --extern unsigned IOCTL_CYSETTIMEOUT; - extern unsigned IOCTL_EQL_EMANCIPATE; - extern unsigned IOCTL_EQL_ENSLAVE; - extern unsigned IOCTL_EQL_GETMASTRCFG; -diff --git a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp ---- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp -+++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp -@@ -143,7 +143,6 @@ - # include <sys/procfs.h> - #endif - #include <sys/user.h> --#include <linux/cyclades.h> - #include <linux/if_eql.h> - #include <linux/if_plip.h> - #include <linux/lp.h> -@@ -460,7 +459,6 @@ - - #if SANITIZER_GLIBC - unsigned struct_ax25_parms_struct_sz = sizeof(struct ax25_parms_struct); -- unsigned struct_cyclades_monitor_sz = sizeof(struct cyclades_monitor); - #if EV_VERSION > (0x010000) - unsigned struct_input_keymap_entry_sz = sizeof(struct input_keymap_entry); - #else -@@ -824,15 +822,6 @@ - #endif // SANITIZER_LINUX - - #if SANITIZER_LINUX && !SANITIZER_ANDROID -- unsigned IOCTL_CYGETDEFTHRESH = CYGETDEFTHRESH; -- unsigned IOCTL_CYGETDEFTIMEOUT = CYGETDEFTIMEOUT; -- unsigned IOCTL_CYGETMON = CYGETMON; -- unsigned IOCTL_CYGETTHRESH = CYGETTHRESH; -- unsigned IOCTL_CYGETTIMEOUT = CYGETTIMEOUT; -- unsigned IOCTL_CYSETDEFTHRESH = CYSETDEFTHRESH; -- unsigned IOCTL_CYSETDEFTIMEOUT = CYSETDEFTIMEOUT; -- unsigned IOCTL_CYSETTHRESH = CYSETTHRESH; -- unsigned IOCTL_CYSETTIMEOUT = CYSETTIMEOUT; - unsigned IOCTL_EQL_EMANCIPATE = EQL_EMANCIPATE; - unsigned IOCTL_EQL_ENSLAVE = EQL_ENSLAVE; - unsigned IOCTL_EQL_GETMASTRCFG = EQL_GETMASTRCFG; diff --git a/packages/llvm/package.py b/packages/llvm/package.py index c6cf4897..b437b0ee 100644 --- a/packages/llvm/package.py +++ b/packages/llvm/package.py @@ -2,20 +2,40 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import os import os.path import re import sys import llnl.util.tty as tty +from llnl.util.lang import classproperty -import spack.build_environment import spack.util.executable +from spack.build_systems.cmake import get_cmake_prefix_path from spack.package import * +from spack.package_base import PackageBase + + +class LlvmDetection(PackageBase): + """Base class to detect LLVM based compilers""" + + compiler_version_argument = "--version" + c_names = ["clang"] + cxx_names = ["clang++"] + + @classmethod + def filter_detected_exes(cls, prefix, exes_in_prefix): + # Executables like lldb-vscode-X are daemon listening on some port and would hang Spack + # during detection. clang-cl, clang-cpp, etc. are dev tools that we don't need to test + reject = re.compile( + r"-(vscode|cpp|cl|ocl|gpu|tidy|rename|scan-deps|format|refactor|offload|" + r"check|query|doc|move|extdef|apply|reorder|change-namespace|" + r"include-fixer|import-test|dap|server)" + ) + return [x for x in exes_in_prefix if not reject.search(x)] -class Llvm(CMakePackage, CudaPackage): +class Llvm(CMakePackage, CudaPackage, LlvmDetection, CompilerPackage): """The LLVM Project is a collection of modular and reusable compiler and toolchain technologies. Despite its name, LLVM has little to do with traditional virtual machines, though it does provide helpful @@ -29,13 +49,28 @@ class Llvm(CMakePackage, CudaPackage): git = "https://github.com/llvm/llvm-project" maintainers("trws", "haampie", "skosukhin") - tags = ["e4s"] + tags = ["e4s", "compiler"] generator("ninja") - family = "compiler" # Used by lmod + license("Apache-2.0") version("main", branch="main") + version("19.1.3", sha256="e5106e2bef341b3f5e41340e4b6c6a58259f4021ad801acf14e88f1a84567b05") + version("19.1.2", sha256="622cb6c5e95a3bb7e9876c4696a65671f235bd836cfd0c096b272f6c2ada41e7") + version("19.1.1", sha256="115dfd98a353d05bffdab3f80db22f159da48aca0124e8c416f437adcd54b77f") + version("19.1.0", sha256="0a08341036ca99a106786f50f9c5cb3fbe458b3b74cab6089fd368d0edb2edfe") + version("18.1.8", sha256="09c08693a9afd6236f27a2ebae62cda656eba19021ef3f94d59e931d662d4856") + version("18.1.7", sha256="b60df7cbe02cef2523f7357120fb0d46cbb443791cde3a5fb36b82c335c0afc9") + version("18.1.6", sha256="01390edfae5b809e982b530ff9088e674c62b13aa92cb9dc1e067fa2cf501083") + version("18.1.5", sha256="d543309f55ae3f9b422108302b45c40f5696c96862f4bda8f5526955daa54284") + version("18.1.4", sha256="deca5a29e8b1d103ecc4badb3c304aca50d5cac6453364d88ee415dc55699dfb") + version("18.1.3", sha256="fc5a2fd176d73ceb17f4e522f8fe96d8dde23300b8c233476d3609f55d995a7a") + version("18.1.2", sha256="8d686d5ece6f12b09985cb382a3a530dc06bb6e7eb907f57c7f8bf2d868ebb0b") + version("18.1.1", sha256="62439f733311869dbbaf704ce2e02141d2a07092d952fc87ef52d1d636a9b1e4") + version("18.1.0", sha256="eb18f65a68981e94ea1a5aae4f02321b17da9e99f76bfdb983b953f4ba2d3550") + version("17.0.6", sha256="81494d32e6f12ea6f73d6d25424dbd2364646011bb8f7e345ca870750aa27de1") + version("17.0.5", sha256="432c1eda3d1c9379cd52a9bee8e0ea6f7b204bff5075895f963fd8e575aa4fb8") version("17.0.4", sha256="46200b79f52a02fe26d0a43fd856ab6ceff49ab2a0b7c240ac4b700a6ada700c") version("17.0.3", sha256="1e3d9d04fb5fbd8d0080042ad72c7e2a5c68788b014b186647a604dbbdd625d2") version("17.0.2", sha256="dcba3eb486973dce45b6edfe618f3f29b703ae7e6ef9df65182fb50fb6fe4235") @@ -83,6 +118,9 @@ class Llvm(CMakePackage, CudaPackage): version("5.0.1", sha256="84ca454abf262579814a2a2b846569f6e0cb3e16dc33ca3642b4f1dff6fbafd3") version("5.0.0", sha256="1f1843315657a4371d8ca37f01265fa9aae17dbcf46d2d0a95c1fdb3c6a4bab6") + depends_on("c", type="build") + depends_on("cxx", type="build") + variant( "clang", default=True, description="Build the LLVM C/C++/Objective-C compiler frontend" ) @@ -131,8 +169,12 @@ class Llvm(CMakePackage, CudaPackage): "or as a project (with the compiler in use)", ) + variant("offload", default=True, when="@19:", description="Build the Offload subproject") + conflicts("+offload", when="~clang") + variant("libomptarget", default=True, description="Build the OpenMP offloading library") conflicts("+libomptarget", when="~clang") + conflicts("+libomptarget", when="~offload @19:") for _p in ["darwin", "windows"]: conflicts("+libomptarget", when="platform={0}".format(_p)) del _p @@ -239,6 +281,8 @@ class Llvm(CMakePackage, CudaPackage): conflicts("+z3", when="~clang") conflicts("+lua", when="@:10") conflicts("+lua", when="~lldb") + # Python distutils were removed with 3.12 and are required to build LLVM <= 14 + conflicts("^python@3.12:", when="@:14") variant( "zstd", @@ -247,6 +291,9 @@ class Llvm(CMakePackage, CudaPackage): description="Enable zstd support for static analyzer / lld", ) + provides("libllvm@19", when="@19.0.0:19") + provides("libllvm@18", when="@18.0.0:18") + provides("libllvm@17", when="@17.0.0:17") provides("libllvm@16", when="@16.0.0:16") provides("libllvm@15", when="@15.0.0:15") provides("libllvm@14", when="@14.0.0:14") @@ -308,19 +355,15 @@ class Llvm(CMakePackage, CudaPackage): depends_on("swig", when="+lua") depends_on("swig", when="+python") depends_on("xz") - # begin VISIONS: modified, bring upstream FIXME: maybe no longer needed - depends_on("swig@2:4.0", when="@10:") - depends_on("swig@3:4.0", when="@12:") - # end VISIONS - - # Use ^swig cause it's triggered by both python & lua scripting in lldb - with when("^swig"): - depends_on("swig@2:", when="@10:") - depends_on("swig@3:", when="@12:") - depends_on("swig@4:", when="@17:") - # Commits f0a25fe0b746f56295d5c02116ba28d2f965c175 and - # 81fc5f7909a4ef5a8d4b5da2a10f77f7cb01ba63 fixed swig 4.1 support - depends_on("swig@:4.0", when="@:15") + + for _when_spec in ("+lldb+python", "+lldb+lua"): + with when(_when_spec): + depends_on("swig@2:", when="@10:") + depends_on("swig@3:", when="@12:") + depends_on("swig@4:", when="@17:") + # Commits f0a25fe0b746f56295d5c02116ba28d2f965c175 and + # 81fc5f7909a4ef5a8d4b5da2a10f77f7cb01ba63 fixed swig 4.1 support + depends_on("swig@:4.0", when="@:15") # gold support, required for some features depends_on("binutils+gold+ld+plugins+headers", when="+gold") @@ -384,6 +427,19 @@ class Llvm(CMakePackage, CudaPackage): # cuda_arch value must be specified conflicts("cuda_arch=none", when="+cuda", msg="A value for cuda_arch must be specified.") + # clang/test/Misc/target-invalid-cpu-note.c + conflicts("cuda_arch=10") + conflicts("cuda_arch=11") + conflicts("cuda_arch=12") + conflicts("cuda_arch=13") + conflicts("cuda_arch=75", when="@:13") + conflicts("cuda_arch=80", when="@:13") + conflicts("cuda_arch=86", when="@:13") + conflicts("cuda_arch=87", when="@:15") + conflicts("cuda_arch=89", when="@:15") + conflicts("cuda_arch=90", when="@:15") + conflicts("cuda_arch=90a", when="@:17") + # LLVM bug https://bugs.llvm.org/show_bug.cgi?id=48234 # CMake bug: https://gitlab.kitware.com/cmake/cmake/-/issues/21469 # Fixed in upstream versions of both @@ -486,6 +542,12 @@ class Llvm(CMakePackage, CudaPackage): when="@14:15", ) + # missing <cstdint> include + patch( + "https://github.com/llvm/llvm-project/commit/ff1681ddb303223973653f7f5f3f3435b48a1983.patch?full_index=1", + sha256="c6ca6b925f150e8644ce756023797b7f94c9619c62507231f979edab1c09af78", + when="@6:13", + ) # fix building of older versions of llvm with newer versions of glibc for compiler_rt_as in ["project", "runtime"]: with when("compiler-rt={0}".format(compiler_rt_as)): @@ -583,6 +645,10 @@ class Llvm(CMakePackage, CudaPackage): # avoid build failed with Fujitsu compiler patch("llvm13-fujitsu.patch", when="@13 %fj") + # avoid build failed with Fujitsu compiler since llvm17 + patch("llvm17-fujitsu.patch", when="@17: %fj") + patch("llvm17-18-thread.patch", when="@17:18 %fj") + # patch for missing hwloc.h include for libompd # see https://reviews.llvm.org/D123888 patch( @@ -623,6 +689,8 @@ class Llvm(CMakePackage, CudaPackage): patch("add-include-for-libelf-llvm-12-14.patch", when="@12:14") patch("add-include-for-libelf-llvm-15.patch", when="@15") + patch("sanitizer-platform-limits-posix-xdr-macos.patch", when="@10:14 platform=darwin") + @when("@14:17") def patch(self): # https://github.com/llvm/llvm-project/pull/69458 @@ -633,47 +701,43 @@ class Llvm(CMakePackage, CudaPackage): string=True, ) - # The functions and attributes below implement external package - # detection for LLVM. See: - # - # https://spack.readthedocs.io/en/latest/packaging_guide.html#making-a-package-discoverable-with-spack-external-find - executables = ["clang", "flang", "ld.lld", "lldb"] + compiler_version_regex = ( + # Normal clang compiler versions are left as-is + r"clang version ([^ )\n]+)-svn[~.\w\d-]*|" + # Don't include hyphenated patch numbers in the version + # (see https://github.com/spack/spack/pull/14365 for details) + r"clang version ([^ )\n]+?)-[~.\w\d-]*|" + r"clang version ([^ )\n]+)|" + # LLDB + r"lldb version ([^ )\n]+)|" + # LLD + r"LLD ([^ )\n]+) \(compatible with GNU linkers\)" + ) + fortran_names = ["flang"] - @classmethod - def filter_detected_exes(cls, prefix, exes_in_prefix): - result = [] - for exe in exes_in_prefix: - # Executables like lldb-vscode-X are daemon listening - # on some port and would hang Spack during detection. - # clang-cl and clang-cpp are dev tools that we don't - # need to test - if any(x in exe for x in ("vscode", "cpp", "-cl", "-gpu")): - continue - result.append(exe) - return result + @property + def supported_languages(self): + languages = [] + if self.spec.satisfies("+clang"): + languages.extend(["c", "cxx"]) + if self.spec.satisfies("+flang"): + languages.append("fortran") + return languages + + @classproperty + def executables(cls): + return super().executables + [r"^ld\.lld(-\d+)?$", r"^lldb(-\d+)?$"] @classmethod def determine_version(cls, exe): - version_regex = re.compile( - # Normal clang compiler versions are left as-is - r"clang version ([^ )\n]+)-svn[~.\w\d-]*|" - # Don't include hyphenated patch numbers in the version - # (see https://github.com/spack/spack/pull/14365 for details) - r"clang version ([^ )\n]+?)-[~.\w\d-]*|" - r"clang version ([^ )\n]+)|" - # LLDB - r"lldb version ([^ )\n]+)|" - # LLD - r"LLD ([^ )\n]+) \(compatible with GNU linkers\)" - ) try: compiler = Executable(exe) - output = compiler("--version", output=str, error=str) + output = compiler(cls.compiler_version_argument, output=str, error=str) if "Apple" in output: return None if "AMD" in output: return None - match = version_regex.search(output) + match = re.search(cls.compiler_version_regex, output) if match: return match.group(match.lastindex) except spack.util.executable.ProcessError: @@ -685,23 +749,23 @@ class Llvm(CMakePackage, CudaPackage): @classmethod def determine_variants(cls, exes, version_str): + # Do not need to reuse more general logic from CompilerPackage + # because LLVM has kindly named compilers variants, compilers = ["+clang"], {} lld_found, lldb_found = False, False - for exe in exes: - if "clang++" in exe: - compilers["cxx"] = exe - elif "clang" in exe: - compilers["c"] = exe - elif "flang" in exe: + for exe in sorted(exes, key=len): + name = os.path.basename(exe) + if "clang++" in name: + compilers.setdefault("cxx", exe) + elif "clang" in name: + compilers.setdefault("c", exe) + elif "flang" in name: variants.append("+flang") - compilers["fc"] = exe - compilers["f77"] = exe - elif "ld.lld" in exe: + compilers.setdefault("fortran", exe) + elif "ld.lld" in name: lld_found = True - compilers["ld"] = exe - elif "lldb" in exe: + elif "lldb" in name: lldb_found = True - compilers["lldb"] = exe variants.append("+lld" if lld_found else "~lld") variants.append("+lldb" if lldb_found else "~lldb") @@ -727,7 +791,7 @@ class Llvm(CMakePackage, CudaPackage): if self.spec.external: return self.spec.extra_attributes["compilers"].get("c", None) result = None - if "+clang" in self.spec: + if self.spec.satisfies("+clang"): result = os.path.join(self.spec.prefix.bin, "clang") return result @@ -738,7 +802,7 @@ class Llvm(CMakePackage, CudaPackage): if self.spec.external: return self.spec.extra_attributes["compilers"].get("cxx", None) result = None - if "+clang" in self.spec: + if self.spec.satisfies("+clang"): result = os.path.join(self.spec.prefix.bin, "clang++") return result @@ -749,7 +813,7 @@ class Llvm(CMakePackage, CudaPackage): if self.spec.external: return self.spec.extra_attributes["compilers"].get("fc", None) result = None - if "+flang" in self.spec: + if self.spec.satisfies("+flang"): result = os.path.join(self.spec.prefix.bin, "flang") return result @@ -760,7 +824,7 @@ class Llvm(CMakePackage, CudaPackage): if self.spec.external: return self.spec.extra_attributes["compilers"].get("f77", None) result = None - if "+flang" in self.spec: + if self.spec.satisfies("+flang"): result = os.path.join(self.spec.prefix.bin, "flang") return result @@ -794,10 +858,7 @@ class Llvm(CMakePackage, CudaPackage): ) def flag_handler(self, name, flags): - if name == "cxxflags": - flags.append(self.compiler.cxx11_flag) - return (None, flags, None) - elif name == "ldflags" and self.spec.satisfies("%intel"): + if name == "ldflags" and self.spec.satisfies("%intel"): flags.append("-shared-intel") return (None, flags, None) return (flags, None, None) @@ -813,6 +874,14 @@ class Llvm(CMakePackage, CudaPackage): os.symlink(bin, sym) env.prepend_path("PATH", self.stage.path) + def setup_run_environment(self, env): + if self.spec.satisfies("+clang"): + env.set("CC", join_path(self.spec.prefix.bin, "clang")) + env.set("CXX", join_path(self.spec.prefix.bin, "clang++")) + if self.spec.satisfies("+flang"): + env.set("FC", join_path(self.spec.prefix.bin, "flang")) + env.set("F77", join_path(self.spec.prefix.bin, "flang")) + root_cmakelists_dir = "llvm" def cmake_args(self): @@ -820,13 +889,11 @@ class Llvm(CMakePackage, CudaPackage): define = self.define from_variant = self.define_from_variant - python = spec["python"] cmake_args = [ define("LLVM_REQUIRES_RTTI", True), define("LLVM_ENABLE_RTTI", True), define("LLVM_ENABLE_LIBXML2", False), define("CLANG_DEFAULT_OPENMP_RUNTIME", "libomp"), - define("PYTHON_EXECUTABLE", python.command.path), define("LIBOMP_USE_HWLOC", True), define("LIBOMP_HWLOC_INSTALL_DIR", spec["hwloc"].prefix), from_variant("LLVM_ENABLE_ZSTD", "zstd"), @@ -850,15 +917,10 @@ class Llvm(CMakePackage, CudaPackage): if shlib_symbol_version is not None and shlib_symbol_version.value != "none": cmake_args.append(define("LLVM_SHLIB_SYMBOL_VERSION", shlib_symbol_version.value)) - if python.version >= Version("3"): - cmake_args.append(define("Python3_EXECUTABLE", python.command.path)) - else: - cmake_args.append(define("Python2_EXECUTABLE", python.command.path)) - projects = [] runtimes = [] - if "+cuda" in spec: + if spec.satisfies("+cuda"): cmake_args.extend( [ define("CUDA_TOOLKIT_ROOT_DIR", spec["cuda"].prefix), @@ -872,7 +934,7 @@ class Llvm(CMakePackage, CudaPackage): ), ] ) - if "openmp=runtime" in spec: + if spec.satisfies("openmp=runtime"): cmake_args.append(define("LIBOMPTARGET_NVPTX_ENABLE_BCLIB", True)) else: # still build libomptarget but disable cuda @@ -887,7 +949,15 @@ class Llvm(CMakePackage, CudaPackage): cmake_args.append(from_variant("LIBOMPTARGET_ENABLE_DEBUG", "libomptarget_debug")) - if "+lldb" in spec: + if spec.satisfies("@14:"): + # The hsa-rocr-dev package may be pulled in through hwloc, which can lead to cmake + # finding libhsa and enabling the AMDGPU plugin. Since we don't support this yet, + # disable explicitly. See commit a05a0c3c2f8eefc80d84b7a87a23a4452d4a3087. + cmake_args.append(define("LIBOMPTARGET_BUILD_AMDGPU_PLUGIN", False)) + if "python" in spec: # lit's Python needs to be set with this variable + cmake_args.append(define("python_executable", spec["python"].command.path)) + + if spec.satisfies("+lldb"): projects.append("lldb") cmake_args.extend( [ @@ -898,59 +968,65 @@ class Llvm(CMakePackage, CudaPackage): define("LLDB_ENABLE_LZMA", True), ] ) - if spec["ncurses"].satisfies("+termlib"): - cmake_args.append(define("LLVM_ENABLE_TERMINFO", True)) + if spec.satisfies("@19:"): + cmake_args.append(define("LLDB_CURSES_LIBS", spec["ncurses"].libs)) else: - cmake_args.append(define("LLVM_ENABLE_TERMINFO", False)) + if spec["ncurses"].satisfies("+termlib"): + cmake_args.append(define("LLVM_ENABLE_TERMINFO", True)) + else: + cmake_args.append(define("LLVM_ENABLE_TERMINFO", False)) if spec.version >= Version("10"): cmake_args.append(from_variant("LLDB_ENABLE_PYTHON", "python")) else: - cmake_args.append(define("LLDB_DISABLE_PYTHON", "~python" in spec)) + cmake_args.append(define("LLDB_DISABLE_PYTHON", spec.satisfies("~python"))) if spec.satisfies("@5.0.0: +python"): cmake_args.append(define("LLDB_USE_SYSTEM_SIX", True)) - else: + elif spec.satisfies("@:19"): cmake_args.append(define("LLVM_ENABLE_TERMINFO", False)) - if "+gold" in spec: + if spec.satisfies("+gold"): cmake_args.append(define("LLVM_BINUTILS_INCDIR", spec["binutils"].prefix.include)) - if "+clang" in spec: + if spec.satisfies("+clang"): projects.append("clang") projects.append("clang-tools-extra") - if "openmp=runtime" in spec: + if spec.satisfies("openmp=runtime"): runtimes.append("openmp") - elif "openmp=project" in spec: + elif spec.satisfies("openmp=project"): projects.append("openmp") - if "+libomptarget" in spec: + if spec.satisfies("+offload"): + runtimes.append("offload") + + if spec.satisfies("+libomptarget"): cmake_args.append(define("OPENMP_ENABLE_LIBOMPTARGET", True)) else: cmake_args.append(define("OPENMP_ENABLE_LIBOMPTARGET", False)) - if "@8" in spec: + if spec.satisfies("@8"): cmake_args.append(from_variant("CLANG_ANALYZER_ENABLE_Z3_SOLVER", "z3")) - elif "@9:" in spec: + elif spec.satisfies("@9:"): cmake_args.append(from_variant("LLVM_ENABLE_Z3_SOLVER", "z3")) - if "+flang" in spec: + if spec.satisfies("+flang"): projects.append("flang") - if "+lld" in spec: + if spec.satisfies("+lld"): projects.append("lld") - if "compiler-rt=runtime" in spec: + if spec.satisfies("compiler-rt=runtime"): runtimes.append("compiler-rt") - elif "compiler-rt=project" in spec: + elif spec.satisfies("compiler-rt=project"): projects.append("compiler-rt") - if "libcxx=runtime" in spec: + if spec.satisfies("libcxx=runtime"): runtimes.extend(["libcxx", "libcxxabi"]) - elif "libcxx=project" in spec: + elif spec.satisfies("libcxx=project"): projects.extend(["libcxx", "libcxxabi"]) - if "+mlir" in spec: + if spec.satisfies("+mlir"): projects.append("mlir") - if "libunwind=runtime" in spec: + if spec.satisfies("libunwind=runtime"): runtimes.append("libunwind") - elif "libunwind=project" in spec: + elif spec.satisfies("libunwind=project"): projects.append("libunwind") - if "+polly" in spec: + if spec.satisfies("+polly"): projects.append("polly") cmake_args.append(define("LINK_POLLY_INTO_TOOLS", True)) @@ -964,6 +1040,14 @@ class Llvm(CMakePackage, CudaPackage): # CMAKE_INSTALL_RPATH to it, which fails. Statically link libc++abi.a # into libc++.so, linking with -lc++ or -stdlib=libc++ is enough. define("LIBCXX_ENABLE_STATIC_ABI_LIBRARY", True), + # Make sure that CMake does not pick host-installed tools for the build + # Until #45535 is merged, prevent CMake from delivering incompatible + # system tools like python3.12 to older LLVM versions like LLVM-14: + define("CMAKE_FIND_PACKAGE_PREFER_CONFIG", True), + define("CMAKE_FIND_USE_PACKAGE_ROOT_PATH", False), + define("CMAKE_FIND_USE_SYSTEM_PACKAGE_REGISTRY", False), + define("CMAKE_FIND_USE_PACKAGE_REGISTRY", False), + define("CMAKE_FIND_USE_SYSTEM_PATH", False), ] ) @@ -971,14 +1055,16 @@ class Llvm(CMakePackage, CudaPackage): cmake_args.append(from_variant("LIBOMP_TSAN_SUPPORT", "libomp_tsan")) - if self.compiler.name == "gcc": + # From clang 16 onwards we use a more precise --gcc-install-dir flag in post-install + # generated config files. + if self.spec.satisfies("@:15 %gcc"): cmake_args.append(define("GCC_INSTALL_PREFIX", self.compiler.prefix)) if self.spec.satisfies("~code_signing platform=darwin"): cmake_args.append(define("LLDB_USE_SYSTEM_DEBUGSERVER", True)) # LLDB test suite requires libc++ - if "libcxx=none" in spec: + if spec.satisfies("libcxx=none"): cmake_args.append(define("LLDB_INCLUDE_TESTS", False)) # Enable building with CLT [and not require full Xcode] @@ -991,12 +1077,44 @@ class Llvm(CMakePackage, CudaPackage): # Semicolon seperated list of runtimes to enable if runtimes: + # The older versions are not careful enough with the order of the runtimes. + # Instead of applying + # https://github.com/llvm/llvm-project/commit/06400a0142af8297b5d39b8f34a7c59db6f9910c, + # which might be incompatible with the version that we install, + # we sort the runtimes here according to the same order as + # in the aforementioned commit: + if self.spec.satisfies("@:14"): + runtimes_order = [ + "libc", + "libunwind", + "libcxxabi", + "libcxx", + "compiler-rt", + "openmp", + ] + runtimes.sort( + key=lambda x: ( + runtimes_order.index(x) if x in runtimes_order else len(runtimes_order) + ) + ) + + # CMake args passed just to runtimes + runtime_cmake_args = [define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True)] + + # When building runtimes, just-built clang has to know where GCC is. + gcc_install_dir_flag = get_gcc_install_dir_flag(spec, self.compiler) + if gcc_install_dir_flag: + runtime_cmake_args.extend( + [ + define("CMAKE_C_FLAGS", gcc_install_dir_flag), + define("CMAKE_CXX_FLAGS", gcc_install_dir_flag), + ] + ) + cmake_args.extend( [ define("LLVM_ENABLE_RUNTIMES", runtimes), - define( - "RUNTIMES_CMAKE_ARGS", [define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True)] - ), + define("RUNTIMES_CMAKE_ARGS", runtime_cmake_args), ] ) @@ -1008,9 +1126,9 @@ class Llvm(CMakePackage, CudaPackage): define = self.define # unnecessary if we build openmp via LLVM_ENABLE_RUNTIMES - if "+cuda openmp=project" in self.spec: + if self.spec.satisfies("+cuda openmp=project"): ompdir = "build-bootstrapped-omp" - prefix_paths = spack.build_environment.get_cmake_prefix_path(self) + prefix_paths = get_cmake_prefix_path(self) prefix_paths.append(str(spec.prefix)) # rebuild libomptarget to get bytecode runtime library files with working_dir(ompdir, create=True): @@ -1034,15 +1152,31 @@ class Llvm(CMakePackage, CudaPackage): cmake(*cmake_args) ninja() ninja("install") - if "+python" in self.spec: - install_tree("llvm/bindings/python", python_platlib) + if self.spec.satisfies("+python"): + if spec.version < Version("17.0.0"): + # llvm bindings were removed in v17: + # https://releases.llvm.org/17.0.1/docs/ReleaseNotes.html#changes-to-the-python-bindings + install_tree("llvm/bindings/python", python_platlib) - if "+clang" in self.spec: + if self.spec.satisfies("+clang"): install_tree("clang/bindings/python", python_platlib) with working_dir(self.build_directory): install_tree("bin", join_path(self.prefix, "libexec", "llvm")) + cfg_files = [] + if spec.satisfies("+clang"): + cfg_files.extend(("clang.cfg", "clang++.cfg")) + if spec.satisfies("@19: +flang"): + # The config file is `flang.cfg` even though the executable is `flang-new`. + # `--gcc-install-dir` / `--gcc-toolchain` support was only added in LLVM 19. + cfg_files.append("flang.cfg") + gcc_install_dir_flag = get_gcc_install_dir_flag(spec, self.compiler) + if gcc_install_dir_flag: + for cfg in cfg_files: + with open(os.path.join(self.prefix.bin, cfg), "w") as f: + print(gcc_install_dir_flag, file=f) + def llvm_config(self, *args, **kwargs): lc = Executable(self.prefix.bin.join("llvm-config")) if not kwargs.get("output"): @@ -1054,6 +1188,18 @@ class Llvm(CMakePackage, CudaPackage): return ret +def get_gcc_install_dir_flag(spec: Spec, compiler) -> Optional[str]: + """Get the --gcc-install-dir=... flag, so that clang does not do a system scan for GCC.""" + if not spec.satisfies("@16: %gcc"): + return None + gcc = Executable(compiler.cc) + libgcc_path = gcc("-print-file-name=libgcc.a", output=str, fail_on_error=False).strip() + if not os.path.isabs(libgcc_path): + return None + libgcc_dir = os.path.dirname(libgcc_path) + return f"--gcc-install-dir={libgcc_dir}" if os.path.exists(libgcc_dir) else None + + def get_llvm_targets_to_build(spec): targets = spec.variants["targets"].value @@ -1100,4 +1246,3 @@ def get_llvm_targets_to_build(spec): llvm_targets.add("PowerPC") return list(llvm_targets) - diff --git a/packages/llvm/sanitizer-platform-limits-posix-xdr-macos.patch b/packages/llvm/sanitizer-platform-limits-posix-xdr-macos.patch new file mode 100644 index 00000000..d4477390 --- /dev/null +++ b/packages/llvm/sanitizer-platform-limits-posix-xdr-macos.patch @@ -0,0 +1,11 @@ +--- a/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp ++++ b/compiler-rt/lib/sanitizer_common/sanitizer_platform_limits_posix.cpp +@@ -1250,7 +1250,7 @@ CHECK_SIZE_AND_OFFSET(group, gr_passwd); + CHECK_SIZE_AND_OFFSET(group, gr_gid); + CHECK_SIZE_AND_OFFSET(group, gr_mem); + +-#if HAVE_RPC_XDR_H ++#if HAVE_RPC_XDR_H && !SANITIZER_MAC + CHECK_TYPE_SIZE(XDR); + CHECK_SIZE_AND_OFFSET(XDR, x_op); + CHECK_SIZE_AND_OFFSET(XDR, x_ops); diff --git a/packages/llvm/thread-p9.patch b/packages/llvm/thread-p9.patch deleted file mode 100644 index 140473a8..00000000 --- a/packages/llvm/thread-p9.patch +++ /dev/null @@ -1,16 +0,0 @@ -diff --git a/libcxx/include/thread b/libcxx/include/thread -index 02da703..d1677a1 100644 ---- a/projects/libcxx/include/thread -+++ b/projects/libcxx/include/thread -@@ -368,9 +368,9 @@ sleep_for(const chrono::duration<_Rep, _Period>& __d) - { - #if defined(_LIBCPP_COMPILER_GCC) && (__powerpc__ || __POWERPC__) - // GCC's long double const folding is incomplete for IBM128 long doubles. -- _LIBCPP_CONSTEXPR duration<long double> _Max = nanoseconds::max(); --#else - _LIBCPP_CONSTEXPR duration<long double> _Max = duration<long double>(ULLONG_MAX/1000000000ULL) ; -+#else -+ _LIBCPP_CONSTEXPR duration<long double> _Max = nanoseconds::max(); - #endif - nanoseconds __ns; - if (__d < _Max) diff --git a/packages/log4cxx/package.py b/packages/log4cxx/package.py index 0cf8f7a9..eb0c9071 100644 --- a/packages/log4cxx/package.py +++ b/packages/log4cxx/package.py @@ -10,18 +10,42 @@ class Log4cxx(CMakePackage): """A C++ port of Log4j""" homepage = "https://logging.apache.org/log4cxx/latest_stable/" + # begin EBRAINS (modified): fix url url = "https://github.com/apache/logging-log4cxx/archive/refs/tags/rel/v1.2.0.tar.gz" + # end EBRAINS maintainers("nicmcd") - # begin EBRAINS (added): bring upstream (ref. spack@0.21.2) + license("Apache-2.0", checked_by="wdconinc") + + # begin EBRAINS (modified): fix checksums version("1.2.0", sha256="3e0af426011718c634194200cdd79b49ec13c322697bdcddef3d8b2ac9efd7b6") - version("1.1.0", sha256="feb425ce35a391cf0927356bebb7da53f96c8a7aaf634aaf740e011203c732bb") + with default_args(deprecated=True): + # https://nvd.nist.gov/vuln/detail/CVE-2023-31038 + version( + "0.12.1", sha256="567a4200c5b005a816c401e798d98294782950c7750eb3e285e851b970c8beed" + ) + version( + "0.12.0", sha256="31730a17b8ff3f416256755b7aa6d7e95b167c670eb469eb9ff99aa006376e79" + ) # end EBRAINS - version("0.12.1", sha256="567a4200c5b005a816c401e798d98294782950c7750eb3e285e851b970c8beed") - version("0.12.0", sha256="31730a17b8ff3f416256755b7aa6d7e95b167c670eb469eb9ff99aa006376e79") - variant("cxxstd", default="17", description="C++ standard", values=("11", "17"), multi=False) + variant( + "cxxstd", + default="17", + description="C++ standard", + values=("11", "17"), + multi=False, + when="@:1.1", + ) + variant( + "cxxstd", + default="20", + description="C++ standard", + values=("11", "17", "20"), + multi=False, + when="@1.2:", + ) # begin EBRAINS (added) variant("events_at_exit", default=False, @@ -34,6 +58,7 @@ class Log4cxx(CMakePackage): depends_on("apr-util") depends_on("apr") depends_on("boost+thread+system", when="cxxstd=11") + depends_on("expat") depends_on("zlib-api") depends_on("zip") diff --git a/packages/nanoflann/package.py b/packages/nanoflann/package.py deleted file mode 100644 index 7dd9d7f3..00000000 --- a/packages/nanoflann/package.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class Nanoflann(CMakePackage): - """a C++ header-only library for Nearest Neighbor (NN) search wih KD-trees.""" - - homepage = "https://github.com/jlblancoc/nanoflann" - url = "https://github.com/jlblancoc/nanoflann/archive/v1.2.3.tar.gz" - - # begin EBRAINS (added): add version - version("1.5.4", sha256="a7f64d0bdff42614c561e52680b16de46c0edac9719f21f935c5e1f8b0654afc") - # end EBRAINS - version("1.4.3", sha256="cbcecf22bec528a8673a113ee9b0e134f91f1f96be57e913fa1f74e98e4449fa") - version("1.2.3", sha256="5ef4dfb23872379fe9eb306aabd19c9df4cae852b72a923af01aea5e8d7a59c3") - - def patch(self): - filter_file("-mtune=native", "", "CMakeLists.txt") - - def cmake_args(self): - args = ["-DBUILD_SHARED_LIBS=ON"] - return args diff --git a/packages/netlib-xblas/package.py b/packages/netlib-xblas/package.py deleted file mode 100644 index cc65577f..00000000 --- a/packages/netlib-xblas/package.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class NetlibXblas(AutotoolsPackage): - """XBLAS is a reference implementation for extra precision BLAS. - - XBLAS is a reference implementation for the dense and banded BLAS - routines, along with extended and mixed precision version. Extended - precision is only used internally; input and output arguments remain - the same as in the existing BLAS. Extra precisions is implemented as - double-double (i.e., 128-bit total, 106-bit significand). Mixed - precision permits some input/output arguments of different types - (mixing real and complex) or precisions (mixing single and - double). This implementation is proof of concept, and no attempt was - made to optimize performance; performance should be as good as - straightforward but careful code written by hand.""" - - homepage = "https://www.netlib.org/xblas" - url = "https://www.netlib.org/xblas/xblas.tar.gz" - - version("1.0.248", sha256="b5fe7c71c2da1ed9bcdc5784a12c5fa9fb417577513fe8a38de5de0007f7aaa1") - - variant("fortran", default=True, description="Build Fortran interfaces") - variant("plain_blas", default=True, description="As part of XBLAS, build plain BLAS routines") - - provides("blas", when="+plain_blas") - - # begin EBRAINS (added) - depends_on("m4", type="build") - # end EBRAINS - - @property - def libs(self): - return find_libraries(["libxblas"], root=self.prefix, shared=False, recursive=True) - - def configure_args(self): - args = [] - - if self.spec.satisfies("~fortran"): - args += ["--disable-fortran"] - - if self.spec.satisfies("~plain_blas"): - args += ["--disable-plain-blas"] - - return args - - def install(self, spec, prefix): - mkdirp(prefix.lib) - install("libxblas.a", prefix.lib) - - if self.spec.satisfies("+plain_blas"): - # XBLAS should be a drop-in BLAS replacement - install("libxblas.a", join_path(prefix.lib, "libblas.a")) - - headers = [ - "f2c-bridge.h", - "blas_dense_proto.h", - "blas_enum.h", - "blas_extended.h", - "blas_extended_private.h", - "blas_extended_proto.h", - "blas_fpu.h", - "blas_malloc.h", - ] - mkdirp(prefix.include) - for h in headers: - install(join_path("src", h), prefix.include) - - return diff --git a/packages/nglview/package.py b/packages/nglview/package.py deleted file mode 100644 index e86cd491..00000000 --- a/packages/nglview/package.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class Nglview(PythonPackage): - """An IPython/Jupyter widget to interactively view molecular structures and trajectories. - Utilizes the embeddable NGL Viewer for rendering.""" - - # Homepage and download url - homepage = "https://github.com/nglviewer/nglview" - git = 'https://github.com/nglviewer/nglview.git' - url = 'https://github.com/nglviewer/nglview/archive/refs/tags/v3.0.4.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('3.0.4', sha256='78b4413b796965a94045df0d584ec51e256c3dca5f366020439fe7e9744ce61b') - - # Dependencies - depends_on('python@3.8:', type=('build', 'run')) - depends_on('py-setuptools') - depends_on('py-jupyter-packaging') - depends_on('py-versioneer') - depends_on('py-numpy', type=('run')) - depends_on('py-ipywidgets', type=('run')) - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import nglview') diff --git a/packages/open3d/package.py b/packages/open3d/package.py index f3d2f626..c1faaa0b 100644 --- a/packages/open3d/package.py +++ b/packages/open3d/package.py @@ -13,14 +13,24 @@ from spack.package import * class Open3d(CMakePackage, CudaPackage): """Open3D: A Modern Library for 3D Data Processing.""" - homepage = "http://www.open3d.org/" + homepage = "https://www.open3d.org/" url = "https://github.com/isl-org/Open3D/archive/refs/tags/v0.13.0.tar.gz" git = "https://github.com/isl-org/Open3D.git" + license("MIT") + # begin EBRAINS (added): add version - version("0.18.0", tag="v0.18.0", submodules=True) + version( + "0.18.0", tag="v0.18.0", commit="0f06a149c4fb9406fd3e432a5cb0c024f38e2f0e", submodules=True + ) # end EBRAINS - version("0.13.0", tag="v0.13.0", submodules=True) + version( + "0.13.0", tag="v0.13.0", commit="c3f9de224e13838a72da0e5565a7ba51038b0f11", submodules=True + ) + + depends_on("c", type="build") # generated + depends_on("cxx", type="build") # generated + depends_on("fortran", type="build") # generated variant("python", default=False, description="Build the Python module") @@ -159,9 +169,6 @@ class Open3d(CMakePackage, CudaPackage): args.append(self.define("DEFINE_GLEW_NO_GLU", True)) # end EBRAINS - if "+python" in self.spec: - args.append(self.define("PYTHON_EXECUTABLE", self.spec["python"].command.path)) - return args def check(self): @@ -189,11 +196,11 @@ class Open3d(CMakePackage, CudaPackage): @run_after("install") @on_package_attributes(run_tests=True) - def test(self): - if "+python" in self.spec: - self.run_test( - self.spec["python"].command.path, - ["-c", "import open3d"], - purpose="checking import of open3d", - work_dir="spack-test", - ) + def test_open3d_import(self): + """Checking import of open3d""" + if "+python" not in self.spec: + return + + with working_dir("spack-test"): + python = which(python.path) + python("-c", "import open3d") diff --git a/packages/openbabel/gcc12-cmake.patch b/packages/openbabel/gcc12-cmake.patch deleted file mode 100644 index c568c899..00000000 --- a/packages/openbabel/gcc12-cmake.patch +++ /dev/null @@ -1,37 +0,0 @@ -From c0570bfeb2d7e0a6a6de1f257cf28e7f3cac8739 Mon Sep 17 00:00:00 2001 -From: Sam James <sam@gentoo.org> -Date: Sun, 12 Jun 2022 11:23:59 +0100 -Subject: [PATCH] CMake: fix time check typo (fixes build failure w/ GCC 12) -MIME-Version: 1.0 -Content-Type: text/plain; charset=UTF-8 -Content-Transfer-Encoding: 8bit - -Without this fixed check, we get a build failure with GCC 12: -``` -/var/tmp/portage/sci-chemistry/openbabel-3.1.1_p20210225/work/openbabel-08e23f39b0cc39b4eebd937a5a2ffc1a7bac3e1b/include/openbabel/obutil.h:65:14: error: �~@~Xclock�~@~Y was not declared in this scope; did you mean �~@~Xclock_t�~@~Y? - 65 | start= clock(); - | ^~~~~ - | clock_t -``` - -Bug: https://bugs.gentoo.org/851510 ---- - src/config.h.cmake | 4 ++-- - 1 file changed, 2 insertions(+), 2 deletions(-) - -diff --git a/src/config.h.cmake b/src/config.h.cmake -index 1c59c67699..26e5dde94f 100644 ---- a/src/config.h.cmake -+++ b/src/config.h.cmake -@@ -182,8 +182,8 @@ - #define OB_MODULE_PATH "@OB_MODULE_PATH@" - - #ifndef TIME_WITH_SYS_TIME -- #ifdef HAVE_SYS_TIME -- #ifdef HAVE_TIME -+ #ifdef HAVE_SYS_TIME_H -+ #ifdef HAVE_TIME_H - #define TIME_WITH_SYS_TIME 1 - #else - #define TIME_WITH_SYS_TIME 0 - diff --git a/packages/openbabel/package.py b/packages/openbabel/package.py deleted file mode 100644 index 17641509..00000000 --- a/packages/openbabel/package.py +++ /dev/null @@ -1,97 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class Openbabel(CMakePackage): - """Open Babel is a chemical toolbox designed to speak the many languages - of chemical data. It's an open, collaborative project allowing anyone to - search, convert, analyze, or store data from molecular modeling, chemistry, - solid-state materials, biochemistry, or related areas.""" - - homepage = "https://openbabel.org/wiki/Main_Page" - url = "https://github.com/openbabel/openbabel/archive/openbabel-3-0-0.tar.gz" - git = "https://github.com/openbabel/openbabel.git" - - maintainers("RMeli") - - version("master", branch="master") - version("3.1.1", tag="openbabel-3-1-1", commit="cbd4db43f8908b874864280fdc03bf92569eebc1") - version("3.1.0", tag="openbabel-3-1-0", commit="1e593abc1edf47352d5e8a0887654edf69a2f5f3") - version("3.0.0", tag="openbabel-3-0-0", commit="49f9cfb32bd0bc6ea440639d338123eb27accbe2") - version("2.4.1", tag="openbabel-2-4-1", commit="701f6049c483b1349118c2ff736a7f609a84dedd") - version("2.4.0", tag="openbabel-2-4-0", commit="087f33320e6796f39e6a1da04f4de7ec46bec4af") - - variant("python", default=True, description="Build Python bindings") - variant("gui", default=True, description="Build with GUI") - variant("cairo", default=True, description="Build with Cairo (PNG output support)") - variant("openmp", default=False, description="Build with OpenMP") - variant("maeparser", default=False, description="Built with MAE parser") - variant("coordgen", default=False, description="Build with Coordgen") - - extends("python", when="+python") - - depends_on("python", type=("build", "run"), when="+python") - depends_on("cmake@3.1:", type="build") - depends_on("pkgconfig", type="build") - depends_on("swig@2.0:", type="build", when="+python") - - depends_on("boost +filesystem +iostreams +test") - depends_on("cairo", when="+cairo") # required to support PNG depiction - depends_on("pango", when="+cairo") # custom cairo requires custom pango - depends_on("eigen@3.0:") # required if using the language bindings - depends_on("libxml2") # required to read/write CML files, XML formats - depends_on("zlib-api") # required to support reading gzipped files - depends_on("rapidjson") # required to support JSON - depends_on("libsm") - depends_on("uuid") - - depends_on("maeparser", when="+maeparser") - depends_on("coordgen", when="+coordgen") - - # Needed for Python 3.6 support - patch("python-3.6-rtld-global.patch", when="@:2.4.1+python") - - # Convert tabs to spaces. Allows unit tests to pass - patch("testpdbformat-tabs-to-spaces.patch", when="@:2.4.1") - - # begin EBRAINS (added) - # CMake: fix time check typo (fixes build failure w/ GCC 12) - patch("gcc12-cmake.patch", when="@:3.1.1") - # end EBRAINS - - def cmake_args(self): - spec = self.spec - args = [] - - if "+python" in spec: - args.extend( - [ - "-DPYTHON_BINDINGS=ON", - "-DPYTHON_EXECUTABLE={0}".format(spec["python"].command.path), - "-DRUN_SWIG=ON", - ] - ) - else: - args.append("-DPYTHON_BINDINGS=OFF") - - args.append(self.define_from_variant("BUILD_GUI", "gui")) - args.append(self.define_from_variant("ENABLE_OPENMP", "openmp")) - args.append(self.define_from_variant("WITH_MAEPARSER", "maeparser")) - args.append(self.define_from_variant("WITH_COORDGEN", "coordgen")) - - return args - - @run_after("install") - @on_package_attributes(run_tests=True) - def check_install(self): - obabel = Executable(join_path(self.prefix.bin, "obabel")) - obabel("-:C1=CC=CC=C1Br", "-omol") - - if "+python" in self.spec: - python("-c", "import openbabel") - if self.spec.version < Version("3.0.0"): - python("-c", "import pybel") diff --git a/packages/openbabel/python-3.6-rtld-global.patch b/packages/openbabel/python-3.6-rtld-global.patch deleted file mode 100644 index 68cd56a1..00000000 --- a/packages/openbabel/python-3.6-rtld-global.patch +++ /dev/null @@ -1,42 +0,0 @@ -The DLFCN module has been removed from python 3.6, as it is not -documented. Same funtionality can be achive with the os module -that makes available the os.RTLD_GLOBAL variable for dlopen() - -See https://github.com/openbabel/openbabel/pull/372 for the -source of this patch. The original patch only affects the CMake -file that SWIG uses to generate openbabel.py. This patch also -includes changes to openbabel.py. - -diff -Nuar a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt ---- a/scripts/CMakeLists.txt 2017-05-17 10:02:54.408527942 -0500 -+++ b/scripts/CMakeLists.txt 2017-05-17 10:04:09.701598715 -0500 -@@ -81,11 +81,8 @@ - COMMAND ${SWIG_EXECUTABLE} -python -c++ -small -O -templatereduce -naturalvar -I${openbabel_SOURCE_DIR}/include -I${openbabel_BINARY_DIR}/include -o ${openbabel_SOURCE_DIR}/scripts/python/openbabel-python.cpp ${eigen_define} -outdir ${openbabel_SOURCE_DIR}/scripts/python ${openbabel_SOURCE_DIR}/scripts/openbabel-python.i - COMMAND ${CMAKE_COMMAND} -E echo "import sys" > ob.py - COMMAND ${CMAKE_COMMAND} -E echo "if sys.platform.find('linux'\) != -1:" >> ob.py -- COMMAND ${CMAKE_COMMAND} -E echo " try:" >> ob.py -- COMMAND ${CMAKE_COMMAND} -E echo " import dl" >> ob.py -- COMMAND ${CMAKE_COMMAND} -E echo " except ImportError:" >> ob.py -- COMMAND ${CMAKE_COMMAND} -E echo " import DLFCN as dl" >> ob.py -- COMMAND ${CMAKE_COMMAND} -E echo " sys.setdlopenflags(sys.getdlopenflags() | dl.RTLD_GLOBAL)" >> ob.py -+ COMMAND ${CMAKE_COMMAND} -E echo " import os" >> ob.py -+ COMMAND ${CMAKE_COMMAND} -E echo " sys.setdlopenflags(sys.getdlopenflags() | os.RTLD_GLOBAL)" >> ob.py - COMMAND cat ${openbabel_SOURCE_DIR}/scripts/python/openbabel.py >> ob.py - COMMAND ${CMAKE_COMMAND} -E copy ob.py ${openbabel_SOURCE_DIR}/scripts/python/openbabel.py - COMMAND ${CMAKE_COMMAND} -E remove ob.py -diff -Nuar a/scripts/python/openbabel.py b/scripts/python/openbabel.py ---- a/scripts/python/openbabel.py 2017-05-17 10:02:54.398527534 -0500 -+++ b/scripts/python/openbabel.py 2017-05-17 10:04:26.705292138 -0500 -@@ -1,10 +1,7 @@ - import sys - if sys.platform.find('linux') != -1: -- try: -- import dl -- except ImportError: -- import DLFCN as dl -- sys.setdlopenflags(sys.getdlopenflags() | dl.RTLD_GLOBAL) -+ import os -+ sys.setdlopenflags(sys.getdlopenflags() | os.RTLD_GLOBAL) - # This file was automatically generated by SWIG (http://www.swig.org). - # Version 3.0.10 - # diff --git a/packages/openbabel/testpdbformat-tabs-to-spaces.patch b/packages/openbabel/testpdbformat-tabs-to-spaces.patch deleted file mode 100644 index 0a71a72e..00000000 --- a/packages/openbabel/testpdbformat-tabs-to-spaces.patch +++ /dev/null @@ -1,47 +0,0 @@ -From 08cd38485d4cf1df8802da540f3018921dbc735e Mon Sep 17 00:00:00 2001 -From: "Adam J. Stewart" <ajstewart426@gmail.com> -Date: Wed, 17 May 2017 10:56:23 -0500 -Subject: [PATCH] Convert tabs to spaces in testpdbformat.py - -See https://github.com/openbabel/openbabel/pull/1568 - ---- - test/testpdbformat.py | 12 ++++++------ - 1 file changed, 6 insertions(+), 6 deletions(-) - -diff --git a/test/testpdbformat.py b/test/testpdbformat.py -index 40bd316..ceb8496 100644 ---- a/test/testpdbformat.py -+++ b/test/testpdbformat.py -@@ -24,12 +24,12 @@ class TestPDBFormat(BaseTest): - - def testInsertionCodes(self): - """ -- Testing a PDB entry with insertion codes to distinguish residues -- upon conversion to FASTA. -+ Testing a PDB entry with insertion codes to distinguish residues -+ upon conversion to FASTA. - """ - self.canFindExecutable("babel") - -- self.entryPDBwithInsertioncodes="""ATOM 406 N VAL L 29 58.041 17.797 48.254 1.00 0.00 N -+ self.entryPDBwithInsertioncodes="""ATOM 406 N VAL L 29 58.041 17.797 48.254 1.00 0.00 N - ATOM 407 CA VAL L 29 57.124 18.088 47.170 1.00 0.00 C - ATOM 408 C VAL L 29 55.739 17.571 47.538 1.00 0.00 C - ATOM 409 O VAL L 29 55.535 16.362 47.550 1.00 0.00 O -@@ -100,9 +100,9 @@ ATOM 473 HE1 TYR L 32 48.512 15.775 42.066 1.00 0.00 H - ATOM 474 HE2 TYR L 32 48.145 19.172 44.648 1.00 0.00 H - ATOM 475 HH TYR L 32 46.462 17.658 44.280 1.00 0.00 H - """ -- output, error = run_exec(self.entryPDBwithInsertioncodes, -- "babel -ipdb -ofasta") -- self.assertEqual(output.rstrip().rsplit("\n",1)[1], "VSSSY") -+ output, error = run_exec(self.entryPDBwithInsertioncodes, -+ "babel -ipdb -ofasta") -+ self.assertEqual(output.rstrip().rsplit("\n",1)[1], "VSSSY") - - if __name__ == "__main__": - testsuite = [] --- -2.9.4 - diff --git a/packages/py-astropy/package.py b/packages/py-astropy/package.py index a58d9ebe..e780f21c 100644 --- a/packages/py-astropy/package.py +++ b/packages/py-astropy/package.py @@ -17,9 +17,9 @@ class PyAstropy(PythonPackage): pypi = "astropy/astropy-4.0.1.post1.tar.gz" git = "https://github.com/astropy/astropy.git" - # begin EBRAINS (added): add version - version("5.2.2", sha256="e6a9e34716bda5945788353c63f0644721ee7e5447d16b1cdcb58c48a96b0d9c") - # end EBRAINS + license("BSD-3-Clause") + + version("6.1.0", sha256="6c3b915f10b1576190730ddce45f6245f9927dda3de6e3f692db45779708950f") version("5.1", sha256="1db1b2c7eddfc773ca66fa33bd07b25d5b9c3b5eee2b934e0ca277fa5b1b7b7e") version( "4.0.1.post1", sha256="5c304a6c1845ca426e7bc319412b0363fccb4928cb4ba59298acd1918eec44b5" @@ -29,15 +29,24 @@ class PyAstropy(PythonPackage): version("1.1.2", sha256="6f0d84cd7dfb304bb437dda666406a1d42208c16204043bc920308ff8ffdfad1") version("1.1.post1", sha256="64427ec132620aeb038e4d8df94d6c30df4cc8b1c42a6d8c5b09907a31566a21") + depends_on("c", type="build") # generated + variant("all", default=False, when="@3.2:", description="Enable all functionality") # Required dependencies + depends_on("python@3.10:", when="@6.1.0:", type=("build", "run")) depends_on("python@3.8:", when="@5.1:", type=("build", "run")) depends_on("py-setuptools", type="build") +# TODO: probably fix, unrealistic depends_on("py-cython@0.29.13:", type="build") + depends_on("py-cython@0.29.30", when="@5.1:6.0", type="build") + depends_on("py-cython@3.0.0", when="@6.1.0:", type="build") + # in newer pip versions --install-option does not exist depends_on("py-pip@:23.0", type="build") + depends_on("py-astropy-iers-data", when="@6:", type=("build", "run")) + depends_on("py-numpy@1.23:", when="@6.1:", type=("build", "run")) depends_on("py-numpy@1.18:", when="@5.1:", type=("build", "run")) depends_on("py-numpy@1.16:", when="@4.0:", type=("build", "run")) depends_on("py-numpy@1.13:", when="@3.1:", type=("build", "run")) @@ -45,13 +54,13 @@ class PyAstropy(PythonPackage): depends_on("py-numpy@1.9:", when="@2.0:", type=("build", "run")) depends_on("py-numpy@1.7:", when="@1.2:", type=("build", "run")) depends_on("py-numpy", type=("build", "run")) + # https://github.com/astropy/astropy/issues/16200 + depends_on("py-numpy@:1", when="@:6.0") depends_on("py-packaging@19.0:", when="@5.1:", type=("build", "run")) depends_on("py-pyyaml@3.13:", when="@5.1:", type=("build", "run")) depends_on("py-pyerfa@2.0:", when="@5.1:", type=("build", "run")) + depends_on("py-pyerfa@2.0.1.1:", when="@6.1.0:", type=("build", "run")) depends_on("py-setuptools-scm@6.2:", when="@5.1:", type="build") - # begin EBRAINS (modified) - depends_on("py-cython@0.29.30:", when="@5.1:", type="build") - # end EBRAINS depends_on("py-extension-helpers", when="@5.1:", type="build") depends_on("pkgconfig", type="build") @@ -63,8 +72,10 @@ class PyAstropy(PythonPackage): # Optional dependencies with when("+all"): + depends_on("py-scipy@1.8:", when="@6:", type=("build", "run")) depends_on("py-scipy@1.3:", when="@5:", type=("build", "run")) depends_on("py-scipy@0.18:", type=("build", "run")) + depends_on("py-matplotlib@3.3:", when="@6:", type=("build", "run")) depends_on("py-matplotlib@3.1:", when="@5:", type=("build", "run")) depends_on("py-matplotlib@2.1:", when="@4:", type=("build", "run")) depends_on("py-matplotlib@2.0:", type=("build", "run")) @@ -88,6 +99,8 @@ class PyAstropy(PythonPackage): depends_on("py-ipython", type=("build", "run")) depends_on("py-pytest@7:", when="@5.0.2:", type=("build", "run")) depends_on("py-pytest", type=("build", "run")) + depends_on("py-fsspec+http@2023.4:", when="@6.1:", type=("build", "run")) + depends_on("py-s3fs@2023.4:", when="@6.1:", type=("build", "run")) depends_on("py-typing-extensions@3.10.0.1:", when="@5.0.2:", type=("build", "run")) # Historical optional dependencies diff --git a/packages/py-autopep8/package.py b/packages/py-autopep8/package.py index e09fd53f..5e37dec3 100644 --- a/packages/py-autopep8/package.py +++ b/packages/py-autopep8/package.py @@ -13,6 +13,8 @@ class PyAutopep8(PythonPackage): homepage = "https://github.com/hhatto/autopep8" pypi = "autopep8/autopep8-1.2.4.tar.gz" + license("MIT") + # begin EBRAINS (added): add version version("2.0.4", sha256="2913064abd97b3419d1cc83ea71f042cb821f87e45b9c88cad5ad3c4ea87fe0c") # end EBRAINS diff --git a/packages/py-bokeh/package.py b/packages/py-bokeh/package.py index 5c92e166..f6556f83 100644 --- a/packages/py-bokeh/package.py +++ b/packages/py-bokeh/package.py @@ -9,9 +9,12 @@ from spack.package import * class PyBokeh(PythonPackage): """Statistical and novel interactive HTML plots for Python""" - homepage = "https://github.com/bokeh/bokeh" + homepage = "https://bokeh.org/" pypi = "bokeh/bokeh-0.12.2.tar.gz" + license("BSD-3-Clause") + + version("3.3.1", sha256="2a7b3702d7e9f03ef4cd801b02b7380196c70cff2773859bcb84fa565218955c") version("2.4.3", sha256="ef33801161af379665ab7a34684f2209861e3aefd5c803a21fbbb99d94874b03") version("2.4.1", sha256="d0410717d743a0ac251e62480e2ea860a7341bdcd1dbe01499a904f233c90512") version("2.4.0", sha256="6fa00ed8baab5cca33f4175792c309fa2536eaae7e90abee884501ba8c90fddb") @@ -20,6 +23,9 @@ class PyBokeh(PythonPackage): version("0.12.2", sha256="0a840f6267b6d342e1bd720deee30b693989538c49644142521d247c0f2e6939") depends_on("py-setuptools", type="build", when="@1.3.4:") + depends_on("py-setuptools@64:", type="build", when="@3:") + depends_on("py-setuptools-git-versioning", type="build", when="@3:") + depends_on("py-colorama", type="build", when="@3:") # begin EBRAINS (added) depends_on("npm", type=("build", "run")) @@ -29,6 +35,8 @@ class PyBokeh(PythonPackage): depends_on("python@2.7:", type=("build", "run"), when="@1.3.4:") depends_on("python@3.6:", type=("build", "run"), when="@2.3.3:") depends_on("python@3.7:", type=("build", "run"), when="@2.4.0:") + depends_on("python@3.8:", type=("build", "run"), when="@3.0.0:") + depends_on("python@3.9:", type=("build", "run"), when="@3.2.0:") depends_on("py-requests@1.2.3:", type=("build", "run"), when="@0.12.2") depends_on("py-six@1.5.2:", type=("build", "run"), when="@:1.3.4") @@ -37,11 +45,18 @@ class PyBokeh(PythonPackage): depends_on("py-jinja2@2.7:", type=("build", "run")) depends_on("py-jinja2@2.9:", type=("build", "run"), when="@2.3.3:") + depends_on("py-contourpy@1:", type=("build", "run"), when="@3:") + depends_on("py-numpy@1.7.1:", type=("build", "run")) depends_on("py-numpy@1.11.3:", type=("build", "run"), when="@2.3.3:") + depends_on("py-numpy@1.16:", type=("build", "run"), when="@3.1:") + # https://github.com/bokeh/bokeh/issues/13835 + depends_on("py-numpy@:1", when="@:3.4.0", type=("build", "run")) depends_on("py-packaging@16.8:", type=("build", "run"), when="@1.3.4:") + depends_on("py-pandas@1.2:", type=("build", "run"), when="@3:") + depends_on("pil@4.0:", type=("build", "run"), when="@1.3.4:") depends_on("pil@7.1.0:", type=("build", "run"), when="@2.3.3:") @@ -50,5 +65,7 @@ class PyBokeh(PythonPackage): depends_on("py-tornado@4.3:", type=("build", "run")) depends_on("py-tornado@5.1:", type=("build", "run"), when="@2.3.3:") - depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:") - depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:") \ No newline at end of file + depends_on("py-typing-extensions@3.7.4:", type=("build", "run"), when="@2.3.3:3.0.0") + depends_on("py-typing-extensions@3.10.0:", type=("build", "run"), when="@2.4.0:3.0.0") + + depends_on("py-xyzservices@2021.09.1:", type=("build", "run"), when="@3:") diff --git a/packages/py-chex/package.py b/packages/py-chex/package.py deleted file mode 100644 index 86c2f71a..00000000 --- a/packages/py-chex/package.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyChex(PythonPackage): - """Chex is a library of utilities for helping to write reliable JAX code.""" - - homepage = "https://github.com/deepmind/chex" - pypi = "chex/chex-0.1.0.tar.gz" - - # begin EBRAINS (added): bring upstream - version("0.1.7", sha256="74ed49799ac4d229881456d468136f1b19a9f9839e3de72b058824e2a4f4dedd") - # end EBRAINS - version("0.1.5", sha256="686858320f8f220c82a6c7eeb54dcdcaa4f3d7f66690dacd13a24baa1ee8299e") - version("0.1.0", sha256="9e032058f5fed2fc1d5e9bf8e12ece5910cf6a478c12d402b6d30984695f2161") - - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-setuptools", type="build") - depends_on("py-absl-py@0.9.0:", type=("build", "run")) - # begin EBRAINS (added): bring upstream - depends_on("py-typing-extensions@4.2.0:", when="@0.1.6: ^python@:3.10", type=("build", "run")) - # end EBRAINS - depends_on("py-dm-tree@0.1.5:", type=("build", "run")) - depends_on("py-jax@0.1.55:", type=("build", "run")) - # begin EBRAINS (added): bring upstream - depends_on("py-jax@0.4.6:", when="@0.1.7:", type=("build", "run")) - # end EBRAINS - depends_on("py-jaxlib@0.1.37:", type=("build", "run")) - depends_on("py-numpy@1.18.0:", type=("build", "run")) - depends_on("py-toolz@0.9.0:", type=("build", "run")) diff --git a/packages/py-dash/package.py b/packages/py-dash/package.py deleted file mode 100644 index cca2a47d..00000000 --- a/packages/py-dash/package.py +++ /dev/null @@ -1,29 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyDash(PythonPackage): - """Python framework for building ML & data science web apps""" - - homepage = "https://dash.plotly.com/" - pypi = "dash/dash-2.16.1.tar.gz" - - version("2.16.1", sha256="b2871d6b8d4c9dfd0a64f89f22d001c93292910b41d92d9ff2bb424a28283976") - version("2.15.0", sha256="d38891337fc855d5673f75e5346354daa063c4ff45a8a6a21f25e858fcae41c2") - - depends_on("python@3.8:", type=("build", "run")) - depends_on("py-setuptools", type="build") - - depends_on("py-flask@1.0.4:3.0", type=("build", "run")) - depends_on("py-werkzeug@:3.0", type=("build", "run")) - depends_on("py-plotly@5:", type=("build", "run")) - depends_on("py-importlib-metadata", type=("build", "run")) - depends_on("py-typing-extensions@4.1.1:", type=("build", "run")) - depends_on("py-requests", type=("build", "run")) - depends_on("py-retrying", type=("build", "run")) - depends_on("py-nest-asyncio", type=("build", "run")) - diff --git a/packages/py-flit-core/package.py b/packages/py-flit-core/package.py deleted file mode 100644 index 9cd30a97..00000000 --- a/packages/py-flit-core/package.py +++ /dev/null @@ -1,40 +0,0 @@ -# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyFlitCore(PythonPackage): - """Distribution-building parts of Flit.""" - - homepage = "https://github.com/pypa/flit" - pypi = "flit-core/flit_core-3.9.0.tar.gz" - # begin EBRAINS (added): tag as build-tool - tags = ["build-tools"] - # end EBRAINS - maintainers("takluyver") - - version("3.9.0", sha256="72ad266176c4a3fcfab5f2930d76896059851240570ce9a98733b658cb786eba") - version("3.8.0", sha256="b305b30c99526df5e63d6022dd2310a0a941a187bd3884f4c8ef0418df6c39f3") - version("3.7.1", sha256="14955af340c43035dbfa96b5ee47407e377ee337f69e70f73064940d27d0a44f") - version("3.6.0", sha256="5892962ab8b8ea945835b3a288fe9dd69316f1903d5288c3f5cafdcdd04756ad") - version("3.5.1", sha256="3083720351a6cb00e0634a1ec0e26eae7b273174c3c6c03d5b597a14203b282e") - version("3.5.0", sha256="2db800d33ff41e4c6e7c1b594666cb2a11553024106655272c7245933b1d75bd") - version("3.4.0", sha256="29468fa2330969167d1f5c23eb9c0661cb6dacfcd46f361a274609a7f4197530") - version("3.3.0", sha256="b1404accffd6504b5f24eeca9ec5d3c877f828d16825348ba81515fa084bd5f0") - version("3.2.0", sha256="ff87f25c5dbc24ef30ea334074e35030e4885e4c5de3bf4e21f15746f6d99431") - version("3.1.0", sha256="22ff73be39a2b3c9e0692dfbbea3ad4a9d127e5733736a87dbb8ddcbf7309b1e") - version("3.0.0", sha256="a465052057e2d6d957e6850e9915245adedfc4fd0dd5737d0791bf3132417c2d") - version("2.3.0", sha256="a50bcd8bf5785e3a7d95434244f30ba693e794c5204ac1ee908fc07c4acdbf80") - - # pyproject.toml - depends_on("python@3.6:", when="@3.4:", type=("build", "run")) - depends_on("python@3.4:", when="@3:", type=("build", "run")) - depends_on("python@2.7,3.4:", type=("build", "run")) - - # flit_core/build_thyself.py - depends_on("py-tomli", when="@3.4:3.5", type="run") - depends_on("py-toml", when="@3.1:3.3", type="run") - depends_on("py-pytoml", when="@:3.0", type="run") diff --git a/packages/py-ipycanvas/package.py b/packages/py-ipycanvas/package.py index f9cd586a..0883611f 100644 --- a/packages/py-ipycanvas/package.py +++ b/packages/py-ipycanvas/package.py @@ -12,6 +12,8 @@ class PyIpycanvas(PythonPackage): homepage = "https://github.com/martinRenou/ipycanvas" pypi = "ipycanvas/ipycanvas-0.9.0.tar.gz" + license("BSD-3-Clause") + # begin EBRAINS (added): add version version("0.12.0", sha256="3984339cef0c15674e347dd65ffb0cd1edc62e37869cbb5efea46f3259e976f3") # end EBRAINS diff --git a/packages/py-ipympl/package.py b/packages/py-ipympl/package.py index 2bd82b56..a22c00e6 100644 --- a/packages/py-ipympl/package.py +++ b/packages/py-ipympl/package.py @@ -1,4 +1,4 @@ -# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -13,19 +13,44 @@ class PyIpympl(PythonPackage): pypi = "ipympl/ipympl-0.8.8.tar.gz" maintainers("haralmha") - version("0.8.8", sha256="5bf5d780b07fafe7924922ac6b2f3abd22721f341e5e196b3b82737dfbd0e1c9") - - depends_on("py-setuptools@40.8:", type="build") - depends_on("py-ipython@:8", type=("build", "run")) - depends_on("py-numpy", type=("build", "run")) - depends_on("py-ipython-genutils", type=("build", "run")) - depends_on("pil", type=("build", "run")) - depends_on("py-traitlets@:5", type=("build", "run")) - depends_on("py-ipywidgets@7.6:7", type=("build", "run")) - depends_on("py-matplotlib@2:3", type=("build", "run")) - depends_on("py-jupyter-packaging@0.7", type="build") - depends_on("py-jupyterlab@3", type="build") - depends_on("yarn", type="build") + license("BSD-3-Clause") + + version("0.9.4", sha256="cfb53c5b4fcbcee6d18f095eecfc6c6c474303d5b744e72cc66e7a2804708907") + # Build failures + version( + "0.8.8", + sha256="5bf5d780b07fafe7924922ac6b2f3abd22721f341e5e196b3b82737dfbd0e1c9", + # begin EBRAINS (modified): don't deprecate to keep compatibility with jupyterlab@3 + deprecated=False, + # end EBRAINS + ) + + with default_args(type="build"): + with when("@0.9:"): + depends_on("py-hatchling") + depends_on("py-jupyterlab@4") + depends_on("py-hatch-nodejs-version@0.3.2:") + + # Historical dependencies + with when("@:0.8"): + depends_on("py-jupyter-packaging@0.7") + depends_on("py-jupyterlab@3") + depends_on("py-setuptools@40.8:") + depends_on("yarn") + + with default_args(type=("build", "run")): + depends_on("py-ipython@:8") + depends_on("py-ipython-genutils") + depends_on("py-ipywidgets@7.6:8", when="@0.9:") + depends_on("py-ipywidgets@7.6:7", when="@:0.8") + depends_on("py-matplotlib@3.4:3", when="@0.9:") + depends_on("py-matplotlib@2:3", when="@:0.8") + depends_on("py-numpy") + depends_on("pil") + depends_on("py-traitlets@:5") + + # Necessary for jupyter extension env vars + depends_on("py-jupyter-core") # begin EBRAINS (added): use newer typescript # (see https://github.com/DefinitelyTyped/DefinitelyTyped/issues/69932) diff --git a/packages/py-jax/package.py b/packages/py-jax/package.py deleted file mode 100644 index 5d57d7ec..00000000 --- a/packages/py-jax/package.py +++ /dev/null @@ -1,70 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyJax(PythonPackage): - """JAX is Autograd and XLA, brought together for high-performance - machine learning research. With its updated version of Autograd, - JAX can automatically differentiate native Python and NumPy - functions. It can differentiate through loops, branches, - recursion, and closures, and it can take derivatives of - derivatives of derivatives. It supports reverse-mode - differentiation (a.k.a. backpropagation) via grad as well as - forward-mode differentiation, and the two can be composed - arbitrarily to any order.""" - - homepage = "https://github.com/google/jax" - pypi = "jax/jax-0.2.25.tar.gz" - - # begin EBRAINS (added): bring upstream - version("0.4.13", sha256="03bfe6749dfe647f16f15f6616638adae6c4a7ca7167c75c21961ecfd3a3baaa") - # end EBRAINS - version("0.4.3", sha256="d43f08f940aa30eb339965cfb3d6bee2296537b0dc2f0c65ccae3009279529ae") - version("0.3.23", sha256="bff436e15552a82c0ebdef32737043b799e1e10124423c57a6ae6118c3a7b6cd") - version("0.2.25", sha256="822e8d1e06257eaa0fdc4c0a0686c4556e9f33647fa2a766755f984786ae7446") - - # begin EBRAINS (modified): bring upstream - depends_on("python@3.7:", type=("build", "run")) - depends_on("python@3.8:", when="@0.4:", type=("build", "run")) - depends_on("python@3.9:", when="@0.4.14:", type=("build", "run")) - depends_on("py-setuptools", type="build") - depends_on("py-numpy@1.22:", when="@0.4.14:", type=("build", "run")) - depends_on("py-numpy@1.21:", when="@0.4.9:", type=("build", "run")) - depends_on("py-numpy@1.20:", when="@0.3:", type=("build", "run")) - depends_on("py-numpy@1.18:", type=("build", "run")) - depends_on("py-opt-einsum", type=("build", "run")) - depends_on("py-scipy@1.2.1:", type=("build", "run")) - depends_on("py-scipy@1.5:", when="@0.3:", type=("build", "run")) - depends_on("py-scipy@1.7:", when="@0.4.7:", type=("build", "run")) - depends_on("py-ml-dtypes@0.2.0:", when="@0.4.14:", type=("build", "run")) - depends_on("py-ml-dtypes@0.1.0:", when="@0.4.9:", type=("build", "run")) - depends_on("py-ml-dtypes@0.0.3:", when="@0.4.7:", type=("build", "run")) - depends_on("py-importlib-metadata@4.6:", when="@0.4.11: ^python@:3.9", type="run") - # end EBRAINS - - # See _minimum_jaxlib_version in jax/version.py - # begin EBRAINS (modified): bring upstream - jax_to_jaxlib = { - "0.4.14": "0.4.14", - "0.4.13": "0.4.13", - "0.4.3": "0.4.2", - "0.3.23": "0.3.15", - "0.2.25": "0.1.69", - } - # end EBRAINS - - for jax, jaxlib in jax_to_jaxlib.items(): - # begin EBRAINS (modified): bring upstream - depends_on(f"py-jaxlib@{jaxlib}", when=f"@{jax}", type=("build", "run")) - # end EBRAINS - - # Historical dependencies - depends_on("py-absl-py", when="@:0.3", type=("build", "run")) - depends_on("py-typing-extensions", when="@:0.3", type=("build", "run")) - # begin EBRAINS (deleted): - # depends_on("py-etils+epath", when="@0.3", type=("build", "run")) - # end EBRAINS diff --git a/packages/py-jaxlib/package.py b/packages/py-jaxlib/package.py deleted file mode 100644 index 20b2683c..00000000 --- a/packages/py-jaxlib/package.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -import tempfile - -from spack.package import * - - -class PyJaxlib(PythonPackage, CudaPackage): - """XLA library for Jax""" - - homepage = "https://github.com/google/jax" - url = "https://github.com/google/jax/archive/refs/tags/jaxlib-v0.1.74.tar.gz" - - tmp_path = "" - buildtmp = "" - - # begin EBRAINS (added): bring upstream - version("0.4.13", sha256="45766238b57b992851763c64bc943858aebafe4cad7b3df6cde844690bc34293") - # end EBRAINS - version("0.4.3", sha256="2104735dc22be2b105e5517bd5bc6ae97f40e8e9e54928cac1585c6112a3d910") - version("0.3.22", sha256="680a6f5265ba26d5515617a95ae47244005366f879a5c321782fde60f34e6d0d") - version("0.1.74", sha256="bbc78c7a4927012dcb1b7cd135c7521f782d7dad516a2401b56d3190f81afe35") - - # begin EBRAINS (deleted): Variant with default=False is provided by CudaPackage - # variant("cuda", default=True, description="Build with CUDA") - # end EBRAINS - - # jaxlib/setup.py - # begin EBRAINS (modified): bring upstream - depends_on("python@3.9:", when="@0.4.14:", type=("build", "run")) - depends_on("python@3.8:", when="@0.4:", type=("build", "run")) - depends_on("python@3.7:", type=("build", "run")) - depends_on("py-setuptools", type="build") - depends_on("py-numpy@1.22:", when="@0.4.14:", type=("build", "run")) - depends_on("py-numpy@1.21:", when="@0.4.9:", type=("build", "run")) - depends_on("py-numpy@1.20:", when="@0.3:", type=("build", "run")) - depends_on("py-numpy@1.18:", type=("build", "run")) - depends_on("py-scipy@1.5:", type=("build", "run")) - depends_on("py-scipy@1.7:", when="@0.4.7:", type=("build", "run")) - depends_on("py-ml-dtypes@0.2.0:", when="@0.4.14:", type=("build", "run")) - depends_on("py-ml-dtypes@0.1.0:", when="@0.4.9:", type=("build", "run")) - depends_on("py-ml-dtypes@0.0.3:", when="@0.4.7:", type=("build", "run")) - # end EBRAINS - - # .bazelversion - depends_on("bazel@5.1.1:5.9", when="@0.3:", type="build") - # https://github.com/google/jax/issues/8440 - depends_on("bazel@4.1:4", when="@0.1", type="build") - - # README.md - # begin EBRAINS (added): bring upstream - depends_on("cuda@11.8:", when="@0.4.8:+cuda") - # end EBRAINS - depends_on("cuda@11.4:", when="@0.4:+cuda") - depends_on("cuda@11.1:", when="@0.3+cuda") - # https://github.com/google/jax/issues/12614 - depends_on("cuda@11.1:11.7.0", when="@0.1+cuda") - depends_on("cudnn@8.2:", when="@0.4:+cuda") - depends_on("cudnn@8.0.5:", when="+cuda") - - # Historical dependencies - depends_on("py-absl-py", when="@:0.3", type=("build", "run")) - depends_on("py-flatbuffers@1.12:2", when="@0.1", type=("build", "run")) - - conflicts( - "cuda_arch=none", - when="+cuda", - msg="Must specify CUDA compute capabilities of your GPU, see " - "https://developer.nvidia.com/cuda-gpus", - ) - - def patch(self): - self.tmp_path = tempfile.mkdtemp(prefix="spack") - self.buildtmp = tempfile.mkdtemp(prefix="spack") - filter_file( - "build --spawn_strategy=standalone", - f""" -# Limit CPU workers to spack jobs instead of using all HOST_CPUS. -build --spawn_strategy=standalone -build --local_cpu_resources={make_jobs} -""".strip(), - ".bazelrc", - string=True, - ) - filter_file( - 'f"--output_path={output_path}",', - 'f"--output_path={output_path}",' - f' "--sources_path={self.tmp_path}",' - ' "--nohome_rc",' - ' "--nosystem_rc",' - f' "--jobs={make_jobs}",', - "build/build.py", - string=True, - ) - filter_file( - "args = parser.parse_args()", - "args, junk = parser.parse_known_args()", - "build/build_wheel.py", - string=True, - ) - - def install(self, spec, prefix): - args = [] - args.append("build/build.py") - if "+cuda" in spec: - args.append("--enable_cuda") - args.append("--cuda_path={0}".format(self.spec["cuda"].prefix)) - args.append("--cudnn_path={0}".format(self.spec["cudnn"].prefix)) - capabilities = ",".join( - "{0:.1f}".format(float(i) / 10.0) for i in spec.variants["cuda_arch"].value - ) - args.append("--cuda_compute_capabilities={0}".format(capabilities)) - args.append( - "--bazel_startup_options=" - "--output_user_root={0}".format(self.wrapped_package_object.buildtmp) - ) - python(*args) - with working_dir(self.wrapped_package_object.tmp_path): - args = std_pip_args + ["--prefix=" + self.prefix, "."] - pip(*args) - remove_linked_tree(self.wrapped_package_object.tmp_path) - remove_linked_tree(self.wrapped_package_object.buildtmp) diff --git a/packages/py-numba/package.py b/packages/py-numba/package.py index 6c0ee7b2..522abe31 100644 --- a/packages/py-numba/package.py +++ b/packages/py-numba/package.py @@ -13,6 +13,11 @@ class PyNumba(PythonPackage): pypi = "numba/numba-0.35.0.tar.gz" git = "https://github.com/numba/numba.git" + skip_modules = ["numba.core.rvsdg_frontend"] + + license("BSD-2-Clause") + + version("0.58.1", sha256="487ded0633efccd9ca3a46364b40006dbdaca0f95e99b8b83e778d1195ebcbaa") version("0.57.0", sha256="2af6d81067a5bdc13960c6d2519dbabbf4d5d597cf75d640c5aeaefd48c6420a") version("0.56.4", sha256="32d9fef412c81483d7efe0ceb6cf4d3310fde8b624a9cecca00f790573ac96ee") version("0.56.0", sha256="87a647dd4b8fce389869ff71f117732de9a519fe07663d9a02d75724eb8e244d") @@ -22,34 +27,39 @@ class PyNumba(PythonPackage): version("0.51.1", sha256="1e765b1a41535684bf3b0465c1d0a24dcbbff6af325270c8f4dad924c0940160") version("0.50.1", sha256="89e81b51b880f9b18c82b7095beaccc6856fcf84ba29c4f0ced42e4e5748a3a7") version("0.48.0", sha256="9d21bc77e67006b5723052840c88cc59248e079a907cc68f1a1a264e1eaba017") - version("0.40.1", sha256="52d046c13bcf0de79dbfb936874b7228f141b9b8e3447cc35855e9ad3e12aa33") - depends_on("python@3.8:3.11", when="@0.57", type=("build", "run")) + depends_on("c", type="build") # generated + depends_on("cxx", type="build") # generated + + variant("tbb", default=False, description="Build with Intel Threading Building Blocks") + + depends_on("python@3.8:3.11", when="@0.57:", type=("build", "run")) depends_on("python@3.7:3.10", when="@0.55:0.56", type=("build", "run")) depends_on("python@3.7:3.9", when="@0.54", type=("build", "run")) depends_on("python@3.6:3.9", when="@0.53", type=("build", "run")) depends_on("python@3.6:3.8", when="@0.52", type=("build", "run")) depends_on("python@3.6:3.8", when="@0.48:0.51", type=("build", "run")) - depends_on("python@3.3:3.7", when="@0.40.1:0.47", type=("build", "run")) - depends_on("py-numpy@1.21:1.24", when="@0.57:", type=("build", "run")) + depends_on("py-numpy@1.22:1.26", when="@0.58.1:", type=("build", "run")) + depends_on("py-numpy@1.21:1.25", when="@0.58.0", type=("build", "run")) + depends_on("py-numpy@1.21:1.24", when="@0.57", type=("build", "run")) depends_on("py-numpy@1.18:1.23", when="@0.56.1:0.56.4", type=("build", "run")) depends_on("py-numpy@1.18:1.22", when="@0.55.2:0.56.0", type=("build", "run")) depends_on("py-numpy@1.18:1.21", when="@0.55.0:0.55.1", type=("build", "run")) depends_on("py-numpy@1.17:1.20", when="@0.54", type=("build", "run")) depends_on("py-numpy@1.15:1.20", when="@0.48:0.53", type=("build", "run")) - depends_on("py-numpy@1.10:1.20", when="@:0.47", type=("build", "run")) depends_on("py-setuptools", type=("build", "run")) + depends_on("py-llvmlite@0.41", when="@0.58", type=("build", "run")) depends_on("py-llvmlite@0.40", when="@0.57", type=("build", "run")) depends_on("py-llvmlite@0.39", when="@0.56", type=("build", "run")) depends_on("py-llvmlite@0.38", when="@0.55", type=("build", "run")) depends_on("py-llvmlite@0.37", when="@0.54.0", type=("build", "run")) depends_on("py-llvmlite@0.34", when="@0.51.1", type=("build", "run")) depends_on("py-llvmlite@0.33", when="@0.50.1", type=("build", "run")) - depends_on("py-llvmlite@0.31", when="@0.47,0.48", type=("build", "run")) - depends_on("py-llvmlite@0.25", when="@0.40", type=("build", "run")) - depends_on("py-llvmlite@0.20:0.25", when="@0.35.1", type=("build", "run")) + depends_on("py-llvmlite@0.31", when="@0.48", type=("build", "run")) depends_on("py-importlib-metadata", when="@0.56:^python@:3.8", type=("build", "run")) + depends_on("tbb", when="+tbb") + conflicts("~tbb", when="@:0.50") # No way to disable TBB # Version 6.0.0 of llvm had a hidden symbol which breaks numba at runtime. # See https://reviews.llvm.org/D44140 conflicts("^llvm@6.0.0") @@ -57,3 +67,7 @@ class PyNumba(PythonPackage): # begin EBRAINS (added): numba>=0.57 requires at least version 14.0.0 of LLVM conflicts("llvm@:13", when="@0.57.0:") # end EBRAINS + + def setup_build_environment(self, env): + if self.spec.satisfies("~tbb"): + env.set("NUMBA_DISABLE_TBB", "yes") diff --git a/packages/py-optax/package.py b/packages/py-optax/package.py deleted file mode 100644 index c5cf8840..00000000 --- a/packages/py-optax/package.py +++ /dev/null @@ -1,24 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - - -class PyOptax(PythonPackage): - """A gradient processing and optimization library in JAX.""" - - homepage = "https://github.com/deepmind/optax" - pypi = "optax/optax-0.1.4.tar.gz" - - version("0.1.4", sha256="fb7a0550d57a6636164a3de25986a8a19be8ff6431fcdf1225b4e05175810f22") - - depends_on("python@3.8:", type=("build", "run")) - depends_on("py-setuptools", type="build") - - depends_on("py-absl-py@0.7.1:", type=("build", "run")) - depends_on("py-chex@0.1.5:", type=("build", "run")) - depends_on("py-jax@0.1.55:", type=("build", "run")) - depends_on("py-jaxlib@0.1.37:", type=("build", "run")) - depends_on("py-numpy@1.18:", type=("build", "run")) diff --git a/packages/py-pycuda/package.py b/packages/py-pycuda/package.py index 98691d68..3c3d4926 100644 --- a/packages/py-pycuda/package.py +++ b/packages/py-pycuda/package.py @@ -15,6 +15,8 @@ class PyPycuda(PythonPackage): homepage = "https://mathema.tician.de/software/pycuda/" pypi = "pycuda/pycuda-2019.1.2.tar.gz" + license("MIT") + # begin EBRAINS (added): add version version("2023.1", sha256="175ff675f0cf10e38e9adc03ed5df3ed8d8abf7da5134c8dccec752e8a0a3e91") # end EBRAINS @@ -23,6 +25,8 @@ class PyPycuda(PythonPackage): version("2019.1.2", sha256="ada56ce98a41f9f95fe18809f38afbae473a5c62d346cfa126a2d5477f24cc8a") version("2016.1.2", sha256="a7dbdac7e2f0c0d2ad98f5f281d5a9d29d6673b3c20210e261b96e9a2d0b6e37") + depends_on("cxx", type="build") # generated + @run_before("install") def configure(self): pyver = self.spec["python"].version.up_to(2).joined diff --git a/packages/py-pyvista/package.py b/packages/py-pyvista/package.py deleted file mode 100644 index 38054a2f..00000000 --- a/packages/py-pyvista/package.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyPyvista(PythonPackage): - """Easier Pythonic interface to VTK.""" - - homepage = "https://github.com/pyvista/pyvista" - pypi = "pyvista/pyvista-0.32.1.tar.gz" - - # Requires optional trame dependency - skip_modules = ["pyvista.ext", "pyvista.jupyter", "pyvista.trame"] - - maintainers("banesullivan") - - license("MIT") - - # begin EBRAINS (added): add version - version("0.43.0", sha256="0118060d56de7f9b583405723ddd8f77f20d7d8af601ad7c0b0521c6302069cf") - # end EBRAINS - version("0.42.3", sha256="00159cf0dea05c1ecfd1695c8c6ccfcfff71b0744c9997fc0276e661dc052351") - version("0.37.0", sha256="d36a2c6d5f53f473ab6a9241669693acee7a5179394dc97595da14cc1de23141") - version("0.32.1", sha256="585ac79524e351924730aff9b7207d6c5ac4175dbb5d33f7a9a2de22ae53dbf9") - - depends_on("py-setuptools", type="build") - depends_on("py-matplotlib@3.0.1:", when="@0.39:", type=("build", "run")) - depends_on("py-numpy", type=("build", "run")) - depends_on("pil", type=("build", "run")) - depends_on("py-pooch", when="@0.37:", type=("build", "run")) - depends_on("py-scooby@0.5.1:", type=("build", "run")) - depends_on("vtk+python", type=("build", "run")) - depends_on("py-typing-extensions", when="^python@:3.7", type=("build", "run")) - - # Historical dependencies - depends_on("py-appdirs", when="@:0.36", type=("build", "run")) - depends_on("py-imageio", when="@:0.38", type=("build", "run")) - depends_on("py-meshio@4.0.3:4", when="@:0.32", type=("build", "run")) - - # '>=3.7.*' in python_requires: setuptools parser changed in v60 and errors. - depends_on("py-setuptools@:59", when="@:0.37", type="build") diff --git a/packages/py-pyviz-comms/package.py b/packages/py-pyviz-comms/package.py index 0e6d6c84..03ed4163 100644 --- a/packages/py-pyviz-comms/package.py +++ b/packages/py-pyviz-comms/package.py @@ -12,6 +12,8 @@ class PyPyvizComms(PythonPackage): homepage = "https://holoviz.org/" pypi = "pyviz_comms/pyviz_comms-2.2.1.tar.gz" + license("BSD-3-Clause") + # begin EBRAINS (added): add version version("2.3.2", sha256="542a10fed8242d3a9d468ed0a14d6a2537e589c3f8a7986c79c374591254d6b4") # end EBRAINS diff --git a/packages/py-ray/package.py b/packages/py-ray/package.py index 6feab9f5..82ba34c4 100644 --- a/packages/py-ray/package.py +++ b/packages/py-ray/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) + from spack.package import * @@ -12,12 +13,17 @@ class PyRay(PythonPackage): homepage = "https://github.com/ray-project/ray" url = "https://github.com/ray-project/ray/archive/ray-0.8.7.tar.gz" + license("Apache-2.0") + # begin EBRAINS (added): ECM new node-js -> new react whatever -> new py-ray version("2.4.0", sha256="b0110a84630b2f6d10cd13e8ac955875c3658373eb6cabcc77cf316de3c28066") # end EBRAINS version("2.0.1", sha256="b8b2f0a99d2ac4c001ff11c78b4521b217e2a02df95fb6270fd621412143f28b") version("0.8.7", sha256="2df328f1bcd3eeb4fa33119142ea0d669396f4ab2a3e78db90178757aa61534b") + depends_on("c", type="build") # generated + depends_on("cxx", type="build") # generated + variant("default", default=False, description="Install default extras", when="@2.0.1") # begin EBRAINS (added): ECM new node-js -> new react whatever -> new py-ray diff --git a/packages/py-sympy/package.py b/packages/py-sympy/package.py index b7f5b90e..ffdfe371 100644 --- a/packages/py-sympy/package.py +++ b/packages/py-sympy/package.py @@ -11,11 +11,15 @@ class PySympy(PythonPackage): pypi = "sympy/sympy-0.7.6.tar.gz" + license("BSD-3-Clause") + + version("1.13.0", sha256="3b6af8f4d008b9a1a6a4268b335b984b23835f26d1d60b0526ebc71d48a25f57") + version("1.12", sha256="ebf595c8dac3e0fdc4152c51878b498396ec7f30e7a914d6071e674d49420fb8") version("1.11.1", sha256="e32380dce63cb7c0108ed525570092fd45168bdae2faa17e528221ef72e88658") # begin EBRAINS (added): add versions version("1.10.1", sha256="5939eeffdf9e152172601463626c022a2c27e75cf6278de8d401d50c9d58787b") version("1.9", sha256="c7a880e229df96759f955d4f3970d4cabce79f60f5b18830c08b90ce77cd5fdc") - # end EBRAINS + # end EBRAINS version("1.8", sha256="1ca588a9f6ce6a323c5592f9635159c2093572826668a1022c75c75bdf0297cb") version("1.7.1", sha256="a3de9261e97535b83bb8607b0da2c7d03126650fafea2b2789657b229c246b2e") version("1.7", sha256="9104004669cda847f38cfd8cd16dd174952c537349dbae740fea5331d2b3a51b") @@ -38,4 +42,5 @@ class PySympy(PythonPackage): # pip silently replaces distutils with setuptools depends_on("py-setuptools", type="build") - depends_on("py-mpmath@0.19:", when="@1.0:", type=("build", "run")) + depends_on("py-mpmath@0.19:", when="@1.0:1.12", type=("build", "run")) + depends_on("py-mpmath@1.1.0:1.3", when="@1.13.0:", type=("build", "run")) diff --git a/packages/py-tree-math/package.py b/packages/py-tree-math/package.py deleted file mode 100644 index 69ba02f5..00000000 --- a/packages/py-tree-math/package.py +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - - -class PyTreeMath(PythonPackage): - """ - tree-math makes it easy to implement numerical algorithms that work on - JAX pytrees, such as iterative methods for optimization and equation - solving. It does so by providing a wrapper class tree_math.Vector that - defines array operations such as infix arithmetic and dot-products on - pytrees as if they were vectors. - """ - - homepage = "https://github.com/google/tree-math" - pypi = "tree-math/tree-math-0.1.0.tar.gz" - git = "https://github.com/google/tree-math.git" - - version('0.1.0.post', commit='0af9679125c13cc38dab5159bc8413ed79465344') - version('0.1.0', sha256='77eb8d6ba4d6cfdd2d986a6bc3fc2d1b16212f0172863a3ca509720babf75929') - - depends_on('py-setuptools', type='build') - depends_on('py-jax', type=('build', 'run')) diff --git a/packages/sbml/package.py b/packages/sbml/package.py index e1bfdcef..f54fbb6b 100644 --- a/packages/sbml/package.py +++ b/packages/sbml/package.py @@ -14,6 +14,8 @@ class Sbml(CMakePackage): homepage = "https://sbml.org" maintainers("rblake-llnl") + license("LGPL-2.1-or-later") + # begin EBRAINS (added): add version version("5.19.0", sha256="a7f0e18be78ff0e064e4cdb1cd86634d08bc33be5250db4a1878bd81eeb8b547") # end EBRAINS @@ -28,6 +30,9 @@ class Sbml(CMakePackage): version("5.10.2", sha256="83f32a143cf657672b1050f5f79d3591c418fc59570d180fb1f39b103f4e5286") version("5.10.0", sha256="2cd8b37018ce8b1df869c8c182803addbce6d451512ae25a7f527b49981f0966") + depends_on("c", type="build") # generated + depends_on("cxx", type="build") # generated + def url_for_version(self, version): url = "https://downloads.sourceforge.net/project/sbml/libsbml/{0}/stable/libSBML-{1}-core-plus-packages-src.tar.gz".format( version, version diff --git a/packages/simpletraj/package.py b/packages/simpletraj/package.py deleted file mode 100644 index be6fc871..00000000 --- a/packages/simpletraj/package.py +++ /dev/null @@ -1,32 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class Simpletraj(PythonPackage): - """Lightweight coordinate-only trajectory reader based on code from GROMACS, MDAnalysis and VMD.""" - - # Homepage and download url - homepage = "https://github.com/arose/simpletraj" - git = 'https://github.com/arose/simpletraj.git' - url = 'https://github.com/arose/simpletraj/archive/refs/tags/v0.3.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('0.3', sha256='9ee9b5f3e387f8f8eb74b11f5c5d60bab6f601d190b40e38a7b31afddc3574d0') - - # Dependencies - depends_on('python@3.8:', type=('build', 'run')) - depends_on('py-setuptools') - depends_on('py-numpy') - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import simpletraj') diff --git a/packages/sleef/package.py b/packages/sleef/package.py deleted file mode 100644 index 796219df..00000000 --- a/packages/sleef/package.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class Sleef(CMakePackage): - """SIMD Library for Evaluating Elementary Functions, vectorized libm and DFT.""" - - homepage = "https://sleef.org" - url = "https://github.com/shibatch/sleef/archive/3.2.tar.gz" - git = "https://github.com/shibatch/sleef.git" - - version("master", branch="master") - version("3.5.1_2020-12-22", commit="e0a003ee838b75d11763aa9c3ef17bf71a725bff") # py-torch@1.8: - version( - "3.5.1", - sha256="415ee9b1bcc5816989d3d4d92afd0cd3f9ee89cbd5a33eb008e69751e40438ab", - preferred=True, - ) - version( - "3.4.0_2019-07-30", commit="7f523de651585fe25cade462efccca647dcc8d02" - ) # py-torch@1.3:1.7 - version( - "3.4.0_2019-05-13", - commit="9b249c53a80343cc1a394ca961d7d5696ea76409", # py-torch@1.2 - git="https://github.com/zdevito/sleef.git", - ) - version( - "3.3.1_2018-12-09", - commit="191f655caa25526ae226cf88dd2529265176014a", # py-torch@1.1 - git="https://github.com/zdevito/sleef.git", - ) - version( - "3.2_2018-05-09", commit="6ff7a135a1e31979d1e1844a2e7171dfbd34f54f" - ) # py-torch@0.4.1:1.0 - version("3.2", sha256="3130c5966e204e6d6a3ace81e543d12b5b21f60897f1c185bfa587c1bd77bee2") - - # https://github.com/shibatch/sleef/issues/474 - conflicts("%apple-clang@15:") - - generator("ninja") - depends_on("cmake@3.4.3:", type="build") - - # # https://github.com/shibatch/sleef/issues/475 - # depends_on("fftw-api") - # depends_on("mpfr") - # depends_on("openssl") - - # # https://github.com/shibatch/sleef/issues/458 - # conflicts("^mpfr@4.2:") - - # begin EBRAINS (modified): removed "disable shared lib build" to fix py-torch - # this was fixed upstream in 8c061e51. - def cmake_args(self): - # https://salsa.debian.org/science-team/sleef/-/blob/master/debian/rules - return [ - self.define("BUILD_DFT", False), - self.define("BUILD_TESTS", False), - ] - # end EBRAINS diff --git a/packages/wf-biobb/package.py b/packages/wf-biobb/package.py index dbcd5b17..28ff7c41 100644 --- a/packages/wf-biobb/package.py +++ b/packages/wf-biobb/package.py @@ -16,5 +16,5 @@ class WfBiobb(BundlePackage): version("0.1") depends_on("py-plotly") - depends_on("nglview") - depends_on("simpletraj") + depends_on("py-nglview") + depends_on("py-simpletraj") -- GitLab From 282c08c57ee8954f685f9ca7d12d822892c74af6 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 19 Feb 2025 12:52:29 +0000 Subject: [PATCH 036/111] fix(arbor): align with spack.target removal --- packages/arbor/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/arbor/package.py b/packages/arbor/package.py index 0dc9d0fd..7a4a2c25 100644 --- a/packages/arbor/package.py +++ b/packages/arbor/package.py @@ -124,8 +124,8 @@ class Arbor(CMakePackage, CudaPackage): # query spack for the architecture-specific compiler flags set by its wrapper args.append("-DARB_ARCH=none") - opt_flags = self.spec.target.optimization_flags( - self.spec.compiler.name, self.spec.compiler.version + opt_flags = spack.build_environment.optimization_flags( + self.compiler, self.spec.target ) args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags) -- GitLab From c2c96d841c76f2f4d281f8f2b6336b0be15c6940 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 18 Feb 2025 16:39:12 +0100 Subject: [PATCH 037/111] fix(BSS2): support new spack's SPACK_STORE_{INCLUDE,LINK}_DIRS --- packages/build-brainscales/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/build-brainscales/package.py b/packages/build-brainscales/package.py index d8349c67..6ea4a370 100644 --- a/packages/build-brainscales/package.py +++ b/packages/build-brainscales/package.py @@ -140,10 +140,18 @@ class BuildBrainscales(WafPackage): # spack tries to find headers and libraries by itself (i.e. it's not # relying on the compiler to find it); we explicitly expose the # spack-provided env vars that contain include and library paths + if 'SPACK_STORE_INCLUDE_DIRS' in my_env: + for dir in reversed(get_path(my_env, "SPACK_STORE_INCLUDE_DIRS")): + env.prepend_path("C_INCLUDE_PATH", dir) + env.prepend_path("CPLUS_INCLUDE_PATH", dir) if 'SPACK_INCLUDE_DIRS' in my_env: for dir in reversed(get_path(my_env, "SPACK_INCLUDE_DIRS")): env.prepend_path("C_INCLUDE_PATH", dir) env.prepend_path("CPLUS_INCLUDE_PATH", dir) + if 'SPACK_STORE_LINK_DIRS' in my_env: + for dir in reversed(get_path(my_env, "SPACK_STORE_LINK_DIRS")): + env.prepend_path("LIBRARY_PATH", dir) + env.prepend_path("LD_LIBRARY_PATH", dir) if 'SPACK_LINK_DIRS' in my_env: for dir in reversed(get_path(my_env, "SPACK_LINK_DIRS")): env.prepend_path("LIBRARY_PATH", dir) -- GitLab From 088e000dd3d3bfba15c2fe44ae53bdcd23c92b98 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 21 Feb 2025 01:39:02 +0000 Subject: [PATCH 038/111] feat(biobb): align packages with Spack upstream --- packages/biobb-common/package.py | 38 ---------------- packages/biobb-gromacs/package.py | 37 ---------------- packages/biobb-io/package.py | 42 ------------------ packages/biobb-model/package.py | 43 ------------------- packages/biobb-structure-checking/package.py | 36 ---------------- packages/biobb-structure-utils/package.py | 43 ------------------- .../package.py | 4 +- .../package.py | 4 +- spack.yaml | 16 +++---- 9 files changed, 12 insertions(+), 251 deletions(-) delete mode 100644 packages/biobb-common/package.py delete mode 100644 packages/biobb-gromacs/package.py delete mode 100644 packages/biobb-io/package.py delete mode 100644 packages/biobb-model/package.py delete mode 100644 packages/biobb-structure-checking/package.py delete mode 100644 packages/biobb-structure-utils/package.py rename packages/{biobb-analysis => py-biobb-analysis}/package.py (95%) rename packages/{biobb-chemistry => py-biobb-chemistry}/package.py (95%) diff --git a/packages/biobb-common/package.py b/packages/biobb-common/package.py deleted file mode 100644 index 44613737..00000000 --- a/packages/biobb-common/package.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class BiobbCommon(PythonPackage): - """Biobb_common is the base package required to use the biobb packages""" - - # Homepage and download url - homepage = "https://github.com/bioexcel/biobb_common" - git = 'https://github.com/bioexcel/biobb_common.git' - url = 'https://github.com/bioexcel/biobb_common/archive/refs/tags/v4.0.0.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('4.0.0', sha256='fff990dce42ded2af3d587567dbf5321b1498f12f24d04d62003f9869d6eb8fe') - - # Dependencies - depends_on('py-setuptools') - depends_on('python@3.8:', type=('build', 'run')) - depends_on('py-pyyaml', type=('build', 'run')) - depends_on('py-requests', type=('build', 'run')) - depends_on('py-biopython@1.78:1.80', type=('build', 'run')) - - # Custom patch to enable python 3.10.8 for this package - def patch(self): - filter_file(" python_requires='>=3.7,<=3.10',", " python_requires='>=3.7,<3.11',", "setup.py") - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import biobb_common') diff --git a/packages/biobb-gromacs/package.py b/packages/biobb-gromacs/package.py deleted file mode 100644 index 1c41dea9..00000000 --- a/packages/biobb-gromacs/package.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class BiobbGromacs(PythonPackage): - """Biobb_gromacs is the Biobb module collection to perform molecular - dynamics simulations using Gromacs""" - - # Homepage and download url - homepage = "https://github.com/bioexcel/biobb_gromacs" - git = 'https://github.com/bioexcel/biobb_gromacs.git' - url = 'https://github.com/bioexcel/biobb_gromacs/archive/refs/tags/v4.0.0.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('4.0.0', sha256='87f9079ec8b72ff43cd1b388a06fee5b1f64bb4080ffab110941f3c6d155ef0b') - - # Dependencies - depends_on('python@3.8:', type=('build', 'run')) - depends_on('biobb-common') - depends_on('gromacs') - - # Patching to enable python 3.10 (not official, might not be stable) - def patch(self): - filter_file(" python_requires='>=3.7,<=3.10',", " python_requires='>=3.7,<3.11',", "setup.py") - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import biobb_gromacs') diff --git a/packages/biobb-io/package.py b/packages/biobb-io/package.py deleted file mode 100644 index 807bb205..00000000 --- a/packages/biobb-io/package.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class BiobbIo(PythonPackage): - """Biobb_io is the Biobb module collection to fetch data to be - consumed by the rest of the Biobb building blocks""" - - # Homepage and download url - homepage = "https://github.com/bioexcel/biobb_io" - git = 'https://github.com/bioexcel/biobb_io.git' - url = 'https://github.com/bioexcel/biobb_io/archive/refs/tags/v4.0.0.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('4.0.0', sha256='600a30f14b1a0e21f57775ba1be695e1595f5702237415fe90d7c531b5a0408a') - - # Dependencies - depends_on('biobb-common') - depends_on('python@3.8:', type=('build', 'run')) - - # Patching to enable python 3.10 (not official, might not be stable) - def patch(self): - filter_file(" python_requires='>=3.7,<3.10',", " python_requires='>=3.7,<3.11',", "setup.py") - filter_file( - "'Programming Language :: Python :: 3.9'", - "'Programming Language :: Python :: 3.9',\r\n " - "'Programming Language :: Python :: 3.10'", - "setup.py", - ) - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import biobb_io') diff --git a/packages/biobb-model/package.py b/packages/biobb-model/package.py deleted file mode 100644 index 23bfde29..00000000 --- a/packages/biobb-model/package.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class BiobbModel(PythonPackage): - """Biobb_model is the Biobb module collection to check and model 3d structures, - create mutations or reconstruct missing atoms""" - - # Homepage and download url - homepage = "https://github.com/bioexcel/biobb_model" - git = 'https://github.com/bioexcel/biobb_model.git' - url = 'https://github.com/bioexcel/biobb_model/archive/refs/tags/v4.0.0.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('4.0.0', sha256='92387725b7ccd3f40d18c1d3e30a6169ca4204ba498ac72e25c9e701b1a89d91') - - # Dependencies - depends_on('python@3.8:', type=('build', 'run')) - depends_on('biobb-common') - depends_on('biobb-structure-checking') - - # Patching to enable python 3.10 (not official, might not be stable) - def patch(self): - filter_file(" python_requires='>=3.7,<=3.10',", " python_requires='>=3.7,<3.11',", "setup.py") - filter_file( - "'Programming Language :: Python :: 3.9'", - "'Programming Language :: Python :: 3.9',\r\n " - "'Programming Language :: Python :: 3.10'", - "setup.py", - ) - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import biobb_model') diff --git a/packages/biobb-structure-checking/package.py b/packages/biobb-structure-checking/package.py deleted file mode 100644 index d298c2b8..00000000 --- a/packages/biobb-structure-checking/package.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class BiobbStructureChecking(PythonPackage): - """Biobb_structure_checking performs a checking of the quality of a - 3D structure intended to facilitate the setup of molecular dynamics - simulation of protein or nucleic acids systems""" - - # Homepage and download url - homepage = "https://github.com/bioexcel/biobb_structure_checking" - git = 'https://github.com/bioexcel/biobb_structure_checking.git' - url = 'https://github.com/bioexcel/biobb_structure_checking/archive/refs/tags/v3.12.1.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('3.12.1', sha256='ef3e6fe5f7763e534c91fac00bf873c3d88bcca18be7a63c63608dceb36f3d40') - - # Dependencies - depends_on('py-setuptools') - depends_on('python@3.8:', type=('build', 'run')) - depends_on('py-psutil', type=('build', 'run')) - depends_on('py-numpy', type=('build', 'run')) - depends_on('py-biopython@1.78:1.80', type=('build', 'run')) - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import biobb_structure_checking') diff --git a/packages/biobb-structure-utils/package.py b/packages/biobb-structure-utils/package.py deleted file mode 100644 index 063ff341..00000000 --- a/packages/biobb-structure-utils/package.py +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * - -class BiobbStructureUtils(PythonPackage): - """Biobb_structure_utils is the Biobb module collection to modify - or extract information from a PDB structure file.""" - - # Homepage and download url - homepage = "https://github.com/bioexcel/biobb_structure_utils" - git = 'https://github.com/bioexcel/biobb_structure_utils.git' - url = 'https://github.com/bioexcel/biobb_structure_utils/archive/refs/tags/v4.0.0.tar.gz' - - # Set the gitlab accounts of this package maintainers - maintainers = ['dbeltran'] - - # Versions - version('master', branch='master') - version('4.0.0', sha256='f328eee2166631e1cc514118eb41187620c358c8024431ce2a0b0e4547692c47') - - # Dependencies - depends_on('python@3.8:', type=('build', 'run')) - depends_on('biobb-common') - depends_on('biobb-structure-checking') - - # Patching to enable python 3.10 (not official, might not be stable) - def patch(self): - filter_file(" python_requires='>=3.7,<=3.10',", " python_requires='>=3.7,<3.11',", "setup.py") - filter_file( - "'Programming Language :: Python :: 3.9'", - "'Programming Language :: Python :: 3.9',\r\n " - "'Programming Language :: Python :: 3.10'", - "setup.py", - ) - - # Test - @run_after('install') - @on_package_attributes(run_tests=True) - def check_install (self): - python("-c", 'import biobb_structure_utils') diff --git a/packages/biobb-analysis/package.py b/packages/py-biobb-analysis/package.py similarity index 95% rename from packages/biobb-analysis/package.py rename to packages/py-biobb-analysis/package.py index 56472b1d..799d4c36 100644 --- a/packages/biobb-analysis/package.py +++ b/packages/py-biobb-analysis/package.py @@ -5,7 +5,7 @@ from spack import * -class BiobbAnalysis(PythonPackage): +class PyBiobbAnalysis(PythonPackage): """Biobb_analysis is the Biobb module collection to perform analysis of molecular dynamics simulations""" @@ -23,7 +23,7 @@ class BiobbAnalysis(PythonPackage): # Dependencies depends_on('python@3.8:', type=('build', 'run')) - depends_on('biobb-common') + depends_on('py-biobb-common') depends_on('gromacs') depends_on('ambertools') diff --git a/packages/biobb-chemistry/package.py b/packages/py-biobb-chemistry/package.py similarity index 95% rename from packages/biobb-chemistry/package.py rename to packages/py-biobb-chemistry/package.py index 292b6ce1..6ec163b6 100644 --- a/packages/biobb-chemistry/package.py +++ b/packages/py-biobb-chemistry/package.py @@ -5,7 +5,7 @@ from spack import * -class BiobbChemistry(PythonPackage): +class PyBiobbChemistry(PythonPackage): """Biobb_chemistry is the Biobb module collection to perform chemistry over molecular dynamics simulations.""" @@ -23,7 +23,7 @@ class BiobbChemistry(PythonPackage): # Dependencies depends_on('python@3.8:', type=('build', 'run')) - depends_on('biobb-common') + depends_on('py-biobb-common') depends_on('openbabel') depends_on('ambertools') depends_on('acpype') diff --git a/spack.yaml b/spack.yaml index 79fd41e2..e465a5a4 100644 --- a/spack.yaml +++ b/spack.yaml @@ -4,14 +4,14 @@ spack: specs: # EBRAINS tools - arbor@0.9.0 +python +mpi - - biobb-analysis@4.0.1 - - biobb-chemistry@4.0.0 - - biobb-common@4.0.0 - - biobb-gromacs@4.0.0 - - biobb-io@4.0.0 - - biobb-model@4.0.0 - - biobb-structure-checking@3.12.1 - - biobb-structure-utils@4.0.0 + - py-biobb-analysis@4.0.1 + - py-biobb-chemistry@4.0.0 + - py-biobb-common@4.1.0 + - py-biobb-gromacs@4.1.1 + - py-biobb-io@4.1.0 + - py-biobb-model@4.1.0 + - py-biobb-structure-checking@3.13.4 + - py-biobb-structure-utils@4.1.0 - hxtorch@9.0-a8 - nest@3.8 +sonata - neuron@8.2.3 +mpi -- GitLab From 2d3dcdba0a51eb9157e0c1d0849671c597d2587f Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 21 Feb 2025 01:54:46 +0000 Subject: [PATCH 039/111] feat(bluebrain): align packages with Spack upstream --- packages/py-bluepyefe/package.py | 32 ------------------ packages/py-bluepymm/package.py | 5 +-- packages/py-bluepyopt/package.py | 49 ---------------------------- packages/py-bluepyopt/pmi_rank.patch | 17 ---------- packages/py-efel/package.py | 28 ---------------- packages/py-libsonata/package.py | 1 + packages/py-morph-tool/package.py | 40 ----------------------- packages/py-morphio/package.py | 35 -------------------- packages/py-neurom/package.py | 37 --------------------- spack.yaml | 10 +++--- 10 files changed, 7 insertions(+), 247 deletions(-) delete mode 100644 packages/py-bluepyefe/package.py delete mode 100644 packages/py-bluepyopt/package.py delete mode 100644 packages/py-bluepyopt/pmi_rank.patch delete mode 100644 packages/py-efel/package.py delete mode 100644 packages/py-morph-tool/package.py delete mode 100644 packages/py-morphio/package.py delete mode 100644 packages/py-neurom/package.py diff --git a/packages/py-bluepyefe/package.py b/packages/py-bluepyefe/package.py deleted file mode 100644 index df41cbc8..00000000 --- a/packages/py-bluepyefe/package.py +++ /dev/null @@ -1,32 +0,0 @@ -# (from https://github.com/BlueBrain/spack) - -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyBluepyefe(PythonPackage): - """Blue Brain Python E-feature extraction""" - - homepage = "https://github.com/BlueBrain/BluePyEfe" - pypi = "bluepyefe/bluepyefe-0.3.13.tar.gz" - git = "https://github.com/BlueBrain/BluePyEfe.git" - - version("2.2.18", sha256="bfb50c6482433ec2ffb4b65b072d2778bd89ae50d92dd6830969222aabb30275") - - depends_on("py-setuptools", type="build") - - depends_on("py-numpy", type=("build", "run")) - depends_on("py-neo", type=("build", "run")) - depends_on("py-matplotlib", type=("build", "run")) - depends_on("py-efel", type=("build", "run")) - depends_on("py-scipy", type=("build", "run")) - depends_on("py-h5py", type=("build", "run")) - depends_on("py-igor", type=("build", "run")) - - def setup_run_environment(self, env): - env.set("NEURON_INIT_MPI", "0") - env.unset("PMI_RANK") diff --git a/packages/py-bluepymm/package.py b/packages/py-bluepymm/package.py index 5fa0d410..7b1a4c2c 100644 --- a/packages/py-bluepymm/package.py +++ b/packages/py-bluepymm/package.py @@ -14,15 +14,12 @@ class PyBluepymm(PythonPackage): homepage = "https://github.com/BlueBrain/BluePyMM" pypi = "bluepymm/bluepymm-0.7.49.tar.gz" + version("0.8.7", sha256="f0e5d4e113b19f71398d0796d5182f322c48c2ab07793ce8d0e4771a251914ab") version("0.7.65", sha256="024b009decd8d967b3b885421196d53670e3c0a6b75aaaa55559f148b0b0d7d4") depends_on("py-setuptools", type="build") depends_on("py-bluepyopt", type="run") depends_on("py-matplotlib", type="run") - # The below dependency should disappear once - # the matplotlib package is fixed - # not needed with EBRAINS python version: - # depends_on("py-backports-functools-lru-cache", type="run", when="^python@:3.3.99") depends_on("py-pandas", type="run") depends_on("py-numpy", type="run") depends_on("py-ipyparallel", type="run") diff --git a/packages/py-bluepyopt/package.py b/packages/py-bluepyopt/package.py deleted file mode 100644 index 16a10d59..00000000 --- a/packages/py-bluepyopt/package.py +++ /dev/null @@ -1,49 +0,0 @@ -# (from https://github.com/BlueBrain/spack) - -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyBluepyopt(PythonPackage): - """Bluebrain Python Optimisation Library""" - - homepage = "https://github.com/BlueBrain/BluePyOpt" - pypi = "bluepyopt/bluepyopt-1.9.27.tar.gz" - - # NOTE : while adding new release check pmi_rank.patch compatibility - version("1.13.86", sha256="37b4abcc4a53ed5af50fa0b3bc4d0003332b7f2f2b6e4d9f0b9de8638254e753") - version("1.10.38", sha256="fb1411c6a8fbfac52d36b837225bae882fd6524acfb4d0580189312ef3c1cfcc") - version("1.9.37", sha256="4399af71de48b288832e92f0de73c431bf88d6e76e2c4ea250c3b38fb38a45a8") - version("1.9.27", sha256="4cce15b92b32311c808cae5e005b664deb6e8dc5df4ca13ea7b59252ae346522") - version("1.8.68", sha256="b9d432840aab89d4863c935d3dc604816441eba02d731422b92056cee751ca9c") - version("1.6.56", sha256="1c57c91465ca4b947fe157692e7004a3e6df02e4151e3dc77a8831382a8f1ab9") - version("1.8.68", sha256="b9d432840aab89d4863c935d3dc604816441eba02d731422b92056cee751ca9c") - version("1.9.12", sha256="7b623ab9168f460a85d952719ca5249248fc95e6f7a02658b0673b2baa0a8fc6") - - # patch required to avoid hpe-mpi linked mechanism library - patch("pmi_rank.patch", when="@1.9.27:") - - variant("neuron", default=True, description="Use BluePyOpt together with NEURON") - - depends_on("py-setuptools", type="build") - depends_on("py-numpy", type=("build", "run")) - depends_on("py-pandas", type=("build", "run")) - depends_on("py-deap", type=("build", "run")) - depends_on("py-deap@1.3.3:", type=("build","run"), when="@1.13.86:") - depends_on("py-efel", type=("build", "run")) - depends_on("py-ipyparallel", type=("build", "run")) - depends_on("py-pickleshare", type=("build", "run")) - depends_on("py-jinja2", type=("build", "run")) - depends_on("py-future", type=("build", "run")) - depends_on("py-pebble@4.3.10:", type=("build", "run")) - depends_on("py-scoop@0.7:", type=("build", "run"), when="@:1.9.37") - depends_on("neuron", type=("build", "run"), when="+neuron") - - def setup_run_environment(self, env): - env.unset("PMI_RANK") - env.set("NEURON_INIT_MPI", "0") - env.prepend_path("PATH", self.spec["py-ipyparallel"].prefix.bin) diff --git a/packages/py-bluepyopt/pmi_rank.patch b/packages/py-bluepyopt/pmi_rank.patch deleted file mode 100644 index 21a73849..00000000 --- a/packages/py-bluepyopt/pmi_rank.patch +++ /dev/null @@ -1,17 +0,0 @@ -diff --git a/bluepyopt/ephys/simulators.py b/bluepyopt/ephys/simulators.py -index e71ad8b..3c93237 100644 ---- a/bluepyopt/ephys/simulators.py -+++ b/bluepyopt/ephys/simulators.py -@@ -89,6 +89,12 @@ class NrnSimulator(object): - NrnSimulator._nrn_disable_banner() - self.banner_disabled = True - -+ # certain mpi libraries (hpe-mpt) use PMI_RANK env variable to initialize -+ # MPI before calling MPI_Init (which is undesirable). Unset this variable -+ # if exist to avoid issue with loading neuron and mechanism library. -+ if 'PMI_RANK' in os.environ: -+ os.environ.pop("PMI_RANK") -+ - import neuron # NOQA - - return neuron diff --git a/packages/py-efel/package.py b/packages/py-efel/package.py deleted file mode 100644 index c7b5d571..00000000 --- a/packages/py-efel/package.py +++ /dev/null @@ -1,28 +0,0 @@ -# (from https://github.com/BlueBrain/spack) - -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyEfel(PythonPackage): - """The Electrophys Feature Extract Library (eFEL) allows - neuroscientists to automatically extract features from time series data - recorded from neurons (both in vitro and in silico). - Examples are the action potential width and amplitude in - voltage traces recorded during whole-cell patch clamp experiments. - The user of the library provides a set of traces and selects the - features to be calculated. The library will then extract the requested - features and return the values to the user.""" - - homepage = "https://github.com/BlueBrain/eFEL" - pypi = "efel/efel-3.0.80.tar.gz" - - version("4.0.4", sha256="258c506776df609edc799338fd773e78f0f0315fd6f3e2f969478bda401a8894") - - depends_on("py-setuptools", type="build") - depends_on("py-numpy", type="run") - depends_on("py-six", type="run") diff --git a/packages/py-libsonata/package.py b/packages/py-libsonata/package.py index b8b9695a..80fe984b 100644 --- a/packages/py-libsonata/package.py +++ b/packages/py-libsonata/package.py @@ -39,3 +39,4 @@ class PyLibsonata(PythonPackage): depends_on("py-numpy@1.17:", type=("build", "run")) depends_on("py-setuptools", type="build", when="@0.1:") depends_on("py-setuptools-scm", type="build", when="@0.1:") + diff --git a/packages/py-morph-tool/package.py b/packages/py-morph-tool/package.py deleted file mode 100644 index 3c483b74..00000000 --- a/packages/py-morph-tool/package.py +++ /dev/null @@ -1,40 +0,0 @@ -# (from https://github.com/BlueBrain/spack) - -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyMorphTool(PythonPackage): - """Python morphology manipulation toolkit""" - - homepage = "https://github.com/BlueBrain/morph-tool" - git = "https://github.com/BlueBrain/morph-tool.git" - pypi = "morph-tool/morph-tool-2.9.1.tar.gz" - - version("develop", branch="master") - version("2.9.1", sha256="305e9456c8047726588b23dfa070eb95ccbe5573e9fea3e0a83dc93eacdf61dc") - version("2.9.0", sha256="c60d4010e17ddcc3f53c864c374fffee05713c8f8fd2ba4eed7706041ce1fa47") - - variant("neuron", default=False, description="Enable additional neuron support") - - depends_on("py-setuptools", type=("build", "run")) - - depends_on("py-click@6.7:", type=("build", "run")) - depends_on("py-deprecation@2.1.0:", type=("build", "run")) - depends_on("py-more-itertools@8.6.0:", type=("build", "run")) - depends_on("py-numpy@1.14:", type=("build", "run")) - depends_on("py-pandas@1.0.3:", type=("build", "run")) - depends_on("py-xmltodict@0.12:", type=("build", "run")) - - depends_on("py-plotly@4.1:", type=("build", "run")) - depends_on("py-dask@2.19:", type=("build", "run")) - depends_on("neuron+python@7.8:", type=("build", "run")) - depends_on("py-bluepyopt@1.9.37:", type=("build", "run"), when="+neuron") - - depends_on("py-neurom@3.0:3.999,develop", type=("build", "run")) - depends_on("py-morphio@3.3.6:3,develop", type=("build", "run"), when="@2.9.1:") - depends_on("py-morphio@3.0:3,develop", type=("build", "run"), when="@2.9.0") diff --git a/packages/py-morphio/package.py b/packages/py-morphio/package.py deleted file mode 100644 index a2957888..00000000 --- a/packages/py-morphio/package.py +++ /dev/null @@ -1,35 +0,0 @@ -# (from https://github.com/BlueBrain/spack) - -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyMorphio(PythonPackage): - """Python library for reading / writing morphology files""" - - homepage = "https://github.com/BlueBrain/MorphIO" - git = "https://github.com/BlueBrain/MorphIO.git" - pypi = "morphio/MorphIO-3.3.2.tar.gz" - - version("develop", branch="master", submodules=True) - version("unifurcation", branch="unifurcation", submodules=True) - - version("3.3.6", sha256="0f2e55470d92a3d89f2141ae905ee104fd16257b93dafb90682d90171de2f4e6") - version("3.3.5", sha256="9e6cfebaea32080131b2b08a4a32dfbe92b18427a3e557861e27c4131f7542ac") - version("3.3.4", sha256="b70c6884e9b835560501f798c75c9cc7eaf3162cba1d930b5a9b854bb9ea60dc") - version("3.3.3", sha256="f6d91970cfd734b2e5fb8f9239a0bfa00519fe082dd8e403e4cc204dbdf0a9fa") - version("3.3.2", sha256="fc961defbfbfb3f11360954fb3ec51373eaff25b154fa31d6b31decca6937780") - version("3.1.1", sha256="ad9f0e363f09f03c6eda54f5f3b006d204236677d2f2c9675421e0441033a503") - version("2.7.1", sha256="3f3e2229da85e874527775fce080f712b6dc287edc44b90b6de35d17b34badff") - - depends_on("py-setuptools", type="build") - depends_on("py-setuptools-scm", type="build") - - depends_on("ninja", type="build") - depends_on("cmake@3.2:", type="build") - depends_on("py-numpy@1.14.1:", type="run") - depends_on("hdf5") diff --git a/packages/py-neurom/package.py b/packages/py-neurom/package.py deleted file mode 100644 index b902984e..00000000 --- a/packages/py-neurom/package.py +++ /dev/null @@ -1,37 +0,0 @@ -# (from https://github.com/BlueBrain/spack) - -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyNeurom(PythonPackage): - """Python library neuron morphology analysis""" - - homepage = "https://github.com/BlueBrain/NeuroM" - git = "https://github.com/BlueBrain/NeuroM.git" - pypi = "neurom/neurom-2.2.1.tar.gz" - - version("develop", branch="master") - version("3.2.2", sha256="bc442cf5193289b893a66d5e541868f84bb120b03395b03ce2423c19729b92de") - - variant("plotly", default=False, description="Enable plotly support") - - depends_on("py-setuptools@0.42:", type=("build", "run")) - depends_on("py-setuptools-scm", type="build") - depends_on("py-wheel", type="build") - - depends_on("py-click@7.0:", type=("build", "run")) - depends_on("py-numpy@1.8.0:", type=("build", "run")) - depends_on("py-pyyaml@3.10:", type=("build", "run")) - depends_on("py-tqdm@4.8.4:", type=("build", "run")) - depends_on("py-matplotlib@3.2.1:", type=("build", "run")) - depends_on("py-scipy@1.2.0:", type=("build", "run")) - depends_on("py-plotly@3.6.0:", type=("build", "run"), when="+plotly") - depends_on("py-psutil@5.5.1:", type=("build", "run"), when="+plotly") - - depends_on("py-morphio@3.1.1:", type=("build", "run")) - depends_on("py-pandas@1.0.5:", type=("build", "run")) diff --git a/spack.yaml b/spack.yaml index e465a5a4..99226cda 100644 --- a/spack.yaml +++ b/spack.yaml @@ -16,14 +16,14 @@ spack: - nest@3.8 +sonata - neuron@8.2.3 +mpi - jaxsnn@9.0-a8 - - py-bluepyefe@2.2.18 - - py-bluepymm@0.7.65 - - py-bluepyopt@1.13.86 + - py-bluepyefe@2.3.6 + - py-bluepymm@0.8.7 + - py-bluepyopt@1.14.11 - py-bsb@4.0.0a57 - py-ebrains-drive@0.6.0 - py-ebrains-kg-core@0.9.15 - py-ebrains-validation-client@0.9.1 - - py-efel@4.0.4 + - py-efel@5.2.0 - py-elephant@1.1.0 - py-fairgraph@0.12.1 - py-frites@0.4.4 @@ -38,7 +38,7 @@ spack: - py-neo@0.13.3 - py-nestml@8.0.0 - py-netpyne@1.0.6 - - py-neurom@3.2.2 + - py-neurom@3.2.4 - py-neuror@1.6.4 - py-pynn@0.12.3 +mpi - py-pyunicore@1.1.1 -- GitLab From 1b81fd9363c32ee85ba5ba357af8f728aadd2044 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 21 Feb 2025 02:09:48 +0000 Subject: [PATCH 040/111] fix(py-spynnaker): add missing dependency --- packages/py-spynnaker/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/py-spynnaker/package.py b/packages/py-spynnaker/package.py index 0e200cb3..836e884e 100644 --- a/packages/py-spynnaker/package.py +++ b/packages/py-spynnaker/package.py @@ -34,3 +34,4 @@ class PySpynnaker(PythonPackage): depends_on("py-lazyarray", type=("build", "run")) depends_on("py-scipy", type=("build", "run")) depends_on("py-csa", type=("build", "run")) + depends_on("py-typing-extensions", type=("build", "run")) -- GitLab From 55097008784dcc4bdc21a8cdeb930b63cc4c80d1 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 21 Feb 2025 02:23:11 +0000 Subject: [PATCH 041/111] fix(py-pytest-benchmark): add missing dependency --- packages/py-pytest-benchmark/package.py | 26 +++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 packages/py-pytest-benchmark/package.py diff --git a/packages/py-pytest-benchmark/package.py b/packages/py-pytest-benchmark/package.py new file mode 100644 index 00000000..ab6f0096 --- /dev/null +++ b/packages/py-pytest-benchmark/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyPytestBenchmark(PythonPackage): + """A pytest fixture for benchmarking code.""" + + homepage = "https://github.com/ionelmc/pytest-benchmark" + pypi = "pytest-benchmark/pytest-benchmark-3.2.3.tar.gz" + + license("BSD-2-Clause") + + version("3.2.3", sha256="ad4314d093a3089701b24c80a05121994c7765ce373478c8f4ba8d23c9ba9528") + + depends_on("python@2.7:2.8,3.5:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-pytest@3.8:", type=("build", "run")) + depends_on("py-py-cpuinfo", type=("build", "run")) + # begin EBRAINS (added) + depends_on("py-py@1.8.2:", type=("build", "run")) + # end EBRAINS -- GitLab From 79eb0bd3087e1c3ad029221934d92c9673ee3c74 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Sat, 22 Feb 2025 19:20:56 +0000 Subject: [PATCH 042/111] fix(py-ipympl): add missing dependency --- packages/py-ipympl/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/py-ipympl/package.py b/packages/py-ipympl/package.py index a22c00e6..285ca93e 100644 --- a/packages/py-ipympl/package.py +++ b/packages/py-ipympl/package.py @@ -48,6 +48,9 @@ class PyIpympl(PythonPackage): depends_on("py-numpy") depends_on("pil") depends_on("py-traitlets@:5") + # begin EBRAINS (added): add missing dependency + depends_on("py-tomli") + # end EBRAINS # Necessary for jupyter extension env vars depends_on("py-jupyter-core") -- GitLab From 0347005182782772981b63b80fbdfebbb47cdf2a Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 21 Feb 2025 02:15:18 +0000 Subject: [PATCH 043/111] fix(py-hbp-validation-client): add missing dependency --- packages/py-hbp-validation-client/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/py-hbp-validation-client/package.py b/packages/py-hbp-validation-client/package.py index fa315a01..59272cac 100644 --- a/packages/py-hbp-validation-client/package.py +++ b/packages/py-hbp-validation-client/package.py @@ -33,4 +33,5 @@ class PyHbpValidationClient(PythonPackage): depends_on('py-nameparser@1.1.1:', type=('build', 'run'), when='@:0.8.4') depends_on('py-ebrains-drive@0.4.0:', type=('build', 'run'), when='@:0.8.4') depends_on('py-simplejson@3.17.2:', type=('build', 'run'), when='@:0.8.4') + depends_on('py-sciunit', type=('build', 'run')) depends_on('py-ebrains-validation-client@0.9.1:', type=('build', 'run'), when='@0.9.0:') -- GitLab From 2661d9425666e266e74065c8cfe9a058e7d0f6f9 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Sat, 22 Feb 2025 23:30:36 +0000 Subject: [PATCH 044/111] fix(py-sciunit): add version 0.2.8 for compatibility with Python 3.11 --- packages/py-sciunit/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/py-sciunit/package.py b/packages/py-sciunit/package.py index 8e6479a6..5684040b 100644 --- a/packages/py-sciunit/package.py +++ b/packages/py-sciunit/package.py @@ -14,6 +14,7 @@ class PySciunit(PythonPackage): pypi = "sciunit/sciunit-0.2.5.1.tar.gz" git = "https://github.com/scidash/sciunit.git" + version('0.2.8', sha256='85b7288200e55a3270d2346cc357c19d2c812140a9398eda52152a6cb5a281f5') version('0.2.5.1', sha256='6148704f92a29c9d6de65ca9455b03ebe1f05101dae5e706aee2186e5a09fab3') depends_on('python@3.6.9:') -- GitLab From 29ae30e788a6639e2978117e4e4021915fdc6d5b Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Sat, 22 Feb 2025 23:32:04 +0000 Subject: [PATCH 045/111] fix(py-cherrypy): add version 18.10.0 for compatibility with Python 3.11 --- packages/py-cherrypy/package.py | 29 ++++++++++++++++++++++ packages/py-jaraco-collections/package.py | 26 ++++++++++++++++++++ packages/py-jaraco-context/package.py | 24 ++++++++++++++++++ packages/py-jaraco-text/package.py | 30 +++++++++++++++++++++++ 4 files changed, 109 insertions(+) create mode 100644 packages/py-cherrypy/package.py create mode 100644 packages/py-jaraco-collections/package.py create mode 100644 packages/py-jaraco-context/package.py create mode 100644 packages/py-jaraco-text/package.py diff --git a/packages/py-cherrypy/package.py b/packages/py-cherrypy/package.py new file mode 100644 index 00000000..635e1da7 --- /dev/null +++ b/packages/py-cherrypy/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyCherrypy(PythonPackage): + """CherryPy is a pythonic, object-oriented HTTP framework.""" + + homepage = "https://cherrypy.readthedocs.io/en/latest/" + pypi = "CherryPy/cherrypy-18.10.0.tar.gz" + + license("BSD-3-Clause") + + # begin EBRAINS(added): new version + version("18.10.0", sha256="6c70e78ee11300e8b21c0767c542ae6b102a49cac5cfd4e3e313d7bb907c5891") + # end EBRAINS + version("18.1.1", sha256="6585c19b5e4faffa3613b5bf02c6a27dcc4c69a30d302aba819639a2af6fa48b") + + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-more-itertools", type=("build", "run")) + depends_on("py-zc-lockfile", type=("build", "run")) + depends_on("py-cheroot@6.2.4:", type=("build", "run")) + depends_on("py-portend@2.1.1:", type=("build", "run")) + depends_on("py-jaraco-collections", type=("build", "run")) + depends_on("python@3.5:", when="@18.0.0:", type=("build", "run")) diff --git a/packages/py-jaraco-collections/package.py b/packages/py-jaraco-collections/package.py new file mode 100644 index 00000000..df6f5542 --- /dev/null +++ b/packages/py-jaraco-collections/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyJaracoCollections(PythonPackage): + """Collection objects similar to those in stdlib by jaraco""" + + homepage = "https://github.com/jaraco/jaraco.collections" + pypi = "jaraco.collections/jaraco.collections-5.1.0.tar.gz" + + license("MIT") + + version("5.0.1", sha256="808631b174b84a4e2a592490d62f62dfc15d8047a0f715726098dc43b81a6cfa") + version("5.0.0", sha256="1680e8d09f295f625c7ba926880175a26fdbe7092b4c76d198e30476b21cfe68") + version("4.3.0", sha256="74ffc23fccfee4de0a2ebf556a33675b6a3c003d6335947d3122a0bc8822c8e4") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-setuptools@56:", type="build") + depends_on("py-setuptools-scm@3.4.1: +toml", type="build") + depends_on("py-jaraco-text", type=("build", "run")) + diff --git a/packages/py-jaraco-context/package.py b/packages/py-jaraco-context/package.py new file mode 100644 index 00000000..97655be7 --- /dev/null +++ b/packages/py-jaraco-context/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyJaracoContext(PythonPackage): + """Useful decorators and context managers""" + + homepage = "https://github.com/jaraco/jaraco.context" + pypi = "jaraco.context/jaraco.context-5.1.0.tar.gz" + + license("MIT") + + version("5.1.0", sha256="24ec1f739aec2c5766c68027ccc70d91d7b0cb931699442f5c7ed93515b955e7") + version("5.0.0", sha256="e0e3a7e5ce2dc17daf5f7a0e9387eebb8f352514fd43418ced34bddc6063c34f") + version("4.3.0", sha256="4dad2404540b936a20acedec53355bdaea223acb88fd329fa6de9261c941566e") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-setuptools@56:", type="build") + depends_on("py-setuptools-scm@3.4.1: +toml", type="build") diff --git a/packages/py-jaraco-text/package.py b/packages/py-jaraco-text/package.py new file mode 100644 index 00000000..0789df05 --- /dev/null +++ b/packages/py-jaraco-text/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyJaracoText(PythonPackage): + """Module for text manipulation""" + + homepage = "https://github.com/jaraco/jaraco.text" + pypi = "jaraco.text/jaraco.text-3.12.0.tar.gz" + + license("MIT") + + version("3.12.0", sha256="389e25c8d4b32e9715bf530596fab0f5cd3aa47296e43969392e18a541af592c") + version("3.11.1", sha256="333a5df2148f7139718607cdf352fe1d95162971a7299c380dcc24dab0168980") + version("3.11.0", sha256="0ddb589595fe176ea8179c801ca4ece2be0aa71f377b91f3ca65b4d741948351") + + depends_on("python@3.8:", type=("build", "run")) + + depends_on("py-setuptools@56:", type="build") + depends_on("py-setuptools-scm@3.4.1: +toml", type="build") + + depends_on("py-jaraco-functools", type=("build", "run")) + depends_on("py-jaraco-context@4.1:", type=("build", "run")) + depends_on("py-importlib-resources@1.3:", when="^python@:3.9", type=("build", "run")) + depends_on("py-inflect", type=("build", "run")) + depends_on("py-more-itertools", type=("build", "run")) -- GitLab From fd473887964d52ecdddb0f51a2eb5ef9de6b2949 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 19 Feb 2025 13:01:09 +0000 Subject: [PATCH 046/111] fix: add type=test to all run-only dependencies of top-level packages --- packages/neuron/package.py | 2 +- packages/py-bluepymm/package.py | 20 ++++++++++---------- packages/py-tvb-framework/package.py | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/neuron/package.py b/packages/neuron/package.py index a87d6d75..a429e035 100644 --- a/packages/neuron/package.py +++ b/packages/neuron/package.py @@ -51,7 +51,7 @@ class Neuron(CMakePackage): depends_on("py-mpi4py", when="+mpi+python+tests") depends_on("readline") depends_on("caliper", when="+caliper") - depends_on("py-numpy", type="run") + depends_on("py-numpy", type=("run","test")) extends('python', when='+python') diff --git a/packages/py-bluepymm/package.py b/packages/py-bluepymm/package.py index 7b1a4c2c..74ed5759 100644 --- a/packages/py-bluepymm/package.py +++ b/packages/py-bluepymm/package.py @@ -18,16 +18,16 @@ class PyBluepymm(PythonPackage): version("0.7.65", sha256="024b009decd8d967b3b885421196d53670e3c0a6b75aaaa55559f148b0b0d7d4") depends_on("py-setuptools", type="build") - depends_on("py-bluepyopt", type="run") - depends_on("py-matplotlib", type="run") - depends_on("py-pandas", type="run") - depends_on("py-numpy", type="run") - depends_on("py-ipyparallel", type="run") - depends_on("py-lxml", type="run") - depends_on("py-sh", type="run") - depends_on("neuron", type="run") - depends_on("py-h5py", type="run") - depends_on("py-pyyaml", type="run") + depends_on("py-bluepyopt", type=("run","test")) + depends_on("py-matplotlib", type=("run","test")) + depends_on("py-pandas", type=("run","test")) + depends_on("py-numpy", type=("run","test")) + depends_on("py-ipyparallel", type=("run","test")) + depends_on("py-lxml", type=("run","test")) + depends_on("py-sh", type=("run","test")) + depends_on("neuron", type=("run","test")) + depends_on("py-h5py", type=("run","test")) + depends_on("py-pyyaml", type=("run","test")) def setup_run_environment(self, env): env.unset("PMI_RANK") diff --git a/packages/py-tvb-framework/package.py b/packages/py-tvb-framework/package.py index 764ba19a..fb1ebc2f 100644 --- a/packages/py-tvb-framework/package.py +++ b/packages/py-tvb-framework/package.py @@ -59,7 +59,7 @@ class PyTvbFramework(PythonPackage): depends_on('py-six', type=('build', 'run')) depends_on('py-sqlalchemy', type=('build', 'run')) depends_on('py-tvb-data', type=('run', 'test')) - depends_on('py-tvb-gdist', type='run') + depends_on('py-tvb-gdist', type=('run', 'test')) depends_on('py-tvb-library', type=('build', 'run')) depends_on('py-tvb-storage', type=('build', 'run')) depends_on('py-werkzeug', type=('build', 'run')) -- GitLab From 63c4f74e103ec2dea78a32892ac0d963f09750cd Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 17 Feb 2025 18:59:10 +0200 Subject: [PATCH 047/111] feat(ebrainslab): update gcc, python and R version --- site-config/ebrainslab/packages.yaml | 6 +++--- site-config/ebrainslab/spack.yaml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/site-config/ebrainslab/packages.yaml b/site-config/ebrainslab/packages.yaml index 3759479f..44e6777d 100644 --- a/site-config/ebrainslab/packages.yaml +++ b/site-config/ebrainslab/packages.yaml @@ -2,13 +2,13 @@ packages: all: # collab-specific settings target: [x86_64] - compiler: [gcc@10.3.0] + compiler: [gcc@13] python: # collab-specific settings - require: "@3.8.11" + require: "@3.11" r: # EM: +X fixes build for collab - require: "@4.3.0+X" + require: "@4.3.3+X" py-torch: # ECM: No cuda nor rocm in collab VMs. # I did not try to minimize the specification any further. diff --git a/site-config/ebrainslab/spack.yaml b/site-config/ebrainslab/spack.yaml index ce36e8bb..5e2f9363 100644 --- a/site-config/ebrainslab/spack.yaml +++ b/site-config/ebrainslab/spack.yaml @@ -7,7 +7,7 @@ spack: - py-notebook - r-irkernel # "collab"-specific constraint to match ("jupyterlab_widgets") in the base image - - py-ipywidgets@:7.7 + - py-ipywidgets # Collab utils - clb-nb-utils@0.1.0 -- GitLab From 67e211f3b9690d31d57392b3847af7bfccbe2f12 Mon Sep 17 00:00:00 2001 From: Moritz Kern <212-moritzkern@users.noreply.gitlab.ebrains.eu> Date: Sun, 23 Feb 2025 04:29:51 +0100 Subject: [PATCH 048/111] feat(py-elephant): add version 1.1.1 --- packages/py-elephant/package.py | 11 +++++++---- spack.yaml | 2 +- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/packages/py-elephant/package.py b/packages/py-elephant/package.py index 1ceebcfb..5b3a984d 100644 --- a/packages/py-elephant/package.py +++ b/packages/py-elephant/package.py @@ -16,6 +16,8 @@ class PyElephant(PythonPackage, CudaPackage): maintainers = ["moritzkern"] version('develop', branch='master') + + version("1.1.1", sha256="c08b89358b52e826bd081ee5a530728fe487c45f5d0539ec97ebdaed9c106e89") version("1.1.0", sha256="4085a8fcac3ab855f2585d017d17a1589c74adfbd930f7a1e012f2f5bd994e71") version("1.0.0", sha256="b1471228821a5b8f3a3137f9facc1a7f2dc355b8e3300490bdc05f0466b80b27") version("0.14.0", sha256="02ce3b2a8d08dc19828f95384551339ea0946bc405c1db9aace54135417c2b0f") @@ -61,18 +63,20 @@ class PyElephant(PythonPackage, CudaPackage): depends_on("py-statsmodels@0.12.1:", type=("build", "run"), when="+extras") depends_on("py-jinja2@2.11.2:", type=("build", "run"), when="+extras") depends_on("py-neo@0.10.0:", type=("build", "run"), when="@0.11.0:1.0.0") + depends_on("py-neo@0.13.1:", type=("build", "run"), when="@1.1.1:") depends_on("py-neo@0.13.0:", type=("build", "run"), when="@0.11.0:1.1.0") depends_on("py-neo@0.9.0", type=("build", "run"), when="@0.9.0:0.10.0") depends_on("py-neo@0.8.0", type=("build", "run"), when="@0.6.4:0.8.0") depends_on("py-numpy@1.18.1:1.23.5", type=("build", "run"), when="@0.6.4:0.11.2") - depends_on("py-numpy@1.18.1:", type=("build", "run"), when="@0.12.0:") + depends_on("py-numpy@1.18.1:1.26.4", type=("build", "run"), when="@0.12.0:") depends_on("py-numpy@1.18.1:", type=("build", "run"), when="@develop") depends_on("py-quantities@0.12.1:0.13.0", type=("build", "run"), when="@0.6.4:0.11.2") depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@develop") depends_on("py-quantities@0.14.1:", type=("build", "run"), when="@0.12.0:") depends_on("py-scikit-learn", type=("build", "run"), when="@0.3:") depends_on("py-scipy@1.5.4:", type=("build", "run"), when="@0.6.4:1.0.0") - depends_on("py-scipy@1.10.0:", type=("build", "run"), when="@1.1.0:") + depends_on("py-scipy@1.10.0:", type=("build", "run"), when="@1.1.0") + depends_on("py-scipy@1.14.0:", type=("build", "run"), when="@1.1.1:") depends_on("py-six@1.10.0:", type=("build", "run"), when="@0.6.4:") depends_on("py-tqdm", type=("build", "run"), when="@0.6.4:") depends_on("py-pycuda", type=("build", "run"), when="@0.10.0:+cuda") @@ -86,7 +90,6 @@ class PyElephant(PythonPackage, CudaPackage): # skip some tests that seem to fail on HPC deployments, # see https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds/-/issues/38 # test_WPLI_ground_truth_consistency_real_LFP_dataset, test_multitaper_cohere_perfect_cohere - # skip the following due to issue with neo > 0.13.0 https://github.com/NeuralEnsemble/elephant/pull/634 # ECM (2025-02-05): also disable "test_parallel" test due to some test hang, cf. ESD issue 86 # https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds/-/issues/86 - pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_neo_tools and not test_statistics and not test_trials and not test_parallel') + pytest('-k', 'not test_WPLI_ground_truth_consistency_real_LFP_dataset and not test_multitaper_cohere_perfect_cohere and not test_parallel') diff --git a/spack.yaml b/spack.yaml index 99226cda..0c13eca8 100644 --- a/spack.yaml +++ b/spack.yaml @@ -24,7 +24,7 @@ spack: - py-ebrains-kg-core@0.9.15 - py-ebrains-validation-client@0.9.1 - py-efel@5.2.0 - - py-elephant@1.1.0 + - py-elephant@1.1.1 - py-fairgraph@0.12.1 - py-frites@0.4.4 - py-hbp-archive@1.1.1 -- GitLab From 435766da6f1a26b7664c6a9963bf9f676da481d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 3 Sep 2024 11:05:32 +0200 Subject: [PATCH 049/111] feat: adds arbor@0.10.0 (and incorporate upstream changes) --- packages/arbor/package.py | 46 +++++++++++++++++++++++++++++---------- 1 file changed, 35 insertions(+), 11 deletions(-) diff --git a/packages/arbor/package.py b/packages/arbor/package.py index 7a4a2c25..c45fa47d 100644 --- a/packages/arbor/package.py +++ b/packages/arbor/package.py @@ -15,8 +15,13 @@ class Arbor(CMakePackage, CudaPackage): url = "https://github.com/arbor-sim/arbor/releases/download/v0.9.0/arbor-v0.9.0-full.tar.gz" maintainers = ["thorstenhater", "brenthuisman", "haampie"] - version("master", branch="master") - version("develop") + version("master", branch="master", submodules=True) + version("develop", branch="master", submodules=True) + version( + "0.10.0", + sha256="72966b7a2f45ce259b8ba167ca3e4f5ab9f212136a300267aaac0c04ed3fe3fc", + url="https://github.com/arbor-sim/arbor/releases/download/v0.10.1/arbor-v0.10.0-full.tar.gz", + ) version( "0.9.0", sha256="5f9740955c821aca81e23298c17ad64f33f635756ad9b4a0c1444710f564306a", @@ -75,36 +80,50 @@ class Arbor(CMakePackage, CudaPackage): conflicts("%cce@:9.1") conflicts("%intel") + # begin EBRAINS (modified: added run dep) depends_on("cmake@3.19:", type=("build", "run")) + # end EBRAINS # misc dependencies depends_on("fmt@7.1:", when="@0.5.3:") # required by the modcc compiler depends_on("fmt@9.1:", when="@0.7.1:") - depends_on("googletest@1.12.1", when="@0.7.1:") + # begin EBRAINS (modified: relaxed (upstream gave no info about update)) + # upstream adds: depends_on("fmt@10.1:", when="@0.9.1:") + depends_on("googletest@1.12.1:", when="@0.7.1:") depends_on("pugixml@1.11:", when="@0.7.1:") - depends_on("nlohmann-json@3.11.2") + # upstream adds: depends_on("pugixml@1.13:", when="@0.9.1:") + depends_on("nlohmann-json@3.11.2:") depends_on("random123") + #upstream adds: depends_on("random123@1.14.0:", when="@0.10:") + # end EBRAINS (modified) with when("+cuda"): depends_on("cuda@10:") depends_on("cuda@11:", when="@0.7.1:") + depends_on("cuda@12:", when="@0.9.1:") # mpi + # begin EBRAINS (modified: added run dep) depends_on("mpi", when="+mpi", type=("build", "run")) + # end EBRAINS (modified) depends_on("py-mpi4py", when="+mpi+python", type=("build", "run")) # python (bindings) - extends("python", when="+python") - depends_on("python@3.7:", when="+python", type=("build", "run")) - depends_on("py-numpy", when="+python", type=("build", "run")) with when("+python"): + extends("python") + depends_on("python@3.7:", type=("build", "run")) + depends_on("python@3.9:", when="@0.9.1:", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) depends_on("py-pybind11@2.6:", type="build") depends_on("py-pybind11@2.8.1:", when="@0.5.3:", type="build") depends_on("py-pybind11@2.10.1:", when="@0.7.1:", type="build") + depends_on("py-pandas", type="test") + depends_on("py-seaborn", type="test") # sphinx based documentation - depends_on("python@3.7:", when="+doc", type="build") - depends_on("py-sphinx", when="+doc", type="build") - depends_on("py-svgwrite", when="+doc", type="build") + with when("+doc"): + depends_on("python@3.10:", type="build") + depends_on("py-sphinx", type="build") + depends_on("py-svgwrite", type="build") @property def build_targets(self): @@ -127,7 +146,11 @@ class Arbor(CMakePackage, CudaPackage): opt_flags = spack.build_environment.optimization_flags( self.compiler, self.spec.target ) - args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags) + # Might return nothing + if opt_flags: + args.append("-DARB_CXX_FLAGS_TARGET=" + opt_flags) + # Needed, spack has no units package + args.append("-DARB_USE_BUNDLED_UNITS=ON") return args @@ -135,3 +158,4 @@ class Arbor(CMakePackage, CudaPackage): @on_package_attributes(run_tests=True) def install_test(self): python("-c", "import arbor") + python("python/example/single_cell_model.py") -- GitLab From 22896f58a404a2745c0721bf0c64914b2c149dde Mon Sep 17 00:00:00 2001 From: Han Lu <ha.lu@fz-juelich.de> Date: Fri, 27 Sep 2024 13:55:32 +0200 Subject: [PATCH 050/111] chore(arbor): update maintainers list --- packages/arbor/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/arbor/package.py b/packages/arbor/package.py index c45fa47d..8cf2887b 100644 --- a/packages/arbor/package.py +++ b/packages/arbor/package.py @@ -13,7 +13,7 @@ class Arbor(CMakePackage, CudaPackage): homepage = "https://arbor-sim.org" git = "https://github.com/arbor-sim/arbor.git" url = "https://github.com/arbor-sim/arbor/releases/download/v0.9.0/arbor-v0.9.0-full.tar.gz" - maintainers = ["thorstenhater", "brenthuisman", "haampie"] + maintainers = ("thorstenhater", "ErbB4", "haampie") version("master", branch="master", submodules=True) version("develop", branch="master", submodules=True) -- GitLab From c09daae9697f9022e4f1d0f41f20c96075ec2af0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 3 Sep 2024 11:06:48 +0200 Subject: [PATCH 051/111] feat(ESD): update to arbor@0.10.0 --- spack.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spack.yaml b/spack.yaml index 0c13eca8..055df087 100644 --- a/spack.yaml +++ b/spack.yaml @@ -3,7 +3,7 @@ spack: - site-config/$SYSTEMNAME specs: # EBRAINS tools - - arbor@0.9.0 +python +mpi + - arbor@0.10.0 +python +mpi - py-biobb-analysis@4.0.1 - py-biobb-chemistry@4.0.0 - py-biobb-common@4.1.0 -- GitLab From 885811dc5558fc1e5ba4f80a1ab90bb4beef8cae Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 03:57:47 +0100 Subject: [PATCH 052/111] chore(CI): use spack config to set upstream --- install_spack_env.sh | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index faf7ef8b..e5151a15 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -46,19 +46,14 @@ if [ ! -d ${CI_SPACK_ROOT} ]; then SPACK_ROOT_EXISTED=0 fi -if [[ $UPSTREAM_INSTANCE ]] -then - UPSTREAM_PREFIX=$(find $UPSTREAM_INSTANCE/spack/opt/spack/ -type d -name ".spack-db" 2>/dev/null | xargs -I {} dirname {}) - cat <<EOF > ${CI_SPACK_ROOT}/etc/spack/defaults/upstreams.yaml -upstreams: - upstream-spack-instance: - install_tree: $UPSTREAM_PREFIX -EOF -fi - # activate Spack source ${CI_SPACK_ROOT}/share/spack/setup-env.sh +if [[ $UPSTREAM_INSTANCE ]]; then + UPSTREAM_PREFIX=$(find $UPSTREAM_INSTANCE/spack/opt/spack/ -type d -name ".spack-db" 2>/dev/null | xargs -I {} dirname {}) + spack config add upstreams:upstream-spack-instance:install_tree:$UPSTREAM_PREFIX +fi + if [ "${SPACK_ROOT_EXISTED}" -eq 0 ]; then # for caching purposes it's nice if we can relocate into long paths, but we # can't do that for existing installations -> else path -- GitLab From 011f0a17fa694082bf406e5a0fdb4dafc238a7bd Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 04:06:13 +0100 Subject: [PATCH 053/111] feat(CI): disable local configuration and cache directories --- install_spack_env.sh | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index e5151a15..8de4f7df 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -21,10 +21,9 @@ export OCI_CACHE_PREFIX=$7 # make sure spack uses the symlinked folder as path export CI_SPACK_ROOT=${INSTALLATION_ROOT}/spack -# specify location of .spack dir (by default in ~) -# this is where cache and configuration settings are stored -export SPACK_USER_CACHE_PATH=${CI_SPACK_ROOT}/.spack -export SPACK_USER_CONFIG_PATH=${CI_SPACK_ROOT}/.spack +# disable local configuration and cache directories +export SPACK_DISABLE_LOCAL_CONFIG=true +export SPACK_USER_CACHE_PATH=/tmp/spack # define SYSTEMNAME variable in sites where it's not already defined export SYSTEMNAME=${SYSTEMNAME:-${HPC_SYSTEM:-$BSC_MACHINE}} -- GitLab From aa3fa88d439c43e245eef75251c01a3d2ea14cd0 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 03:04:47 +0200 Subject: [PATCH 054/111] fix(CI): don't use hardcoded prefix path for log collection --- .gitlab-ci.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d76a5319..858211ed 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -197,9 +197,9 @@ build-spack-env-on-runner: after_script: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job - - shopt -s globstar - - PKG_DIR=$CI_PROJECT_DIR/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 - - if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi + - . $CI_PROJECT_DIR/spack/share/spack/setup-env.sh + - cd $(spack-python -c "print(spack.store.parse_install_tree(spack.config.get('config'))[0])") + - find . -mindepth 4 -maxdepth 4 \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \; # for not succesfully installed packages: also keep the spack logs for any packages that failed - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi # - if [ -d /tmp/spack_tests ]; then mv /tmp/spack_tests $CI_PROJECT_DIR; fi @@ -250,7 +250,7 @@ sync-esd-image: after_script: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job - - PKG_DIR=${SANDBOX_ROOT}/${INSTALLATION_ROOT}/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 + - PKG_DIR=${SANDBOX_ROOT}/${INSTALLATION_ROOT}/spack/opt/spack/**/linux-*/gcc-13.3.0 - if cd $PKG_DIR; then find . \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi # for not succesfully installed packages: also keep the spack logs for any packages that failed - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi @@ -296,9 +296,9 @@ sync-gitlab-spack-instance: - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed # for succesfully installed packages: keep the spack logs for any package modified during this CI job # (we use repo.yaml, that is modified at each start of the pipeline, as a reference file) - - shopt -s globstar - - PKG_DIR=$SPACK_PATH_GITLAB/spack/opt/spack/**/linux-ubuntu20.04-x86_64/gcc-10.3.0 - - if cd $PKG_DIR; then find . -newer $SPACK_REPO_PATH/repo.yaml \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \;; fi + - . $SPACK_PATH_GITLAB/spack/share/spack/setup-env.sh + - cd $(spack-python -c "print(spack.store.parse_install_tree(spack.config.get('config'))[0])") + - find . -mindepth 4 -maxdepth 4 -newer $SPACK_REPO_PATH/repo.yaml \( -name ".spack" -o -name ".build" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/installed \; # for not succesfully installed packages: also keep the spack logs for any packages that failed - if cd /tmp/$(whoami)/spack-stage/; then find . -maxdepth 2 \( -name "*.txt" -o -name ".install_time_tests" \) -exec cp -r --parents "{}" $CI_PROJECT_DIR/spack_logs/not_installed \;; fi artifacts: -- GitLab From cded6c4b5022f507f491a13def2ec1a8c11e6fab Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 03:08:04 +0200 Subject: [PATCH 055/111] fix(py-viziphant): relax python dependency version range --- packages/py-viziphant/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/py-viziphant/package.py b/packages/py-viziphant/package.py index 439d9051..22281cb4 100644 --- a/packages/py-viziphant/package.py +++ b/packages/py-viziphant/package.py @@ -20,7 +20,7 @@ class PyViziphant(PythonPackage): version('0.1.0', sha256='8fd56ec8633f799396dc33fbace95d2553bedb17f680a8c0e97f43b3a629bf6c') depends_on('py-setuptools', type='build') - depends_on('python@3.7:3.10', type=('build', 'run')) + depends_on('python@3.7:3.11', type=('build', 'run')) depends_on('py-neo@0.9.0:', type=('build', 'run')) depends_on('py-elephant@0.9.0:', type=('build', 'run')) depends_on('py-numpy@1.18.1:', type=('build', 'run')) -- GitLab From 5a2751d09eab350e4d9ff2173b376c8907b7811f Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 26 Feb 2025 09:31:11 +0000 Subject: [PATCH 056/111] feat(py-llvmlite, py-numba): add new versions from spack upstream develop (https://github.com/spack/spack/commit/8677bb4d43e84591b00c3293381a169ac3ecf299) --- packages/py-llvmlite/package.py | 86 +++++++++++++++++++++++++++++++++ packages/py-numba/package.py | 43 ++++++++++++----- 2 files changed, 118 insertions(+), 11 deletions(-) create mode 100644 packages/py-llvmlite/package.py diff --git a/packages/py-llvmlite/package.py b/packages/py-llvmlite/package.py new file mode 100644 index 00000000..a4389d9c --- /dev/null +++ b/packages/py-llvmlite/package.py @@ -0,0 +1,86 @@ +# Copyright Spack Project Developers. See COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyLlvmlite(PythonPackage): + """A lightweight LLVM python binding for writing JIT compilers""" + + homepage = "https://llvmlite.readthedocs.io/en/latest/index.html" + pypi = "llvmlite/llvmlite-0.23.0.tar.gz" + git = "https://github.com/numba/llvmlite.git" + + license("BSD-2-Clause") + + version("0.44.0", sha256="07667d66a5d150abed9157ab6c0b9393c9356f229784a4385c02f99e94fc94d4") + version("0.43.0", sha256="ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5") + version("0.42.0", sha256="f92b09243c0cc3f457da8b983f67bd8e1295d0f5b3746c7a1861d7a99403854a") + version("0.41.1", sha256="f19f767a018e6ec89608e1f6b13348fa2fcde657151137cb64e56d48598a92db") + version("0.41.0", sha256="7d41db345d76d2dfa31871178ce0d8e9fd8aa015aa1b7d4dab84b5cb393901e0") + version("0.40.1", sha256="5cdb0d45df602099d833d50bd9e81353a5e036242d3c003c5b294fc61d1986b4") + version("0.40.0", sha256="c910b8fbfd67b8e9d0b10ebc012b23cd67cbecef1b96f00d391ddd298d71671c") + version("0.39.1", sha256="b43abd7c82e805261c425d50335be9a6c4f84264e34d6d6e475207300005d572") + version("0.39.0", sha256="01098be54f1aa25e391cebba8ea71cd1533f8cd1f50e34c7dd7540c2560a93af") + version("0.38.1", sha256="0622a86301fcf81cc50d7ed5b4bebe992c030580d413a8443b328ed4f4d82561") + version("0.38.0", sha256="a99d166ccf3b116f3b9ed23b9b70ba2415640a9c978f3aaa13fad49c58f4965c") + version("0.37.0", sha256="6392b870cd018ec0c645d6bbb918d6aa0eeca8c62674baaee30862d6b6865b15") + version( + "0.34.0", + sha256="f03ee0d19bca8f2fe922bb424a909d05c28411983b0c2bc58b020032a0d11f63", + deprecated=True, + ) + version( + "0.33.0", + sha256="9c8aae96f7fba10d9ac864b443d1e8c7ee4765c31569a2b201b3d0b67d8fc596", + deprecated=True, + ) + version( + "0.31.0", + sha256="22ab2b9d7ec79fab66ac8b3d2133347de86addc2e2df1b3793e523ac84baa3c8", + deprecated=True, + ) + + depends_on("cxx", type="build") # generated + + depends_on("py-setuptools", type="build") + depends_on("python@3.9:3.12", when="@0.42:", type=("build", "run")) + depends_on("python@3.8:3.11", when="@0.40:0.41", type=("build", "run")) + depends_on("python@:3.10", when="@0.38:0.39", type=("build", "run")) + depends_on("python@:3.9", when="@0.36:0.37", type=("build", "run")) + depends_on("python@:3.8", when="@0.31:0.35", type=("build", "run")) + + # https://github.com/numba/llvmlite#compatibility + depends_on("llvm@15", when="@0.44:") + depends_on("llvm@14", when="@0.41:0.43") + depends_on("llvm@11:14", when="@0.40") + depends_on("llvm@11", when="@0.37:0.39") + for t in [ + "arm:", + "ppc:", + "ppc64:", + "ppc64le:", + "ppcle:", + "sparc:", + "sparc64:", + "x86:", + "x86_64:", + ]: + depends_on("llvm@10.0", when=f"@0.34:0.36 target={t}") + + depends_on("llvm@9.0", when="@0.34:0.36 target=aarch64:") + depends_on("llvm@9.0", when="@0.33") + depends_on("llvm@7.0:7.1,8.0", when="@0.29:0.32") + depends_on("binutils", type="build") + + # TODO: investigate + conflicts("%apple-clang@15:") + + def setup_build_environment(self, env): + if self.spec.satisfies("%fj"): + env.set("CXX_FLTO_FLAGS", "{0}".format(self.compiler.cxx_pic_flag)) + env.set("LD_FLTO_FLAGS", "-Wl,--exclude-libs=ALL") + else: + # Need to set PIC flag since this is linking statically with LLVM + env.set("CXX_FLTO_FLAGS", "-flto {0}".format(self.compiler.cxx_pic_flag)) diff --git a/packages/py-numba/package.py b/packages/py-numba/package.py index 522abe31..98b99ab1 100644 --- a/packages/py-numba/package.py +++ b/packages/py-numba/package.py @@ -1,5 +1,4 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. +# Copyright Spack Project Developers. See COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) @@ -17,6 +16,9 @@ class PyNumba(PythonPackage): license("BSD-2-Clause") + version("0.61.0", sha256="888d2e89b8160899e19591467e8fdd4970e07606e1fbc248f239c89818d5f925") + version("0.60.0", sha256="5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16") + version("0.59.1", sha256="76f69132b96028d2774ed20415e8c528a34e3299a40581bae178f0994a2f370b") version("0.58.1", sha256="487ded0633efccd9ca3a46364b40006dbdaca0f95e99b8b83e778d1195ebcbaa") version("0.57.0", sha256="2af6d81067a5bdc13960c6d2519dbabbf4d5d597cf75d640c5aeaefd48c6420a") version("0.56.4", sha256="32d9fef412c81483d7efe0ceb6cf4d3310fde8b624a9cecca00f790573ac96ee") @@ -24,22 +26,42 @@ class PyNumba(PythonPackage): version("0.55.2", sha256="e428d9e11d9ba592849ccc9f7a009003eb7d30612007e365afe743ce7118c6f4") version("0.55.1", sha256="03e9069a2666d1c84f93b00dbd716fb8fedde8bb2c6efafa2f04842a46442ea3") version("0.54.0", sha256="bad6bd98ab2e41c34aa9c80b8d9737e07d92a53df4f74d3ada1458b0b516ccff") - version("0.51.1", sha256="1e765b1a41535684bf3b0465c1d0a24dcbbff6af325270c8f4dad924c0940160") - version("0.50.1", sha256="89e81b51b880f9b18c82b7095beaccc6856fcf84ba29c4f0ced42e4e5748a3a7") - version("0.48.0", sha256="9d21bc77e67006b5723052840c88cc59248e079a907cc68f1a1a264e1eaba017") + version( + "0.51.1", + sha256="1e765b1a41535684bf3b0465c1d0a24dcbbff6af325270c8f4dad924c0940160", + deprecated=True, + ) + version( + "0.50.1", + sha256="89e81b51b880f9b18c82b7095beaccc6856fcf84ba29c4f0ced42e4e5748a3a7", + deprecated=True, + ) + version( + "0.48.0", + sha256="9d21bc77e67006b5723052840c88cc59248e079a907cc68f1a1a264e1eaba017", + deprecated=True, + ) depends_on("c", type="build") # generated depends_on("cxx", type="build") # generated variant("tbb", default=False, description="Build with Intel Threading Building Blocks") - depends_on("python@3.8:3.11", when="@0.57:", type=("build", "run")) + # Be careful that the bounds given in setup.py are exclusive on the upper bound + # i.e., [min, max) + depends_on("python@3.10:3.13", when="@0.61:", type=("build", "run")) + depends_on("python@3.9:3.12", when="@0.59:", type=("build", "run")) + depends_on("python@3.8:3.11", when="@0.57:0.58", type=("build", "run")) depends_on("python@3.7:3.10", when="@0.55:0.56", type=("build", "run")) depends_on("python@3.7:3.9", when="@0.54", type=("build", "run")) depends_on("python@3.6:3.9", when="@0.53", type=("build", "run")) depends_on("python@3.6:3.8", when="@0.52", type=("build", "run")) depends_on("python@3.6:3.8", when="@0.48:0.51", type=("build", "run")) - depends_on("py-numpy@1.22:1.26", when="@0.58.1:", type=("build", "run")) + # begin EBRAINS (added): fix numpy dependency version range + depends_on("py-numpy@1.24:1.26,2.0:2.1", when="@0.61:", type=("build", "run")) + depends_on("py-numpy@1.22:1.26,2.0", when="@0.60", type=("build", "run")) + # end EBRAINS + depends_on("py-numpy@1.22:1.26", when="@0.58.1:0.59", type=("build", "run")) depends_on("py-numpy@1.21:1.25", when="@0.58.0", type=("build", "run")) depends_on("py-numpy@1.21:1.24", when="@0.57", type=("build", "run")) depends_on("py-numpy@1.18:1.23", when="@0.56.1:0.56.4", type=("build", "run")) @@ -48,6 +70,9 @@ class PyNumba(PythonPackage): depends_on("py-numpy@1.17:1.20", when="@0.54", type=("build", "run")) depends_on("py-numpy@1.15:1.20", when="@0.48:0.53", type=("build", "run")) depends_on("py-setuptools", type=("build", "run")) + depends_on("py-llvmlite@0.44", when="@0.61", type=("build", "run")) + depends_on("py-llvmlite@0.43", when="@0.60", type=("build", "run")) + depends_on("py-llvmlite@0.42", when="@0.59", type=("build", "run")) depends_on("py-llvmlite@0.41", when="@0.58", type=("build", "run")) depends_on("py-llvmlite@0.40", when="@0.57", type=("build", "run")) depends_on("py-llvmlite@0.39", when="@0.56", type=("build", "run")) @@ -64,10 +89,6 @@ class PyNumba(PythonPackage): # See https://reviews.llvm.org/D44140 conflicts("^llvm@6.0.0") - # begin EBRAINS (added): numba>=0.57 requires at least version 14.0.0 of LLVM - conflicts("llvm@:13", when="@0.57.0:") - # end EBRAINS - def setup_build_environment(self, env): if self.spec.satisfies("~tbb"): env.set("NUMBA_DISABLE_TBB", "yes") -- GitLab From 8df4dc1f4f95ed94ab769a263bd470c9367206a8 Mon Sep 17 00:00:00 2001 From: Abolfazl Ziaeemehr <a.ziaeemehr@gmail.com> Date: Thu, 20 Feb 2025 14:17:31 +0200 Subject: [PATCH 057/111] feat(py-vbi): add vbi Co-Authored-By: Eleni Mathioulaki <emathioulaki@athenarc.gr> --- packages/py-sbi/package.py | 46 ++++++++++++++++++++++++++++++++++++++ packages/py-vbi/package.py | 43 +++++++++++++++++++++++++++++++++++ spack.yaml | 1 + 3 files changed, 90 insertions(+) create mode 100644 packages/py-sbi/package.py create mode 100644 packages/py-vbi/package.py diff --git a/packages/py-sbi/package.py b/packages/py-sbi/package.py new file mode 100644 index 00000000..a560cd93 --- /dev/null +++ b/packages/py-sbi/package.py @@ -0,0 +1,46 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PySbi(PythonPackage): + """Python package for simulating biological systems.""" + + homepage = "https://sbi-dev.github.io/sbi/v0.23.3/" + url = "https://github.com/sbi-dev/sbi/archive/refs/tags/v0.23.3.tar.gz" + + version("0.23.3", "b1ef102e47c90088f2adfff5ea88b18421e84c4641ff4dd4f68c1116c296ba81") + + depends_on("python@3.9:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-wheel", type="build") + + depends_on("py-arviz", type=("build", "run")) + depends_on("py-joblib@1.3.0:", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-notebook@:6.4.12", type=("build", "run")) + depends_on("py-numpy@:1", type=("build", "run")) + depends_on("py-pillow", type=("build", "run")) + depends_on("py-pyknos@0.16.0:", type=("build", "run")) + depends_on("py-pyro-ppl@1.3.1:", type=("build", "run")) + depends_on("py-scikit-learn", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-tensorboard", type=("build", "run")) + depends_on("py-torch@1.13.0:", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) + depends_on("py-pymc@5.0.0:", type=("build", "run")) + depends_on("py-zuko@1.2.0:", type=("build", "run")) + + depends_on("py-pytest", type="test") + depends_on("py-torchtestcase", type="test") + + skip_modules = ["sbi.inference.snle", "sbi.inference.snpe", "sbi.inference.snre", "sbi.samplers.score", "sbi.samplers.vi"] + + @run_after("install") + @on_package_attributes(run_tests=True) + def install_test(self): + pytest = which("pytest") + pytest("-m", "not slow and not gpu") diff --git a/packages/py-vbi/package.py b/packages/py-vbi/package.py new file mode 100644 index 00000000..ba2cbebb --- /dev/null +++ b/packages/py-vbi/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyVbi(PythonPackage): + + homepage = "https://vbi.readthedocs.io/latest/" + git = "https://github.com/ins-amu/vbi" + url = "https://github.com/ins-amu/vbi/archive/refs/tags/v0.1.3.tar.gz" + + version("0.1.3", "54fa2062f44c9ec8219fae3c13c52a4bd17141b5467b982987673de0662c5255") + version("0.1.2", "6ccfeeec718be62a480002a8370130a3e3344955186f99ecbb15b646b68210d6") + + depends_on("python@3.8:", type=("build","run")) + depends_on("py-setuptools", type="build") + depends_on("py-setuptools-scm", type="build") + depends_on("py-wheel", type="build") + depends_on("swig@4:", type="build") + depends_on("py-numpy", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + depends_on("py-numba", type=("build", "run")) + depends_on("py-h5py", type=("build", "run")) + depends_on("py-pandas", type=("build", "run")) + depends_on("py-networkx", type=("build", "run")) + depends_on("py-nbconvert", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) + depends_on("py-sbi", type=("build", "run")) + depends_on("py-torch", type=("build", "run")) + depends_on("py-parameterized", type=("build", "run")) + depends_on("py-scikit-learn", type=("build", "run")) + depends_on("py-pycatch22", type=("build", "run")) + depends_on("py-pytest", type="test") + + @run_after("install") + @on_package_attributes(run_tests=True) + def install_test(self): + pytest = which("pytest") + pytest() diff --git a/spack.yaml b/spack.yaml index 055df087..b4322538 100644 --- a/spack.yaml +++ b/spack.yaml @@ -56,6 +56,7 @@ spack: - py-tvb-ext-unicore - py-tvb-ext-xircuits@1.1.0 - py-viziphant@0.4.0 + - py-vbi - pynn-brainscales@9.0-a8 - r-rgsl@0.1.1 - r-sbtabvfgen@0.1 -- GitLab From 243bd1438eb48ec25695cf45dfb88420280df311 Mon Sep 17 00:00:00 2001 From: teodoramisan <teodora.misan@codemart.ro> Date: Fri, 21 Feb 2025 18:09:01 +0200 Subject: [PATCH 058/111] feat(py-sbi): update dependencies and add missing packages Co-Authored-By: Eleni Mathioulaki <emathioulaki@athenarc.gr> --- packages/py-cons/package.py | 19 +++++++++++++ packages/py-etuples/package.py | 21 +++++++++++++++ packages/py-logical-unification/package.py | 20 ++++++++++++++ packages/py-mini-kanren/package.py | 24 +++++++++++++++++ packages/py-nflows/package.py | 22 +++++++++++++++ packages/py-pycatch22/package.py | 16 +++++++++++ packages/py-pyknos/package.py | 25 +++++++++++++++++ packages/py-pymc/package.py | 31 ++++++++++++++++++++++ packages/py-pytensor/package.py | 27 +++++++++++++++++++ packages/py-torchtestcase/package.py | 20 ++++++++++++++ packages/py-umnn/package.py | 20 ++++++++++++++ packages/py-xarray-einstats/package.py | 23 ++++++++++++++++ packages/py-zuko/package.py | 20 ++++++++++++++ 13 files changed, 288 insertions(+) create mode 100644 packages/py-cons/package.py create mode 100644 packages/py-etuples/package.py create mode 100644 packages/py-logical-unification/package.py create mode 100644 packages/py-mini-kanren/package.py create mode 100644 packages/py-nflows/package.py create mode 100644 packages/py-pycatch22/package.py create mode 100644 packages/py-pyknos/package.py create mode 100644 packages/py-pymc/package.py create mode 100644 packages/py-pytensor/package.py create mode 100644 packages/py-torchtestcase/package.py create mode 100644 packages/py-umnn/package.py create mode 100644 packages/py-xarray-einstats/package.py create mode 100644 packages/py-zuko/package.py diff --git a/packages/py-cons/package.py b/packages/py-cons/package.py new file mode 100644 index 00000000..77b2e1ab --- /dev/null +++ b/packages/py-cons/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyCons(PythonPackage): + """An implementation of cons in Python.""" + + homepage = "https://github.com/pythological/python-cons" + pypi = "cons/cons-0.4.6.tar.gz" + + version("0.4.6", "669fe9d5ee916d5e42b9cac6acc911df803d04f2e945c1604982a04d27a29b47") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-logical-unification@0.4.0:", type=("build", "run")) diff --git a/packages/py-etuples/package.py b/packages/py-etuples/package.py new file mode 100644 index 00000000..d3462437 --- /dev/null +++ b/packages/py-etuples/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyEtuples(PythonPackage): + """Python library that allows one to define, optimize, and efficiently evaluate mathematical expressions involving + multi-dimensional arrays. It provides the computational backend for PyMC.""" + + homepage = "http://github.com/pythological/etuples" + pypi = "etuples/etuples-0.3.9.tar.gz" + + version("0.3.9", "a474e586683d8ba8d842ba29305005ceed1c08371a4b4b0e0e232527137e5ea3") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-cons", type=("build", "run")) + depends_on("py-multipledispatch", type=("build", "run")) diff --git a/packages/py-logical-unification/package.py b/packages/py-logical-unification/package.py new file mode 100644 index 00000000..7ba1620e --- /dev/null +++ b/packages/py-logical-unification/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyLogicalUnification(PythonPackage): + """Logical unification in Python, extensible via dispatch.""" + + homepage = "http://github.com/pythological/unification/" + pypi = "logical-unification/logical-unification-0.4.6.tar.gz" + + version("0.4.6", "908435123f8a106fa4dcf9bf1b75c7beb309fa2bbecf277868af8f1c212650a0") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-toolz", type=("build", "run")) + depends_on("py-multipledispatch", type=("build", "run")) diff --git a/packages/py-mini-kanren/package.py b/packages/py-mini-kanren/package.py new file mode 100644 index 00000000..2d24eb02 --- /dev/null +++ b/packages/py-mini-kanren/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyMiniKanren(PythonPackage): + """Logic/relational programming in Python with miniKanren.""" + + homepage = "http://github.com/pythological/kanren" + pypi = "miniKanren/miniKanren-1.0.3.tar.gz" + + version("1.0.3", "1ec8bdb01144ad5e8752c7c297fb8a122db920f859276d25a72d164e998d7f6e") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-toolz", type=("build", "run")) + depends_on("py-cons@0.4.0:", type=("build", "run")) + depends_on("py-multipledispatch", type=("build", "run")) + depends_on("py-etuples@0.3.1:", type=("build", "run")) + depends_on("py-logical-unification@0.4.1:", type=("build", "run")) + depends_on("py-typing-extensions", type=("build", "run")) diff --git a/packages/py-nflows/package.py b/packages/py-nflows/package.py new file mode 100644 index 00000000..72f82caf --- /dev/null +++ b/packages/py-nflows/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + +class PyNflows(PythonPackage): + """It is a comprehensive collection of normalizing flows using PyTorch.""" + + homepage = "https://github.com/bayesiains/nflows" + pypi = "nflows/nflows-0.14.tar.gz" + + version("0.14", "6299844a62f9999fcdf2d95cb2d01c091a50136bd17826e303aba646b2d11b55") + + depends_on("py-setuptools", type="build") + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-tensorboard", type=("build", "run")) + depends_on("py-torch", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) + depends_on("py-umnn", type=("build", "run")) diff --git a/packages/py-pycatch22/package.py b/packages/py-pycatch22/package.py new file mode 100644 index 00000000..68c4be09 --- /dev/null +++ b/packages/py-pycatch22/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPycatch22(PythonPackage): + + homepage = "https://github.com/DynamicsAndNeuralSystems/pycatch22" + pypi="pycatch22/pycatch22-0.4.5.tar.gz" + + version("0.4.5", sha256="7ec844c659f22bedc66847ac866ef2bd86ffbbd4d8114b5e97f699f20a6f9f81") + + depends_on("py-setuptools", type="build") diff --git a/packages/py-pyknos/package.py b/packages/py-pyknos/package.py new file mode 100644 index 00000000..e9449a6d --- /dev/null +++ b/packages/py-pyknos/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPyknos(PythonPackage): + """Python package for conditional density estimation. It either wraps or implements diverse conditional density + estimators.""" + + homepage = "https://github.com/sbi-dev/pyknos" + pypi = "pyknos/pyknos-0.16.0.tar.gz" + + version("0.16.0", "4e1db834d8a5fd847882a081937732fea6798668b72293ae052765e7bfc371c3") + + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-nflows@0.14", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-tensorboard", type=("build", "run")) + depends_on("py-torch", type=("build", "run")) + depends_on("py-tqdm", type=("build", "run")) diff --git a/packages/py-pymc/package.py b/packages/py-pymc/package.py new file mode 100644 index 00000000..b7dc999f --- /dev/null +++ b/packages/py-pymc/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyPymc(PythonPackage): + """PyMC (formerly PyMC3) is a Python package for Bayesian statistical modeling focusing on advanced Markov chain Monte + Carlo (MCMC) and variational inference (VI) algorithms.""" + + homepage = "https://github.com/pymc-devs/pymc" + pypi = "pymc/pymc-5.20.1.tar.gz" + + version("5.20.1", "fb5f20d196a1b34eb193a855c611887b2e7b98d3af37d8573a33d112e2278eac") + + depends_on("python@3.10:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-versioneer", type="build") + + depends_on("py-arviz@0.13:", type=("build", "run")) + depends_on("py-cachetools@4.2.1:", type=("build", "run")) + depends_on("py-cloudpickle", type=("build", "run")) + depends_on("py-numpy@1.25.0:", type=("build", "run")) + depends_on("py-pandas@0.24.0:", type=("build", "run")) + depends_on("py-pytensor@2.26.1:2.27.999", type=("build", "run")) + depends_on("py-rich@13.7.1:", type=("build", "run")) + depends_on("py-scipy@1.4.1:", type=("build", "run")) + depends_on("py-threadpoolctl@3.1.0:3.99", type=("build", "run")) + depends_on("py-typing-extensions@3.7.4:", type=("build", "run")) diff --git a/packages/py-pytensor/package.py b/packages/py-pytensor/package.py new file mode 100644 index 00000000..4c045393 --- /dev/null +++ b/packages/py-pytensor/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + +class PyPytensor(PythonPackage): + """Python library that allows one to define, optimize, and efficiently evaluate mathematical expressions involving + multi-dimensional arrays. It provides the computational backend for PyMC.""" + + homepage = "https://github.com/pymc-devs/pytensor" + pypi = "pytensor/pytensor-2.27.1.tar.gz" + + version("2.27.1", "ed5075e1504e0e4c2322340111289820c5e1718b70187922777d560a8ef26f75") + + depends_on("python@3.10:3.13", type=("build", "run")) + depends_on("py-setuptools@59.0.0:", type="build") + depends_on("py-cython", type="build") + depends_on("py-versioneer+toml", type="build") + depends_on("py-scipy@1.0:1", type=("build", "run")) + depends_on("py-numpy@1.17.0:", type=("build", "run")) + depends_on("py-filelock", type=("build", "run")) # TODO: it needs filelock>=3.15, but on pypi the latest one is 3.12.4 + depends_on("py-etuples", type=("build", "run")) + depends_on("py-logical-unification", type=("build", "run")) + depends_on("py-mini-kanren", type=("build", "run")) + depends_on("py-cons", type=("build", "run")) diff --git a/packages/py-torchtestcase/package.py b/packages/py-torchtestcase/package.py new file mode 100644 index 00000000..29db01bd --- /dev/null +++ b/packages/py-torchtestcase/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyTorchtestcase(PythonPackage): + """Extends unittest.TestCase such that assertions support PyTorch tensors and parameters.""" + + homepage = "https://github.com/phohenecker/torch-test-case" + pypi = "torchtestcase/torchtestcase-2018.2.tar.gz" + + version("2018.2", sha256="0061cde2eb79f09c9501fae675c52c799371606d52afcff8753c44e1a6254a00") + version("2018.1", sha256="691b053b0466aed40201e1b41f5a903b4df889a64272a18bcab4b1c8e9091cb4") + version("2017.1", sha256="f8bb0c4e3216087130f80c4237bb5e4c1e6de629d553f25fd7b85f6e33bf9b34") + + depends_on("py-numpy@1.13.1:", type=("build", "run")) + depends_on("py-torch@0.4.0:", type=("build", "run")) diff --git a/packages/py-umnn/package.py b/packages/py-umnn/package.py new file mode 100644 index 00000000..545b1ec0 --- /dev/null +++ b/packages/py-umnn/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyUmnn(PythonPackage): + """Official implementation of Unconstrained Monotonic Neural Networks (UMNN).""" + + homepage = "https://github.com/AWehenkel/UMNN" + pypi = "umnn/umnn-1.71.tar.gz" + + version("1.71", "bdd41d941a5d904e2217a960a9584922afad8068304976dc6fb0245e4f834996") + + depends_on("python@3.6:", type=("build", "run")) + depends_on("py-hatchling", type="build") + depends_on("py-numpy", type=("build", "run")) + depends_on("py-torch@1.1:", type=("build", "run")) diff --git a/packages/py-xarray-einstats/package.py b/packages/py-xarray-einstats/package.py new file mode 100644 index 00000000..520eb406 --- /dev/null +++ b/packages/py-xarray-einstats/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyXarrayEinstats(PythonPackage): + """Stats, linear algebra and einops for xarray""" + + homepage = "https://github.com/arviz-devs/xarray-einstats" + pypi = "xarray_einstats/xarray_einstats-0.8.0.tar.gz" + + version("0.8.0", sha256="7f1573f9bd4d60d6e7ed9fd27c4db39da51ec49bf8ba654d4602a139a6309d7f") + version("0.7.0", sha256="2d7b571b3bbad3cf2fd10c6c75fd949d247d14c29574184c8489d9d607278d38") + version("0.6.0", sha256="ace90601505cfbe2d374762e674557ed14e1725b024823372f7ef9fd237effad") + + depends_on("python@3.10:", type=("build", "run")) + depends_on("py-flit-core@3.4:4", type="build") + depends_on("py-numpy@1.23:", type=("build", "run")) + depends_on("py-scipy@1.9:", type=("build", "run")) + depends_on("py-xarray@2022.09:", type=("build", "run")) diff --git a/packages/py-zuko/package.py b/packages/py-zuko/package.py new file mode 100644 index 00000000..9c222273 --- /dev/null +++ b/packages/py-zuko/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyZuko(PythonPackage): + """Python package that implements normalizing flows in PyTorch.""" + + homepage = "https://github.com/probabilists/zuko" + pypi = "zuko/zuko-1.3.1.tar.gz" + + version("1.3.1", "00f246802d3f486183185529ba22e0b2bf691397e03b28150a5cf713fa0da758") + + depends_on("python@3.9:", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-numpy@1.20.0:", type=("build", "run")) + depends_on("py-torch@1.12.0:", type=("build", "run")) -- GitLab From 176c2a5a618785e017d328b0d36b140bbf32277d Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 01:47:16 +0000 Subject: [PATCH 059/111] feat(py-arviz): copy package from upstream --- packages/py-arviz/package.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 packages/py-arviz/package.py diff --git a/packages/py-arviz/package.py b/packages/py-arviz/package.py new file mode 100644 index 00000000..d16dc45e --- /dev/null +++ b/packages/py-arviz/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyArviz(PythonPackage): + """ArviZ (pronounced "AR-vees") is a Python package for exploratory + analysis of Bayesian models. Includes functions for posterior analysis, + model checking, comparison and diagnostics.""" + + homepage = "https://github.com/arviz-devs/arviz" + pypi = "arviz/arviz-0.6.1.tar.gz" + + license("Apache-2.0") + + version("0.6.1", sha256="435edf8db49c41a8fa198f959e7581063006c49a4efdef4755bb778db6fd4f72") + + depends_on("py-setuptools", type="build") + depends_on("py-matplotlib@3.0:", type=("build", "run")) + depends_on("py-numpy@1.12:", type=("build", "run")) + depends_on("py-scipy@0.19:", type=("build", "run")) + depends_on("py-packaging", type=("build", "run")) + depends_on("py-pandas@0.23:", type=("build", "run")) + depends_on("py-xarray@0.11:", type=("build", "run")) + depends_on("py-netcdf4", type=("build", "run")) -- GitLab From bed743be961d0d17f738e94b7269af925c6a0155 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 02:12:48 +0000 Subject: [PATCH 060/111] feat(py-arviz): add version 0.20.0 --- packages/py-arviz/package.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/py-arviz/package.py b/packages/py-arviz/package.py index d16dc45e..c127d5f4 100644 --- a/packages/py-arviz/package.py +++ b/packages/py-arviz/package.py @@ -16,6 +16,9 @@ class PyArviz(PythonPackage): license("Apache-2.0") + # begin EBRAINS (added): added version + version("0.20.0", sha256="a2704e0c141410fcaea1973a90cabf280f5aed5c1e10f44381ebd6c144c10a9c") + # end EBRAINS version("0.6.1", sha256="435edf8db49c41a8fa198f959e7581063006c49a4efdef4755bb778db6fd4f72") depends_on("py-setuptools", type="build") @@ -25,4 +28,9 @@ class PyArviz(PythonPackage): depends_on("py-packaging", type=("build", "run")) depends_on("py-pandas@0.23:", type=("build", "run")) depends_on("py-xarray@0.11:", type=("build", "run")) - depends_on("py-netcdf4", type=("build", "run")) + # begin EBRAINS (modified): update dependencies + depends_on("py-netcdf4", when="@:0.13", type=("build", "run")) + depends_on("py-h5netcdf", when="@0.15:", type=("build", "run")) + depends_on("py-typing-extensions", when="@0.11:", type=("build", "run")) + depends_on("py-xarray-einstats", type=("build", "run")) + # end EBRAINS -- GitLab From 4b4bf14f45f1236dba63f36ec4d47df32a1262d3 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 01:47:58 +0000 Subject: [PATCH 061/111] feat(py-parameterized): copy package from upstream --- packages/py-parameterized/package.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 packages/py-parameterized/package.py diff --git a/packages/py-parameterized/package.py b/packages/py-parameterized/package.py new file mode 100644 index 00000000..b5c68b42 --- /dev/null +++ b/packages/py-parameterized/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyParameterized(PythonPackage): + """Parameterized testing with any Python test framework.""" + + homepage = "https://github.com/wolever/parameterized" + pypi = "parameterized/parameterized-0.7.1.tar.gz" + + version("0.7.1", sha256="6a94dbea30c6abde99fd4c2f2042c1bf7f980e48908bf92ead62394f93cf57ed") + + depends_on("py-setuptools", type="build") -- GitLab From 51add17e2da351bd8ed9c847ee0962ebfffbe444 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 02:13:25 +0000 Subject: [PATCH 062/111] feat(py-parameterized): add version 0.9.0 --- packages/py-parameterized/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/py-parameterized/package.py b/packages/py-parameterized/package.py index b5c68b42..1ab00622 100644 --- a/packages/py-parameterized/package.py +++ b/packages/py-parameterized/package.py @@ -12,6 +12,9 @@ class PyParameterized(PythonPackage): homepage = "https://github.com/wolever/parameterized" pypi = "parameterized/parameterized-0.7.1.tar.gz" + # begin EBRAINS (added): new version + version("0.9.0", sha256="7fc905272cefa4f364c1a3429cbbe9c0f98b793988efb5bf90aac80f08db09b1") + # end EBRAINS version("0.7.1", sha256="6a94dbea30c6abde99fd4c2f2042c1bf7f980e48908bf92ead62394f93cf57ed") depends_on("py-setuptools", type="build") -- GitLab From 60ad098442d5258c2c0253e568ffbf04db3f477a Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 01:31:59 +0000 Subject: [PATCH 063/111] feat(py-joblib): copy package from upstream --- packages/py-joblib/package.py | 41 +++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 packages/py-joblib/package.py diff --git a/packages/py-joblib/package.py b/packages/py-joblib/package.py new file mode 100644 index 00000000..4e85d003 --- /dev/null +++ b/packages/py-joblib/package.py @@ -0,0 +1,41 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyJoblib(PythonPackage): + """Lightweight pipelining with Python functions.""" + + homepage = "https://joblib.readthedocs.io/" + pypi = "joblib/joblib-0.14.0.tar.gz" + git = "https://github.com/joblib/joblib" + + # 'joblib.test' requires 'pytest'. Leave out of 'import_modules' to avoid + # unnecessary dependencies. + skip_modules = ["joblib.test"] + + license("BSD-3-Clause") + + version("1.2.0", sha256="e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018") + version("1.1.0", sha256="4158fcecd13733f8be669be0683b96ebdbbd38d23559f54dca7205aea1bf1e35") + version("1.0.1", sha256="9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7") + version("1.0.0", sha256="7ad866067ac1fdec27d51c8678ea760601b70e32ff1881d4dc8e1171f2b64b24") + version("0.17.0", sha256="9e284edd6be6b71883a63c9b7f124738a3c16195513ad940eae7e3438de885d5") + version("0.16.0", sha256="8f52bf24c64b608bf0b2563e0e47d6fcf516abc8cfafe10cfd98ad66d94f92d6") + version("0.15.1", sha256="61e49189c84b3c5d99a969d314853f4d1d263316cc694bec17548ebaa9c47b6e") + version("0.15.0", sha256="f8f84dcef519233be4ede1c64fd1f2d48b1e8bbb632d1013ebca75f8b678ee72") + version("0.14.1", sha256="0630eea4f5664c463f23fbf5dcfc54a2bc6168902719fa8e19daf033022786c8") + version("0.14.0", sha256="6fcc57aacb4e89451fd449e9412687c51817c3f48662c3d8f38ba3f8a0a193ff") + version("0.13.2", sha256="315d6b19643ec4afd4c41c671f9f2d65ea9d787da093487a81ead7b0bac94524") + version("0.11", sha256="7b8fd56df36d9731a83729395ccb85a3b401f62a96255deb1a77220c00ed4085") + version("0.10.3", sha256="29b2965a9efbc90a5fe66a389ae35ac5b5b0c1feabfc7cab7fd5d19f429a071d") + version("0.10.2", sha256="3123553bdad83b143428033537c9e1939caf4a4d8813dade6a2246948c94494b") + version("0.10.0", sha256="49b3a0ba956eaa2f077e1ebd230b3c8d7b98afc67520207ada20a4d8b8efd071") + + depends_on("python@3.7:", when="@1.2:", type=("build", "run")) + depends_on("python@3.6:", when="@0.15:", type=("build", "run")) + depends_on("python@2.7:2.8,3.4:", type=("build", "run")) + depends_on("py-setuptools", type=("build", "run")) -- GitLab From e1ac13342d7746737c1741052f7395aad501fa28 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 01:36:53 +0000 Subject: [PATCH 064/111] feat(py-joblib): add version 1.4.2 --- packages/py-joblib/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/py-joblib/package.py b/packages/py-joblib/package.py index 4e85d003..41d5602a 100644 --- a/packages/py-joblib/package.py +++ b/packages/py-joblib/package.py @@ -19,6 +19,9 @@ class PyJoblib(PythonPackage): license("BSD-3-Clause") + # begin EBRAINS (added): add version + version("1.4.2", sha256="2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e") + # end EBRAINS version("1.2.0", sha256="e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018") version("1.1.0", sha256="4158fcecd13733f8be669be0683b96ebdbbd38d23559f54dca7205aea1bf1e35") version("1.0.1", sha256="9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7") -- GitLab From dfc46c88d2a8da0e253adff7aed3d7fefea3490f Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 01:25:36 +0000 Subject: [PATCH 065/111] feat(py-torch): copy package from upstream --- .../detect_omp_of_fujitsu_compiler.patch | 20 + packages/py-torch/fj-ssl2_1.10.patch | 76 ++ packages/py-torch/fj-ssl2_1.11.patch | 76 ++ packages/py-torch/fj-ssl2_1.3-1.5.patch | 76 ++ packages/py-torch/fj-ssl2_1.6-1.7.patch | 76 ++ packages/py-torch/fj-ssl2_1.8.patch | 76 ++ packages/py-torch/fj-ssl2_1.9.patch | 76 ++ packages/py-torch/package.py | 704 ++++++++++++++++++ packages/py-torch/rocm.patch | 98 +++ packages/py-torch/xnnpack.patch | 47 ++ 10 files changed, 1325 insertions(+) create mode 100644 packages/py-torch/detect_omp_of_fujitsu_compiler.patch create mode 100644 packages/py-torch/fj-ssl2_1.10.patch create mode 100644 packages/py-torch/fj-ssl2_1.11.patch create mode 100644 packages/py-torch/fj-ssl2_1.3-1.5.patch create mode 100644 packages/py-torch/fj-ssl2_1.6-1.7.patch create mode 100644 packages/py-torch/fj-ssl2_1.8.patch create mode 100644 packages/py-torch/fj-ssl2_1.9.patch create mode 100644 packages/py-torch/package.py create mode 100644 packages/py-torch/rocm.patch create mode 100644 packages/py-torch/xnnpack.patch diff --git a/packages/py-torch/detect_omp_of_fujitsu_compiler.patch b/packages/py-torch/detect_omp_of_fujitsu_compiler.patch new file mode 100644 index 00000000..519d6686 --- /dev/null +++ b/packages/py-torch/detect_omp_of_fujitsu_compiler.patch @@ -0,0 +1,20 @@ +--- pytorch/cmake/Modules/FindOpenMP.cmake.org 2020-05-26 17:43:53.000000000 +0900 ++++ pytorch/cmake/Modules/FindOpenMP.cmake 2020-05-26 17:46:37.000000000 +0900 +@@ -84,7 +84,7 @@ + unset(OpenMP_FLAG_CANDIDATES) + + set(OMP_FLAG_GNU "-fopenmp") +- set(OMP_FLAG_Clang "-fopenmp=libomp" "-fopenmp=libiomp5" "-fopenmp") ++ set(OMP_FLAG_Clang "-fopenmp" "-fopenmp=libomp" "-fopenmp=libiomp5") + + # AppleClang may need a header file, search for omp.h with hints to brew + # default include dir +@@ -245,7 +245,7 @@ + set(OpenMP_libomp_LIBRARY "${MKL_OPENMP_LIBRARY}" CACHE STRING "libomp location for OpenMP") + else() + find_library(OpenMP_libomp_LIBRARY +- NAMES omp gomp iomp5 ++ NAMES fjomp omp gomp iomp5 + HINTS ${CMAKE_${LANG}_IMPLICIT_LINK_DIRECTORIES} + DOC "libomp location for OpenMP" + ) diff --git a/packages/py-torch/fj-ssl2_1.10.patch b/packages/py-torch/fj-ssl2_1.10.patch new file mode 100644 index 00000000..bcd2c378 --- /dev/null +++ b/packages/py-torch/fj-ssl2_1.10.patch @@ -0,0 +1,76 @@ +diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake +index ca560288a4..f5a29ecf43 100644 +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -130,7 +130,7 @@ else() + set(AT_MKLDNN_ENABLED 0) + set(AT_MKL_ENABLED 0) + endif() +-set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;vecLib") ++set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;SSL2;vecLib") + message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS}) + + if(BLAS STREQUAL "Eigen") +@@ -185,6 +185,20 @@ elseif(BLAS STREQUAL "vecLib") + set(BLAS_INFO "veclib") + set(BLAS_FOUND 1) + set(BLAS_LIBRARIES ${vecLib_LINKER_LIBS}) ++elseif(BLAS STREQUAL "SSL2") ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ message(STATUS "SSL2 Selected BLAS library") ++ list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so") ++ set(SSL2_FOUND ON) ++ message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ set(WITH_BLAS "ssl2") ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() + elseif(BLAS STREQUAL "Generic") + # On Debian family, the CBLAS ABIs have been merged into libblas.so + find_library(BLAS_LIBRARIES blas) +@@ -201,7 +215,7 @@ if(NOT INTERN_BUILD_MOBILE) + set(AT_MKL_ENABLED 0) + set(AT_MKL_MT 0) + set(USE_BLAS 1) +- if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR VECLIB_FOUND)) ++ if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR SSL2_FOUND OR VECLIB_FOUND)) + message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library") + find_package(BLAS) + if(NOT BLAS_FOUND) +diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake +index 47c80b45f6..efd4a87d06 100644 +--- a/cmake/Modules/FindBLAS.cmake ++++ b/cmake/Modules/FindBLAS.cmake +@@ -276,6 +276,28 @@ if((NOT BLAS_LIBRARIES) + endif() + endif() + ++# BLAS in SSL2 library? ++if((NOT BLAS_LIBRARIES) ++ AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2"))) ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ check_fortran_libraries( ++ BLAS_LIBRARIES ++ BLAS ++ sgemm ++ "-SSL2;--linkfortran" ++ "fjlapackexsve") ++ if (BLAS_LIBRARIES) ++ set(BLAS_INFO "ssl2") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ endif (BLAS_LIBRARIES) ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() ++endif() ++ + # Generic BLAS library? + if((NOT BLAS_LIBRARIES) + AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic"))) diff --git a/packages/py-torch/fj-ssl2_1.11.patch b/packages/py-torch/fj-ssl2_1.11.patch new file mode 100644 index 00000000..af41e5bb --- /dev/null +++ b/packages/py-torch/fj-ssl2_1.11.patch @@ -0,0 +1,76 @@ +diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake +index 557ab649a4..56d1699736 100644 +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -174,7 +174,7 @@ else() + set(AT_MKLDNN_ENABLED 0) + set(AT_MKL_ENABLED 0) + endif() +-set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;vecLib") ++set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;SSL2;vecLib") + message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS}) + + if(BLAS STREQUAL "Eigen") +@@ -229,6 +229,20 @@ elseif(BLAS STREQUAL "vecLib") + set(BLAS_INFO "veclib") + set(BLAS_FOUND 1) + set(BLAS_LIBRARIES ${vecLib_LINKER_LIBS}) ++elseif(BLAS STREQUAL "SSL2") ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ message(STATUS "SSL2 Selected BLAS library") ++ list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so") ++ set(SSL2_FOUND ON) ++ message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ set(WITH_BLAS "ssl2") ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() + elseif(BLAS STREQUAL "FlexiBLAS") + find_package(FlexiBLAS REQUIRED) + include_directories(SYSTEM ${FlexiBLAS_INCLUDE_DIR}) +@@ -250,7 +264,7 @@ if(NOT INTERN_BUILD_MOBILE) + set(AT_MKL_SEQUENTIAL 0) + set(AT_MKL_MT 0) + set(USE_BLAS 1) +- if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR VECLIB_FOUND OR FlexiBLAS_FOUND)) ++ if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR SSL2_FOUND OR VECLIB_FOUND OR FlexiBLAS_FOUND)) + message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library") + find_package(BLAS) + if(NOT BLAS_FOUND) +diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake +index 94942d520f..ae5b8db963 100644 +--- a/cmake/Modules/FindBLAS.cmake ++++ b/cmake/Modules/FindBLAS.cmake +@@ -289,6 +289,28 @@ if((NOT BLAS_LIBRARIES) + endif() + endif() + ++# BLAS in SSL2 library? ++if((NOT BLAS_LIBRARIES) ++ AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2"))) ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ check_fortran_libraries( ++ BLAS_LIBRARIES ++ BLAS ++ sgemm ++ "-SSL2;--linkfortran" ++ "fjlapackexsve") ++ if (BLAS_LIBRARIES) ++ set(BLAS_INFO "ssl2") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ endif (BLAS_LIBRARIES) ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() ++endif() ++ + # Generic BLAS library? + if((NOT BLAS_LIBRARIES) + AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic"))) diff --git a/packages/py-torch/fj-ssl2_1.3-1.5.patch b/packages/py-torch/fj-ssl2_1.3-1.5.patch new file mode 100644 index 00000000..0ea87500 --- /dev/null +++ b/packages/py-torch/fj-ssl2_1.3-1.5.patch @@ -0,0 +1,76 @@ +diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake +index a8e9769536..f0f91304c2 100644 +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -107,7 +107,7 @@ else() + set(AT_MKLDNN_ENABLED 0) + set(AT_MKL_ENABLED 0) + endif() +-set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;FLAME") ++set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;SSL2;FLAME") + message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS}) + + if(BLAS STREQUAL "Eigen") +@@ -147,6 +147,20 @@ elseif(BLAS STREQUAL "vecLib") + find_package(vecLib REQUIRED) + include_directories(SYSTEM ${vecLib_INCLUDE_DIR}) + list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS}) ++elseif(BLAS STREQUAL "SSL2") ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ message(STATUS "SSL2 Selected BLAS library") ++ list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so") ++ set(SSL2_FOUND ON) ++ message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ set(WITH_BLAS "ssl2") ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() + else() + message(FATAL_ERROR "Unrecognized BLAS option: " ${BLAS}) + endif() +@@ -156,7 +170,7 @@ if (NOT INTERN_BUILD_MOBILE) + set(AT_MKL_ENABLED 0) + set(AT_MKL_MT 0) + set(USE_BLAS 1) +- if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND)) ++ if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR SSL2_FOUND)) + message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library") + find_package(BLAS) + if (NOT BLAS_FOUND) +diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake +index e93e98a609..d43a6c40bd 100644 +--- a/cmake/Modules/FindBLAS.cmake ++++ b/cmake/Modules/FindBLAS.cmake +@@ -239,6 +239,28 @@ if((NOT BLAS_LIBRARIES) + endif (BLAS_LIBRARIES) + endif() + ++# BLAS in SSL2 library? ++if((NOT BLAS_LIBRARIES) ++ AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2"))) ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ check_fortran_libraries( ++ BLAS_LIBRARIES ++ BLAS ++ sgemm ++ "-SSL2;--linkfortran" ++ "fjlapackexsve") ++ if (BLAS_LIBRARIES) ++ set(BLAS_INFO "ssl2") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ endif (BLAS_LIBRARIES) ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() ++endif() ++ + # Generic BLAS library? + if((NOT BLAS_LIBRARIES) + AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic"))) diff --git a/packages/py-torch/fj-ssl2_1.6-1.7.patch b/packages/py-torch/fj-ssl2_1.6-1.7.patch new file mode 100644 index 00000000..423af3f2 --- /dev/null +++ b/packages/py-torch/fj-ssl2_1.6-1.7.patch @@ -0,0 +1,76 @@ +diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake +index 36e1ab7682..0f02f51c47 100644 +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -114,7 +114,7 @@ else() + set(AT_MKLDNN_ENABLED 0) + set(AT_MKL_ENABLED 0) + endif() +-set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;FLAME;Generic") ++set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;SSL2;FLAME;Generic") + message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS}) + + if(BLAS STREQUAL "Eigen") +@@ -154,6 +154,20 @@ elseif(BLAS STREQUAL "vecLib") + find_package(vecLib REQUIRED) + include_directories(SYSTEM ${vecLib_INCLUDE_DIR}) + list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS}) ++elseif(BLAS STREQUAL "SSL2") ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ message(STATUS "SSL2 Selected BLAS library") ++ list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so") ++ set(SSL2_FOUND ON) ++ message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ set(WITH_BLAS "ssl2") ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() + elseif(BLAS STREQUAL "Generic") + # On Debian family, the CBLAS ABIs have been merged into libblas.so + find_library(BLAS_LIBRARIES blas) +@@ -168,7 +182,7 @@ if(NOT INTERN_BUILD_MOBILE) + set(AT_MKL_ENABLED 0) + set(AT_MKL_MT 0) + set(USE_BLAS 1) +- if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR GENERIC_BLAS_FOUND)) ++ if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR SSL2_FOUND OR GENERIC_BLAS_FOUND)) + message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library") + find_package(BLAS) + if(NOT BLAS_FOUND) +diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake +index e93e98a609..d43a6c40bd 100644 +--- a/cmake/Modules/FindBLAS.cmake ++++ b/cmake/Modules/FindBLAS.cmake +@@ -239,6 +239,28 @@ if((NOT BLAS_LIBRARIES) + endif (BLAS_LIBRARIES) + endif() + ++# BLAS in SSL2 library? ++if((NOT BLAS_LIBRARIES) ++ AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2"))) ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ check_fortran_libraries( ++ BLAS_LIBRARIES ++ BLAS ++ sgemm ++ "-SSL2;--linkfortran" ++ "fjlapackexsve") ++ if (BLAS_LIBRARIES) ++ set(BLAS_INFO "ssl2") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ endif (BLAS_LIBRARIES) ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() ++endif() ++ + # Generic BLAS library? + if((NOT BLAS_LIBRARIES) + AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic"))) diff --git a/packages/py-torch/fj-ssl2_1.8.patch b/packages/py-torch/fj-ssl2_1.8.patch new file mode 100644 index 00000000..461c1a29 --- /dev/null +++ b/packages/py-torch/fj-ssl2_1.8.patch @@ -0,0 +1,76 @@ +diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake +index 06464e799a..7f50bd8fa0 100644 +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -118,7 +118,7 @@ else() + set(AT_MKLDNN_ENABLED 0) + set(AT_MKL_ENABLED 0) + endif() +-set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;FLAME;Generic") ++set_property(CACHE BLAS PROPERTY STRINGS "Eigen;ATLAS;OpenBLAS;MKL;vecLib;SSL2;FLAME;Generic") + message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS}) + + if(BLAS STREQUAL "Eigen") +@@ -157,6 +157,20 @@ elseif(BLAS STREQUAL "vecLib") + find_package(vecLib REQUIRED) + include_directories(SYSTEM ${vecLib_INCLUDE_DIR}) + list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS}) ++elseif(BLAS STREQUAL "SSL2") ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ message(STATUS "SSL2 Selected BLAS library") ++ list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so") ++ set(SSL2_FOUND ON) ++ message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ set(WITH_BLAS "ssl2") ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() + elseif(BLAS STREQUAL "Generic") + # On Debian family, the CBLAS ABIs have been merged into libblas.so + find_library(BLAS_LIBRARIES blas) +@@ -171,7 +185,7 @@ if(NOT INTERN_BUILD_MOBILE) + set(AT_MKL_ENABLED 0) + set(AT_MKL_MT 0) + set(USE_BLAS 1) +- if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR GENERIC_BLAS_FOUND)) ++ if(NOT (ATLAS_FOUND OR OpenBLAS_FOUND OR MKL_FOUND OR VECLIB_FOUND OR SSL2_FOUND OR GENERIC_BLAS_FOUND)) + message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library") + find_package(BLAS) + if(NOT BLAS_FOUND) +diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake +index e8f5d7c950..29219e057f 100644 +--- a/cmake/Modules/FindBLAS.cmake ++++ b/cmake/Modules/FindBLAS.cmake +@@ -257,6 +257,28 @@ if((NOT BLAS_LIBRARIES) + endif() + endif() + ++# BLAS in SSL2 library? ++if((NOT BLAS_LIBRARIES) ++ AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2"))) ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ check_fortran_libraries( ++ BLAS_LIBRARIES ++ BLAS ++ sgemm ++ "-SSL2;--linkfortran" ++ "fjlapackexsve") ++ if (BLAS_LIBRARIES) ++ set(BLAS_INFO "ssl2") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ endif (BLAS_LIBRARIES) ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() ++endif() ++ + # Generic BLAS library? + if((NOT BLAS_LIBRARIES) + AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic"))) diff --git a/packages/py-torch/fj-ssl2_1.9.patch b/packages/py-torch/fj-ssl2_1.9.patch new file mode 100644 index 00000000..0febb575 --- /dev/null +++ b/packages/py-torch/fj-ssl2_1.9.patch @@ -0,0 +1,76 @@ +diff --git a/cmake/Dependencies.cmake b/cmake/Dependencies.cmake +index 5d57b9ca78..a74fe73b9f 100644 +--- a/cmake/Dependencies.cmake ++++ b/cmake/Dependencies.cmake +@@ -118,7 +118,7 @@ else() + set(AT_MKLDNN_ENABLED 0) + set(AT_MKL_ENABLED 0) + endif() +-set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;vecLib") ++set_property(CACHE BLAS PROPERTY STRINGS "ATLAS;BLIS;Eigen;FLAME;Generic;MKL;OpenBLAS;SSL2;vecLib") + message(STATUS "Trying to find preferred BLAS backend of choice: " ${BLAS}) + + if(BLAS STREQUAL "Eigen") +@@ -161,6 +161,20 @@ elseif(BLAS STREQUAL "vecLib") + find_package(vecLib REQUIRED) + include_directories(SYSTEM ${vecLib_INCLUDE_DIR}) + list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS ${vecLib_LINKER_LIBS}) ++elseif(BLAS STREQUAL "SSL2") ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ message(STATUS "SSL2 Selected BLAS library") ++ list(APPEND Caffe2_PUBLIC_DEPENDENCY_LIBS "fjlapackexsve.so") ++ set(SSL2_FOUND ON) ++ message(STATUS "set CMAKE_SHARED_LINKER_FLAGS: -SSL2 --linkfortran") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ set(WITH_BLAS "ssl2") ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() + elseif(BLAS STREQUAL "Generic") + # On Debian family, the CBLAS ABIs have been merged into libblas.so + find_library(BLAS_LIBRARIES blas) +@@ -175,7 +189,7 @@ if(NOT INTERN_BUILD_MOBILE) + set(AT_MKL_ENABLED 0) + set(AT_MKL_MT 0) + set(USE_BLAS 1) +- if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR VECLIB_FOUND)) ++ if(NOT (ATLAS_FOUND OR BLIS_FOUND OR GENERIC_BLAS_FOUND OR MKL_FOUND OR OpenBLAS_FOUND OR SSL2_FOUND OR VECLIB_FOUND)) + message(WARNING "Preferred BLAS (" ${BLAS} ") cannot be found, now searching for a general BLAS library") + find_package(BLAS) + if(NOT BLAS_FOUND) +diff --git a/cmake/Modules/FindBLAS.cmake b/cmake/Modules/FindBLAS.cmake +index eefd6d475a..92ad75d32e 100644 +--- a/cmake/Modules/FindBLAS.cmake ++++ b/cmake/Modules/FindBLAS.cmake +@@ -276,6 +276,28 @@ if((NOT BLAS_LIBRARIES) + endif() + endif() + ++# BLAS in SSL2 library? ++if((NOT BLAS_LIBRARIES) ++ AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "ssl2"))) ++ if(CMAKE_CXX_COMPILER MATCHES ".*/FCC$" ++ AND CMAKE_C_COMPILER MATCHES ".*/fcc$") ++ check_fortran_libraries( ++ BLAS_LIBRARIES ++ BLAS ++ sgemm ++ "-SSL2;--linkfortran" ++ "fjlapackexsve") ++ if (BLAS_LIBRARIES) ++ set(BLAS_INFO "ssl2") ++ set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -SSL2 --linkfortran") ++ endif (BLAS_LIBRARIES) ++ else() ++ message(STATUS "Not built using fcc and FCC.") ++ message(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}") ++ message(STATUS "CMAKE_CXX_COMPILER: ${CMAKE_CXX_COMPILER}") ++ endif() ++endif() ++ + # Generic BLAS library? + if((NOT BLAS_LIBRARIES) + AND ((NOT WITH_BLAS) OR (WITH_BLAS STREQUAL "generic"))) diff --git a/packages/py-torch/package.py b/packages/py-torch/package.py new file mode 100644 index 00000000..e2bc15b6 --- /dev/null +++ b/packages/py-torch/package.py @@ -0,0 +1,704 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os +import sys + +from spack.operating_systems.mac_os import macos_version +from spack.package import * + + +class PyTorch(PythonPackage, CudaPackage, ROCmPackage): + """Tensors and Dynamic neural networks in Python with strong GPU acceleration.""" + + homepage = "https://pytorch.org/" + git = "https://github.com/pytorch/pytorch.git" + submodules = True + + # Exact set of modules is version- and variant-specific, just attempt to import the + # core libraries to ensure that the package was successfully installed. + import_modules = ["torch", "torch.autograd", "torch.nn", "torch.utils"] + + license("BSD-3-Clause") + maintainers("adamjstewart") + + version("main", branch="main") + version("2.5.1", tag="v2.5.1", commit="a8d6afb511a69687bbb2b7e88a3cf67917e1697e") + version("2.5.0", tag="v2.5.0", commit="32f585d9346e316e554c8d9bf7548af9f62141fc") + version("2.4.1", tag="v2.4.1", commit="ee1b6804381c57161c477caa380a840a84167676") + version("2.4.0", tag="v2.4.0", commit="d990dada86a8ad94882b5c23e859b88c0c255bda") + version("2.3.1", tag="v2.3.1", commit="63d5e9221bedd1546b7d364b5ce4171547db12a9") + version("2.3.0", tag="v2.3.0", commit="97ff6cfd9c86c5c09d7ce775ab64ec5c99230f5d") + version("2.2.2", tag="v2.2.2", commit="39901f229520a5256505ec24782f716ee7ddc843") + version("2.2.1", tag="v2.2.1", commit="6c8c5ad5eaf47a62fafbb4a2747198cbffbf1ff0") + version("2.2.0", tag="v2.2.0", commit="8ac9b20d4b090c213799e81acf48a55ea8d437d6") + version("2.1.2", tag="v2.1.2", commit="a8e7c98cb95ff97bb30a728c6b2a1ce6bff946eb") + version("2.1.1", tag="v2.1.1", commit="4c55dc50355d5e923642c59ad2a23d6ad54711e7") + version("2.1.0", tag="v2.1.0", commit="7bcf7da3a268b435777fe87c7794c382f444e86d") + version("2.0.1", tag="v2.0.1", commit="e9ebda29d87ce0916ab08c06ab26fd3766a870e5") + version("2.0.0", tag="v2.0.0", commit="c263bd43e8e8502d4726643bc6fd046f0130ac0e") + version("1.13.1", tag="v1.13.1", commit="49444c3e546bf240bed24a101e747422d1f8a0ee") + version("1.13.0", tag="v1.13.0", commit="7c98e70d44abc7a1aead68b6ea6c8adc8c554db5") + version("1.12.1", tag="v1.12.1", commit="664058fa83f1d8eede5d66418abff6e20bd76ca8") + version("1.12.0", tag="v1.12.0", commit="67ece03c8cd632cce9523cd96efde6f2d1cc8121") + version("1.11.0", tag="v1.11.0", commit="bc2c6edaf163b1a1330e37a6e34caf8c553e4755") + version("1.10.2", tag="v1.10.2", commit="71f889c7d265b9636b93ede9d651c0a9c4bee191") + version("1.10.1", tag="v1.10.1", commit="302ee7bfb604ebef384602c56e3853efed262030") + version("1.10.0", tag="v1.10.0", commit="36449ea93134574c2a22b87baad3de0bf8d64d42") + version("1.9.1", tag="v1.9.1", commit="dfbd030854359207cb3040b864614affeace11ce") + version("1.9.0", tag="v1.9.0", commit="d69c22dd61a2f006dcfe1e3ea8468a3ecaf931aa") + version("1.8.2", tag="v1.8.2", commit="e0495a7aa104471d95dc85a1b8f6473fbcc427a8") + version("1.8.1", tag="v1.8.1", commit="56b43f4fec1f76953f15a627694d4bba34588969") + version("1.8.0", tag="v1.8.0", commit="37c1f4a7fef115d719104e871d0cf39434aa9d56") + version("1.7.1", tag="v1.7.1", commit="57bffc3a8e4fee0cce31e1ff1f662ccf7b16db57") + version("1.7.0", tag="v1.7.0", commit="e85d494707b835c12165976b8442af54b9afcb26") + version("1.6.0", tag="v1.6.0", commit="b31f58de6fa8bbda5353b3c77d9be4914399724d") + version("1.5.1", tag="v1.5.1", commit="3c31d73c875d9a4a6ea8a843b9a0d1b19fbe36f3") + version("1.5.0", tag="v1.5.0", commit="4ff3872a2099993bf7e8c588f7182f3df777205b") + version("1.4.1", tag="v1.4.1", commit="74044638f755cd8667bedc73da4dbda4aa64c948") + + depends_on("c", type="build") + depends_on("cxx", type="build") + + is_darwin = sys.platform == "darwin" + + # All options are defined in CMakeLists.txt. + # Some are listed in setup.py, but not all. + variant("debug", default=False, description="Build with debugging support") + variant("caffe2", default=False, description="Build Caffe2", when="@1.7:") + variant("test", default=False, description="Build C++ test binaries") + variant("cuda", default=not is_darwin, description="Use CUDA") + variant("rocm", default=False, description="Use ROCm") + variant("cudnn", default=not is_darwin, description="Use cuDNN", when="+cuda") + variant("fbgemm", default=True, description="Use FBGEMM (quantized 8-bit server operators)") + variant("kineto", default=True, description="Use Kineto profiling library", when="@1.8:") + variant("magma", default=not is_darwin, description="Use MAGMA", when="+cuda") + variant("metal", default=is_darwin, description="Use Metal for Caffe2 iOS build") + variant( + "mps", + default=is_darwin and macos_version() >= Version("12.3"), + description="Use MPS for macOS build (requires full Xcode suite)", + when="@1.12: platform=darwin", + ) + variant("nccl", default=True, description="Use NCCL", when="+cuda platform=linux") + variant("nccl", default=True, description="Use NCCL", when="+rocm platform=linux") + # Requires AVX2: https://discuss.pytorch.org/t/107518 + variant("nnpack", default=True, description="Use NNPACK", when="target=x86_64_v3:") + variant("numa", default=True, description="Use NUMA", when="platform=linux") + variant("numpy", default=True, description="Use NumPy") + variant("openmp", default=True, description="Use OpenMP for parallel code") + variant("qnnpack", default=True, description="Use QNNPACK (quantized 8-bit operators)") + variant("valgrind", default=True, description="Use Valgrind", when="@1.8: platform=linux") + variant("xnnpack", default=True, description="Use XNNPACK", when="@1.5:") + variant("mkldnn", default=True, description="Use MKLDNN") + variant("distributed", default=True, description="Use distributed") + variant("mpi", default=True, description="Use MPI for Caffe2", when="+distributed") + variant("ucc", default=False, description="Use UCC", when="@1.13: +distributed") + variant("gloo", default=True, description="Use Gloo", when="+distributed") + variant("tensorpipe", default=True, description="Use TensorPipe", when="@1.6: +distributed") + variant("onnx_ml", default=True, description="Enable traditional ONNX ML API", when="@1.5:") + variant( + "breakpad", + default=True, + description="Enable breakpad crash dump library", + when="@1.10:1.11", + ) + # py-torch has strict dependencies on old protobuf/py-protobuf versions that + # cause problems with other packages that require newer versions of protobuf + # and py-protobuf --> provide an option to use the internal/vendored protobuf. + variant("custom-protobuf", default=False, description="Use vendored protobuf") + + conflicts("+cuda+rocm") + conflicts("+tensorpipe", when="+rocm ^hip@:5.1", msg="TensorPipe not supported until ROCm 5.2") + conflicts("+breakpad", when="target=ppc64:") + conflicts("+breakpad", when="target=ppc64le:") + + # https://github.com/pytorch/pytorch/issues/77811 + conflicts("+qnnpack", when="platform=darwin target=aarch64:") + + # https://github.com/pytorch/pytorch/issues/97397 + conflicts( + "~tensorpipe", + when="@1.8: +distributed", + msg="TensorPipe must be enabled with +distributed", + ) + + # https://github.com/pytorch/pytorch/issues/100991 + conflicts("%apple-clang@14:", when="@:1") + + conflicts( + "cuda_arch=none", + when="+cuda", + msg="Must specify CUDA compute capabilities of your GPU, see " + "https://developer.nvidia.com/cuda-gpus", + ) + + # Required dependencies + # Based on PyPI wheel availability + with default_args(type=("build", "link", "run")): + depends_on("python@3.9:3.13", when="@2.5:") + depends_on("python@3.8:3.12", when="@2.2:2.4") + depends_on("python@3.8:3.11", when="@2.0:2.1") + depends_on("python@:3.10", when="@1.11:1") + depends_on("python@:3.9", when="@1.7.1:1.10") + depends_on("python@:3.8", when="@1.4:1.7.0") + + # CMakelists.txt + with default_args(type="build"): + depends_on("cmake@3.18:", when="@2:") + depends_on("cmake@3.13:", when="@1.11:") + depends_on("cmake@3.10:", when="@1.10:") + depends_on("cmake@3.5:") + depends_on("ninja@1.5:") + + with default_args(type=("build", "run")): + # setup.py + depends_on("py-filelock", when="@2:") + depends_on("py-typing-extensions@4.8:", when="@2.2:") + depends_on("py-typing-extensions@3.6.2.1:", when="@1.7:") + depends_on("py-sympy", when="@2:") + depends_on("py-networkx", when="@2:") + depends_on("py-jinja2", when="@2:") + depends_on("py-fsspec", when="@2.1:") + + # pyproject.toml + depends_on("py-setuptools") + depends_on("py-astunparse", when="@1.13:") + depends_on("py-numpy@1.16.6:") + # https://github.com/pytorch/pytorch/issues/107302 + depends_on("py-numpy@:1", when="@:2.2") + depends_on("py-pyyaml") + depends_on("py-requests", when="@1.13:") + + # Undocumented dependencies + depends_on("py-tqdm", type="run") + depends_on("blas") + depends_on("lapack") + + # Third party dependencies + depends_on("fp16@2020-05-14", when="@1.6:") + depends_on("fxdiv@2020-04-17", when="@1.6:") + # https://github.com/pytorch/pytorch/issues/60332 + # depends_on("xnnpack@2024-02-29", when="@2.3:+xnnpack") + # depends_on("xnnpack@2022-12-21", when="@2.0:2.2+xnnpack") + # depends_on("xnnpack@2022-02-16", when="@1.12:1+xnnpack") + # depends_on("xnnpack@2021-06-21", when="@1.10:1.11+xnnpack") + # depends_on("xnnpack@2021-02-22", when="@1.8:1.9+xnnpack") + # depends_on("xnnpack@2020-03-23", when="@1.6:1.7+xnnpack") + depends_on("benchmark", when="@1.6:+test") + depends_on("cpuinfo@2024-09-06", when="@2.5.1:") + depends_on("cpuinfo@2024-08-30", when="@2.5.0") + depends_on("cpuinfo@2023-11-04", when="@2.3:2.4") + depends_on("cpuinfo@2023-01-13", when="@2.1:2.2") + depends_on("cpuinfo@2022-08-19", when="@1.13:2.0") + depends_on("cpuinfo@2020-12-17", when="@1.8:1.12") + depends_on("cpuinfo@2020-06-11", when="@1.6:1.7") + depends_on("eigen") + depends_on("gloo@2023-12-03", when="@2.3:+gloo") + depends_on("gloo@2023-05-19", when="@2.1:2.2+gloo") + depends_on("gloo@2023-01-17", when="@2.0+gloo") + depends_on("gloo@2022-05-18", when="@1.13:1+gloo") + depends_on("gloo@2021-05-21", when="@1.10:1.12+gloo") + depends_on("gloo@2021-05-04", when="@1.9+gloo") + depends_on("gloo@2020-09-18", when="@1.7:1.8+gloo") + depends_on("gloo@2020-03-17", when="@1.6+gloo") + depends_on("gloo+cuda", when="@1.6:+gloo+cuda") + depends_on("gloo+libuv", when="@1.6: platform=darwin") + depends_on("nccl", when="+nccl+cuda") + # https://github.com/pytorch/pytorch/issues/60331 + # depends_on("onnx@1.16.0", when="@2.3:+onnx_ml") + # depends_on("onnx@1.15.0", when="@2.2+onnx_ml") + # depends_on("onnx@1.14.1", when="@2.1+onnx_ml") + # depends_on("onnx@1.13.1", when="@2.0+onnx_ml") + # depends_on("onnx@1.12.0", when="@1.13:1+onnx_ml") + # depends_on("onnx@1.11.0", when="@1.12+onnx_ml") + # depends_on("onnx@1.10.1_2021-10-08", when="@1.11+onnx_ml") + # depends_on("onnx@1.10.1", when="@1.10+onnx_ml") + # depends_on("onnx@1.8.0_2020-11-03", when="@1.8:1.9+onnx_ml") + # depends_on("onnx@1.7.0_2020-05-31", when="@1.6:1.7+onnx_ml") + with when("~custom-protobuf"): + depends_on("protobuf@3.13.0", when="@1.10:") + depends_on("protobuf@3.11.4", when="@1.6:1.9") + depends_on("protobuf@3.6.1", when="@1.1:1.5") + depends_on("protobuf@3.5.0", when="@1.0") + with default_args(type=("build", "run")): + depends_on("py-protobuf@3.13", when="@1.10:") + depends_on("py-protobuf@3.11", when="@1.6:1.9") + depends_on("py-protobuf@3.6", when="@1.1:1.5") + depends_on("py-protobuf@3.5", when="@1.0") + depends_on("psimd@2020-05-17", when="@1.6:") + depends_on("pthreadpool@2023-08-29", when="@2.2:") + depends_on("pthreadpool@2021-04-13", when="@1.9:2.1") + depends_on("pthreadpool@2020-10-05", when="@1.8") + depends_on("pthreadpool@2020-06-15", when="@1.6:1.7") + with default_args(type=("build", "link", "run")): + depends_on("py-pybind11@2.13.5:", when="@2.5:") + depends_on("py-pybind11@2.12.0:", when="@2.3:2.4") + depends_on("py-pybind11@2.11.0:", when="@2.1:2.2") + depends_on("py-pybind11@2.10.1:", when="@2.0") + depends_on("py-pybind11@2.10.0:", when="@1.13:1") + depends_on("py-pybind11@2.6.2:", when="@1.8:1.12") + depends_on("py-pybind11@2.3.0:", when="@:1.7") + depends_on("sleef@3.6.0_2024-03-20", when="@2.4:") + depends_on("sleef@3.5.1_2020-12-22", when="@1.8:2.3") + depends_on("sleef@3.4.0_2019-07-30", when="@1.6:1.7") + + # Optional dependencies + with default_args(type=("build", "link", "run")): + # cmake/public/cuda.cmake + depends_on("cuda@11:", when="@2.4:+cuda") + # https://github.com/pytorch/pytorch/issues/122169 + depends_on("cuda@11:12.3", when="@2.0:2.3+cuda") + depends_on("cuda@10.2:12.3", when="@1.11:1+cuda") + # https://discuss.pytorch.org/t/compiling-1-10-1-from-source-with-gcc-11-and-cuda-11-5/140971 + depends_on("cuda@10.2:11.4", when="@1.10+cuda") + depends_on("cuda@9.2:11.4", when="@1.6:1.9+cuda") + depends_on("cuda@9:11.4", when="@:1.5+cuda") + # https://github.com/pytorch/pytorch#prerequisites + # https://github.com/pytorch/pytorch/issues/119400 + depends_on("cudnn@8.5:9.0", when="@2.3:+cudnn") + depends_on("cudnn@7:8", when="@1.6:2.2+cudnn") + depends_on("cudnn@7", when="@:1.5+cudnn") + depends_on("magma+cuda", when="+magma+cuda") + depends_on("magma+rocm", when="+magma+rocm") + depends_on("numactl", when="+numa") + depends_on("llvm-openmp", when="%apple-clang +openmp") + depends_on("valgrind", when="+valgrind") + with when("+rocm"): + depends_on("hsa-rocr-dev") + depends_on("hip") + depends_on("rccl", when="+nccl") + depends_on("rocprim") + depends_on("hipcub") + depends_on("rocthrust") + depends_on("roctracer-dev") + depends_on("rocrand") + depends_on("hipsparse") + depends_on("hipfft") + depends_on("rocfft") + depends_on("rocblas") + depends_on("miopen-hip") + depends_on("rocminfo") + depends_on("mpi", when="+mpi") + depends_on("ucc", when="+ucc") + depends_on("ucx", when="+ucc") + depends_on("mkl", when="+mkldnn") + + # Test dependencies + with default_args(type="test"): + depends_on("py-hypothesis") + depends_on("py-six") + depends_on("py-psutil") + + # Historical dependencies + with default_args(type=("build", "run")): + depends_on("mkl@2021.1.1:2021.4.0", when="@2.3 platform=windows") + depends_on("py-cffi", when="@:1") + depends_on("py-future", when="@1.5:1") + depends_on("py-six", when="@1.13:1") + + conflicts("%gcc@:9.3", when="@2.2:", msg="C++17 support required") + + # https://github.com/pytorch/pytorch/issues/90448 + patch( + "https://github.com/pytorch/pytorch/pull/97270.patch?full_index=1", + sha256="beb3fb57746cf8443f5caa6e08b2f8f4d4822c1e11e0c912134bd166c6a0ade7", + when="@1.10:2.0", + ) + + # Fix BLAS being overridden by MKL + # https://github.com/pytorch/pytorch/issues/60328 + patch( + "https://github.com/pytorch/pytorch/pull/59220.patch?full_index=1", + sha256="6d5717267f901e8ee493dfacd08734d9bcc48ad29a76ca9ef702368e96bee675", + when="@:1.11", + ) + + # Fixes build on older systems with glibc <2.12 + patch( + "https://github.com/pytorch/pytorch/pull/55063.patch?full_index=1", + sha256="2229bcbf20fbe88aa9f7318f89c126ec7f527875ffe689a763c78abfa127a65c", + when="@:1.8.1", + ) + + # https://github.com/pytorch/pytorch/issues/70297 + patch( + "https://github.com/google/breakpad/commit/605c51ed96ad44b34c457bbca320e74e194c317e.patch?full_index=1", + sha256="694d83db3a2147d543357f22ba5c8d5683d0ed43e693d42bca8f24ec50080f98", + when="+breakpad", + working_dir="third_party/breakpad", + ) + + # Fixes CMake configuration error when XNNPACK is disabled + # https://github.com/pytorch/pytorch/pull/35607 + # https://github.com/pytorch/pytorch/pull/37865 + patch("xnnpack.patch", when="@1.5") + + # Fixes build error when ROCm is enabled for pytorch-1.5 release + patch("rocm.patch", when="@1.5+rocm") + + # Fixes compilation with Clang 9.0.0 and Apple Clang 11.0.3 + # https://github.com/pytorch/pytorch/pull/37086 + patch( + "https://github.com/pytorch/pytorch/commit/e921cd222a8fbeabf5a3e74e83e0d8dfb01aa8b5.patch?full_index=1", + sha256="0f3ad037a95af9d34b1d085050c1e7771fd00f0b89e5b3a276097b7c9f4fabf8", + when="@:1.5", + ) + + # Fixes 'FindOpenMP.cmake' + # to detect openmp settings used by Fujitsu compiler. + patch("detect_omp_of_fujitsu_compiler.patch", when="%fj") + + # Fixes to build with fujitsu-ssl2 + patch("fj-ssl2_1.11.patch", when="@1.11:^fujitsu-ssl2") + patch("fj-ssl2_1.10.patch", when="@1.10^fujitsu-ssl2") + patch("fj-ssl2_1.9.patch", when="@1.9^fujitsu-ssl2") + patch("fj-ssl2_1.8.patch", when="@1.8^fujitsu-ssl2") + patch("fj-ssl2_1.6-1.7.patch", when="@1.6:1.7^fujitsu-ssl2") + patch("fj-ssl2_1.3-1.5.patch", when="@:1.5^fujitsu-ssl2") + + # Fix compilation of +distributed~tensorpipe + # https://github.com/pytorch/pytorch/issues/68002 + patch( + "https://github.com/pytorch/pytorch/commit/c075f0f633fa0136e68f0a455b5b74d7b500865c.patch?full_index=1", + sha256="41271e494a3a60a65a8dd45ac053d1a6e4e4d5b42c2dac589ac67524f61ac41e", + when="@1.10.0+distributed~tensorpipe", + ) + + # Use patches from IBM's Open CE to enable building on Power systems + # 01xx patches are specific to open-ce, we only include 03xx patches used in meta.yaml + # https://github.com/open-ce/pytorch-feedstock + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0302-cpp-extension.patch", + sha256="ecb3973fa7d0f4c8f8ae40433f3ca5622d730a7b16f6cb63325d1e95baff8aa2", + when="@1.10:1.11 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0311-PR66085-Remove-unused-dump-method-from-VSX-vec256-methods.patch", + sha256="f05db59f3def4c4215db7142d81029c73fe330c660492159b66d65ca5001f4d1", + when="@1.10 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0312-PR67331-Dummpy-VSX-bfloat16-implementation.patch", + sha256="860b64afa85f5e6647ebc3c91d5a0bb258784770900c9302c3599c98d5cff1ee", + when="@1.10 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0313-add-missing-vsx-dispatch.patch", + sha256="7393c2bc0b6d41ecc813c829a1e517bee864686652e91f174cb7bcdfb10ba451", + when="@1.10 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.10/recipe/0314-fix-nullpointer-error.patch", + sha256="b9cff8966f316f58514c66a403b7a6786be3cdb252f1380a6b91c722686a4097", + when="@1.10 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.7.4/pytorch-1.12/recipe/0302-cpp-extension.patch", + sha256="2fac519cca8997f074c263505657ff867e7ba2d6637fc8bda99c70a99be0442a", + when="@1.12 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.8.0/pytorch-1.13/recipe/0302-cpp-extension.patch", + sha256="a54db63640b90e5833cc1099c0935572f5297d2d8625f62f01ac1fda79ed4569", + when="@1.13 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0309-fallback-to-cpu_kernel-with-VSX.patch", + sha256="27f41c8d6cb61e69e761be62f03dc1ce023cbca34926e3ba559996821a7ce726", + when="@2.0 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.9.0/pytorch-2.0/recipe/0310-PR100149.patch", + sha256="1adbd38a9cc1611f1caaa325614695f4349d9ffd236332e0d8f0de5a3880f4dd", + when="@2.0 arch=ppc64le:", + ) + patch( + "https://github.com/open-ce/pytorch-feedstock/raw/open-ce-v1.10.0/pytorch-2.0/recipe/0311-PR104956.patch", + sha256="be27c906924a21be198a3ea6c459739a1daa8b8b89045af339dafa4cd6f90d6c", + when="@2.0 arch=ppc64le:", + ) + conflicts("arch=ppc64le:", when="@:1.9") + + # Cherry-pick a patch to allow earlier versions of PyTorch to work with CUDA 11.4 + patch( + "https://github.com/pytorch/pytorch/commit/c74c0c571880df886474be297c556562e95c00e0.patch?full_index=1", + sha256="8ff7d285e52e4718bad1ca01ceb3bb6471d7828329036bb94222717fcaa237da", + when="@:1.9.1 ^cuda@11.4.100:", + ) + + # PyTorch does not build with GCC 12 (fixed in 2.0) + # See: https://github.com/pytorch/pytorch/issues/77614 + patch( + "https://github.com/facebookincubator/gloo/commit/4a5e339b764261d20fc409071dc7a8b8989aa195.patch?full_index=1", + sha256="dc8b3a9bea4693f32d6850ea2ce6ce75e1778538bfba464b50efca92bac425e3", + when="@:1 %gcc@12:", + working_dir="third_party/gloo", + ) + + # PyTorch does not build on Linux >=6.0.3 (fixed in master) + # See: https://github.com/facebookincubator/gloo/issues/345 + patch( + "https://github.com/facebookincubator/gloo/commit/10909297fedab0a680799211a299203e53515032.patch?full_index=1", + sha256="8e6e9a44e0533ba4303a95a651b1934e5d73632cab08cc7d5a9435e1e64aa424", + when="@:1", + working_dir="third_party/gloo", + ) + + # Some missing includes + # See: https://github.com/pytorch/pytorch/pull/100036 + patch( + "https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/100036.patch?full_index=1", + sha256="65060b54c31196b26dcff29bbb178fd17d5677e8481a2a06002c0ca4dd37b3d0", + when="@2.0.0:2.0.1", + ) + # See: https://github.com/pytorch/pytorch/pull/100049 + patch( + "https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/100049.patch?full_index=1", + sha256="673056141c0ea6ff4411f65a26f1a9d7a7c49ad8fe034a01ef0d56ba8a7a9386", + when="@2.0.0:2.0.1", + ) + + # Use correct OpenBLAS include path under prefix + patch( + "https://patch-diff.githubusercontent.com/raw/pytorch/pytorch/pull/110063.patch?full_index=1", + sha256="23fb4009f7337051fc5303927ff977186a5af960245e7212895406477d8b2f66", + when="@:2.1", + ) + + patch( + "https://github.com/pytorch/FBGEMM/commit/da01a59556fec9776733bf20aea8fe8fb29cdd3d.patch?full_index=1", + sha256="97d8bd43f8cd8bb203dab3480d609c08499224acaca9915f2bdeb23c62350fb1", + when="@2.0.1 +fbgemm", + working_dir="third_party/fbgemm", + ) + + @when("@1.5.0:") + def patch(self): + # https://github.com/pytorch/pytorch/issues/52208 + filter_file( + "torch_global_deps PROPERTIES LINKER_LANGUAGE C", + "torch_global_deps PROPERTIES LINKER_LANGUAGE CXX", + "caffe2/CMakeLists.txt", + ) + + def torch_cuda_arch_list(self, env): + if "+cuda" in self.spec: + torch_cuda_arch = CudaPackage.compute_capabilities( + self.spec.variants["cuda_arch"].value + ) + env.set("TORCH_CUDA_ARCH_LIST", ";".join(torch_cuda_arch)) + + def setup_build_environment(self, env): + """Set environment variables used to control the build. + + PyTorch's ``setup.py`` is a thin wrapper around ``cmake``. + In ``tools/setup_helpers/cmake.py``, you can see that all + environment variables that start with ``BUILD_``, ``USE_``, + or ``CMAKE_``, plus a few more explicitly specified variable + names, are passed directly to the ``cmake`` call. Therefore, + most flags defined in ``CMakeLists.txt`` can be specified as + environment variables. + """ + + def enable_or_disable(variant, keyword="USE", var=None): + """Set environment variable to enable or disable support for a + particular variant. + + Parameters: + variant (str): the variant to check + keyword (str): the prefix to use for enabling/disabling + var (str): CMake variable to set. Defaults to variant.upper() + """ + if var is None: + var = variant.upper() + + if "+" + variant in self.spec: + env.set(keyword + "_" + var, "ON") + elif "~" + variant in self.spec: + env.set(keyword + "_" + var, "OFF") + + # Build in parallel to speed up build times + env.set("MAX_JOBS", make_jobs) + + # Spack logs have trouble handling colored output + env.set("COLORIZE_OUTPUT", "OFF") + + enable_or_disable("test", keyword="BUILD") + enable_or_disable("caffe2", keyword="BUILD") + + enable_or_disable("cuda") + if "+cuda" in self.spec: + env.set("CUDA_TOOLKIT_ROOT_DIR", self.spec["cuda"].prefix) # Linux/macOS + env.set("CUDA_HOME", self.spec["cuda"].prefix) # Linux/macOS + env.set("CUDA_PATH", self.spec["cuda"].prefix) # Windows + self.torch_cuda_arch_list(env) + + if self.spec.satisfies("%clang"): + for flag in self.spec.compiler_flags["cxxflags"]: + if "gcc-toolchain" in flag: + env.set("CMAKE_CUDA_FLAGS", "=-Xcompiler={0}".format(flag)) + + enable_or_disable("rocm") + if "+rocm" in self.spec: + env.set("PYTORCH_ROCM_ARCH", ";".join(self.spec.variants["amdgpu_target"].value)) + env.set("HSA_PATH", self.spec["hsa-rocr-dev"].prefix) + env.set("ROCBLAS_PATH", self.spec["rocblas"].prefix) + env.set("ROCFFT_PATH", self.spec["rocfft"].prefix) + env.set("HIPFFT_PATH", self.spec["hipfft"].prefix) + env.set("HIPSPARSE_PATH", self.spec["hipsparse"].prefix) + env.set("HIP_PATH", self.spec["hip"].prefix) + env.set("HIPRAND_PATH", self.spec["rocrand"].prefix) + env.set("ROCRAND_PATH", self.spec["rocrand"].prefix) + env.set("MIOPEN_PATH", self.spec["miopen-hip"].prefix) + if "+nccl" in self.spec: + env.set("RCCL_PATH", self.spec["rccl"].prefix) + env.set("ROCPRIM_PATH", self.spec["rocprim"].prefix) + env.set("HIPCUB_PATH", self.spec["hipcub"].prefix) + env.set("ROCTHRUST_PATH", self.spec["rocthrust"].prefix) + env.set("ROCTRACER_PATH", self.spec["roctracer-dev"].prefix) + if self.spec.satisfies("^hip@5.2.0:"): + env.set("CMAKE_MODULE_PATH", self.spec["hip"].prefix.lib.cmake.hip) + + enable_or_disable("cudnn") + if "+cudnn" in self.spec: + # cmake/Modules_CUDA_fix/FindCUDNN.cmake + env.set("CUDNN_INCLUDE_DIR", self.spec["cudnn"].prefix.include) + env.set("CUDNN_LIBRARY", self.spec["cudnn"].libs[0]) + + # Flash attention has very high memory requirements that may cause the build to fail + # https://github.com/pytorch/pytorch/issues/111526 + # https://github.com/pytorch/pytorch/issues/124018 + env.set("USE_FLASH_ATTENTION", "OFF") + + enable_or_disable("fbgemm") + enable_or_disable("kineto") + enable_or_disable("magma") + enable_or_disable("metal") + enable_or_disable("mps") + enable_or_disable("breakpad") + + enable_or_disable("nccl") + if "+cuda+nccl" in self.spec: + env.set("NCCL_LIB_DIR", self.spec["nccl"].libs.directories[0]) + env.set("NCCL_INCLUDE_DIR", self.spec["nccl"].prefix.include) + + # cmake/External/nnpack.cmake + enable_or_disable("nnpack") + + enable_or_disable("numa") + if "+numa" in self.spec: + # cmake/Modules/FindNuma.cmake + env.set("NUMA_ROOT_DIR", self.spec["numactl"].prefix) + + # cmake/Modules/FindNumPy.cmake + enable_or_disable("numpy") + # cmake/Modules/FindOpenMP.cmake + enable_or_disable("openmp") + enable_or_disable("qnnpack") + enable_or_disable("qnnpack", var="PYTORCH_QNNPACK") + enable_or_disable("valgrind") + enable_or_disable("xnnpack") + enable_or_disable("mkldnn") + enable_or_disable("distributed") + enable_or_disable("mpi") + enable_or_disable("ucc") + # cmake/Modules/FindGloo.cmake + enable_or_disable("gloo") + enable_or_disable("tensorpipe") + + if "+debug" in self.spec: + env.set("DEBUG", "ON") + else: + env.set("DEBUG", "OFF") + + if "+onnx_ml" in self.spec: + env.set("ONNX_ML", "ON") + elif "~onnx_ml" in self.spec: + env.set("ONNX_ML", "OFF") + + if not self.spec.satisfies("@main"): + env.set("PYTORCH_BUILD_VERSION", self.version) + env.set("PYTORCH_BUILD_NUMBER", 0) + + # BLAS to be used by Caffe2 + # Options defined in cmake/Dependencies.cmake and cmake/Modules/FindBLAS.cmake + if self.spec["blas"].name == "atlas": + env.set("BLAS", "ATLAS") + env.set("WITH_BLAS", "atlas") + env.set("Atlas_ROOT_DIR", self.spec["atlas"].prefix) + elif self.spec["blas"].name in ["blis", "amdblis"]: + env.set("BLAS", "BLIS") + env.set("WITH_BLAS", "blis") + env.set("BLIS_HOME", self.spec["blas"].prefix) + elif self.spec["blas"].name == "eigen": + env.set("BLAS", "Eigen") + elif self.spec["lapack"].name in ["libflame", "amdlibflame"]: + env.set("BLAS", "FLAME") + env.set("WITH_BLAS", "FLAME") + elif self.spec["blas"].name in ["intel-mkl", "intel-parallel-studio", "intel-oneapi-mkl"]: + env.set("BLAS", "MKL") + env.set("WITH_BLAS", "mkl") + # help find MKL + if self.spec["mkl"].name == "intel-oneapi-mkl": + env.set("INTEL_MKL_DIR", self.spec["mkl"].prefix.mkl.latest) + else: + env.set("INTEL_MKL_DIR", self.spec["mkl"].prefix.mkl) + elif self.spec["blas"].name == "openblas": + env.set("BLAS", "OpenBLAS") + env.set("WITH_BLAS", "open") + env.set("OpenBLAS_HOME", self.spec["openblas"].prefix) + elif self.spec["blas"].name == "veclibfort": + env.set("BLAS", "vecLib") + env.set("WITH_BLAS", "veclib") + elif self.spec["blas"].name == "fujitsu-ssl2": + env.set("BLAS", "SSL2") + env.set("WITH_BLAS", "ssl2") + else: + env.set("BLAS", "Generic") + env.set("WITH_BLAS", "generic") + + # Don't use vendored third-party libraries when possible + # env.set("USE_SYSTEM_LIBS", "ON") + env.set("USE_SYSTEM_BENCHMARK", "ON") + env.set("USE_SYSTEM_CPUINFO", "ON") + env.set("USE_SYSTEM_EIGEN_INSTALL", "ON") + env.set("USE_SYSTEM_FP16", "ON") + env.set("USE_SYSTEM_FXDIV", "ON") + env.set("USE_SYSTEM_GLOO", "ON") + env.set("USE_SYSTEM_NCCL", "ON") + # https://github.com/pytorch/pytorch/issues/60331 + # env.set("USE_SYSTEM_ONNX", "ON") + env.set("USE_SYSTEM_PSIMD", "ON") + env.set("USE_SYSTEM_PTHREADPOOL", "ON") + env.set("USE_SYSTEM_PYBIND11", "ON") + env.set("USE_SYSTEM_SLEEF", "ON") + env.set("USE_SYSTEM_UCC", "ON") + # https://github.com/pytorch/pytorch/issues/60332 + # env.set("USE_SYSTEM_XNNPACK", "ON") + + if self.spec.satisfies("+custom-protobuf"): + env.set("BUILD_CUSTOM_PROTOBUF", "ON") + else: + env.set("BUILD_CUSTOM_PROTOBUF", "OFF") + + def setup_run_environment(self, env): + self.torch_cuda_arch_list(env) + + @run_before("install") + def build_amd(self): + if "+rocm" in self.spec: + python(os.path.join("tools", "amd_build", "build_amd.py")) + + @run_after("install") + @on_package_attributes(run_tests=True) + def install_test(self): + with working_dir("test"): + python("run_test.py") + + @property + def cmake_prefix_paths(self): + cmake_prefix_paths = [join_path(python_platlib, "torch", "share", "cmake")] + return cmake_prefix_paths diff --git a/packages/py-torch/rocm.patch b/packages/py-torch/rocm.patch new file mode 100644 index 00000000..b50cc7e1 --- /dev/null +++ b/packages/py-torch/rocm.patch @@ -0,0 +1,98 @@ +diff --git a/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h b/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h +index 9cd678dfb4cc7..4630465115c7c 100644 +--- a/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h ++++ b/aten/src/ATen/cuda/nvrtc_stub/ATenNVRTC.h +@@ -67,6 +67,14 @@ namespace at { namespace cuda { + // + // HIP doesn't have + // cuGetErrorString (maps to non-functional hipGetErrorString___) ++// ++// HIP from ROCm 3.5 on renamed hipOccupancyMaxActiveBlocksPerMultiprocessor ++// to hipModuleOccupancyMaxActiveBlocksPerMultiprocessor. ++#if HIP_VERSION < 305 ++#define HIPOCCUPANCYMAXACTIVEBLOCKSPERMULTIPROCESSOR hipOccupancyMaxActiveBlocksPerMultiprocessor ++#else ++#define HIPOCCUPANCYMAXACTIVEBLOCKSPERMULTIPROCESSOR cuOccupancyMaxActiveBlocksPerMultiprocessor ++#endif + + #define AT_FORALL_NVRTC(_) \ + _(nvrtcVersion) \ +@@ -76,7 +84,7 @@ namespace at { namespace cuda { + _(nvrtcGetPTX) \ + _(cuModuleLoadData) \ + _(cuModuleGetFunction) \ +- _(cuOccupancyMaxActiveBlocksPerMultiprocessor) \ ++ _(HIPOCCUPANCYMAXACTIVEBLOCKSPERMULTIPROCESSOR)\ + _(nvrtcGetErrorString) \ + _(nvrtcGetProgramLogSize) \ + _(nvrtcGetProgramLog) \ +diff --git a/aten/src/ATen/native/cuda/SoftMax.cu b/aten/src/ATen/native/cuda/SoftMax.cu +index da1995123ecfc..f935eb4ef3d0e 100644 +--- a/aten/src/ATen/native/cuda/SoftMax.cu ++++ b/aten/src/ATen/native/cuda/SoftMax.cu +@@ -127,8 +127,8 @@ void SpatialSoftMax_getLaunchSizes( + uint32_t block_threads = block.x * block.y; + smem_size = block.x == 1 ? 0 : block_threads * sizeof(accscalar_t); + int max_active_blocks; +-#ifdef __HIP_PLATFORM_HCC__ +- // XXX HIP function signature is not compatible yet. ++#if defined(__HIP_PLATFORM_HCC__) && HIP_VERSION < 305 ++ // HIP function signature is not compatible yet. + uint32_t max_blocks; + cudaOccupancyMaxActiveBlocksPerMultiprocessor(&max_blocks, + k, block_threads, smem_size); +diff --git a/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp b/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp +index 5586e49919727..27315ee475277 100644 +--- a/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp ++++ b/torch/csrc/jit/codegen/fuser/cuda/fused_kernel.cpp +@@ -140,10 +140,10 @@ FusedKernelCUDA::FusedKernelCUDA( + nvrtc().cuModuleGetFunction(&function_, module_, name_.c_str())); + + // Computes max blocks +-#ifdef __HIP_PLATFORM_HCC__ +- // XXX HIP function signature is not compatible yet ++#if defined(__HIP_PLATFORM_HCC__) && HIP_VERSION < 305 ++ // HIP function signature is not compatible yet + uint32_t max_blocks; +- AT_CUDA_DRIVER_CHECK(nvrtc().cuOccupancyMaxActiveBlocksPerMultiprocessor( ++ AT_CUDA_DRIVER_CHECK(nvrtc().hipOccupancyMaxActiveBlocksPerMultiprocessor( + &max_blocks, function_, 128, 0)); + maxBlocks_ = max_blocks; + #else +diff --git a/torch/utils/hipify/cuda_to_hip_mappings.py b/torch/utils/hipify/cuda_to_hip_mappings.py +index 7e21363cbe6af..26f269d92ae38 100644 +--- a/torch/utils/hipify/cuda_to_hip_mappings.py ++++ b/torch/utils/hipify/cuda_to_hip_mappings.py +@@ -2890,7 +2890,7 @@ + ( + "cuOccupancyMaxActiveBlocksPerMultiprocessor", + ( +- "hipOccupancyMaxActiveBlocksPerMultiprocessor", ++ "hipModuleOccupancyMaxActiveBlocksPerMultiprocessor", + CONV_OCCUPANCY, + API_DRIVER, + ), +@@ -2898,7 +2898,7 @@ + ( + "cuOccupancyMaxActiveBlocksPerMultiprocessorWithFlags", + ( +- "hipOccupancyMaxActiveBlocksPerMultiprocessorWithFlags", ++ "hipModuleOccupancyMaxActiveBlocksPerMultiprocessorWithFlags", + CONV_OCCUPANCY, + API_DRIVER, + HIP_UNSUPPORTED, +@@ -2906,12 +2906,12 @@ + ), + ( + "cuOccupancyMaxPotentialBlockSize", +- ("hipOccupancyMaxPotentialBlockSize", CONV_OCCUPANCY, API_DRIVER), ++ ("hipModuleOccupancyMaxPotentialBlockSize", CONV_OCCUPANCY, API_DRIVER), + ), + ( + "cuOccupancyMaxPotentialBlockSizeWithFlags", + ( +- "hipOccupancyMaxPotentialBlockSizeWithFlags", ++ "hipModuleOccupancyMaxPotentialBlockSizeWithFlags", + CONV_OCCUPANCY, + API_DRIVER, + HIP_UNSUPPORTED, diff --git a/packages/py-torch/xnnpack.patch b/packages/py-torch/xnnpack.patch new file mode 100644 index 00000000..15403308 --- /dev/null +++ b/packages/py-torch/xnnpack.patch @@ -0,0 +1,47 @@ +diff --git a/caffe2/CMakeLists.txt b/caffe2/CMakeLists.txt +index 8025a7de3c..0da37079d6 100644 +--- a/caffe2/CMakeLists.txt ++++ b/caffe2/CMakeLists.txt +@@ -46,12 +46,19 @@ if (INTERN_BUILD_ATEN_OPS) + list(APPEND Caffe2_DEPENDENCY_INCLUDE ${ATen_THIRD_PARTY_INCLUDE}) + endif() + ++# {Q/X,etc} NPACK support is enabled by default, if none of these options ++# are selected, turn this flag ON to incidate the support is disabled ++set(NNPACK_AND_FAMILY_DISABLED OFF) ++if(NOT (USE_NNPACK OR USE_QNNPACK OR USE_PYTORCH_QNNPACK OR USE_XNNPACK)) ++ set(NNPACK_AND_FAMILY_DISABLED ON) ++endif() ++ + # ---[ Caffe2 build + # Note: the folders that are being commented out have not been properly + # addressed yet. + + # For pthreadpool_new_if_impl. TODO: Remove when threadpools are unitied. +-if (NOT MSVC) ++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED) + IF(NOT TARGET fxdiv) + SET(FXDIV_BUILD_TESTS OFF CACHE BOOL "") + SET(FXDIV_BUILD_BENCHMARKS OFF CACHE BOOL "") +@@ -710,7 +717,7 @@ ELSEIF(USE_CUDA) + ENDIF() + + +-if (NOT MSVC) ++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED) + TARGET_LINK_LIBRARIES(torch_cpu PRIVATE fxdiv) + endif() + +diff --git a/caffe2/utils/CMakeLists.txt b/caffe2/utils/CMakeLists.txt +index 27aabb1315..3c7845c67d 100644 +--- a/caffe2/utils/CMakeLists.txt ++++ b/caffe2/utils/CMakeLists.txt +@@ -36,7 +36,7 @@ list(APPEND Caffe2_CPU_SRCS + # ---[ threadpool/pthreadpool* is a local modification of the NNPACK + # pthreadpool with a very similar interface. Neither NNPACK, nor this + # thread pool supports Windows. +-if (NOT MSVC) ++if (NOT MSVC AND NOT NNPACK_AND_FAMILY_DISABLED) + add_definitions(-DUSE_INTERNAL_THREADPOOL_IMPL) + set(Caffe2_CPU_SRCS ${Caffe2_CPU_SRCS} + utils/threadpool/pthreadpool.cc -- GitLab From 93fc4562cf0a387ce9aa26193c756bada9a67c23 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 01:29:37 +0000 Subject: [PATCH 066/111] fix(py-torch): fixed sympy dependency for py-torch@2.5 --- packages/py-torch/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/py-torch/package.py b/packages/py-torch/package.py index e2bc15b6..6c7f9abd 100644 --- a/packages/py-torch/package.py +++ b/packages/py-torch/package.py @@ -159,6 +159,9 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): depends_on("py-typing-extensions@4.8:", when="@2.2:") depends_on("py-typing-extensions@3.6.2.1:", when="@1.7:") depends_on("py-sympy", when="@2:") + # begin EBRAINS (added): fix sympy version for py-torch@2.5: (from setup.py) + depends_on("py-sympy@1.13.1", when="@2.5:^python@3.9:") + # end EBRAINS depends_on("py-networkx", when="@2:") depends_on("py-jinja2", when="@2:") depends_on("py-fsspec", when="@2.1:") -- GitLab From 437f31b660bfde29cd36d3001146bf9250d79cee Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 01:30:19 +0000 Subject: [PATCH 067/111] fix(py-torch): fix wrong default install dir see https://github.com/pytorch/pytorch/issues/129304 --- packages/py-torch/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/py-torch/package.py b/packages/py-torch/package.py index 6c7f9abd..b274723b 100644 --- a/packages/py-torch/package.py +++ b/packages/py-torch/package.py @@ -478,6 +478,14 @@ class PyTorch(PythonPackage, CudaPackage, ROCmPackage): working_dir="third_party/fbgemm", ) + # begin EBRAINS (added): see https://github.com/pytorch/pytorch/issues/129304 + patch( + "https://github.com/pytorch/pytorch/commit/9174d14551c4c6f594bd1532ab00fb7158b1bbfa.patch?full_index=1", + sha256="25204236888f25ea74c081787a01deae1a6fa66ecd77ecbe280e1ce8af6b8116", + when="@2.4", + ) + # end EBRAINS + @when("@1.5.0:") def patch(self): # https://github.com/pytorch/pytorch/issues/52208 -- GitLab From b2e03f3e15ef613196746c595c10a52e8b92d122 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 26 Feb 2025 15:31:58 +0000 Subject: [PATCH 068/111] feat(wf-human-multi-area-model): relax notebook dependency version constraint --- packages/wf-human-multi-area-model/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/wf-human-multi-area-model/package.py b/packages/wf-human-multi-area-model/package.py index 7cafb2c1..5bcefe0d 100644 --- a/packages/wf-human-multi-area-model/package.py +++ b/packages/wf-human-multi-area-model/package.py @@ -28,7 +28,7 @@ class WfHumanMultiAreaModel(Package): depends_on("py-nnmt@1.3.0:", type=("run", "test")) depends_on("py-dicthash@0.0.1:", type=("run", "test")) depends_on("py-networkx@3.1:", type=("run", "test")) - depends_on("py-notebook@6.5.4:", type=("run", "test")) + depends_on("py-notebook@6.4:", type=("run", "test")) depends_on("py-future@0.18.2:", type=("run", "test")) def install(self, spec, prefix): -- GitLab From 4ab6eb51593c252b15e70d47242b8d0910625333 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 02:17:52 +0000 Subject: [PATCH 069/111] fix(py-vbi): fix installation of tests --- packages/py-vbi/fix-install.patch | 12 ++++++++++++ packages/py-vbi/package.py | 2 ++ 2 files changed, 14 insertions(+) create mode 100644 packages/py-vbi/fix-install.patch diff --git a/packages/py-vbi/fix-install.patch b/packages/py-vbi/fix-install.patch new file mode 100644 index 00000000..d84cceb0 --- /dev/null +++ b/packages/py-vbi/fix-install.patch @@ -0,0 +1,12 @@ +diff --git a/pyproject.toml b/pyproject.toml +index debc24e..2e0500b 100644 +--- a/pyproject.toml ++++ b/pyproject.toml +@@ -58,7 +58,6 @@ path = "vbi/_version.py" + + + [tool.setuptools] +-packages = ["vbi"] + include-package-data = true + + [tool.setuptools.package-data] diff --git a/packages/py-vbi/package.py b/packages/py-vbi/package.py index ba2cbebb..138893c5 100644 --- a/packages/py-vbi/package.py +++ b/packages/py-vbi/package.py @@ -15,6 +15,8 @@ class PyVbi(PythonPackage): version("0.1.3", "54fa2062f44c9ec8219fae3c13c52a4bd17141b5467b982987673de0662c5255") version("0.1.2", "6ccfeeec718be62a480002a8370130a3e3344955186f99ecbb15b646b68210d6") + patch('fix-install.patch') + depends_on("python@3.8:", type=("build","run")) depends_on("py-setuptools", type="build") depends_on("py-setuptools-scm", type="build") -- GitLab From f93d7c434af5cfce526ef0f7a4927e4334618354 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 11:24:36 +0000 Subject: [PATCH 070/111] feat(py-blessed): copy package from upstream --- packages/py-blessed/package.py | 39 ++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 packages/py-blessed/package.py diff --git a/packages/py-blessed/package.py b/packages/py-blessed/package.py new file mode 100644 index 00000000..d2415b18 --- /dev/null +++ b/packages/py-blessed/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyBlessed(PythonPackage): + """Blessed is a thin, practical wrapper around terminal capabilities in + Python.""" + + homepage = "https://github.com/jquast/blessed" + pypi = "blessed/blessed-1.15.0.tar.gz" + + license("MIT") + + version("1.19.0", sha256="4db0f94e5761aea330b528e84a250027ffe996b5a94bf03e502600c9a5ad7a61") + version("1.18.1", sha256="8b09936def6bc06583db99b65636b980075733e13550cb6af262ce724a55da23") + version("1.18.0", sha256="1312879f971330a1b7f2c6341f2ae7e2cbac244bfc9d0ecfbbecd4b0293bc755") + version("1.17.12", sha256="580429e7e0c6f6a42ea81b0ae5a4993b6205c6ccbb635d034b4277af8175753e") + version("1.17.11", sha256="7d4914079a6e8e14fbe080dcaf14dee596a088057cdc598561080e3266123b48") + version("1.17.10", sha256="58b9464609f54e2eca5f5926db590a5b01fefef882844ce05064f483b8f96c26") + version("1.17.9", sha256="0d497a5be8a808b7300c00bf8303e7ba9fd11f6063a67bb924a475e5bfa7a9bb") + version("1.17.8", sha256="7671d057b2df6ddbefd809009fb08feb2f8d2d163d240b5e765088a90519b2f1") + version("1.17.7", sha256="0329a3d1db91328986a6dfd36475dbc498c867090f0433cdcc1a45a5eb2067e4") + version("1.17.6", sha256="a9a774fc6eda05248735b0d86e866d640ca2fef26038878f7e4d23f7749a1e40") + version("1.17.5", sha256="926916492220af741657ec4668aba95f54a8c32445e765cfa38c7ccd3343cc6f") + version("1.17.4", sha256="320a619c83298a9c9d632dbd8fafbb90ba9a38b83c7e64726c572fb186dd0781") + version("1.17.3", sha256="cc38547175ae0a3a3d4e5dcc7e7478a5a6bf0a6b5f4d9c6b2e5eadbe4475cb0e") + version("1.17.0", sha256="38632d60dd384de9e9be0ee5b6e1c6130f96efd0767c6ca530a453da36238c25") + version("1.16.1", sha256="a222783b09f266cf76f5a01f4dfd9de79650f07cbefe2cbc67ec7bb9577c1dfa") + version("1.16.0", sha256="34b78e9b56c2ba2f6a9a625cc989d6cf4ae8ae87dcc4ed8ad144660ae4cf7784") + version("1.15.0", sha256="777b0b6b5ce51f3832e498c22bc6a093b6b5f99148c7cbf866d26e2dec51ef21") + + depends_on("py-setuptools", type="build") + depends_on("py-wcwidth@0.1.4:", type=("build", "run")) + depends_on("py-six@1.9.0:", type=("build", "run")) -- GitLab From efb7995b1da53bb8d3bd301a54cf85f87c6f835a Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 11:24:56 +0000 Subject: [PATCH 071/111] feat(py-blessed): add version 1.20 --- packages/py-blessed/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/py-blessed/package.py b/packages/py-blessed/package.py index d2415b18..c40b13ca 100644 --- a/packages/py-blessed/package.py +++ b/packages/py-blessed/package.py @@ -16,6 +16,9 @@ class PyBlessed(PythonPackage): license("MIT") + # begin EBRAINS (added): add version + version("1.20.0", sha256="2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680") + # end EBRAINS version("1.19.0", sha256="4db0f94e5761aea330b528e84a250027ffe996b5a94bf03e502600c9a5ad7a61") version("1.18.1", sha256="8b09936def6bc06583db99b65636b980075733e13550cb6af262ce724a55da23") version("1.18.0", sha256="1312879f971330a1b7f2c6341f2ae7e2cbac244bfc9d0ecfbbecd4b0293bc755") -- GitLab From da35a770e5548c1d0066001646a5c8d343aa28e5 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 11:25:16 +0000 Subject: [PATCH 072/111] feat(py-exceptiongroup): copy package from upstream --- packages/py-exceptiongroup/package.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 packages/py-exceptiongroup/package.py diff --git a/packages/py-exceptiongroup/package.py b/packages/py-exceptiongroup/package.py new file mode 100644 index 00000000..7dfb5da6 --- /dev/null +++ b/packages/py-exceptiongroup/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyExceptiongroup(PythonPackage): + """A backport of the BaseExceptionGroup and ExceptionGroup classes from Python 3.11.""" + + homepage = "https://github.com/agronholm/exceptiongroup" + pypi = "exceptiongroup/exceptiongroup-1.0.4.tar.gz" + + version("1.1.1", sha256="d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785") + version("1.0.4", sha256="bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec") + + depends_on("py-flit-scm", type="build") -- GitLab From fe0421c74d54dee9cde3968b96ba2082b7f8104b Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 11:26:04 +0000 Subject: [PATCH 073/111] feat(py-exceptiongroup): add version 1.2.0 --- packages/py-exceptiongroup/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/py-exceptiongroup/package.py b/packages/py-exceptiongroup/package.py index 7dfb5da6..1fe03cd4 100644 --- a/packages/py-exceptiongroup/package.py +++ b/packages/py-exceptiongroup/package.py @@ -13,6 +13,9 @@ class PyExceptiongroup(PythonPackage): homepage = "https://github.com/agronholm/exceptiongroup" pypi = "exceptiongroup/exceptiongroup-1.0.4.tar.gz" + # begin EBRAINS (added): add version + version("1.2.0", sha256="91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68") + # end EBRAINS version("1.1.1", sha256="d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785") version("1.0.4", sha256="bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec") -- GitLab From 4e27a96baf88fa340ab9b42707444586df47c4f3 Mon Sep 17 00:00:00 2001 From: filimarc <oppilif576@msn.com> Date: Mon, 13 Jan 2025 14:55:57 +0100 Subject: [PATCH 074/111] feat(py-bsb): bump packages for bsb 4.4 Bump bsb-core and bsb-hdf5 to 5.0.2 Add bsb suite python packages and dependencies Add workflow package for bsb --- packages/py-arborize/package.py | 26 ++++++++ packages/py-bsb-core/package.py | 59 +++++++++++++++++++ packages/py-bsb-hdf5/package.py | 12 ++-- packages/py-bsb-json/package.py | 22 +++++++ packages/py-bsb-nest/package.py | 24 ++++++++ packages/py-bsb-neuron/package.py | 26 ++++++++ packages/py-bsb-yaml/package.py | 23 ++++++++ packages/py-bsb/package.py | 51 ---------------- packages/py-dashing/package.py | 19 ++++++ packages/py-mpilock/package.py | 21 +++++++ packages/py-mpipool/package.py | 24 ++++++++ packages/py-nmodl-glia/package.py | 25 ++++++++ .../py-nmodl/fix-setup-requirements.patch | 21 +++++++ packages/py-nmodl/package.py | 38 ++++++++++++ packages/py-nrn-patch/package.py | 29 +++++++++ packages/wf-bsb/package.py | 29 +++++++++ 16 files changed, 392 insertions(+), 57 deletions(-) create mode 100644 packages/py-arborize/package.py create mode 100644 packages/py-bsb-core/package.py create mode 100644 packages/py-bsb-json/package.py create mode 100644 packages/py-bsb-nest/package.py create mode 100644 packages/py-bsb-neuron/package.py create mode 100644 packages/py-bsb-yaml/package.py delete mode 100644 packages/py-bsb/package.py create mode 100644 packages/py-dashing/package.py create mode 100644 packages/py-mpilock/package.py create mode 100644 packages/py-mpipool/package.py create mode 100644 packages/py-nmodl-glia/package.py create mode 100644 packages/py-nmodl/fix-setup-requirements.patch create mode 100644 packages/py-nmodl/package.py create mode 100644 packages/py-nrn-patch/package.py create mode 100644 packages/wf-bsb/package.py diff --git a/packages/py-arborize/package.py b/packages/py-arborize/package.py new file mode 100644 index 00000000..f897963f --- /dev/null +++ b/packages/py-arborize/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyArborize(PythonPackage): + """ + A framework to package NEURON cell models following a clear separation between cell model description and its implementation in NEURON. + Cell models described using this framework are highly portable, testable and distributable. + """ + + homepage = "https://github.com/dbbs-lab/arborize" + pypi = "arborize/arborize-4.1.0.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("4.1.0", sha256="2cb88b890fa69de42a49b38ea4defd0caa4ee91b34f9b75216e1536d9058f57f") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-numpy@1.21:") + depends_on("py-errr@1.2:") + depends_on("py-morphio@3.3.6:4") diff --git a/packages/py-bsb-core/package.py b/packages/py-bsb-core/package.py new file mode 100644 index 00000000..eacb83a4 --- /dev/null +++ b/packages/py-bsb-core/package.py @@ -0,0 +1,59 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBsbCore(PythonPackage): + """ + The BSB is a component framework for neural modeling, which focuses on component + declarations to piece together a model. + """ + + homepage = "https://bsb.readthedocs.io" + pypi = "bsb-core/bsb_core-5.0.0.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("5.0.2", sha256="414be0f3ba72b2f656b89f8e4636e4a1d19b1f4dc9ba9360cc984020cb1859dc") + version("5.0.1", sha256="7cb905ee38419709b4ead2ffb40e1005d813d2c6780706b3f5eb2696aabeb983") + version("5.0.0", sha256="08e1776d351a8bb5c056ffbd8108d0bd941f71518b475aecbad9f22050b7cc91") + + variant('parallel', default=True, + description='Build with MPI bindings') + + depends_on("python@3.9:3.12", type=("build", "run")) + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-numpy@1.19:") + depends_on("py-scipy@1.5:") + depends_on("py-scikit-learn@1.0:") + depends_on("py-rtree@1.0:") + depends_on("py-psutil@5.8:") + depends_on("py-pynrrd@1.0:") + depends_on("py-toml@0.10:") + depends_on("py-requests") + depends_on("py-urllib3@2:") + depends_on("py-appdirs@1.4:") + depends_on("py-neo") + depends_on("py-tqdm@4.50:") + depends_on("py-shortuuid") + depends_on("py-quantities@0.15.0:") + depends_on("py-morphio@3.3:") + depends_on("py-errr@1.2.0:") + depends_on("py-dashing@0.1.0:") + depends_on("py-exceptiongroup") + + depends_on('mpi', when='+parallel') + depends_on('py-mpi4py', when='+parallel') + depends_on('py-mpipool@2.2.1:3', when='+parallel') + depends_on('py-mpilock@1.1:', when='+parallel') + + + def setup_build_environment(self, env): + env.set("SPATIALINDEX_C_LIBRARY", self.spec["libspatialindex"].libs[0]) + + def setup_run_environment(self, env): + self.setup_build_environment(env) diff --git a/packages/py-bsb-hdf5/package.py b/packages/py-bsb-hdf5/package.py index 059f2df3..36fbc5d0 100644 --- a/packages/py-bsb-hdf5/package.py +++ b/packages/py-bsb-hdf5/package.py @@ -10,13 +10,13 @@ class PyBsbHdf5(PythonPackage): """An HDF-5 based storage engine for the BSB framework.""" homepage = "https://github.com/dbbs-lab/bsb-hdf5" - url = "https://pypi.org/packages/py3/b/bsb_hdf5/bsb_hdf5-0.8.3-py3-none-any.whl" + pypi = "bsb-hdf5/bsb_hdf5-5.0.4.tar.gz" - maintainers = ["helveg"] + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] - version('0.8.3', sha256="38162bfe9470b87cb30a2bff78dce68fc1b97f2df7d7e3b288c16b671f7579e5", expand=False) + version('5.0.2', sha256='ed11177887848a3f177982201e1adb5770131bd541055a96935af38b39439fac') - depends_on("py-setuptools", type="build") - # depends_on("py-bsb@4.0.0a57:") + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-bsb-core@5.0.0:",when='@5.0.2') depends_on("py-shortuuid") - depends_on("py-h5py@3.0:") diff --git a/packages/py-bsb-json/package.py b/packages/py-bsb-json/package.py new file mode 100644 index 00000000..c692a2c5 --- /dev/null +++ b/packages/py-bsb-json/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBsbJson(PythonPackage): + """A plugin that allows the user to write their models' configuration in the json format, for the BSB framework.""" + + homepage = "https://github.com/dbbs-lab/bsb-json" + pypi = "bsb-json/bsb_json-4.2.2.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("4.2.2", sha256="0c9e0af2a50f8ebbce353ba19bd11bafaf2536d74f0a79af3b0b6d8241fa6937") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-bsb-core@5.0.0:") + depends_on("py-shortuuid") diff --git a/packages/py-bsb-nest/package.py b/packages/py-bsb-nest/package.py new file mode 100644 index 00000000..9d81556a --- /dev/null +++ b/packages/py-bsb-nest/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBsbNest(PythonPackage): + """ + The BSB-NEST is a component framework for neural modeling, used for simulate SNN with NEST software. + """ + + homepage = "https://github.com/dbbs-lab/bsb-nest" + pypi = "bsb-nest/bsb_nest-4.3.2.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("4.3.2", sha256="478aa2937ca554ff291ce726cc69e1c1b283d7353a56e3b6878b585ed0684041") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-bsb-core@5.0.2:") + depends_on("nest") diff --git a/packages/py-bsb-neuron/package.py b/packages/py-bsb-neuron/package.py new file mode 100644 index 00000000..6e4ec130 --- /dev/null +++ b/packages/py-bsb-neuron/package.py @@ -0,0 +1,26 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBsbNeuron(PythonPackage): + """ + The BSB-NEURON is a component framework for neural modeling, used for simulate with NEURON software. + """ + + homepage = "https://github.com/dbbs-lab/bsb-neuron" + pypi = "bsb-neuron/bsb_neuron-4.2.2.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("4.2.2", sha256="e7570c0cb17d31349eb8e88487e8ba48653f0fad0d7c232df8815cadde34a941") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-bsb-core@5.0.2:") + depends_on("neuron") + depends_on("py-arborize@4.1:") + depends_on("py-nrn-patch@4:") diff --git a/packages/py-bsb-yaml/package.py b/packages/py-bsb-yaml/package.py new file mode 100644 index 00000000..d91b3301 --- /dev/null +++ b/packages/py-bsb-yaml/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyBsbYaml(PythonPackage): + """A plugin that allows the user to write their models' configuration in the yaml format, for the BSB framework.""" + + homepage = "https://github.com/dbbs-lab/bsb-yaml" + pypi = "bsb-yaml/bsb_yaml-4.2.2.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("4.2.2", sha256="c5614bc5fe57b78a445303756819a8d4ba032924484f88a07f6c26dd7e5afbec") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-bsb-core@5.0.0:") + depends_on("py-pyyaml@6.0:") + depends_on("py-shortuuid") diff --git a/packages/py-bsb/package.py b/packages/py-bsb/package.py deleted file mode 100644 index 397bdbd3..00000000 --- a/packages/py-bsb/package.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack.package import * - - -class PyBsb(PythonPackage): - """ - The BSB is a component framework for neural modeling, which focuses on component - declarations to piece together a model. - """ - - homepage = "https://bsb.readthedocs.io" - url = "https://pypi.org/packages/py3/b/bsb/bsb-4.0.0a57-py3-none-any.whl" - - maintainers = ["helveg"] - - version("4.0.0a57", sha256="5da15799aa8994894ff5371561d534b43beffaa79461189c94080071359f4076", expand=False) - - depends_on("python@3.8:", type=("build", "run")) - depends_on("py-setuptools", type="build") - depends_on("py-numpy@1.19:") - depends_on("py-scipy@1.5:") - depends_on("py-scikit-learn@1.0:") - depends_on("py-plotly") - depends_on("py-rtree@1.0:") - depends_on("py-psutil@5.8:") - depends_on("py-pynrrd@1.0:") - depends_on("py-toml") - depends_on("py-requests") - depends_on("py-appdirs@1.4:") - depends_on("py-neo") - depends_on("py-tqdm@4.50:") - depends_on("py-shortuuid") - depends_on("py-quantities") - depends_on("py-pyyaml@6.0:") - depends_on("py-morphio@3.3:") - depends_on("py-bsb-hdf5@0.8.3:") - depends_on("py-errr@1.2.0:") - depends_on("py-colour@0.1.5:") - - def setup_build_environment(self, env): - env.set("SPATIALINDEX_C_LIBRARY", self.spec["libspatialindex"].libs[0]) - - def setup_run_environment(self, env): - self.setup_build_environment(env) - - skip_modules = ['bsb.simulators.arbor', 'bsb.simulators.arbor.devices'] - diff --git a/packages/py-dashing/package.py b/packages/py-dashing/package.py new file mode 100644 index 00000000..48d63dc4 --- /dev/null +++ b/packages/py-dashing/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyDashing(PythonPackage): + """Dashing is a library to quickly create terminal-based dashboards in Python.""" + + homepage = "https://github.com/FedericoCeratto/dashing" + pypi = "dashing-next/dashing_next-0.1.0.tar.gz" + + version("0.1.0", sha256="9d48e97fce430a9cfb47d5627041b001ab306b65e97d6967fe86e2c25e324612") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-blessed@1.20.0:") diff --git a/packages/py-mpilock/package.py b/packages/py-mpilock/package.py new file mode 100644 index 00000000..16e9a60c --- /dev/null +++ b/packages/py-mpilock/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyMpilock(PythonPackage): + """mpilock offers a WindowController class with a high-level API for parallel access to resources. """ + + homepage = "https://github.com/Helveg/mpilock" + pypi = "mpilock/mpilock-1.1.0-py3-none-any.whl" + + version("1.1.0", sha256="0902ef859a7b3dfb4312a3c46332302493aa14fa398b610554706b0b9e7cb57c", expand=False) + + maintainers=["helveg"] + + depends_on("py-setuptools", type="build") + depends_on("py-mpi4py@3.0.3:") + depends_on("py-numpy@1.20.0:") diff --git a/packages/py-mpipool/package.py b/packages/py-mpipool/package.py new file mode 100644 index 00000000..fb6f112a --- /dev/null +++ b/packages/py-mpipool/package.py @@ -0,0 +1,24 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyMpipool(PythonPackage): + """mpipool offers MPI based parallel execution of tasks through implementations of Python's standard library interfaces""" + + homepage = "https://github.com/mpipool/mpipool" + pypi = "mpipool/mpipool-2.2.1.tar.gz" + + version("2.2.1", sha256="dc735b994349ae3e06fce7c3601523ba062125ffa6dd4c6c51a94c168c9ff92c") + + maintainers=["helveg"] + + depends_on("py-flit-core@3.2:4", type="build") + depends_on("py-mpi4py@3.0.3:") + depends_on("py-errr@1.0:") + depends_on("py-tblib@1.7.0:") + depends_on("py-dill@0.3.3:") diff --git a/packages/py-nmodl-glia/package.py b/packages/py-nmodl-glia/package.py new file mode 100644 index 00000000..acd06eaf --- /dev/null +++ b/packages/py-nmodl-glia/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNmodlGlia(PythonPackage): + """ + Patch to use NMODL within the BSB + """ + + homepage = "https://github.com/dbbs-lab/glia" + pypi = "nmodl-glia/nmodl_glia-4.0.1.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("4.0.1", sha256="c3b3dad203eac1f394d6a4ca6e4f42d25d5eebc013970309f1453c7ca3e5c5a3") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-numpy@1.21:") + depends_on("py-errr@1.2:") + depends_on("py-nmodl@0.5:") diff --git a/packages/py-nmodl/fix-setup-requirements.patch b/packages/py-nmodl/fix-setup-requirements.patch new file mode 100644 index 00000000..52ec60c2 --- /dev/null +++ b/packages/py-nmodl/fix-setup-requirements.patch @@ -0,0 +1,21 @@ +diff --git a/setup.py b/setup.py +index c956004c9..671a6619e 100644 +--- a/setup.py ++++ b/setup.py +@@ -131,16 +131,6 @@ setup( + zip_safe=False, + setup_requires=[ + "jinja2>=2.9.3", +- "jupyter-client", +- "jupyter", +- "myst_parser", +- "mistune<3", # prevents a version conflict with nbconvert +- "nbconvert", +- "nbsphinx>=0.3.2", +- "pytest>=3.7.2", +- "sphinxcontrib-applehelp<1.0.3", +- "sphinx<6", +- "sphinx-rtd-theme", + ] + + install_requirements, + install_requires=install_requirements, diff --git a/packages/py-nmodl/package.py b/packages/py-nmodl/package.py new file mode 100644 index 00000000..fd5fb011 --- /dev/null +++ b/packages/py-nmodl/package.py @@ -0,0 +1,38 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNmodl(PythonPackage): + """The NMODL Framework is a code generation engine for NEURON MODeling Language (NMODL).""" + + homepage = "https://github.com/BlueBrain/nmodl" + + git = "https://github.com/BlueBrain/nmodl" + + license("Apache-2.0") + maintainers = ["bbp.opensource"] + + version("0.5", tag="0.5", commit="ac272785dc444c8444b085d121f08b7575bb6647", submodules=True) + + patch("fix-setup-requirements.patch", when="@:0.6") + + depends_on("flex@2.6:") + depends_on("bison@3.0:") + depends_on("cmake@3.15:", type="build") + depends_on("python@3.9:", type=("build","run")) + depends_on("py-setuptools", type="build") + depends_on("py-scikit-build", type="build") + depends_on("py-jinja2@2.9.3:", type="build") + depends_on("py-pyyaml@3.13:", type="build") + depends_on("py-pytest") + depends_on("py-sympy@1.3:", type=("build","run")) + depends_on("py-find-libpython", type=("build","run")) + depends_on("py-importlib-metadata", when="^python@:3.8", type=("build","run")) + depends_on("py-importlib-resources", when="^python@:3.8", type=("build","run")) + + def setup_build_environment(self, env): + env.set("NMODL_WHEEL_VERSION", self.version) diff --git a/packages/py-nrn-patch/package.py b/packages/py-nrn-patch/package.py new file mode 100644 index 00000000..aaf40362 --- /dev/null +++ b/packages/py-nrn-patch/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class PyNrnPatch(PythonPackage): + """A patch to make the BSB interface with NEURON software""" + + homepage = "https://github.com/dbbs-lab/patch" + pypi = "nrn-patch/nrn_patch-4.0.0.tar.gz" + + license("GPL-3.0-only") + maintainers = ["helveg","filimarc","drodarie"] + + version("4.0.0", sha256="0f95243798c7363826d7835023f7c9215577edd8d6695cc7caeb65a7fe8a54c0") + + depends_on("py-flit-core@3.2:4.0", type="build") + depends_on("py-numpy@1.21:") + depends_on("py-errr@1.2:") + depends_on("py-click@8.0:") + depends_on("py-appdirs@1.0:") + depends_on("py-cookiecutter@2.0:") + depends_on("py-black@0.24:") + depends_on("py-toml@0.1:") + depends_on("py-nmodl-glia@4.0:") + depends_on("neuron@8:10") diff --git a/packages/wf-bsb/package.py b/packages/wf-bsb/package.py new file mode 100644 index 00000000..2e843818 --- /dev/null +++ b/packages/wf-bsb/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class WfBsb(BundlePackage): + """Meta-package to collect all dependencies for the BSB.""" + + homepage="https://github.com/dbbs-lab/bsb" + + maintainers = ["helveg","filimarc","drodarie"] + + version("4.4") + + variant('nest', default=False, + description='Build with NEST interface') + variant('neuron', default=False, + description='Build with NEURON interface') + + depends_on("py-bsb-core@5.0.2:") + depends_on("py-bsb-hdf5@5.0.2:") + depends_on("py-bsb-json@4.2.2:") + depends_on("py-bsb-yaml@4.2.2:") + + depends_on("py-bsb-nest",when="+nest") + depends_on("py-bsb-neuron",when="+neuron") -- GitLab From ab87efeb322144dea55f283e4b2181b25d7686ba Mon Sep 17 00:00:00 2001 From: filimarc <oppilif576@msn.com> Date: Mon, 13 Jan 2025 15:12:50 +0100 Subject: [PATCH 075/111] feat(py-bsb): add bsb suite to spack.yaml Add bsb subpackages --- spack.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/spack.yaml b/spack.yaml index b4322538..274a55d1 100644 --- a/spack.yaml +++ b/spack.yaml @@ -19,7 +19,12 @@ spack: - py-bluepyefe@2.3.6 - py-bluepymm@0.8.7 - py-bluepyopt@1.14.11 - - py-bsb@4.0.0a57 + - py-bsb-core@5.0.2 + - py-bsb-hdf5@5.0.2 + - py-bsb-yaml@4.2.2 + - py-bsb-json@4.2.2 + - py-bsb-nest@4.3.2 + - py-bsb-neuron@4.2.2 - py-ebrains-drive@0.6.0 - py-ebrains-kg-core@0.9.15 - py-ebrains-validation-client@0.9.1 @@ -65,6 +70,7 @@ spack: # Workflows (meta-packages) - wf-biobb - wf-brainscales2-demos@9.0-a8 + - wf-bsb@4.4 +nest +neuron - wf-protein-association-rates@0.1 - wf-multi-area-model@1.2.0 - wf-human-multi-area-model@2.0.1 -- GitLab From e290113064c8955562c65f592bbc7bfe509532e4 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 13:57:43 +0000 Subject: [PATCH 076/111] feat(py-annarchy): add version 4.8.2.3 --- packages/py-annarchy/package.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/py-annarchy/package.py b/packages/py-annarchy/package.py index bf672bb8..4368d1e0 100644 --- a/packages/py-annarchy/package.py +++ b/packages/py-annarchy/package.py @@ -13,14 +13,16 @@ class PyAnnarchy(PythonPackage): """ homepage = "https://annarchy.readthedocs.io/en/latest/" - pypi = 'ANNarchy/ANNarchy-4.7.2.5.tar.gz' + pypi = 'ANNarchy/annarchy-4.8.2.3.tar.gz' maintainers = ['dionperd', 'paulapopa', "ldomide"] + version('4.8.2.3', '25a4d09905983ce27f7c6b4dd67a54831ea233b6b28943cb67dafd3c351d1dde') version('4.7.2.5', 'b7ef91cc4415e078e386eb30e595922c9f0ef90ad1340a12dc5ca46e728a7bb2') # python_requires - depends_on('python@3.8:3.10', type=('build', 'run')) + depends_on('python@3.8:3.10', when='@:4.7.2', type=('build', 'run')) + depends_on('python@3.10:', when='@4.7.3:', type=('build', 'run')) # setup_requires depends_on('py-pip', type='build') @@ -34,6 +36,8 @@ class PyAnnarchy(PythonPackage): depends_on('py-sympy', type=('build', 'run')) # >= 1.6 depends_on('py-matplotlib', type=('build', 'run')) # >= 2.0 depends_on('py-cython', type=('build', 'run')) # >= 0.20 + depends_on('py-tqdm', when='@4.8:', type=('build', 'run')) + depends_on('py-h5py', when='@4.8.2:', type=('build', 'run')) # Highly recommended: # pyqtgraph >= 0.9.8 (to visualize some of the provided examples. The OpenGL backend can also be needed) -- GitLab From 3885a77eb3eaa4d54ec28f89f81633aea023d1e7 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 13:59:45 +0000 Subject: [PATCH 077/111] feat(py-tvb-multiscale): update dependencies --- packages/py-tvb-multiscale/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/py-tvb-multiscale/package.py b/packages/py-tvb-multiscale/package.py index 7b78790a..b57b8f03 100644 --- a/packages/py-tvb-multiscale/package.py +++ b/packages/py-tvb-multiscale/package.py @@ -21,7 +21,7 @@ class PyTvbMultiscale(PythonPackage): patch('tvb-multiscale-2.1.0-version-pep440.patch', when='@2.1.0.ebrains') # python_requires - depends_on('python@3.8:3.10', type=('build', 'run')) + depends_on('python@3.8:3.11', type=('build', 'run')) # setup_requires depends_on('py-pip', type='build') @@ -39,7 +39,7 @@ class PyTvbMultiscale(PythonPackage): depends_on('py-ray', type=('build', 'run')) # Test dependency - depends_on('py-pytest@:7.1', type='test') + depends_on('py-pytest', type='test') @run_after('install') @on_package_attributes(run_tests=True) -- GitLab From 999bfd27be3f33d2aa9fab8166e6d0c7240f4447 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 13:58:58 +0000 Subject: [PATCH 078/111] feat(py-ray): add version 2.30.0 --- packages/py-ray/package.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/packages/py-ray/package.py b/packages/py-ray/package.py index 82ba34c4..f1bf4b72 100644 --- a/packages/py-ray/package.py +++ b/packages/py-ray/package.py @@ -15,7 +15,10 @@ class PyRay(PythonPackage): license("Apache-2.0") - # begin EBRAINS (added): ECM new node-js -> new react whatever -> new py-ray + # begin EBRAINS (added): new versions + # new version to allow building with newer bazel + version("2.30.0", sha256="854d549a77f0b0e810d1e9a18e7becf984279e2a0bfad5bed508f500ff770e34") + # ECM: new node-js -> new react whatever -> new py-ray version("2.4.0", sha256="b0110a84630b2f6d10cd13e8ac955875c3658373eb6cabcc77cf316de3c28066") # end EBRAINS version("2.0.1", sha256="b8b2f0a99d2ac4c001ff11c78b4521b217e2a02df95fb6270fd621412143f28b") @@ -30,18 +33,20 @@ class PyRay(PythonPackage): conflicts("node-js@17:", when="@:2.0.1") # end EBRAINS - # begin EBRAINS (added): ElM add missing dependencies/constraints for added version 2.4.0 + # begin EBRAINS (added): ElM add missing dependencies/constraints for added versions + depends_on("python@3.9:3.12", when="@2.30.0", type=("build", "run")) depends_on("python@3.6:3.11", when="@2.4.0", type=("build", "run")) + depends_on("bazel@6.5", when="@2.30.0", type="build") depends_on("bazel@5", when="@2.4.0", type="build") - depends_on("py-cython@0.29.32:", when="@2.4.0", type="build") + depends_on("py-cython@0.29.32:", when="@2.4.0:", type="build") depends_on("py-attrs", when="@2.4.0", type=("build", "run")) - depends_on("py-click@7.0:", when="@2.4.0", type=("build", "run")) - depends_on("py-grpcio@1.32:1.51.3", when="@2.4.0 ^python@:3.9", type=("build", "run")) - depends_on("py-grpcio@1.42:1.51.3", when="@2.4.0 ^python@3.10:", type=("build", "run")) - depends_on("py-protobuf@3.15.3:", when="@2.4.0", type=("build", "run")) + depends_on("py-click@7.0:", when="@2.4.0:", type=("build", "run")) + depends_on("py-grpcio@1.32:1.51.3", when="@2.4.0: ^python@:3.9", type=("build", "run")) + depends_on("py-grpcio@1.42:1.51.3", when="@2.4.0: ^python@3.10:", type=("build", "run")) + depends_on("py-protobuf@3.15.3:", when="@2.4.0:", type=("build", "run")) depends_on("py-frozenlist", when="@2.4.0", type=("build", "run")) depends_on("py-typing-extensions", when="@2.4.0 ^python@:3.7", type=("build", "run")) - depends_on("py-virtualenv@20.0.24:", when="@2.4.0", type=("build", "run")) + depends_on("py-virtualenv@20.0.24:", when="@2.4.0:", type=("build", "run")) # end EBRAINS depends_on("python@3.6:3.10", when="@2.0.1", type=("build", "run")) @@ -113,7 +118,7 @@ class PyRay(PythonPackage): build_directory = "python" # begin EBRAINS (added): fix boost download url - patch("fix-url-boost.patch", when="@2.4.0:") + patch("fix-url-boost.patch", when="@2.4.0") # end EBRAINS def patch(self): -- GitLab From 69bd1d6c51752ee75a056ddf8cbe7699c01553ab Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Fri, 28 Feb 2025 14:03:27 +0000 Subject: [PATCH 079/111] feat(py-pyspike): patch for py-cython@3 and use source from github, because pypi archives are already patched --- packages/py-pyspike/cython3.patch | 24 ++++++++++++++++++++++++ packages/py-pyspike/package.py | 14 ++++++++------ 2 files changed, 32 insertions(+), 6 deletions(-) create mode 100644 packages/py-pyspike/cython3.patch diff --git a/packages/py-pyspike/cython3.patch b/packages/py-pyspike/cython3.patch new file mode 100644 index 00000000..e3e62eaf --- /dev/null +++ b/packages/py-pyspike/cython3.patch @@ -0,0 +1,24 @@ +diff --git a/setup.py b/setup.py +index 297746d..b52cf8b 100644 +--- a/setup.py ++++ b/setup.py +@@ -21,11 +21,14 @@ else: + use_cython = True + + +-class numpy_include(object): +- """Defers import of numpy until install_requires is through""" +- def __str__(self): +- import numpy +- return numpy.get_include() ++class numpy_include(os.PathLike): ++ """Defers import of numpy until install_requires is through""" ++ def __str__(self): ++ import numpy ++ return numpy.get_include() ++ ++ def __fspath__(self): ++ return str(self) + + + if os.path.isfile("pyspike/cython/cython_add.c") and \ diff --git a/packages/py-pyspike/package.py b/packages/py-pyspike/package.py index 9fbe4128..ddf17e7e 100644 --- a/packages/py-pyspike/package.py +++ b/packages/py-pyspike/package.py @@ -12,15 +12,17 @@ class PyPyspike(PythonPackage): """ homepage = "https://github.com/mariomulansky/PySpike" - pypi = 'pyspike/pyspike-0.7.0.tar.gz' + url = 'https://github.com/mariomulansky/PySpike/archive/refs/tags/0.8.0.tar.gz' maintainers = ['dionperd', 'paulapopa', "ldomide"] - version('0.8.0', '76137b861ed531608aaf55af1a5ebf8a586e98653dab2467b4c1da7b2d9aa4e5') - version('0.7.0', 'a5d1c1472d3e7c3ac85c8a4ce069d750cca02acf18f185677b29c0a757e78efe') + version('0.8.0', '199d41af097e0b6e6583e22d4a9c3cedab51ceba4da2d940682ffefe8120a414') + version('0.7.0', '47031ba10a5726845982b62dcae970449ca50c4be9985a1ed0d2a021456bf25a') + + patch("cython3.patch", when="^py-cython@3:") # python_requires - depends_on('python@3.8:3.10', type=('build', 'run')) + depends_on('python@3.8:', type=('build', 'run')) # setup_requires depends_on('py-pip', type='build') @@ -31,10 +33,10 @@ class PyPyspike(PythonPackage): depends_on('py-scipy', type=('build', 'run')) depends_on('py-matplotlib', type=('build', 'run')) depends_on('py-pytest', type=('build', 'run')) - depends_on('py-cython@:2', type=('build', 'run')) + depends_on('py-cython', type=('build', 'run')) # Test dependency - depends_on('py-pytest@:7.1', type='test') + depends_on('py-pytest', type='test') @run_after('install') @on_package_attributes(run_tests=True) -- GitLab From 8e9993028e6e28425e47834bada5016a600cbd91 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 5 Mar 2025 16:39:46 +0000 Subject: [PATCH 080/111] feat(py-tblib): copy package from upstream --- packages/py-tblib/package.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 packages/py-tblib/package.py diff --git a/packages/py-tblib/package.py b/packages/py-tblib/package.py new file mode 100644 index 00000000..b76e9b84 --- /dev/null +++ b/packages/py-tblib/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack.package import * + + +class PyTblib(PythonPackage): + """Traceback fiddling library. Allows you to pickle tracebacks.""" + + homepage = "https://github.com/ionelmc/python-tblib" + pypi = "tblib/tblib-1.6.0.tar.gz" + + license("BSD-2-Clause") + + version("1.6.0", sha256="229bee3754cb5d98b4837dd5c4405e80cfab57cb9f93220410ad367f8b352344") + version("1.4.0", sha256="bd1ad564564a158ff62c290687f3db446038f9ac11a0bf6892712e3601af3bcd") + + depends_on("python@2.7:2.8,3.5:", type=("build", "run")) + depends_on("py-setuptools", type="build") -- GitLab From d09bfb473adeb754005f5a8114aca6b89aa54218 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Wed, 5 Mar 2025 16:47:26 +0000 Subject: [PATCH 081/111] feat(py-tblib): add version 2.0.0 --- packages/py-tblib/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/py-tblib/package.py b/packages/py-tblib/package.py index b76e9b84..f9f8511b 100644 --- a/packages/py-tblib/package.py +++ b/packages/py-tblib/package.py @@ -15,6 +15,9 @@ class PyTblib(PythonPackage): license("BSD-2-Clause") + # begin EBRAINS (added): new version + version("2.0.0", sha256="a6df30f272c08bf8be66e0775fad862005d950a6b8449b94f7c788731d70ecd7") + # end EBRAINS version("1.6.0", sha256="229bee3754cb5d98b4837dd5c4405e80cfab57cb9f93220410ad367f8b352344") version("1.4.0", sha256="bd1ad564564a158ff62c290687f3db446038f9ac11a0bf6892712e3601af3bcd") -- GitLab From 615b2461648dfeb3fb389ce1a04abaa907214832 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Thu, 6 Mar 2025 18:06:54 +0000 Subject: [PATCH 082/111] fix(py-astropy): relax py-cython dependency version range see https://github.com/astropy/astropy/blob/v6.1.0/pyproject.toml#L128 --- packages/py-astropy/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/py-astropy/package.py b/packages/py-astropy/package.py index e780f21c..d62abe73 100644 --- a/packages/py-astropy/package.py +++ b/packages/py-astropy/package.py @@ -40,7 +40,9 @@ class PyAstropy(PythonPackage): # TODO: probably fix, unrealistic depends_on("py-cython@0.29.13:", type="build") depends_on("py-cython@0.29.30", when="@5.1:6.0", type="build") - depends_on("py-cython@3.0.0", when="@6.1.0:", type="build") + # begin EBRAINS (modified): relax dependency version constraint + depends_on("py-cython@3.0", when="@6:", type="build") + # end EBRAINS # in newer pip versions --install-option does not exist depends_on("py-pip@:23.0", type="build") -- GitLab From 5d459ce403ac413e379661cb115d5f0255dad668 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 23:14:08 +0000 Subject: [PATCH 083/111] feat(CI): update build env docker image --- .gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 858211ed..80b8a4d9 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -3,7 +3,7 @@ stages: - test variables: - BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:24.12 + BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:devel SPACK_PATH_GITLAB: /mnt/spack_v0.23.0 SYSTEMNAME: ebrainslab GIT_SUBMODULE_STRATEGY: recursive -- GitLab From ca5bdd3ef09674a8f984c43e80cb9631a2b398f5 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 23:26:20 +0000 Subject: [PATCH 084/111] feat(CI): no need for spack-installed compiler on lab image-based builds --- install_spack_env.sh | 46 -------------------------------------------- 1 file changed, 46 deletions(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index 8de4f7df..31232625 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -79,52 +79,6 @@ if [[ ! $(spack mirror list | grep local_cache) ]]; then spack mirror add local_cache ${SPACK_CACHE_BUILD} fi -# install platform compiler (extract version from packages.yaml) -if [ $SYSTEMNAME == ebrainslab ] -then - EBRAINS_SPACK_COMPILER=$(grep 'compiler' $EBRAINS_REPO/site-config/$SYSTEMNAME/packages.yaml | awk -F'[][]' '{ print $2 }') - spack compiler find - spack load $EBRAINS_SPACK_COMPILER || { - # dump dag to file - spack spec -y $EBRAINS_SPACK_COMPILER arch=x86_64 > "/tmp/req_compiler.yaml" - if [ -n "${OCI_CACHE_PREFIX}" ]; then - # fetch missing sources (if packages not yet installed) - python3 ${YASHCHIKI_HOME}/fetch_cached_sources.py \ - --local-cache=${SPACK_CACHE_SOURCE} \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ - --yashchiki-home=${YASHCHIKI_HOME} \ - /tmp/compiler_missing_paths_sources.dat /tmp/req_compiler.yaml - # fetch missing build results (if packages not yet installed) - python3 ${YASHCHIKI_HOME}/fetch_cached_buildresults.py \ - --local-cache=${SPACK_CACHE_BUILD}/build_cache \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ - --yashchiki-home=${YASHCHIKI_HOME} \ - /tmp/compiler_missing_paths_buildresults.dat /tmp/req_compiler.yaml - fi - spack install --no-check-signature -y -j$SPACK_JOBS $EBRAINS_SPACK_COMPILER arch=x86_64 - if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then - echo "Performing update of the source cache (for base compiler)" - python3 ${YASHCHIKI_HOME}/update_cached_sources.py \ - --local-cache=${SPACK_CACHE_SOURCE} \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/source_cache \ - /tmp/compiler_missing_paths_sources.dat - # push previously missing (but now installed) packages to the local cache - spack buildcache create --unsigned ${SPACK_CACHE_BUILD} ${EBRAINS_SPACK_COMPILER} && ret=$? || ret=$? - # upload packages from local to remote cache - echo "Performing update of the build cache (for base compiler)" - python3 ${YASHCHIKI_HOME}/update_cached_buildresults.py \ - --local-cache=${SPACK_CACHE_BUILD}/build_cache \ - --remote-cache-type=oci \ - --remote-cache=${OCI_CACHE_PREFIX}/build_cache \ - /tmp/compiler_missing_paths_buildresults.dat - fi - spack load $EBRAINS_SPACK_COMPILER - } -fi - spack compiler find # create environment if it does not exist -- GitLab From a20260fdf5e0637144c24888cce288c547a36027 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 23:15:30 +0000 Subject: [PATCH 085/111] feat: update Spack to v0.23.1 --- .gitlab-ci.yml | 2 +- README.md | 4 ++-- vendor/spack | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 80b8a4d9..17476bd7 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -4,7 +4,7 @@ stages: variables: BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:devel - SPACK_PATH_GITLAB: /mnt/spack_v0.23.0 + SPACK_PATH_GITLAB: /mnt/spack_v0.23.1 SYSTEMNAME: ebrainslab GIT_SUBMODULE_STRATEGY: recursive GIT_CLEAN_FLAGS: -ffdxq diff --git a/README.md b/README.md index 36bce891..cc0ed81e 100644 --- a/README.md +++ b/README.md @@ -38,9 +38,9 @@ Clone this repository. You can use the `ebrains-yy-mm` branches to install the E git clone --branch {branch-name} https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git ``` -Clone Spack. We currently use version v0.23.0: +Clone Spack. We currently use version v0.23.1: ``` -git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.23.0 https://github.com/spack/spack +git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch v0.23.1 https://github.com/spack/spack ``` Activate Spack: diff --git a/vendor/spack b/vendor/spack index 65abf4d1..712b36d5 160000 --- a/vendor/spack +++ b/vendor/spack @@ -1 +1 @@ -Subproject commit 65abf4d14071280c6d4a183e20c0f6991ed49986 +Subproject commit 712b36d5963a179615bf72c48e90acff3cf4f6b9 -- GitLab From 6125b2c7e95b9f44a66c583707911b7d1127b427 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 20:26:53 +0000 Subject: [PATCH 086/111] feat(python): copy package from upstream --- .../python/cpython-windows-externals.patch | 28 + packages/python/fj-rpath-3.1.patch | 13 + packages/python/fj-rpath-3.9.patch | 11 + packages/python/intel-3.7.patch | 38 + packages/python/package.py | 1318 +++++++++++++++++ .../python/python-3.11-distutils-C++.patch | 257 ++++ .../python/python-3.7.2-distutils-C++.patch | 241 +++ .../python/python-3.7.3-distutils-C++.patch | 256 ++++ ...ython-3.7.4+-distutils-C++-testsuite.patch | 138 ++ .../python/python-3.7.4+-distutils-C++.patch | 257 ++++ packages/python/rpath-non-gcc.patch | 15 + packages/python/tkinter-3.10.patch | 11 + packages/python/tkinter-3.11.patch | 25 + packages/python/tkinter-3.7.patch | 17 + packages/python/tkinter-3.8.patch | 12 + 15 files changed, 2637 insertions(+) create mode 100644 packages/python/cpython-windows-externals.patch create mode 100644 packages/python/fj-rpath-3.1.patch create mode 100644 packages/python/fj-rpath-3.9.patch create mode 100644 packages/python/intel-3.7.patch create mode 100644 packages/python/package.py create mode 100644 packages/python/python-3.11-distutils-C++.patch create mode 100644 packages/python/python-3.7.2-distutils-C++.patch create mode 100644 packages/python/python-3.7.3-distutils-C++.patch create mode 100644 packages/python/python-3.7.4+-distutils-C++-testsuite.patch create mode 100644 packages/python/python-3.7.4+-distutils-C++.patch create mode 100644 packages/python/rpath-non-gcc.patch create mode 100644 packages/python/tkinter-3.10.patch create mode 100644 packages/python/tkinter-3.11.patch create mode 100644 packages/python/tkinter-3.7.patch create mode 100644 packages/python/tkinter-3.8.patch diff --git a/packages/python/cpython-windows-externals.patch b/packages/python/cpython-windows-externals.patch new file mode 100644 index 00000000..c3bcce98 --- /dev/null +++ b/packages/python/cpython-windows-externals.patch @@ -0,0 +1,28 @@ +diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat +index b5a44e3..52941c7 100644 +--- a/PCbuild/get_externals.bat ++++ b/PCbuild/get_externals.bat +@@ -76,7 +76,7 @@ for %%e in (%libraries%) do ( + echo.Fetching external binaries... + + set binaries= +-if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi ++if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi-3.3.0 + if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1k-1 + if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.9.0 + if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 +diff --git a/PCbuild/python.props b/PCbuild/python.props +index 419d5eb..c66fb07 100644 +--- a/PCbuild/python.props ++++ b/PCbuild/python.props +@@ -59,8 +59,8 @@ + <sqlite3Dir>$(ExternalsDir)sqlite-3.35.5.0\</sqlite3Dir> + <bz2Dir>$(ExternalsDir)bzip2-1.0.6\</bz2Dir> + <lzmaDir>$(ExternalsDir)xz-5.2.2\</lzmaDir> +- <libffiDir>$(ExternalsDir)libffi\</libffiDir> +- <libffiOutDir>$(ExternalsDir)libffi\$(ArchName)\</libffiOutDir> ++ <libffiDir>$(ExternalsDir)libffi-3.3.0\</libffiDir> ++ <libffiOutDir>$(ExternalsDir)libffi-3.3.0\$(ArchName)\</libffiOutDir> + <libffiIncludeDir>$(libffiOutDir)include</libffiIncludeDir> + <opensslDir>$(ExternalsDir)openssl-1.1.1k\</opensslDir> + <opensslOutDir>$(ExternalsDir)openssl-bin-1.1.1k-1\$(ArchName)\</opensslOutDir> diff --git a/packages/python/fj-rpath-3.1.patch b/packages/python/fj-rpath-3.1.patch new file mode 100644 index 00000000..d25b58da --- /dev/null +++ b/packages/python/fj-rpath-3.1.patch @@ -0,0 +1,13 @@ +--- a/Lib/distutils/unixccompiler.py 2009-05-09 21:55:12.000000000 +1000 ++++ b/Lib/distutils/unixccompiler.py 2017-05-13 14:30:18.077518999 +1000 +@@ -215,7 +211,8 @@ + return "-L" + dir + + def _is_gcc(self, compiler_name): +- return "gcc" in compiler_name or "g++" in compiler_name ++ return "gcc" in compiler_name or "g++" in compiler_name \ ++ or "fcc" in compiler_name or "FCC" in compiler_name + + def runtime_library_dir_option(self, dir): + # XXX Hackish, at the very least. See Python bug #445902: + diff --git a/packages/python/fj-rpath-3.9.patch b/packages/python/fj-rpath-3.9.patch new file mode 100644 index 00000000..1542b367 --- /dev/null +++ b/packages/python/fj-rpath-3.9.patch @@ -0,0 +1,11 @@ +--- spack-src/Lib/distutils/unixccompiler.py.org 2022-01-31 14:42:34.000000000 +0900 ++++ spack-src/Lib/distutils/unixccompiler.py 2022-01-31 14:43:19.000000000 +0900 +@@ -212,7 +212,7 @@ + + def _is_gcc(self, compiler_name): + # clang uses same syntax for rpath as gcc +- return any(name in compiler_name for name in ("gcc", "g++", "clang")) ++ return any(name in compiler_name for name in ("gcc", "g++", "clang", "fcc", "FCC")) + + def runtime_library_dir_option(self, dir): + # XXX Hackish, at the very least. See Python bug #445902: diff --git a/packages/python/intel-3.7.patch b/packages/python/intel-3.7.patch new file mode 100644 index 00000000..f2277624 --- /dev/null +++ b/packages/python/intel-3.7.patch @@ -0,0 +1,38 @@ +From 87ed388f41d761ddddc8447e5104569f2436c005 Mon Sep 17 00:00:00 2001 +From: Victor Stinner <vstinner@python.org> +Date: Fri, 11 Oct 2019 15:13:51 +0200 +Subject: [PATCH] bpo-37415: Fix stdatomic.h header check for ICC compiler + +Fix stdatomic.h header check for ICC compiler: the ICC implementation +lacks atomic_uintptr_t type which is needed by Python. + +Test: + +* atomic_int and atomic_uintptr_t types +* atomic_load_explicit() and atomic_store_explicit() +* memory_order_relaxed and memory_order_seq_cst constants + +But don't test ATOMIC_VAR_INIT(): it's not used in Python. +--- + configure | 7 +++++-- + 1 file changed, 5 insertions(+), 2 deletions(-) + +diff --git a/configure b/configure +index f1979c1b8124c..1b30a848a77e7 100755 +--- a/configure ++++ b/configure +@@ -16734,9 +16722,12 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext + + + #include <stdatomic.h> +- atomic_int value = ATOMIC_VAR_INIT(1); ++ atomic_int int_var; ++ atomic_uintptr_t uintptr_var; + int main() { +- int loaded_value = atomic_load(&value); ++ atomic_store_explicit(&int_var, 5, memory_order_relaxed); ++ atomic_store_explicit(&uintptr_var, 0, memory_order_relaxed); ++ int loaded_value = atomic_load_explicit(&int_var, memory_order_seq_cst); + return 0; + } + diff --git a/packages/python/package.py b/packages/python/package.py new file mode 100644 index 00000000..3bfdc240 --- /dev/null +++ b/packages/python/package.py @@ -0,0 +1,1318 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import glob +import json +import os +import platform +import re +import subprocess +import sys +from shutil import copy +from typing import Dict, List + +import llnl.util.tty as tty +from llnl.util.lang import dedupe + +from spack.build_environment import dso_suffix, stat_suffix +from spack.package import * +from spack.util.prefix import Prefix + + +def make_pyvenv_cfg(python_spec: "spack.spec.Spec", venv_prefix: str) -> str: + """Make a pyvenv_cfg file for a given (real) python command and venv prefix.""" + python_cmd = python_spec.command.path + lines = [ + # directory containing python command + f"home = {os.path.dirname(python_cmd)}", + # venv should not allow site packages from the real python to be loaded + "include-system-site-packages = false", + # version of the python command + f"version = {python_spec.version}", + # the path to the python command + f"executable = {python_cmd}", + # command "used" to create the pyvenv.cfg + f"command = {python_cmd} -m venv --without-pip {venv_prefix}", + ] + + return "\n".join(lines) + "\n" + + +class Python(Package): + """The Python programming language.""" + + homepage = "https://www.python.org/" + url = "https://www.python.org/ftp/python/3.8.0/Python-3.8.0.tgz" + list_url = "https://www.python.org/ftp/python/" + list_depth = 1 + tags = ["windows"] + + maintainers("skosukhin", "scheibelp") + + phases = ["configure", "build", "install"] + + #: phase + install_targets = ["install"] + build_targets: List[str] = [] + + license("0BSD") + + version("3.13.0", sha256="12445c7b3db3126c41190bfdc1c8239c39c719404e844babbd015a1bc3fafcd4") + version("3.12.5", sha256="38dc4e2c261d49c661196066edbfb70fdb16be4a79cc8220c224dfeb5636d405") + version("3.12.4", sha256="01b3c1c082196f3b33168d344a9c85fb07bfe0e7ecfe77fee4443420d1ce2ad9") + version("3.12.3", sha256="a6b9459f45a6ebbbc1af44f5762623fa355a0c87208ed417628b379d762dddb0") + version("3.12.2", sha256="a7c4f6a9dc423d8c328003254ab0c9338b83037bd787d680826a5bf84308116e") + version("3.12.1", sha256="d01ec6a33bc10009b09c17da95cc2759af5a580a7316b3a446eb4190e13f97b2") + version("3.12.0", sha256="51412956d24a1ef7c97f1cb5f70e185c13e3de1f50d131c0aac6338080687afb") + version("3.11.9", sha256="e7de3240a8bc2b1e1ba5c81bf943f06861ff494b69fda990ce2722a504c6153d") + version("3.11.8", sha256="d3019a613b9e8761d260d9ebe3bd4df63976de30464e5c0189566e1ae3f61889") + version("3.11.7", sha256="068c05f82262e57641bd93458dfa883128858f5f4997aad7a36fd25b13b29209") + version("3.11.6", sha256="c049bf317e877cbf9fce8c3af902436774ecef5249a29d10984ca3a37f7f4736") + version("3.11.5", sha256="a12a0a013a30b846c786c010f2c19dd36b7298d888f7c4bd1581d90ce18b5e58") + version("3.11.4", sha256="85c37a265e5c9dd9f75b35f954e31fbfc10383162417285e30ad25cc073a0d63") + version("3.11.3", sha256="1a79f3df32265d9e6625f1a0b31c28eb1594df911403d11f3320ee1da1b3e048") + version("3.11.2", sha256="2411c74bda5bbcfcddaf4531f66d1adc73f247f529aee981b029513aefdbf849") + version("3.11.1", sha256="baed518e26b337d4d8105679caf68c5c32630d702614fc174e98cb95c46bdfa4") + version("3.11.0", sha256="64424e96e2457abbac899b90f9530985b51eef2905951febd935f0e73414caeb") + version("3.10.14", sha256="cefea32d3be89c02436711c95a45c7f8e880105514b78680c14fe76f5709a0f6") + version("3.10.13", sha256="698ec55234c1363bd813b460ed53b0f108877c7a133d48bde9a50a1eb57b7e65") + version("3.10.12", sha256="a43cd383f3999a6f4a7db2062b2fc9594fefa73e175b3aedafa295a51a7bb65c") + version("3.10.11", sha256="f3db31b668efa983508bd67b5712898aa4247899a346f2eb745734699ccd3859") + version("3.10.10", sha256="fba64559dde21ebdc953e4565e731573bb61159de8e4d4cedee70fb1196f610d") + version("3.10.9", sha256="4ccd7e46c8898f4c7862910a1703aa0e63525913a519abb2f55e26220a914d88") + version("3.10.8", sha256="f400c3fb394b8bef1292f6dc1292c5fadc3533039a5bc0c3e885f3e16738029a") + version("3.10.7", sha256="1b2e4e2df697c52d36731666979e648beeda5941d0f95740aafbf4163e5cc126") + version("3.10.6", sha256="848cb06a5caa85da5c45bd7a9221bb821e33fc2bdcba088c127c58fad44e6343") + version("3.10.5", sha256="18f57182a2de3b0be76dfc39fdcfd28156bb6dd23e5f08696f7492e9e3d0bf2d") + version("3.10.4", sha256="f3bcc65b1d5f1dc78675c746c98fcee823c038168fc629c5935b044d0911ad28") + version("3.10.3", sha256="5a3b029bad70ba2a019ebff08a65060a8b9b542ffc1a83c697f1449ecca9813b") + version("3.10.2", sha256="3c0ede893011319f9b0a56b44953a3d52c7abf9657c23fb4bc9ced93b86e9c97") + version("3.10.1", sha256="b76117670e7c5064344b9c138e141a377e686b9063f3a8a620ff674fa8ec90d3") + version("3.10.0", sha256="c4e0cbad57c90690cb813fb4663ef670b4d0f587d8171e2c42bd4c9245bd2758") + version("3.9.19", sha256="f5f9ec8088abca9e399c3b62fd8ef31dbd2e1472c0ccb35070d4d136821aaf71") + version("3.9.18", sha256="504ce8cfd59addc04c22f590377c6be454ae7406cb1ebf6f5a350149225a9354") + version("3.9.17", sha256="8ead58f669f7e19d777c3556b62fae29a81d7f06a7122ff9bc57f7dd82d7e014") + version("3.9.16", sha256="1ad539e9dbd2b42df714b69726e0693bc6b9d2d2c8e91c2e43204026605140c5") + version("3.9.15", sha256="48d1ccb29d5fbaf1fb8f912271d09f7450e426d4dfe95978ef6aaada70ece4d8") + version("3.9.14", sha256="9201836e2c16361b2b7408680502393737d44f227333fe2e5729c7d5f6041675") + version("3.9.13", sha256="829b0d26072a44689a6b0810f5b4a3933ee2a0b8a4bfc99d7c5893ffd4f97c44") + version("3.9.12", sha256="70e08462ebf265012bd2be88a63d2149d880c73e53f1712b7bbbe93750560ae8") + version("3.9.11", sha256="3442400072f582ac2f0df30895558f08883b416c8c7877ea55d40d00d8a93112") + version("3.9.10", sha256="1aa9c0702edbae8f6a2c95f70a49da8420aaa76b7889d3419c186bfc8c0e571e") + version("3.9.9", sha256="2cc7b67c1f3f66c571acc42479cdf691d8ed6b47bee12c9b68430413a17a44ea") + version("3.9.8", sha256="7447fb8bb270942d620dd24faa7814b1383b61fa99029a240025fd81c1db8283") + version("3.9.7", sha256="a838d3f9360d157040142b715db34f0218e535333696a5569dc6f854604eb9d1") + version("3.9.6", sha256="d0a35182e19e416fc8eae25a3dcd4d02d4997333e4ad1f2eee6010aadc3fe866") + version("3.9.5", sha256="e0fbd5b6e1ee242524430dee3c91baf4cbbaba4a72dd1674b90fda87b713c7ab") + version("3.9.4", sha256="66c4de16daa74a825cf9da9ddae1fe020b72c3854b73b1762011cc33f9e4592f") + version("3.9.3", sha256="3afeb61a45b5a2e6f1c0f621bd8cf925a4ff406099fdb3d8c97b993a5f43d048") + version("3.9.2", sha256="7899e8a6f7946748830d66739f2d8f2b30214dad956e56b9ba216b3de5581519") + version("3.9.1", sha256="29cb91ba038346da0bd9ab84a0a55a845d872c341a4da6879f462e94c741f117") + version("3.9.0", sha256="df796b2dc8ef085edae2597a41c1c0a63625ebd92487adaef2fed22b567873e8") + version("3.8.19", sha256="c7fa55a36e5c7a19ec37d8f90f60a2197548908c9ac8b31e7c0dbffdd470eeac") + version("3.8.18", sha256="7c5df68bab1be81a52dea0cc2e2705ea00553b67107a301188383d7b57320b16") + version("3.8.17", sha256="def428fa6cf61b66bcde72e3d9f7d07d33b2e4226f04f9d6fce8384c055113ae") + version("3.8.16", sha256="71ca9d935637ed2feb59e90a368361dc91eca472a90acb1d344a2e8178ccaf10") + version("3.8.15", sha256="924d46999df82aa2eaa1de5ca51d6800ffb56b4bf52486a28f40634e3362abc4") + version("3.8.14", sha256="41f959c480c59211feb55d5a28851a56c7e22d02ef91035606ebb21011723c31") + version("3.8.13", sha256="903b92d76354366b1d9c4434d0c81643345cef87c1600adfa36095d7b00eede4") + version("3.8.12", sha256="316aa33f3b7707d041e73f246efedb297a70898c4b91f127f66dc8d80c596f1a") + version("3.8.11", sha256="b77464ea80cec14581b86aeb7fb2ff02830e0abc7bcdc752b7b4bdfcd8f3e393") + version("3.8.10", sha256="b37ac74d2cbad2590e7cd0dd2b3826c29afe89a734090a87bf8c03c45066cb65") + version("3.8.9", sha256="9779ec1df000bf86914cdd40860b88da56c1e61db59d37784beca14a259ac9e9") + version("3.8.8", sha256="76c0763f048e4f9b861d24da76b7dd5c7a3ba7ec086f40caedeea359263276f7") + version("3.8.7", sha256="20e5a04262f0af2eb9c19240d7ec368f385788bba2d8dfba7e74b20bab4d2bac") + version("3.8.6", sha256="313562ee9986dc369cd678011bdfd9800ef62fbf7b1496228a18f86b36428c21") + version("3.8.5", sha256="015115023c382eb6ab83d512762fe3c5502fa0c6c52ffebc4831c4e1a06ffc49") + version("3.8.4", sha256="32c4d9817ef11793da4d0d95b3191c4db81d2e45544614e8449255ca9ae3cc18") + version("3.8.3", sha256="6af6d4d2e010f9655518d0fc6738c7ff7069f10a4d2fbd55509e467f092a8b90") + version("3.8.2", sha256="e634a7a74776c2b89516b2e013dda1728c89c8149b9863b8cea21946daf9d561") + version("3.8.1", sha256="c7cfa39a43b994621b245e029769e9126caa2a93571cee2e743b213cceac35fb") + version("3.8.0", sha256="f1069ad3cae8e7ec467aa98a6565a62a48ef196cb8f1455a245a08db5e1792df") + version( + "3.7.17", + sha256="fd50161bc2a04f4c22a0971ff0f3856d98b4bf294f89740a9f06b520aae63b49", + deprecated=True, + ) + version( + "3.7.16", + sha256="0cf2da07fa464636755215415909e22eb1d058817af4824bc15af8390d05fb38", + deprecated=True, + ) + version( + "3.7.15", + sha256="cf2993798ae8430f3af3a00d96d9fdf320719f4042f039380dca79967c25e436", + deprecated=True, + ) + version( + "3.7.14", + sha256="82b2abf8978caa61a9011d166eede831b32de9cbebc0db8162900fa23437b709", + deprecated=True, + ) + version( + "3.7.13", + sha256="e405417f50984bc5870c7e7a9f9aeb93e9d270f5ac67f667a0cd3a09439682b5", + deprecated=True, + ) + version( + "3.7.12", + sha256="33b4daaf831be19219659466d12645f87ecec6eb21d4d9f9711018a7b66cce46", + deprecated=True, + ) + version( + "3.7.11", + sha256="b4fba32182e16485d0a6022ba83c9251e6a1c14676ec243a9a07d3722cd4661a", + deprecated=True, + ) + version( + "3.7.10", + sha256="c9649ad84dc3a434c8637df6963100b2e5608697f9ba56d82e3809e4148e0975", + deprecated=True, + ) + version( + "3.7.9", + sha256="39b018bc7d8a165e59aa827d9ae45c45901739b0bbb13721e4f973f3521c166a", + deprecated=True, + ) + version( + "3.7.8", + sha256="0e25835614dc221e3ecea5831b38fa90788b5389b99b675a751414c858789ab0", + deprecated=True, + ) + version( + "3.7.7", + sha256="8c8be91cd2648a1a0c251f04ea0bb4c2a5570feb9c45eaaa2241c785585b475a", + deprecated=True, + ) + version( + "3.7.6", + sha256="aeee681c235ad336af116f08ab6563361a0c81c537072c1b309d6e4050aa2114", + deprecated=True, + ) + version( + "3.7.5", + sha256="8ecc681ea0600bbfb366f2b173f727b205bb825d93d2f0b286bc4e58d37693da", + deprecated=True, + ) + version( + "3.7.4", + sha256="d63e63e14e6d29e17490abbe6f7d17afb3db182dbd801229f14e55f4157c4ba3", + deprecated=True, + ) + version( + "3.7.3", + sha256="d62e3015f2f89c970ac52343976b406694931742fbde2fed8d1ce8ebb4e1f8ff", + deprecated=True, + ) + version( + "3.7.2", + sha256="f09d83c773b9cc72421abba2c317e4e6e05d919f9bcf34468e192b6a6c8e328d", + deprecated=True, + ) + version( + "3.7.1", + sha256="36c1b81ac29d0f8341f727ef40864d99d8206897be96be73dc34d4739c9c9f06", + deprecated=True, + ) + version( + "3.7.0", + sha256="85bb9feb6863e04fb1700b018d9d42d1caac178559ffa453d7e6a436e259fd0d", + deprecated=True, + ) + + depends_on("c", type="build") # generated + depends_on("cxx", type="build") # generated + + extendable = True + + # Variants to avoid cyclical dependencies for concretizer + variant("libxml2", default=True, description="Use a gettext library build with libxml2") + + variant( + "debug", default=False, description="debug build with extra checks (this is high overhead)" + ) + + variant("shared", default=True, description="Enable shared libraries") + variant("pic", default=True, description="Produce position-independent code (for shared libs)") + variant( + "optimizations", + default=False, + description="Enable expensive build-time optimizations, if available", + ) + # See https://legacy.python.org/dev/peps/pep-0394/ + variant( + "pythoncmd", + default=sys.platform != "win32", + description="Symlink 'python3' executable to 'python' (not PEP 394 compliant)", + ) + + # Optional Python modules + variant("readline", default=sys.platform != "win32", description="Build readline module") + variant("ssl", default=True, description="Build ssl module") + variant("sqlite3", default=True, description="Build sqlite3 module") + variant("dbm", default=True, description="Build dbm module") + variant("nis", default=False, description="Build nis module") + variant("zlib", default=True, description="Build zlib module") + variant("bz2", default=True, description="Build bz2 module") + variant("lzma", default=True, description="Build lzma module") + variant("pyexpat", default=True, description="Build pyexpat module") + variant("ctypes", default=True, description="Build ctypes module") + variant("tkinter", default=False, description="Build tkinter module") + variant("uuid", default=True, description="Build uuid module") + variant("tix", default=False, description="Build Tix module", when="+tkinter") + variant("crypt", default=True, description="Build crypt module", when="@:3.12 platform=linux") + variant("crypt", default=True, description="Build crypt module", when="@:3.12 platform=darwin") + + if sys.platform != "win32": + depends_on("gmake", type="build") + depends_on("pkgconfig@0.9.0:", type="build") + depends_on("gettext +libxml2", when="+libxml2") + depends_on("gettext ~libxml2", when="~libxml2") + + # Optional dependencies + # See detect_modules() in setup.py for details + depends_on("readline", when="+readline") + depends_on("ncurses", when="+readline") + depends_on("openssl", when="+ssl") + # https://docs.python.org/3/whatsnew/3.7.html#build-changes + depends_on("openssl@1.0.2:", when="+ssl") + # https://docs.python.org/3.10/whatsnew/3.10.html#build-changes + depends_on("openssl@1.1.1:", when="@3.10:+ssl") + depends_on("sqlite@3.0.8:", when="@:3.9+sqlite3") + # https://docs.python.org/3.10/whatsnew/3.10.html#build-changes + depends_on("sqlite@3.7.15:", when="@3.10:+sqlite3") + depends_on("gdbm", when="+dbm") # alternatively ndbm or berkeley-db + depends_on("libnsl", when="+nis") + depends_on("zlib-api", when="+zlib") + depends_on("bzip2", when="+bz2") + depends_on("xz libs=shared", when="+lzma") + depends_on("expat", when="+pyexpat") + depends_on("libffi", when="+ctypes") + # https://docs.python.org/3/whatsnew/3.11.html#build-changes + depends_on("tk@8.5.12:", when="@3.11: +tkinter") + depends_on("tk", when="+tkinter") + depends_on("tcl@8.5.12:", when="@3.11: +tkinter") + depends_on("tcl", when="+tkinter") + depends_on("uuid", when="+uuid") + depends_on("tix", when="+tix") + depends_on("libxcrypt", when="+crypt") + + # Python needs to be patched to build extensions w/ mixed C/C++ code: + # https://github.com/NixOS/nixpkgs/pull/19585/files + # https://bugs.python.org/issue1222585 + # + # NOTE: This patch puts Spack's default Python installation out of + # sync with standard Python installs. If you're using such an + # installation as an external and encountering build issues with mixed + # C/C++ modules, consider installing a Spack-managed Python with + # this patch instead. For more information, see: + # https://github.com/spack/spack/pull/16856 + patch("python-3.7.2-distutils-C++.patch", when="@3.7.2") + patch("python-3.7.3-distutils-C++.patch", when="@3.7.3") + patch("python-3.7.4+-distutils-C++.patch", when="@3.7.4:3.10") + patch("python-3.7.4+-distutils-C++-testsuite.patch", when="@3.7.4:3.11") + patch("python-3.11-distutils-C++.patch", when="@3.11.0:3.11") + patch("cpython-windows-externals.patch", when="@:3.9.6 platform=windows") + patch("tkinter-3.7.patch", when="@3.7 platform=darwin") + # Patch the setup script to deny that tcl/x11 exists rather than allowing + # autodetection of (possibly broken) system components + patch("tkinter-3.8.patch", when="@3.8:3.9 ~tkinter") + patch("tkinter-3.10.patch", when="@3.10.0:3.10 ~tkinter") + patch("tkinter-3.11.patch", when="@3.11.0:3.11 ~tkinter") + + # Ensure that distutils chooses correct compiler option for RPATH: + patch("rpath-non-gcc.patch", when="@:3.11") + + # Ensure that distutils chooses correct compiler option for RPATH on fj: + patch("fj-rpath-3.1.patch", when="@:3.9.7,3.10.0 %fj") + patch("fj-rpath-3.9.patch", when="@3.9.8:3.9,3.10.1:3.11 %fj") + + # Fixes build with the Intel compilers + # https://github.com/python/cpython/pull/16717 + patch("intel-3.7.patch", when="@3.7.1:3.7.5 %intel") + + # CPython tries to build an Objective-C file with GCC's C frontend + # https://github.com/spack/spack/pull/16222 + # https://github.com/python/cpython/pull/13306 + conflicts( + "%gcc platform=darwin", + msg="CPython does not compile with GCC on macOS yet, use clang. " + "See: https://github.com/python/cpython/pull/13306", + ) + conflicts("%nvhpc") + + # https://bugs.python.org/issue45405 + conflicts("@:3.7.12,3.8.0:3.8.12,3.9.0:3.9.7,3.10.0", when="%apple-clang@13:") + + # See https://github.com/python/cpython/issues/106424 + # datetime.now(timezone.utc) segfaults + conflicts("@3.9:", when="%oneapi@2022.2.1:2023") + + # Used to cache various attributes that are expensive to compute + _config_vars: Dict[str, Dict[str, str]] = {} + + # An in-source build with --enable-optimizations fails for python@3.X + build_directory = "spack-build" + + executables = [r"^python\d?$"] + + @classmethod + def determine_version(cls, exe): + # Newer versions of Python support `--version`, + # but older versions only support `-V` + # Output looks like: + # Python 3.7.7 + # On pre-production Ubuntu, this is also possible: + # Python 3.10.2+ + output = Executable(exe)("-V", output=str, error=str) + match = re.search(r"Python\s+([A-Za-z0-9_.-]+)", output) + return match.group(1) if match else None + + @classmethod + def determine_variants(cls, exes, version_str): + python = Executable(exes[0]) + + variants = "" + for exe in exes: + if os.path.basename(exe) == "python": + variants += "+pythoncmd" + break + else: + variants += "~pythoncmd" + + for module in [ + "readline", + "sqlite3", + "dbm", + "nis", + "zlib", + "bz2", + "lzma", + "ctypes", + "tkinter", + "uuid", + ]: + try: + python("-c", "import " + module, error=os.devnull) + variants += "+" + module + except ProcessError: + variants += "~" + module + + # Some variants enable multiple modules + try: + python("-c", "import ssl", error=os.devnull) + python("-c", "import hashlib", error=os.devnull) + variants += "+ssl" + except ProcessError: + variants += "~ssl" + + try: + python("-c", "import xml.parsers.expat", error=os.devnull) + python("-c", "import xml.etree.ElementTree", error=os.devnull) + variants += "+pyexpat" + except ProcessError: + variants += "~pyexpat" + + # Some variant names do not match module names + if "+tkinter" in variants: + try: + python("-c", "import tkinter.tix", error=os.devnull) + variants += "+tix" + except ProcessError: + variants += "~tix" + + # Some modules are platform-dependent + if sys.platform != "win32": + try: + python("-c", "import crypt", error=os.devnull) + variants += "+crypt" + except ProcessError: + variants += "~crypt" + + return variants + + def url_for_version(self, version): + url = "https://www.python.org/ftp/python/{0}/Python-{1}.tgz" + return url.format(re.split("[a-z]", str(version))[0], version) + + def patch(self): + # NOTE: Python's default installation procedure makes it possible for a + # user's local configurations to change the Spack installation. In + # order to prevent this behavior for a full installation, we must + # modify the installation script so that it ignores user files. + ff = FileFilter("Makefile.pre.in") + ff.filter( + r"^(.*)setup\.py(.*)((build)|(install))(.*)$", r"\1setup.py\2 --no-user-cfg \3\6" + ) + + def setup_build_environment(self, env): + spec = self.spec + + # TODO: Python has incomplete support for Python modules with mixed + # C/C++ source, and patches are required to enable building for these + # modules. All Python versions without a viable patch are installed + # with a warning message about this potentially erroneous behavior. + if not spec.satisfies("@3.7.2:"): + tty.warn( + ( + 'Python v{0} does not have the C++ "distutils" patch; ' + "errors may occur when installing Python modules w/ " + "mixed C/C++ source files." + ).format(self.version) + ) + + env.unset("PYTHONPATH") + env.unset("PYTHONHOME") + + # avoid build error on fugaku + if spec.satisfies("@3.10.0 arch=linux-rhel8-a64fx"): + if spec.satisfies("%gcc") or spec.satisfies("%fj"): + env.unset("LC_ALL") + + # https://github.com/python/cpython/issues/87275 + if spec.satisfies("@:3.9.5 +optimizations %apple-clang"): + xcrun = Executable("/usr/bin/xcrun") + env.set("LLVM_AR", xcrun("-find", "ar", output=str).strip()) + + def flag_handler(self, name, flags): + # python 3.8 requires -fwrapv when compiled with intel + if self.spec.satisfies("@3.8: %intel"): + if name == "cflags": + flags.append("-fwrapv") + + # Fix for following issues for python with aocc%3.2.0: + # https://github.com/spack/spack/issues/29115 + # https://github.com/spack/spack/pull/28708 + if self.spec.satisfies("%aocc@3.2.0"): + if name == "cflags": + flags.extend(["-mllvm", "-disable-indvar-simplify=true"]) + + # allow flags to be passed through compiler wrapper + return (flags, None, None) + + @property + def plat_arch(self): + """ + String referencing platform architecture + filtered through Python's Windows build file + architecture support map + + Note: This function really only makes + sense to use on Windows, could be overridden to + cross compile however. + """ + + arch_map = {"AMD64": "x64", "x86": "Win32", "IA64": "Win32", "EM64T": "Win32"} + arch = platform.machine() + if arch in arch_map: + arch = arch_map[arch] + return arch + + @property + def win_build_params(self): + """ + Arguments must be passed to the Python build batch script + in order to configure it to spec and system. + A number of these toggle optional MSBuild Projects + directly corresponding to the python support of the same + name. + """ + args = [] + args.append("-p %s" % self.plat_arch) + if self.spec.satisfies("+debug"): + args.append("-d") + if self.spec.satisfies("~ctypes"): + args.append("--no-ctypes") + if self.spec.satisfies("~ssl"): + args.append("--no-ssl") + if self.spec.satisfies("~tkinter"): + args.append("--no-tkinter") + return args + + def win_installer(self, prefix): + """ + Python on Windows does not export an install target + so we must handcraft one here. This structure + directly mimics the install tree of the Python + Installer on Windows. + + Parameters: + prefix (str): Install prefix for package + """ + proj_root = self.stage.source_path + pcbuild_root = os.path.join(proj_root, "PCbuild") + build_root = os.path.join(pcbuild_root, platform.machine().lower()) + include_dir = os.path.join(proj_root, "Include") + copy_tree(include_dir, prefix.include) + doc_dir = os.path.join(proj_root, "Doc") + copy_tree(doc_dir, prefix.Doc) + tools_dir = os.path.join(proj_root, "Tools") + copy_tree(tools_dir, prefix.Tools) + lib_dir = os.path.join(proj_root, "Lib") + copy_tree(lib_dir, prefix.Lib) + pyconfig = os.path.join(proj_root, "PC", "pyconfig.h") + copy(pyconfig, prefix.include) + shared_libraries = [] + shared_libraries.extend(glob.glob("%s\\*.exe" % build_root)) + shared_libraries.extend(glob.glob("%s\\*.dll" % build_root)) + shared_libraries.extend(glob.glob("%s\\*.pyd" % build_root)) + os.makedirs(prefix.DLLs) + for lib in shared_libraries: + file_name = os.path.basename(lib) + if ( + file_name.endswith(".exe") + or (file_name.endswith(".dll") and "python" in file_name) + or "vcruntime" in file_name + ): + copy(lib, prefix) + else: + copy(lib, prefix.DLLs) + static_libraries = glob.glob("%s\\*.lib" % build_root) + os.makedirs(prefix.libs, exist_ok=True) + for lib in static_libraries: + copy(lib, prefix.libs) + + def configure_args(self): + spec = self.spec + config_args = [] + cflags = [] + + # setup.py needs to be able to read the CPPFLAGS and LDFLAGS + # as it scans for the library and headers to build + link_deps = spec.dependencies(deptype="link") + + if link_deps: + # Header files are often included assuming they reside in a + # subdirectory of prefix.include, e.g. #include <openssl/ssl.h>, + # which is why we don't use HeaderList here. The header files of + # libffi reside in prefix.lib but the configure script of Python + # finds them using pkg-config. + cppflags = " ".join("-I" + spec[dep.name].prefix.include for dep in link_deps) + + # Currently, the only way to get SpecBuildInterface wrappers of the + # dependencies (which we need to get their 'libs') is to get them + # using spec.__getitem__. + ldflags = " ".join(spec[dep.name].libs.search_flags for dep in link_deps) + + config_args.extend(["CPPFLAGS=" + cppflags, "LDFLAGS=" + ldflags]) + + if "+optimizations" in spec: + config_args.append("--enable-optimizations") + # Prefer thin LTO for faster compilation times. + if "@3.11.0: %clang@3.9:" in spec or "@3.11.0: %apple-clang@8:" in spec: + config_args.append("--with-lto=thin") + else: + config_args.append("--with-lto") + config_args.append("--with-computed-gotos") + + if spec.satisfies("@3.7 %intel"): + config_args.append("--with-icc={0}".format(spack_cc)) + + if "+debug" in spec: + config_args.append("--with-pydebug") + else: + config_args.append("--without-pydebug") + + if "+shared" in spec: + config_args.append("--enable-shared") + else: + config_args.append("--disable-shared") + + config_args.append("--without-ensurepip") + + if "+pic" in spec: + cflags.append(self.compiler.cc_pic_flag) + + if "+ssl" in spec: + config_args.append("--with-openssl={0}".format(spec["openssl"].prefix)) + + if "+dbm" in spec: + # Default order is ndbm:gdbm:bdb + config_args.append("--with-dbmliborder=gdbm") + else: + config_args.append("--with-dbmliborder=") + + if "+pyexpat" in spec: + config_args.append("--with-system-expat") + else: + config_args.append("--without-system-expat") + + if self.version < Version("3.12.0"): + if "+ctypes" in spec: + config_args.append("--with-system-ffi") + else: + config_args.append("--without-system-ffi") + + if "+tkinter" in spec: + config_args.extend( + [ + "--with-tcltk-includes=-I{0} -I{1}".format( + spec["tcl"].prefix.include, spec["tk"].prefix.include + ), + "--with-tcltk-libs={0} {1}".format( + spec["tcl"].libs.ld_flags, spec["tk"].libs.ld_flags + ), + ] + ) + + # https://docs.python.org/3.8/library/sqlite3.html#f1 + if spec.satisfies("+sqlite3 ^sqlite+dynamic_extensions"): + config_args.append("--enable-loadable-sqlite-extensions") + + if spec.satisfies("%oneapi"): + cflags.append("-fp-model=strict") + + if cflags: + config_args.append("CFLAGS={0}".format(" ".join(cflags))) + + if self.version >= Version("3.12.0") and sys.platform == "darwin": + config_args.append("CURSES_LIBS={0}".format(spec["ncurses"].libs.link_flags)) + + return config_args + + def configure(self, spec, prefix): + """Runs configure with the arguments specified in + :meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args` + and an appropriately set prefix. + """ + with working_dir(self.stage.source_path, create=True): + if sys.platform == "win32": + pass + else: + options = getattr(self, "configure_flag_args", []) + options += ["--prefix={0}".format(prefix)] + options += self.configure_args() + configure(*options) + + def build(self, spec, prefix): + """Makes the build targets specified by + :py:attr:``~.AutotoolsPackage.build_targets`` + """ + # Windows builds use a batch script to drive + # configure and build in one step + with working_dir(self.stage.source_path): + if sys.platform == "win32": + pcbuild_root = os.path.join(self.stage.source_path, "PCbuild") + builder_cmd = os.path.join(pcbuild_root, "build.bat") + try: + subprocess.check_output( # novermin + " ".join([builder_cmd] + self.win_build_params), stderr=subprocess.STDOUT + ) + except subprocess.CalledProcessError as e: + raise ProcessError( + "Process exited with status %d" % e.returncode, + long_message=e.output.decode("utf-8"), + ) + else: + # See https://autotools.io/automake/silent.html + params = ["V=1"] + params += self.build_targets + make(*params) + + def install(self, spec, prefix): + """Makes the install targets specified by + :py:attr:``~.AutotoolsPackage.install_targets`` + """ + with working_dir(self.stage.source_path): + if sys.platform == "win32": + self.win_installer(prefix) + else: + # See https://github.com/python/cpython/issues/102007 + make(*self.install_targets, parallel=False) + + @run_after("install") + def filter_compilers(self): + """Run after install to tell the configuration files and Makefiles + to use the compilers that Spack built the package with. + + If this isn't done, they'll have CC and CXX set to Spack's generic + cc and c++. We want them to be bound to whatever compiler + they were built with.""" + if sys.platform == "win32": + return + kwargs = {"ignore_absent": True, "backup": False, "string": True} + + filenames = [self.get_sysconfigdata_name(), self.config_vars["makefile_filename"]] + + filter_file(spack_cc, self.compiler.cc, *filenames, **kwargs) + if spack_cxx and self.compiler.cxx: + filter_file(spack_cxx, self.compiler.cxx, *filenames, **kwargs) + + @run_after("install") + def symlink(self): + if sys.platform == "win32": + return + spec = self.spec + prefix = self.prefix + + if spec.satisfies("+pythoncmd"): + os.symlink(os.path.join(prefix.bin, "python3"), os.path.join(prefix.bin, "python")) + os.symlink( + os.path.join(prefix.bin, "python3-config"), + os.path.join(prefix.bin, "python-config"), + ) + + @run_after("install") + def install_python_gdb(self): + # https://devguide.python.org/gdb/ + src = os.path.join("Tools", "gdb", "libpython.py") + if os.path.exists(src): + install(src, self.command.path + "-gdb.py") + + @run_after("install") + @on_package_attributes(run_tests=True) + def import_tests(self): + """Test that basic Python functionality works.""" + + spec = self.spec + + with working_dir("spack-test", create=True): + # Ensure that readline module works + if "+readline" in spec: + self.command("-c", "import readline") + + # Ensure that ssl module works + if "+ssl" in spec: + self.command("-c", "import ssl") + self.command("-c", "import hashlib") + + # Ensure that sqlite3 module works + if "+sqlite3" in spec: + self.command("-c", "import sqlite3") + + # Ensure that dbm module works + if "+dbm" in spec: + self.command("-c", "import dbm") + + # Ensure that nis module works + if "+nis" in spec: + self.command("-c", "import nis") + + # Ensure that zlib module works + if "+zlib" in spec: + self.command("-c", "import zlib") + + # Ensure that bz2 module works + if "+bz2" in spec: + self.command("-c", "import bz2") + + # Ensure that lzma module works + if "+lzma" in spec: + self.command("-c", "import lzma") + + # Ensure that pyexpat module works + if "+pyexpat" in spec: + self.command("-c", "import xml.parsers.expat") + self.command("-c", "import xml.etree.ElementTree") + + # Ensure that ctypes module works + if "+ctypes" in spec: + self.command("-c", "import ctypes") + + # Ensure that tkinter module works + # https://wiki.python.org/moin/TkInter + if "+tkinter" in spec: + # Only works if ForwardX11Trusted is enabled, i.e. `ssh -Y` + if "DISPLAY" in env: + self.command("-c", "import tkinter; tkinter._test()") + else: + self.command("-c", "import tkinter") + + # Ensure that uuid module works + if "+uuid" in spec: + self.command("-c", "import uuid") + + # Ensure that tix module works + if "+tix" in spec: + self.command("-c", "import tkinter.tix") + + # Ensure that crypt module works + if "+crypt" in spec: + self.command("-c", "import crypt") + + # ======================================================================== + # Set up environment to make install easy for python extensions. + # ======================================================================== + + @property + def command(self): + """Returns the Python command, which may vary depending + on the version of Python and how it was installed. + + In general, Python 3 only comes with a ``python3`` command. However, some + package managers will symlink ``python`` to ``python3``, while others + may contain ``python3.11``, ``python3.10``, and ``python3.9`` in the + same directory. + + Returns: + Executable: the Python command + """ + # We need to be careful here. If the user is using an externally + # installed python, several different commands could be located + # in the same directory. Be as specific as possible. Search for: + # + # * python3.11 + # * python3 + # * python + # + # in that order if using python@3.11.0, for example. + suffixes = [self.spec.version.up_to(2), self.spec.version.up_to(1), ""] + file_extension = "" if sys.platform != "win32" else ".exe" + patterns = [f"python{ver}{file_extension}" for ver in suffixes] + root = self.prefix.bin if sys.platform != "win32" else self.prefix + path = find_first(root, files=patterns) + + if path is not None: + return Executable(path) + + else: + # Give a last try at rhel8 platform python + if self.spec.external and self.prefix == "/usr" and self.spec.satisfies("os=rhel8"): + path = os.path.join(self.prefix, "libexec", "platform-python") + if os.path.exists(path): + return Executable(path) + + raise RuntimeError( + f"cannot to locate the '{self.name}' command in {root} or its subdirectories" + ) + + @property + def config_vars(self): + """Return a set of variable definitions associated with a Python installation. + + Wrapper around various ``sysconfig`` functions. To see these variables on the + command line, run: + + .. code-block:: console + + $ python -m sysconfig + + Returns: + dict: variable definitions + """ + cmd = """ +import json +from sysconfig import ( + get_config_vars, + get_config_h_filename, + get_makefile_filename, + get_paths, +) + +config = get_config_vars() +config['config_h_filename'] = get_config_h_filename() +config['makefile_filename'] = get_makefile_filename() +config.update(get_paths()) + +print(json.dumps(config)) +""" + + dag_hash = self.spec.dag_hash() + lib_prefix = "lib" if sys.platform != "win32" else "" + if dag_hash not in self._config_vars: + # Default config vars + version = self.version.up_to(2) + if sys.platform == "win32": + version = str(version).split(".")[0] + config = { + # get_config_vars + "BINDIR": self.prefix.bin, + "CC": "cc", + "CONFINCLUDEPY": self.prefix.include.join("python{}").format(version), + "CXX": "c++", + "INCLUDEPY": self.prefix.include.join("python{}").format(version), + "LIBDEST": self.prefix.lib.join("python{}").format(version), + "LIBDIR": self.prefix.lib, + "LIBPL": self.prefix.lib.join("python{0}") + .join("config-{0}-{1}") + .format(version, sys.platform), + "LDLIBRARY": "{}python{}.{}".format(lib_prefix, version, dso_suffix), + "LIBRARY": "{}python{}.{}".format(lib_prefix, version, stat_suffix), + "LDSHARED": "cc", + "LDCXXSHARED": "c++", + "PYTHONFRAMEWORKPREFIX": "/System/Library/Frameworks", + "base": self.prefix, + "installed_base": self.prefix, + "installed_platbase": self.prefix, + "platbase": self.prefix, + "prefix": self.prefix, + # get_config_h_filename + "config_h_filename": self.prefix.include.join("python{}") + .join("pyconfig.h") + .format(version), + # get_makefile_filename + "makefile_filename": self.prefix.lib.join("python{0}") + .join("config-{0}-{1}") + .Makefile.format(version, sys.platform), + # get_paths + "data": self.prefix, + "include": self.prefix.include.join("python{}".format(version)), + "platinclude": self.prefix.include64.join("python{}".format(version)), + "platlib": self.prefix.lib64.join("python{}".format(version)).join( + "site-packages" + ), + "platstdlib": self.prefix.lib64.join("python{}".format(version)), + "purelib": self.prefix.lib.join("python{}".format(version)).join("site-packages"), + "scripts": self.prefix.bin, + "stdlib": self.prefix.lib.join("python{}".format(version)), + } + + try: + config.update(json.loads(self.command("-c", cmd, output=str))) + except (ProcessError, RuntimeError): + pass + self._config_vars[dag_hash] = config + return self._config_vars[dag_hash] + + def get_sysconfigdata_name(self): + """Return the full path name of the sysconfigdata file.""" + + libdest = self.config_vars["LIBDEST"] + + cmd = "from sysconfig import _get_sysconfigdata_name; " + cmd += "print(_get_sysconfigdata_name())" + filename = self.command("-c", cmd, output=str).strip() + filename += ".py" + + return join_path(libdest, filename) + + @property + def home(self): + """Most of the time, ``PYTHONHOME`` is simply + ``spec['python'].prefix``. However, if the user is using an + externally installed python, it may be symlinked. For example, + Homebrew installs python in ``/usr/local/Cellar/python/2.7.12_2`` + and symlinks it to ``/usr/local``. Users may not know the actual + installation directory and add ``/usr/local`` to their + ``packages.yaml`` unknowingly. Query the python executable to + determine exactly where it is installed. + """ + return Prefix(self.config_vars["base"]) + + def find_library(self, library): + # Spack installs libraries into lib, except on openSUSE where it installs them + # into lib64. If the user is using an externally installed package, it may be + # in either lib or lib64, so we need to ask Python where its LIBDIR is. + libdir = self.config_vars["LIBDIR"] + + # Debian and derivatives use a triplet subdir under /usr/lib, LIBPL can be used + # to get the Python library directory + libpldir = self.config_vars["LIBPL"] + + # The system Python installation on macOS and Homebrew installations + # install libraries into a Frameworks directory + frameworkprefix = self.config_vars["PYTHONFRAMEWORKPREFIX"] + + # Get the active Xcode environment's Framework location. + macos_developerdir = os.environ.get("DEVELOPER_DIR") + if macos_developerdir and os.path.exists(macos_developerdir): + macos_developerdir = os.path.join(macos_developerdir, "Library", "Frameworks") + else: + macos_developerdir = "" + + # Windows libraries are installed directly to BINDIR + win_bin_dir = self.config_vars["BINDIR"] + win_root_dir = self.config_vars["prefix"] + + directories = [ + libdir, + libpldir, + frameworkprefix, + macos_developerdir, + win_bin_dir, + win_root_dir, + ] + + if self.spec.satisfies("platform=windows"): + lib_dirs = ["libs"] + else: + # The Python shipped with Xcode command line tools isn't in any of these locations + lib_dirs = ["lib", "lib64"] + + for subdir in lib_dirs: + directories.append(os.path.join(self.config_vars["base"], subdir)) + + directories = dedupe(directories) + for directory in directories: + path = os.path.join(directory, library) + if os.path.exists(path): + return LibraryList(path) + + @property + def libs(self): + py_version = self.version.up_to(2) + if sys.platform == "win32": + py_version = str(py_version).replace(".", "") + lib_prefix = "lib" if sys.platform != "win32" else "" + # The values of LDLIBRARY and LIBRARY aren't reliable. Intel Python uses a + # static binary but installs shared libraries, so sysconfig reports + # libpythonX.Y.a but only libpythonX.Y.so exists. So we add our own paths, too. + + # With framework python on macOS, self.config_vars["LDLIBRARY"] can point + # to a library that is not linkable because it does not have the required + # suffix of a shared library (it is called "Python" without extention). + # The linker then falls back to libPython.tbd in the default macOS + # software tree, which security settings prohibit to link against + # (your binary is not an allowed client of /path/to/libPython.tbd). + # To avoid this, we replace the entry in config_vars with a default value. + file_extension_shared = os.path.splitext(self.config_vars["LDLIBRARY"])[-1] + if file_extension_shared == "": + shared_libs = [] + else: + shared_libs = [self.config_vars["LDLIBRARY"]] + shared_libs += ["{}python{}.{}".format(lib_prefix, py_version, dso_suffix)] + # Like LDLIBRARY for Python on Mac OS, LIBRARY may refer to an un-linkable object + file_extension_static = os.path.splitext(self.config_vars["LIBRARY"])[-1] + if file_extension_static == "": + static_libs = [] + else: + static_libs = [self.config_vars["LIBRARY"]] + static_libs += ["{}python{}.{}".format(lib_prefix, py_version, stat_suffix)] + + # The +shared variant isn't reliable, as `spack external find` currently can't + # detect it. If +shared, prefer the shared libraries, but check for static if + # those aren't found. Vice versa for ~shared. + if self.spec.satisfies("platform=windows"): + # Since we are searching for link libraries, on Windows search only for + # ".Lib" extensions by default as those represent import libraries for implict links. + candidates = static_libs + elif self.spec.satisfies("+shared"): + candidates = shared_libs + static_libs + else: + candidates = static_libs + shared_libs + + for candidate in dedupe(candidates): + lib = self.find_library(candidate) + if lib: + return lib + + raise spack.error.NoLibrariesError( + "Unable to find {} libraries with the following names:\n\n* ".format(self.name) + + "\n* ".join(candidates) + ) + + @property + def headers(self): + # Locations where pyconfig.h could be + # This varies by system, especially on macOS where the command line tools are + # installed in a very different directory from the system python interpreter. + py_version = str(self.version.up_to(2)) + candidates = [ + os.path.dirname(self.config_vars["config_h_filename"]), + self.config_vars["INCLUDEPY"], + self.config_vars["CONFINCLUDEPY"], + os.path.join(self.config_vars["base"], "include", py_version), + os.path.join(self.config_vars["base"], "Headers"), + ] + candidates = list(dedupe(candidates)) + + for directory in candidates: + headers = find_headers("pyconfig", directory) + if headers: + config_h = headers[0] + break + else: + raise spack.error.NoHeadersError( + "Unable to locate {} headers in any of these locations:\n\n* ".format(self.name) + + "\n* ".join(candidates) + ) + + headers.directories = [os.path.dirname(config_h)] + return headers + + # https://docs.python.org/3/library/sysconfig.html#installation-paths + # https://discuss.python.org/t/understanding-site-packages-directories/12959 + # https://github.com/pypa/pip/blob/22.1/src/pip/_internal/locations/__init__.py + # https://github.com/pypa/installer/pull/103 + + # NOTE: XCode Python's sysconfing module was incorrectly patched, and hard-codes + # everything to be installed in /Library/Python. Therefore, we need to use a + # fallback in the following methods. For more information, see: + # https://github.com/pypa/pip/blob/22.1/src/pip/_internal/locations/__init__.py#L486 + + @property + def platlib(self): + """Directory for site-specific, platform-specific files. + + Exact directory depends on platform/OS/Python version. Examples include: + + * ``lib/pythonX.Y/site-packages`` on most POSIX systems + * ``lib64/pythonX.Y/site-packages`` on RHEL/CentOS/Fedora with system Python + * ``lib/pythonX/dist-packages`` on Debian/Ubuntu with system Python + * ``lib/python/site-packages`` on macOS with framework Python + * ``Lib/site-packages`` on Windows + + Returns: + str: platform-specific site-packages directory + """ + prefix = self.config_vars["platbase"] + os.sep + path = self.config_vars["platlib"] + if path.startswith(prefix): + return path.replace(prefix, "") + return os.path.join("lib64", f"python{self.version.up_to(2)}", "site-packages") + + @property + def purelib(self): + """Directory for site-specific, non-platform-specific files. + + Exact directory depends on platform/OS/Python version. Examples include: + + * ``lib/pythonX.Y/site-packages`` on most POSIX systems + * ``lib/pythonX/dist-packages`` on Debian/Ubuntu with system Python + * ``lib/python/site-packages`` on macOS with framework Python + * ``Lib/site-packages`` on Windows + + Returns: + str: platform-independent site-packages directory + """ + prefix = self.config_vars["base"] + os.sep + path = self.config_vars["purelib"] + if path.startswith(prefix): + return path.replace(prefix, "") + return os.path.join("lib", f"python{self.version.up_to(2)}", "site-packages") + + @property + def include(self): + """Directory for non-platform-specific header files. + + Exact directory depends on platform/Python version/ABI flags. Examples include: + + * ``include/pythonX.Y`` on most POSIX systems + * ``include/pythonX.Yd`` for debug builds + * ``include/pythonX.Ym`` for malloc builds + * ``include/pythonX.Yu`` for wide unicode builds + * ``include`` on macOS with framework Python + * ``Include`` on Windows + + Returns: + str: platform-independent header file directory + """ + prefix = self.config_vars["installed_base"] + os.sep + path = self.config_vars["include"] + if path.startswith(prefix): + return path.replace(prefix, "") + return os.path.join("include", "python{}".format(self.version.up_to(2))) + + def setup_dependent_build_environment(self, env, dependent_spec): + """Set PYTHONPATH to include the site-packages directory for the + extension and any other python extensions it depends on. + """ + # We need to make sure that the extensions are compiled and linked with + # the Spack wrapper. Paths to the executables that are used for these + # operations are normally taken from the sysconfigdata file, which we + # modify after the installation (see method filter compilers). The + # modified file contains paths to the real compilers, not the wrappers. + # The values in the file, however, can be overridden with environment + # variables. The first variable, CC (CXX), which is used for + # compilation, is set by Spack for the dependent package by default. + # That is not 100% correct because the value for CC (CXX) in the + # sysconfigdata file often contains additional compiler flags (e.g. + # -pthread), which we lose by simply setting CC (CXX) to the path to the + # Spack wrapper. Moreover, the user might try to build an extension with + # a compiler that is different from the one that was used to build + # Python itself, which might have unexpected side effects. However, the + # experience shows that none of the above is a real issue and we will + # not try to change the default behaviour. Given that, we will simply + # try to modify LDSHARED (LDCXXSHARED), the second variable, which is + # used for linking, in a consistent manner. + + for compile_var, link_var in [("CC", "LDSHARED"), ("CXX", "LDCXXSHARED")]: + # First, we get the values from the sysconfigdata: + config_compile = self.config_vars[compile_var] + config_link = self.config_vars[link_var] + + # The dependent environment will have the compilation command set to + # the following: + new_compile = join_path( + spack.paths.build_env_path, + dependent_spec.package.compiler.link_paths[compile_var.lower()], + ) + + # Normally, the link command starts with the compilation command: + if config_link.startswith(config_compile): + new_link = new_compile + config_link[len(config_compile) :] + else: + # Otherwise, we try to replace the compiler command if it + # appears "in the middle" of the link command; to avoid + # mistaking some substring of a path for the compiler (e.g. to + # avoid replacing "gcc" in "-L/path/to/gcc/"), we require that + # the compiler command be surrounded by spaces. Note this may + # leave "config_link" unchanged if the compilation command does + # not appear in the link command at all, for example if "ld" is + # invoked directly (no change would be required in that case + # because Spack arranges for the Spack ld wrapper to be the + # first instance of "ld" in PATH). + new_link = config_link.replace(f" {config_compile} ", f" {new_compile} ") + + # There is logic in the sysconfig module that is sensitive to the + # fact that LDSHARED is set in the environment, therefore we export + # the variable only if the new value is different from what we got + # from the sysconfigdata file: + if config_link != new_link and sys.platform != "win32": + env.set(link_var, new_link) + + def setup_dependent_run_environment(self, env, dependent_spec): + """Set PYTHONPATH to include the site-packages directory for the + extension and any other python extensions it depends on. + """ + if not dependent_spec.package.extends(self.spec) or dependent_spec.dependencies( + "python-venv" + ): + return + + # Packages may be installed in platform-specific or platform-independent site-packages + # directories + for directory in {self.platlib, self.purelib}: + env.prepend_path("PYTHONPATH", os.path.join(dependent_spec.prefix, directory)) + + def setup_dependent_package(self, module, dependent_spec): + """Called before python modules' install() methods.""" + module.python = self.command + module.python_include = join_path(dependent_spec.prefix, self.include) + module.python_platlib = join_path(dependent_spec.prefix, self.platlib) + module.python_purelib = join_path(dependent_spec.prefix, self.purelib) + + def add_files_to_view(self, view, merge_map, skip_if_exists=True): + """Make the view a virtual environment if it isn't one already. + + If `python-venv` is linked into the view, it will already be a virtual + environment. If not, then this is an older python that doesn't use the + python-venv support, or we may be using python packages that + use ``depends_on("python")`` but not ``extends("python")``. + + We used to copy the python interpreter in, but we can get the same effect in a + simpler way by adding a ``pyvenv.cfg`` to the environment. + + """ + super().add_files_to_view(view, merge_map, skip_if_exists=skip_if_exists) + + # location of python inside the view, where we will put the venv config + projection = view.get_projection_for_spec(self.spec) + pyvenv_cfg = os.path.join(projection, "pyvenv.cfg") + if os.path.lexists(pyvenv_cfg): + return + + # don't put a pyvenv.cfg in a copy view + if view.link_type == "copy": + return + + with open(pyvenv_cfg, "w") as cfg_file: + cfg_file.write(make_pyvenv_cfg(self.spec["python"], projection)) + + def test_hello_world(self): + """run simple hello world program""" + # do not use self.command because we are also testing the run env + python = self.spec["python"].command + + msg = "hello world!" + out = python("-c", f'print("{msg}")', output=str.split, error=str.split) + assert msg in out + + def test_import_executable(self): + """ensure import of installed executable works""" + python = self.spec["python"].command + + out = python("-c", "import sys; print(sys.executable)", output=str.split, error=str.split) + assert self.spec.prefix in out diff --git a/packages/python/python-3.11-distutils-C++.patch b/packages/python/python-3.11-distutils-C++.patch new file mode 100644 index 00000000..335e06b9 --- /dev/null +++ b/packages/python/python-3.11-distutils-C++.patch @@ -0,0 +1,257 @@ +diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py +index aa66c8b9f4..71e6556bac 100644 +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py +index 66c12dd358..dddb9fd2d4 100644 +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -123,8 +123,10 @@ def __init__(self, verbose=0, dry_run=0, force=0): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -138,9 +140,13 @@ def __init__(self, verbose=0, dry_run=0, force=0): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -164,8 +170,12 @@ def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -300,9 +310,14 @@ def __init__(self, verbose=0, dry_run=0, force=0): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 3414a761e7..f1af560cc1 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -216,9 +216,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -233,19 +235,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: +- cflags = cflags + ' ' + os.environ['CFLAGS'] ++ cflags = os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -254,13 +264,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index d00c48981e..4a3d271fee 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ def preprocess(self, source, output_file=None, macros=None, + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,30 +183,16 @@ def link(self, target_desc, objects, + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- +- if os.path.basename(linker[i]) == 'ld_so_aix': +- # AIX platforms prefix the compiler with the ld_so_aix +- # script, so we need to adjust our linker index +- offset = 1 ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] + else: +- offset = 0 +- +- linker[i+offset] = self.compiler_cxx[i] ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index f803391346..090f14c46c 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -732,9 +732,9 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt @LIBMPDEC_INTERNAL@ @LIBEXPAT_INTERNAL + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + + diff --git a/packages/python/python-3.7.2-distutils-C++.patch b/packages/python/python-3.7.2-distutils-C++.patch new file mode 100644 index 00000000..5728fad6 --- /dev/null +++ b/packages/python/python-3.7.2-distutils-C++.patch @@ -0,0 +1,241 @@ +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ __all__ = [ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -170,9 +170,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -187,19 +189,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: +- cflags = opt + ' ' + os.environ['CFLAGS'] ++ cflags = os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -208,13 +218,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler): + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,22 +183,16 @@ class UnixCCompiler(CCompiler): + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- linker[i] = self.compiler_cxx[i] ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] ++ else: ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -584,10 +584,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + diff --git a/packages/python/python-3.7.3-distutils-C++.patch b/packages/python/python-3.7.3-distutils-C++.patch new file mode 100644 index 00000000..e29323bf --- /dev/null +++ b/packages/python/python-3.7.3-distutils-C++.patch @@ -0,0 +1,256 @@ +diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py +index db6674e..ccbe09a 100644 +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ __all__ = [ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py +index 6c5d777..640fa2d 100644 +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 83160f8..b735369 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -183,9 +183,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, opt, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ ++ (cc, cxx, opt, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ + get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -200,19 +202,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: + cflags = opt + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = opt + ' ' + os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -221,13 +231,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index d10a78d..7e88781 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler): + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,30 +183,16 @@ class UnixCCompiler(CCompiler): + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- +- if os.path.basename(linker[i]) == 'ld_so_aix': +- # AIX platforms prefix the compiler with the ld_so_aix +- # script, so we need to adjust our linker index +- offset = 1 ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] + else: +- offset = 0 +- +- linker[i+offset] = self.compiler_cxx[i] ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 2d2e11f..8456e3f 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -615,10 +615,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + diff --git a/packages/python/python-3.7.4+-distutils-C++-testsuite.patch b/packages/python/python-3.7.4+-distutils-C++-testsuite.patch new file mode 100644 index 00000000..99361087 --- /dev/null +++ b/packages/python/python-3.7.4+-distutils-C++-testsuite.patch @@ -0,0 +1,138 @@ +This patch updates the distutils test suite for: +var/spack/repos/builtin/packages/python/python-3.7.4+-distutils-C++.patch + +That patch fixes several shortcomings in the distutils C++ support, +most prominently missing support for passing CXXFLAGS from the environment. + +Since it does not update the distutils testsuite, it causes the testsuite +to fail, which this patch updates to pass. + +----------------------------------------------------------------------------- +Spack changelog +- Added patch header to aid understanding the patch and maintainance +- Updated the distutils testsuite in Lib/distutils/tests/test_sysconfig.py + +----------------------------------------------------------------------------- +Upstream status + +Upstream bug: https://bugs.python.org/issue1222585 + +Status: Closed, wont fix, comment by Eric Araujo, Python Core Dev: +"setuptools and other active build tools are the better target for this feature." +https://bugs.python.org/issue1222585#msg379348 + +But according to the last-but-oncomment, setuptools seems to be lacking there too. +https://bugs.python.org/issue1222585#msg371840 + +----------------------------------------------------------------------------- +Description + +distutils has no support for CXXFLAGS, this patch adds it. + +Upstream distutils requires to pass all CXXFLAGS (C++-specific CFLAGS) +as part of the CXX enviromnent variable instead. + +This patch: +- adds CXXFLAGS support +- adds LDCXXSHARED like LDSHARED +- passes cflags to CXX like it passes them to CC. + +The distutils testsuite is updated accordingly to pass the tests. +Since it passes, it is the authoritative info of the current effects of this patch. + +See the update of the distutils testsuite in Lib/distutils/tests/test_sysconfig.py +below for details on the effect of this patch. + +diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py +index db6674e..ccbe09a 100644 +--- a/Lib/distutils/tests/test_sysconfig.py ++++ b/Lib/distutils/tests/test_sysconfig.py +@@ -89,8 +89,10 @@ + 'CXX': 'sc_cxx', + 'ARFLAGS': '--sc-arflags', + 'CFLAGS': '--sc-cflags', ++ 'CXXFLAGS': '--sc-cxxflags', + 'CCSHARED': '--sc-ccshared', + 'LDSHARED': 'sc_ldshared', ++ 'LDCXXSHARED': 'sc_ldshared_cxx', + 'SHLIB_SUFFIX': 'sc_shutil_suffix', + + # On macOS, disable _osx_support.customize_compiler() +@@ -114,11 +116,13 @@ + os.environ['AR'] = 'env_ar' + os.environ['CC'] = 'env_cc' + os.environ['CPP'] = 'env_cpp' +- os.environ['CXX'] = 'env_cxx --env-cxx-flags' ++ os.environ['CXX'] = 'env_cxx' + os.environ['LDSHARED'] = 'env_ldshared' ++ os.environ['LDCXXSHARED'] = 'env_ldshared_cxx' + os.environ['LDFLAGS'] = '--env-ldflags' + os.environ['ARFLAGS'] = '--env-arflags' + os.environ['CFLAGS'] = '--env-cflags' ++ os.environ['CXXFLAGS'] = '--env-cxxflags' + os.environ['CPPFLAGS'] = '--env-cppflags' + + comp = self.customize_compiler() +@@ -128,16 +132,24 @@ + 'env_cpp --env-cppflags') + self.assertEqual(comp.exes['compiler'], + 'env_cc --sc-cflags --env-cflags --env-cppflags') ++ self.assertEqual(comp.exes['compiler_cxx'], ++ 'env_cxx --sc-cflags --env-cxxflags --env-cppflags') + self.assertEqual(comp.exes['compiler_so'], + ('env_cc --sc-cflags ' + '--env-cflags ''--env-cppflags --sc-ccshared')) +- self.assertEqual(comp.exes['compiler_cxx'], +- 'env_cxx --env-cxx-flags') ++ self.assertEqual(comp.exes['compiler_so_cxx'], ++ ('env_cxx --sc-cflags ' ++ '--env-cxxflags ''--env-cppflags --sc-ccshared')) + self.assertEqual(comp.exes['linker_exe'], + 'env_cc') ++ self.assertEqual(comp.exes['linker_exe_cxx'], ++ 'env_cxx') + self.assertEqual(comp.exes['linker_so'], + ('env_ldshared --env-ldflags --env-cflags' + ' --env-cppflags')) ++ self.assertEqual(comp.exes['linker_so_cxx'], ++ ('env_ldshared_cxx --env-ldflags --env-cxxflags' ++ ' --env-cppflags')) + self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') + + del os.environ['AR'] +@@ -145,9 +157,11 @@ + del os.environ['CPP'] + del os.environ['CXX'] + del os.environ['LDSHARED'] ++ del os.environ['LDCXXSHARED'] + del os.environ['LDFLAGS'] + del os.environ['ARFLAGS'] + del os.environ['CFLAGS'] ++ del os.environ['CXXFLAGS'] + del os.environ['CPPFLAGS'] + + comp = self.customize_compiler() +@@ -157,14 +171,21 @@ + 'sc_cc -E') + self.assertEqual(comp.exes['compiler'], + 'sc_cc --sc-cflags') ++ # TODO: Likely this sould get --sc-cxxflags instead: ++ self.assertEqual(comp.exes['compiler_cxx'], ++ 'sc_cxx --sc-cflags') + self.assertEqual(comp.exes['compiler_so'], + 'sc_cc --sc-cflags --sc-ccshared') +- self.assertEqual(comp.exes['compiler_cxx'], +- 'sc_cxx') ++ self.assertEqual(comp.exes['compiler_so_cxx'], ++ 'sc_cxx --sc-cflags --sc-ccshared') + self.assertEqual(comp.exes['linker_exe'], + 'sc_cc') ++ self.assertEqual(comp.exes['linker_exe_cxx'], ++ 'sc_cxx') + self.assertEqual(comp.exes['linker_so'], + 'sc_ldshared') ++ self.assertEqual(comp.exes['linker_so_cxx'], ++ 'sc_ldshared_cxx') + self.assertEqual(comp.shared_lib_extension, 'sc_shutil_suffix') + + def test_parse_makefile_base(self): diff --git a/packages/python/python-3.7.4+-distutils-C++.patch b/packages/python/python-3.7.4+-distutils-C++.patch new file mode 100644 index 00000000..02daf0a1 --- /dev/null +++ b/packages/python/python-3.7.4+-distutils-C++.patch @@ -0,0 +1,257 @@ +diff --git a/Lib/_osx_support.py b/Lib/_osx_support.py +index db6674e..ccbe09a 100644 +--- a/Lib/_osx_support.py ++++ b/Lib/_osx_support.py +@@ -14,13 +14,13 @@ __all__ = [ + # configuration variables that may contain universal build flags, + # like "-arch" or "-isdkroot", that may need customization for + # the user environment +-_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS', 'BASECFLAGS', +- 'BLDSHARED', 'LDSHARED', 'CC', 'CXX', +- 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS', +- 'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS') ++_UNIVERSAL_CONFIG_VARS = ('CFLAGS', 'CXXFLAGS', 'LDFLAGS', 'CPPFLAGS', ++ 'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'CC', 'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', ++ 'PY_CPPFLAGS', 'PY_CORE_LDFLAGS', 'PY_CORE_CFLAGS') + + # configuration variables that may contain compiler calls +-_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'CC', 'CXX') ++_COMPILER_CONFIG_VARS = ('BLDSHARED', 'LDSHARED', 'LDCXXSHARED', 'CC', 'CXX') + + # prefix added to original configuration variable names + _INITPRE = '_OSX_SUPPORT_INITIAL_' +diff --git a/Lib/distutils/cygwinccompiler.py b/Lib/distutils/cygwinccompiler.py +index 6c5d777..640fa2d 100644 +--- a/Lib/distutils/cygwinccompiler.py ++++ b/Lib/distutils/cygwinccompiler.py +@@ -125,8 +125,10 @@ class CygwinCCompiler(UnixCCompiler): + # dllwrap 2.10.90 is buggy + if self.ld_version >= "2.10.90": + self.linker_dll = "gcc" ++ self.linker_dll_cxx = "g++" + else: + self.linker_dll = "dllwrap" ++ self.linker_dll_cxx = "dllwrap" + + # ld_version >= "2.13" support -shared so use it instead of + # -mdll -static +@@ -140,9 +142,13 @@ class CygwinCCompiler(UnixCCompiler): + self.set_executables(compiler='gcc -mcygwin -O -Wall', + compiler_so='gcc -mcygwin -mdll -O -Wall', + compiler_cxx='g++ -mcygwin -O -Wall', ++ compiler_so_cxx='g++ -mcygwin -mdll -O -Wall', + linker_exe='gcc -mcygwin', + linker_so=('%s -mcygwin %s' % +- (self.linker_dll, shared_option))) ++ (self.linker_dll, shared_option)), ++ linker_exe_cxx='g++ -mcygwin', ++ linker_so_cxx=('%s -mcygwin %s' % ++ (self.linker_dll_cxx, shared_option))) + + # cygwin and mingw32 need different sets of libraries + if self.gcc_version == "2.91.57": +@@ -166,8 +172,12 @@ class CygwinCCompiler(UnixCCompiler): + raise CompileError(msg) + else: # for other files use the C-compiler + try: +- self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(self.compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -302,9 +312,14 @@ class Mingw32CCompiler(CygwinCCompiler): + self.set_executables(compiler='gcc -O -Wall', + compiler_so='gcc -mdll -O -Wall', + compiler_cxx='g++ -O -Wall', ++ compiler_so_cxx='g++ -mdll -O -Wall', + linker_exe='gcc', + linker_so='%s %s %s' + % (self.linker_dll, shared_option, ++ entry_point), ++ linker_exe_cxx='g++', ++ linker_so_cxx='%s %s %s' ++ % (self.linker_dll_cxx, shared_option, + entry_point)) + # Maybe we should also append -mthreads, but then the finished + # dlls need another dll (mingwm10.dll see Mingw32 docs) +diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py +index 0a034ee..ecf4759 100644 +--- a/Lib/distutils/sysconfig.py ++++ b/Lib/distutils/sysconfig.py +@@ -188,9 +188,11 @@ def customize_compiler(compiler): + _osx_support.customize_compiler(_config_vars) + _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +- (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \ +- get_config_vars('CC', 'CXX', 'CFLAGS', +- 'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ (cc, cxx, cflags, ccshared, ldshared, ldcxxshared, shlib_suffix, ar, ar_flags) = \ ++ get_config_vars('CC', 'CXX', 'CFLAGS', 'CCSHARED', 'LDSHARED', 'LDCXXSHARED', ++ 'SHLIB_SUFFIX', 'AR', 'ARFLAGS') ++ ++ cxxflags = cflags + + if 'CC' in os.environ: + newcc = os.environ['CC'] +@@ -205,19 +207,27 @@ def customize_compiler(compiler): + cxx = os.environ['CXX'] + if 'LDSHARED' in os.environ: + ldshared = os.environ['LDSHARED'] ++ if 'LDCXXSHARED' in os.environ: ++ ldcxxshared = os.environ['LDCXXSHARED'] + if 'CPP' in os.environ: + cpp = os.environ['CPP'] + else: + cpp = cc + " -E" # not always + if 'LDFLAGS' in os.environ: + ldshared = ldshared + ' ' + os.environ['LDFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['LDFLAGS'] + if 'CFLAGS' in os.environ: + cflags = cflags + ' ' + os.environ['CFLAGS'] + ldshared = ldshared + ' ' + os.environ['CFLAGS'] ++ if 'CXXFLAGS' in os.environ: ++ cxxflags = cxxflags + ' ' + os.environ['CXXFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CXXFLAGS'] + if 'CPPFLAGS' in os.environ: + cpp = cpp + ' ' + os.environ['CPPFLAGS'] + cflags = cflags + ' ' + os.environ['CPPFLAGS'] ++ cxxflags = cxxflags + ' ' + os.environ['CPPFLAGS'] + ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] ++ ldcxxshared = ldcxxshared + ' ' + os.environ['CPPFLAGS'] + if 'AR' in os.environ: + ar = os.environ['AR'] + if 'ARFLAGS' in os.environ: +@@ -226,13 +236,17 @@ def customize_compiler(compiler): + archiver = ar + ' ' + ar_flags + + cc_cmd = cc + ' ' + cflags ++ cxx_cmd = cxx + ' ' + cxxflags + compiler.set_executables( + preprocessor=cpp, + compiler=cc_cmd, + compiler_so=cc_cmd + ' ' + ccshared, +- compiler_cxx=cxx, ++ compiler_cxx=cxx_cmd, ++ compiler_so_cxx=cxx_cmd + ' ' + ccshared, + linker_so=ldshared, + linker_exe=cc, ++ linker_so_cxx=ldcxxshared, ++ linker_exe_cxx=cxx, + archiver=archiver) + + compiler.shared_lib_extension = shlib_suffix +diff --git a/Lib/distutils/unixccompiler.py b/Lib/distutils/unixccompiler.py +index d10a78d..7e88781 100644 +--- a/Lib/distutils/unixccompiler.py ++++ b/Lib/distutils/unixccompiler.py +@@ -52,14 +52,17 @@ class UnixCCompiler(CCompiler): + # are pretty generic; they will probably have to be set by an outsider + # (eg. using information discovered by the sysconfig about building + # Python extensions). +- executables = {'preprocessor' : None, +- 'compiler' : ["cc"], +- 'compiler_so' : ["cc"], +- 'compiler_cxx' : ["cc"], +- 'linker_so' : ["cc", "-shared"], +- 'linker_exe' : ["cc"], +- 'archiver' : ["ar", "-cr"], +- 'ranlib' : None, ++ executables = {'preprocessor' : None, ++ 'compiler' : ["cc"], ++ 'compiler_so' : ["cc"], ++ 'compiler_cxx' : ["c++"], ++ 'compiler_so_cxx' : ["c++"], ++ 'linker_so' : ["cc", "-shared"], ++ 'linker_exe' : ["cc"], ++ 'linker_so_cxx' : ["c++", "-shared"], ++ 'linker_exe_cxx' : ["c++"], ++ 'archiver' : ["ar", "-cr"], ++ 'ranlib' : None, + } + + if sys.platform[:6] == "darwin": +@@ -110,12 +113,19 @@ class UnixCCompiler(CCompiler): + + def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): + compiler_so = self.compiler_so ++ compiler_so_cxx = self.compiler_so_cxx + if sys.platform == 'darwin': + compiler_so = _osx_support.compiler_fixup(compiler_so, + cc_args + extra_postargs) ++ compiler_so_cxx = _osx_support.compiler_fixup(compiler_so_cxx, ++ cc_args + extra_postargs) + try: +- self.spawn(compiler_so + cc_args + [src, '-o', obj] + +- extra_postargs) ++ if self.detect_language(src) == 'c++': ++ self.spawn(compiler_so_cxx + cc_args + [src, '-o', obj] + ++ extra_postargs) ++ else: ++ self.spawn(compiler_so + cc_args + [src, '-o', obj] + ++ extra_postargs) + except DistutilsExecError as msg: + raise CompileError(msg) + +@@ -173,30 +183,16 @@ class UnixCCompiler(CCompiler): + ld_args.extend(extra_postargs) + self.mkpath(os.path.dirname(output_filename)) + try: +- if target_desc == CCompiler.EXECUTABLE: +- linker = self.linker_exe[:] ++ if target_lang == "c++": ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe_cxx[:] ++ else: ++ linker = self.linker_so_cxx[:] + else: +- linker = self.linker_so[:] +- if target_lang == "c++" and self.compiler_cxx: +- # skip over environment variable settings if /usr/bin/env +- # is used to set up the linker's environment. +- # This is needed on OSX. Note: this assumes that the +- # normal and C++ compiler have the same environment +- # settings. +- i = 0 +- if os.path.basename(linker[0]) == "env": +- i = 1 +- while '=' in linker[i]: +- i += 1 +- +- if os.path.basename(linker[i]) == 'ld_so_aix': +- # AIX platforms prefix the compiler with the ld_so_aix +- # script, so we need to adjust our linker index +- offset = 1 ++ if target_desc == CCompiler.EXECUTABLE: ++ linker = self.linker_exe[:] + else: +- offset = 0 +- +- linker[i+offset] = self.compiler_cxx[i] ++ linker = self.linker_so[:] + + if sys.platform == 'darwin': + linker = _osx_support.compiler_fixup(linker, ld_args) +diff --git a/Makefile.pre.in b/Makefile.pre.in +index 35ca1a8..cfa79df 100644 +--- a/Makefile.pre.in ++++ b/Makefile.pre.in +@@ -618,10 +618,10 @@ sharedmods: $(BUILDPYTHON) pybuilddir.txt Modules/_math.o + *\ -s*|s*) quiet="-q";; \ + *) quiet="";; \ + esac; \ +- echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ echo "$(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build"; \ +- $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' OPT='$(OPT)' \ ++ $(RUNSHARED) CC='$(CC)' LDSHARED='$(BLDSHARED)' CFLAGS='$(PY_CFLAGS)' \ + _TCLTK_INCLUDES='$(TCLTK_INCLUDES)' _TCLTK_LIBS='$(TCLTK_LIBS)' \ + $(PYTHON_FOR_BUILD) $(srcdir)/setup.py $$quiet build + diff --git a/packages/python/rpath-non-gcc.patch b/packages/python/rpath-non-gcc.patch new file mode 100644 index 00000000..f203bbba --- /dev/null +++ b/packages/python/rpath-non-gcc.patch @@ -0,0 +1,15 @@ +--- a/Lib/distutils/unixccompiler.py 2009-05-09 21:55:12.000000000 +1000 ++++ b/Lib/distutils/unixccompiler.py 2017-05-13 14:30:18.077518999 +1000 +@@ -299,10 +299,8 @@ + else: + return "-Wl,-R" + dir + else: +- # No idea how --enable-new-dtags would be passed on to +- # ld if this system was using GNU ld. Don't know if a +- # system like this even exists. +- return "-R" + dir ++ # Patched by spack to use gcc syntax by default: ++ return "-Wl,-R" + dir + + def library_option(self, lib): + return "-l" + lib diff --git a/packages/python/tkinter-3.10.patch b/packages/python/tkinter-3.10.patch new file mode 100644 index 00000000..e06be826 --- /dev/null +++ b/packages/python/tkinter-3.10.patch @@ -0,0 +1,11 @@ +--- a/setup.py 2021-12-06 12:23:39.000000000 -0600 ++++ b/setup.py 2021-12-14 10:30:33.000000000 -0600 +@@ -2099,6 +2099,8 @@ + # + # Detection stops at the first successful method. + ++ return False ++ + # Check for Tcl and Tk at the locations indicated by _TCLTK_INCLUDES + # and _TCLTK_LIBS environment variables. + if self.detect_tkinter_fromenv(): diff --git a/packages/python/tkinter-3.11.patch b/packages/python/tkinter-3.11.patch new file mode 100644 index 00000000..fe2d54bd --- /dev/null +++ b/packages/python/tkinter-3.11.patch @@ -0,0 +1,25 @@ +From a49e95e44961a0b6703ef9cb577d2ae5334c4a62 Mon Sep 17 00:00:00 2001 +From: Harmen Stoppels <harmenstoppels@gmail.com> +Date: Thu, 3 Nov 2022 13:54:00 +0100 +Subject: [PATCH] disable tkinter explicitly + +--- + setup.py | 2 +- + 1 file changed, 1 insertion(+), 1 deletion(-) + +diff --git a/setup.py b/setup.py +index 15d0d45..642adb3 100644 +--- a/setup.py ++++ b/setup.py +@@ -1358,7 +1358,7 @@ class PyBuildExt(build_ext): + self.detect_decimal() + self.detect_ctypes() + self.detect_multiprocessing() +- self.detect_tkinter() ++ # self.detect_tkinter() + self.detect_uuid() + + # Uncomment the next line if you want to play with xxmodule.c +-- +2.38.1 + diff --git a/packages/python/tkinter-3.7.patch b/packages/python/tkinter-3.7.patch new file mode 100644 index 00000000..87e19018 --- /dev/null +++ b/packages/python/tkinter-3.7.patch @@ -0,0 +1,17 @@ +diff -Naur a/setup.py b/setup.py +--- a/setup.py 2019-01-13 18:59:14.000000000 -0600 ++++ b/setup.py 2019-01-13 19:00:31.000000000 -0600 +@@ -1787,13 +1787,6 @@ + if self.detect_tkinter_explicitly(): + return + +- # Rather than complicate the code below, detecting and building +- # AquaTk is a separate method. Only one Tkinter will be built on +- # Darwin - either AquaTk, if it is found, or X11 based Tk. +- if (host_platform == 'darwin' and +- self.detect_tkinter_darwin(inc_dirs, lib_dirs)): +- return +- + # Assume we haven't found any of the libraries or include files + # The versions with dots are used on Unix, and the versions without + # dots on Windows, for detection by cygwin. diff --git a/packages/python/tkinter-3.8.patch b/packages/python/tkinter-3.8.patch new file mode 100644 index 00000000..a1fc5729 --- /dev/null +++ b/packages/python/tkinter-3.8.patch @@ -0,0 +1,12 @@ +diff -Naur a/setup.py b/setup.py +--- a/setup.py.orig 2021-09-29 21:28:23.000000000 -0400 ++++ a/setup.py 2021-09-29 21:28:44.000000000 -0400 +@@ -1826,6 +1826,8 @@ + def detect_tkinter(self): + # The _tkinter module. + ++ return False ++ + # Check whether --with-tcltk-includes and --with-tcltk-libs were + # configured or passed into the make target. If so, use these values + # to build tkinter and bypass the searches for Tcl and TK in standard -- GitLab From ba0d0c4ed2b905dd2167150e105301790e011948 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 20:30:33 +0000 Subject: [PATCH 087/111] fix(python): fix lib(n)curses detection from https://github.com/spack/spack/pull/35092 fixes https://github.com/spack/spack/issues/34872 --- packages/python/curses.patch | 13 +++++++++++++ packages/python/package.py | 6 ++++++ 2 files changed, 19 insertions(+) create mode 100644 packages/python/curses.patch diff --git a/packages/python/curses.patch b/packages/python/curses.patch new file mode 100644 index 00000000..b83ec60e --- /dev/null +++ b/packages/python/curses.patch @@ -0,0 +1,13 @@ +diff --git a/setup.py b/setup.py +index 85a2b26357..8c83b9f175 100644 +--- a/setup.py ++++ b/setup.py +@@ -1088,7 +1088,7 @@ def detect_readline_curses(self): + if ret == 0: + with open(tmpfile) as fp: + for ln in fp: +- if 'curses' in ln: ++ if 'libcurses' in ln or 'libncurses' in ln: + readline_termcap_library = re.sub( + r'.*lib(n?cursesw?)\.so.*', r'\1', ln + ).rstrip() diff --git a/packages/python/package.py b/packages/python/package.py index 3bfdc240..85ba69c0 100644 --- a/packages/python/package.py +++ b/packages/python/package.py @@ -333,6 +333,12 @@ class Python(Package): # https://github.com/python/cpython/pull/16717 patch("intel-3.7.patch", when="@3.7.1:3.7.5 %intel") + # begin EBRAINS (added) + # Fix curses/readline detection logic to not be triggered by path name + # https://github.com/spack/spack/issues/34872 + patch("curses.patch", when="@:3.11") + # end EBRAINS + # CPython tries to build an Objective-C file with GCC's C frontend # https://github.com/spack/spack/pull/16222 # https://github.com/python/cpython/pull/13306 -- GitLab From a13d18ae9953349b0e2d303f3cc4b595882828fc Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 22:47:19 +0000 Subject: [PATCH 088/111] feat(python): add version 3.11.10 --- packages/python/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/python/package.py b/packages/python/package.py index 85ba69c0..6b9add7e 100644 --- a/packages/python/package.py +++ b/packages/python/package.py @@ -66,6 +66,9 @@ class Python(Package): version("3.12.2", sha256="a7c4f6a9dc423d8c328003254ab0c9338b83037bd787d680826a5bf84308116e") version("3.12.1", sha256="d01ec6a33bc10009b09c17da95cc2759af5a580a7316b3a446eb4190e13f97b2") version("3.12.0", sha256="51412956d24a1ef7c97f1cb5f70e185c13e3de1f50d131c0aac6338080687afb") + # begin EBRAINS (added): add version + version("3.11.10", sha256="92f2faf242681bfa406d53a51e17d42c5373affe23a130cd9697e132ef574706") + # end EBRAINS version("3.11.9", sha256="e7de3240a8bc2b1e1ba5c81bf943f06861ff494b69fda990ce2722a504c6153d") version("3.11.8", sha256="d3019a613b9e8761d260d9ebe3bd4df63976de30464e5c0189566e1ae3f61889") version("3.11.7", sha256="068c05f82262e57641bd93458dfa883128858f5f4997aad7a36fd25b13b29209") -- GitLab From 94dbb0dcb4c690738af79ea0ff53fe1e4b50b224 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 4 Mar 2025 23:21:35 +0000 Subject: [PATCH 089/111] feat(ebrainslab): make gcc and python versions more specific --- site-config/ebrainslab/packages.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/site-config/ebrainslab/packages.yaml b/site-config/ebrainslab/packages.yaml index 44e6777d..90978f93 100644 --- a/site-config/ebrainslab/packages.yaml +++ b/site-config/ebrainslab/packages.yaml @@ -2,10 +2,10 @@ packages: all: # collab-specific settings target: [x86_64] - compiler: [gcc@13] + compiler: [gcc@13.3.0] python: # collab-specific settings - require: "@3.11" + require: "@3.11.10" r: # EM: +X fixes build for collab require: "@4.3.3+X" -- GitLab From b59780fbd3c05d8088633fce6fcd8186dca1b7f1 Mon Sep 17 00:00:00 2001 From: Abolfazl Ziaeemehr <abolfazl.ziaee-mehr@univ-amu.fr> Date: Fri, 7 Mar 2025 18:20:13 +0100 Subject: [PATCH 090/111] fix(py-vbi): add version 0.1.3 --- packages/py-vbi/fix-install.patch | 12 ------------ packages/py-vbi/package.py | 7 ++++--- 2 files changed, 4 insertions(+), 15 deletions(-) delete mode 100644 packages/py-vbi/fix-install.patch diff --git a/packages/py-vbi/fix-install.patch b/packages/py-vbi/fix-install.patch deleted file mode 100644 index d84cceb0..00000000 --- a/packages/py-vbi/fix-install.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/pyproject.toml b/pyproject.toml -index debc24e..2e0500b 100644 ---- a/pyproject.toml -+++ b/pyproject.toml -@@ -58,7 +58,6 @@ path = "vbi/_version.py" - - - [tool.setuptools] --packages = ["vbi"] - include-package-data = true - - [tool.setuptools.package-data] diff --git a/packages/py-vbi/package.py b/packages/py-vbi/package.py index 138893c5..0ac988a2 100644 --- a/packages/py-vbi/package.py +++ b/packages/py-vbi/package.py @@ -6,16 +6,16 @@ from spack import * -class PyVbi(PythonPackage): +class PyVbi(PythonPackage, CudaPackage): homepage = "https://vbi.readthedocs.io/latest/" git = "https://github.com/ins-amu/vbi" url = "https://github.com/ins-amu/vbi/archive/refs/tags/v0.1.3.tar.gz" - version("0.1.3", "54fa2062f44c9ec8219fae3c13c52a4bd17141b5467b982987673de0662c5255") + version("0.1.3", "8ccccf2bf0def2bf97f4706b8597c4cb3ac5f0cf2ac5f08566e22cd6273c1163") version("0.1.2", "6ccfeeec718be62a480002a8370130a3e3344955186f99ecbb15b646b68210d6") + - patch('fix-install.patch') depends_on("python@3.8:", type=("build","run")) depends_on("py-setuptools", type="build") @@ -37,6 +37,7 @@ class PyVbi(PythonPackage): depends_on("py-scikit-learn", type=("build", "run")) depends_on("py-pycatch22", type=("build", "run")) depends_on("py-pytest", type="test") + depends_on("py-cupy", type=("build", "run"), when="+cuda") @run_after("install") @on_package_attributes(run_tests=True) -- GitLab From 431ef1ea26ed436e64947fec7c4e882d0501733e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Tue, 18 Feb 2025 19:31:45 +0100 Subject: [PATCH 091/111] fix(BSS2): build %gcc@13.3 --- packages/build-brainscales/package.py | 8 ++++++++ packages/hxtorch/package.py | 3 +++ packages/oppulance/package.py | 7 +++++++ packages/pynn-brainscales/package.py | 3 +++ packages/wf-brainscales2-demos/package.py | 1 + spack.yaml | 8 ++++---- 6 files changed, 26 insertions(+), 4 deletions(-) diff --git a/packages/build-brainscales/package.py b/packages/build-brainscales/package.py index 6ea4a370..cec40a9c 100644 --- a/packages/build-brainscales/package.py +++ b/packages/build-brainscales/package.py @@ -15,6 +15,13 @@ import spack.build_environment class BuildBrainscales(WafPackage): """Common stuff for BrainScaleS packages...""" + version( + "9.0-a9", + git="https://github.com/electronicvisions/releases-ebrains", + tag="ebrains-9.0-a9", + commit="5951428b7598ff9478fa834d7e991e3ff94709ee", + submodules=True, + ) version( "9.0-a8", git="https://github.com/electronicvisions/releases-ebrains", @@ -52,6 +59,7 @@ class BuildBrainscales(WafPackage): ) # common dependencies of BuildBrainscales-derived packages + depends_on('oppulance@9.0-a9', when='@9.0-a9', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a8', when='@9.0-a8', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a7', when='@9.0-a7', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a6', when='@9.0-a6', type=('build', 'link', 'run', 'test')) diff --git a/packages/hxtorch/package.py b/packages/hxtorch/package.py index 437a6886..ef237ceb 100644 --- a/packages/hxtorch/package.py +++ b/packages/hxtorch/package.py @@ -90,6 +90,9 @@ class Hxtorch(build_brainscales.BuildBrainscales): extends('python') + # some versions of dependencies are broken + conflicts("boost@1.86.0") # sha1 digest changed length, but boost::compute didn't adapt + patch("include-SparseTensorUtils.patch", when="@:8.0-a5") def install_test(self): diff --git a/packages/oppulance/package.py b/packages/oppulance/package.py index 2249a8b4..55d6b04e 100644 --- a/packages/oppulance/package.py +++ b/packages/oppulance/package.py @@ -21,6 +21,13 @@ class Oppulance(Package): depends_on('wget') depends_on('gmp') + version( + "9.0-a9", + git="https://github.com/electronicvisions/releases-ebrains", + tag="ebrains-9.0-a9", + commit="5951428b7598ff9478fa834d7e991e3ff94709ee", + submodules=True, + ) version( "9.0-a8", git="https://github.com/electronicvisions/releases-ebrains", diff --git a/packages/pynn-brainscales/package.py b/packages/pynn-brainscales/package.py index 13d850f7..3e68558d 100644 --- a/packages/pynn-brainscales/package.py +++ b/packages/pynn-brainscales/package.py @@ -82,6 +82,9 @@ class PynnBrainscales(build_brainscales.BuildBrainscales): depends_on('yaml-cpp+shared', type=('build', 'link', 'run')) extends('python') + # some versions of dependencies are broken + conflicts("boost@1.86.0") # sha1 digest changed length, but boost::compute didn't adapt + def install_test(self): with working_dir('spack-test', create=True): old_pythonpath = os.environ.get('PYTHONPATH', '') diff --git a/packages/wf-brainscales2-demos/package.py b/packages/wf-brainscales2-demos/package.py index 9226c76a..1dc5b24b 100644 --- a/packages/wf-brainscales2-demos/package.py +++ b/packages/wf-brainscales2-demos/package.py @@ -16,6 +16,7 @@ class WfBrainscales2Demos(Package): maintainers = ["emuller", "muffgaga"] # ECM: we probably should build the ipynb file in this package + version("9.0-a9", tag="jupyter-notebooks-9.0-a9") version("9.0-a8", tag="jupyter-notebooks-9.0-a8") version("9.0-a7", tag="jupyter-notebooks-9.0-a7") version("9.0-a6", tag="jupyter-notebooks-9.0-a6") diff --git a/spack.yaml b/spack.yaml index 274a55d1..c6d455fd 100644 --- a/spack.yaml +++ b/spack.yaml @@ -12,10 +12,10 @@ spack: - py-biobb-model@4.1.0 - py-biobb-structure-checking@3.13.4 - py-biobb-structure-utils@4.1.0 - - hxtorch@9.0-a8 + - hxtorch@9.0-a9 - nest@3.8 +sonata - neuron@8.2.3 +mpi - - jaxsnn@9.0-a8 + - jaxsnn@9.0-a9 - py-bluepyefe@2.3.6 - py-bluepymm@0.8.7 - py-bluepyopt@1.14.11 @@ -62,14 +62,14 @@ spack: - py-tvb-ext-xircuits@1.1.0 - py-viziphant@0.4.0 - py-vbi - - pynn-brainscales@9.0-a8 + - pynn-brainscales@9.0-a9 - r-rgsl@0.1.1 - r-sbtabvfgen@0.1 - r-uqsa@2.2 - sda@7.3.3d # Workflows (meta-packages) - wf-biobb - - wf-brainscales2-demos@9.0-a8 + - wf-brainscales2-demos@9.0-a9 - wf-bsb@4.4 +nest +neuron - wf-protein-association-rates@0.1 - wf-multi-area-model@1.2.0 -- GitLab From d5bb12658deaf53b06fbed167bdb31a5512a842d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Wed, 26 Feb 2025 13:14:55 +0100 Subject: [PATCH 092/111] fix(genpybind): build on llvm@15 --- packages/genpybind/package.py | 1 + packages/hxtorch/package.py | 2 +- packages/pynn-brainscales/package.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/genpybind/package.py b/packages/genpybind/package.py index f5398139..012d2e5b 100644 --- a/packages/genpybind/package.py +++ b/packages/genpybind/package.py @@ -21,6 +21,7 @@ class Genpybind(WafPackage): version('develop', branch='develop') version('visions', branch='master', git='https://github.com/electronicvisions/genpybind') + version('ebrains-llvm15', tag='ebrains-9.0-a9', git='https://github.com/electronicvisions/genpybind') version('ebrains', tag='ebrains_release-1-rc1', git='https://github.com/electronicvisions/genpybind') depends_on( diff --git a/packages/hxtorch/package.py b/packages/hxtorch/package.py index ef237ceb..d4a5ea66 100644 --- a/packages/hxtorch/package.py +++ b/packages/hxtorch/package.py @@ -53,7 +53,7 @@ class Hxtorch(build_brainscales.BuildBrainscales): ('boost@1.69.0: +graph+icu+mpi+numpy+coroutine+context+filesystem+python+serialization+system+thread+program_options cxxstd=17', { "type": ('build', 'link', 'run', 'test') } ), ('cereal', { "type": ('build', 'link', 'run', 'test') } ), ('cppcheck', { "type": ('build', 'link', 'run') } ), - ('genpybind@ebrains', { "type": ('build', 'link') } ), + ('genpybind@ebrains-llvm15', { "type": ('build', 'link') } ), ('gflags', { "type": ('build', 'link', 'run') } ), ('googletest@1.11.0:+gmock', { "type": ('build', 'link', 'run') } ), # variadic templates needed ('inja', { "type": ('build', 'link', 'run', 'test') } ),# template engine for PPU source jit generation diff --git a/packages/pynn-brainscales/package.py b/packages/pynn-brainscales/package.py index 3e68558d..e0836671 100644 --- a/packages/pynn-brainscales/package.py +++ b/packages/pynn-brainscales/package.py @@ -51,7 +51,7 @@ class PynnBrainscales(build_brainscales.BuildBrainscales): depends_on('boost@1.69.0: +graph+icu+mpi+numpy+coroutine+context+filesystem+python+serialization+system+thread+program_options cxxstd=17', type=('build', 'link', 'run', 'test')) depends_on('cereal', type=('build', 'link', 'run', 'test')) depends_on('cppcheck', type=('build', 'link', 'run')) - depends_on('genpybind@ebrains', type=('build', 'link')) + depends_on('genpybind@ebrains-llvm15', type=('build', 'link')) depends_on('gflags', type=('build', 'link', 'run')) depends_on('googletest@1.11.0:+gmock', type=('build', 'link', 'run')) # variadic templates needed depends_on('inja', type=('build', 'link', 'run', 'test')) # template engine for PPU source jit generation -- GitLab From 4497875b0b58425c0832c8e56497054cf0d0d317 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Wed, 26 Feb 2025 17:43:37 +0100 Subject: [PATCH 093/111] fix(BSS2): build of python wrapper w/ llvm@15 --- packages/build-brainscales/package.py | 2 +- packages/oppulance/package.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/build-brainscales/package.py b/packages/build-brainscales/package.py index cec40a9c..d1ab34f9 100644 --- a/packages/build-brainscales/package.py +++ b/packages/build-brainscales/package.py @@ -19,7 +19,7 @@ class BuildBrainscales(WafPackage): "9.0-a9", git="https://github.com/electronicvisions/releases-ebrains", tag="ebrains-9.0-a9", - commit="5951428b7598ff9478fa834d7e991e3ff94709ee", + commit="41d2597bd6c1c20aee4d538c42c248195a133680", submodules=True, ) version( diff --git a/packages/oppulance/package.py b/packages/oppulance/package.py index 55d6b04e..7df3c70f 100644 --- a/packages/oppulance/package.py +++ b/packages/oppulance/package.py @@ -25,7 +25,7 @@ class Oppulance(Package): "9.0-a9", git="https://github.com/electronicvisions/releases-ebrains", tag="ebrains-9.0-a9", - commit="5951428b7598ff9478fa834d7e991e3ff94709ee", + commit="41d2597bd6c1c20aee4d538c42c248195a133680", submodules=True, ) version( -- GitLab From fef3489ef38befa24e3ce5071adef4ef3ae5bfe7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Thu, 27 Feb 2025 15:01:49 +0100 Subject: [PATCH 094/111] fix(llvm): backport patches for clang's Python bindings --- .../llvm15-clang-python-missing-kinds.patch | 238 ++++++++++++++++++ packages/llvm/package.py | 16 ++ 2 files changed, 254 insertions(+) create mode 100644 packages/llvm/llvm15-clang-python-missing-kinds.patch diff --git a/packages/llvm/llvm15-clang-python-missing-kinds.patch b/packages/llvm/llvm15-clang-python-missing-kinds.patch new file mode 100644 index 00000000..d45dc630 --- /dev/null +++ b/packages/llvm/llvm15-clang-python-missing-kinds.patch @@ -0,0 +1,238 @@ +From eb264d825beb048c6e673ddaf5aca069511fcfb3 Mon Sep 17 00:00:00 2001 +From: ykiko <ykikoykikoykiko@gmail.com> +Date: Mon, 18 Mar 2024 22:13:10 +0800 +Subject: [PATCH] Add some missing Kinds to libclang python bindings (#85571) + +Add some Kinds existing in Index.h but missing in cindex.py. +--- + clang/bindings/python/clang/cindex.py | 173 +++++++++++++++++++++++++- + clang/docs/ReleaseNotes.rst | 2 + + clang/include/clang-c/Index.h | 2 +- + 3 files changed, 175 insertions(+), 2 deletions(-) + +diff --git a/clang/bindings/python/clang/cindex.py b/clang/bindings/python/clang/cindex.py +index 44a34ca196274..302d99dccd77b 100644 +--- a/clang/bindings/python/clang/cindex.py ++++ b/clang/bindings/python/clang/cindex.py +@@ -1091,6 +1091,29 @@ def __repr__(self): + # Represents an @available(...) check. + CursorKind.OBJC_AVAILABILITY_CHECK_EXPR = CursorKind(148) + ++# Fixed point literal. ++CursorKind.FIXED_POINT_LITERAL = CursorKind(149) ++ ++# OpenMP 5.0 [2.1.4, Array Shaping]. ++CursorKind.OMP_ARRAY_SHAPING_EXPR = CursorKind(150) ++ ++# OpenMP 5.0 [2.1.6 Iterators]. ++CursorKind.OMP_ITERATOR_EXPR = CursorKind(151) ++ ++# OpenCL's addrspace_cast<> expression. ++CursorKind.CXX_ADDRSPACE_CAST_EXPR = CursorKind(152) ++ ++# Expression that references a C++20 concept. ++CursorKind.CONCEPT_SPECIALIZATION_EXPR = CursorKind(153) ++ ++# Expression that references a C++20 requires expression. ++CursorKind.REQUIRES_EXPR = CursorKind(154) ++ ++# Expression that references a C++20 parenthesized list aggregate initializer. ++CursorKind.CXX_PAREN_LIST_INIT_EXPR = CursorKind(155) ++ ++# Represents a C++26 pack indexing expression. ++CursorKind.PACK_INDEXING_EXPR = CursorKind(156) + + # A statement whose specific kind is not exposed via this interface. + # +@@ -1312,6 +1335,114 @@ def __repr__(self): + # OpenMP teams distribute directive. + CursorKind.OMP_TEAMS_DISTRIBUTE_DIRECTIVE = CursorKind(271) + ++# OpenMP teams distribute simd directive. ++CursorKind.OMP_TEAMS_DISTRIBUTE_DIRECTIVE = CursorKind(272) ++ ++# OpenMP teams distribute parallel for simd directive. ++CursorKind.OMP_TEAMS_DISTRIBUTE_PARALLEL_FOR_SIMD_DIRECTIVE = CursorKind(273) ++ ++# OpenMP teams distribute parallel for directive. ++CursorKind.OMP_TEAMS_DISTRIBUTE_PARALLEL_FOR_DIRECTIVE = CursorKind(274) ++ ++# OpenMP target teams directive. ++CursorKind.OMP_TARGET_TEAMS_DIRECTIVE = CursorKind(275) ++ ++# OpenMP target teams distribute directive. ++CursorKind.OMP_TARGET_TEAMS_DISTRIBUTE_DIRECTIVE = CursorKind(276) ++ ++# OpenMP target teams distribute parallel for directive. ++CursorKind.OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_FOR_DIRECTIVE = CursorKind(277) ++ ++# OpenMP target teams distribute parallel for simd directive. ++CursorKind.OMP_TARGET_TEAMS_DISTRIBUTE_PARALLEL_FOR_SIMD_DIRECTIVE = CursorKind(278) ++ ++# OpenMP target teams distribute simd directive. ++CursorKind.OMP_TARGET_TEAMS_DISTRIBUTE_SIMD_DIRECTIVE = CursorKind(279) ++ ++# C++2a std::bit_cast expression. ++CursorKind.BUILTIN_BIT_CAST_EXPR = CursorKind(280) ++ ++# OpenMP master taskloop directive. ++CursorKind.OMP_MASTER_TASK_LOOP_DIRECTIVE = CursorKind(281) ++ ++# OpenMP parallel master taskloop directive. ++CursorKind.OMP_PARALLEL_MASTER_TASK_LOOP_DIRECTIVE = CursorKind(282) ++ ++# OpenMP master taskloop simd directive. ++CursorKind.OMP_MASTER_TASK_LOOP_SIMD_DIRECTIVE = CursorKind(283) ++ ++# OpenMP parallel master taskloop simd directive. ++CursorKind.OMP_PARALLEL_MASTER_TASK_LOOP_SIMD_DIRECTIVE = CursorKind(284) ++ ++# OpenMP parallel master directive. ++CursorKind.OMP_PARALLEL_MASTER_DIRECTIVE = CursorKind(285) ++ ++# OpenMP depobj directive. ++CursorKind.OMP_DEPOBJ_DIRECTIVE = CursorKind(286) ++ ++# OpenMP scan directive. ++CursorKind.OMP_SCAN_DIRECTIVE = CursorKind(287) ++ ++# OpenMP tile directive. ++CursorKind.OMP_TILE_DIRECTIVE = CursorKind(288) ++ ++# OpenMP canonical loop. ++CursorKind.OMP_CANONICAL_LOOP = CursorKind(289) ++ ++# OpenMP interop directive. ++CursorKind.OMP_INTEROP_DIRECTIVE = CursorKind(290) ++ ++# OpenMP dispatch directive. ++CursorKind.OMP_DISPATCH_DIRECTIVE = CursorKind(291) ++ ++# OpenMP masked directive. ++CursorKind.OMP_MASKED_DIRECTIVE = CursorKind(292) ++ ++# OpenMP unroll directive. ++CursorKind.OMP_UNROLL_DIRECTIVE = CursorKind(293) ++ ++# OpenMP metadirective directive. ++CursorKind.OMP_META_DIRECTIVE = CursorKind(294) ++ ++# OpenMP loop directive. ++CursorKind.OMP_GENERIC_LOOP_DIRECTIVE = CursorKind(295) ++ ++# OpenMP teams loop directive. ++CursorKind.OMP_TEAMS_GENERIC_LOOP_DIRECTIVE = CursorKind(296) ++ ++# OpenMP target teams loop directive. ++CursorKind.OMP_TARGET_TEAMS_GENERIC_LOOP_DIRECTIVE = CursorKind(297) ++ ++# OpenMP parallel loop directive. ++CursorKind.OMP_PARALLEL_GENERIC_LOOP_DIRECTIVE = CursorKind(298) ++ ++# OpenMP target parallel loop directive. ++CursorKind.OMP_TARGET_PARALLEL_GENERIC_LOOP_DIRECTIVE = CursorKind(299) ++ ++# OpenMP parallel masked directive. ++CursorKind.OMP_PARALLEL_MASKED_DIRECTIVE = CursorKind(300) ++ ++# OpenMP masked taskloop directive. ++CursorKind.OMP_MASKED_TASK_LOOP_DIRECTIVE = CursorKind(301) ++ ++# OpenMP masked taskloop simd directive. ++CursorKind.OMP_MASKED_TASK_LOOP_SIMD_DIRECTIVE = CursorKind(302) ++ ++# OpenMP parallel masked taskloop directive. ++CursorKind.OMP_PARALLEL_MASKED_TASK_LOOP_DIRECTIVE = CursorKind(303) ++ ++# OpenMP parallel masked taskloop simd directive. ++CursorKind.OMP_PARALLEL_MASKED_TASK_LOOP_SIMD_DIRECTIVE = CursorKind(304) ++ ++# OpenMP error directive. ++CursorKind.OMP_ERROR_DIRECTIVE = CursorKind(305) ++ ++# OpenMP scope directive. ++CursorKind.OMP_SCOPE_DIRECTIVE = CursorKind(306) ++ ++# OpenACC Compute Construct. ++CursorKind.OPEN_ACC_COMPUTE_DIRECTIVE = CursorKind(320) ++ + ### + # Other Kinds + +@@ -1349,6 +1480,24 @@ def __repr__(self): + + CursorKind.DLLEXPORT_ATTR = CursorKind(418) + CursorKind.DLLIMPORT_ATTR = CursorKind(419) ++CursorKind.NS_RETURNS_RETAINED = CursorKind(420) ++CursorKind.NS_RETURNS_NOT_RETAINED = CursorKind(421) ++CursorKind.NS_RETURNS_AUTORELEASED = CursorKind(422) ++CursorKind.NS_CONSUMES_SELF = CursorKind(423) ++CursorKind.NS_CONSUMED = CursorKind(424) ++CursorKind.OBJC_EXCEPTION = CursorKind(425) ++CursorKind.OBJC_NSOBJECT = CursorKind(426) ++CursorKind.OBJC_INDEPENDENT_CLASS = CursorKind(427) ++CursorKind.OBJC_PRECISE_LIFETIME = CursorKind(428) ++CursorKind.OBJC_RETURNS_INNER_POINTER = CursorKind(429) ++CursorKind.OBJC_REQUIRES_SUPER = CursorKind(430) ++CursorKind.OBJC_ROOT_CLASS = CursorKind(431) ++CursorKind.OBJC_SUBCLASSING_RESTRICTED = CursorKind(432) ++CursorKind.OBJC_EXPLICIT_PROTOCOL_IMPL = CursorKind(433) ++CursorKind.OBJC_DESIGNATED_INITIALIZER = CursorKind(434) ++CursorKind.OBJC_RUNTIME_VISIBLE = CursorKind(435) ++CursorKind.OBJC_BOXABLE = CursorKind(436) ++CursorKind.FLAG_ENUM = CursorKind(437) + CursorKind.CONVERGENT_ATTR = CursorKind(438) + CursorKind.WARN_UNUSED_ATTR = CursorKind(439) + CursorKind.WARN_UNUSED_RESULT_ATTR = CursorKind(440) +@@ -1395,6 +1544,11 @@ class TemplateArgumentKind(BaseEnumeration): + TemplateArgumentKind.DECLARATION = TemplateArgumentKind(2) + TemplateArgumentKind.NULLPTR = TemplateArgumentKind(3) + TemplateArgumentKind.INTEGRAL = TemplateArgumentKind(4) ++TemplateArgumentKind.TEMPLATE = TemplateArgumentKind(5) ++TemplateArgumentKind.TEMPLATE_EXPANSION = TemplateArgumentKind(6) ++TemplateArgumentKind.EXPRESSION = TemplateArgumentKind(7) ++TemplateArgumentKind.PACK = TemplateArgumentKind(8) ++TemplateArgumentKind.INVALID = TemplateArgumentKind(9) + + ### Exception Specification Kinds ### + class ExceptionSpecificationKind(BaseEnumeration): +@@ -2240,8 +2394,26 @@ def __repr__(self): + TypeKind.OCLQUEUE = TypeKind(159) + TypeKind.OCLRESERVEID = TypeKind(160) + ++TypeKind.OBJCOBJECT = TypeKind(161) ++TypeKind.OBJCCLASS = TypeKind(162) ++TypeKind.ATTRIBUTED = TypeKind(163) ++ ++TypeKind.OCLINTELSUBGROUPAVCMCEPAYLOAD = TypeKind(164) ++TypeKind.OCLINTELSUBGROUPAVCIMEPAYLOAD = TypeKind(165) ++TypeKind.OCLINTELSUBGROUPAVCREFPAYLOAD = TypeKind(166) ++TypeKind.OCLINTELSUBGROUPAVCSICPAYLOAD = TypeKind(167) ++TypeKind.OCLINTELSUBGROUPAVCMCERESULT = TypeKind(168) ++TypeKind.OCLINTELSUBGROUPAVCIMERESULT = TypeKind(169) ++TypeKind.OCLINTELSUBGROUPAVCREFRESULT = TypeKind(170) ++TypeKind.OCLINTELSUBGROUPAVCSICRESULT = TypeKind(171) ++TypeKind.OCLINTELSUBGROUPAVCIMERESULTSINGLEREFERENCESTREAMOUT = TypeKind(172) ++TypeKind.OCLINTELSUBGROUPAVCIMERESULTSDUALREFERENCESTREAMOUT = TypeKind(173) ++TypeKind.OCLINTELSUBGROUPAVCIMERESULTSSINGLEREFERENCESTREAMIN = TypeKind(174) ++TypeKind.OCLINTELSUBGROUPAVCIMEDUALREFERENCESTREAMIN = TypeKind(175) ++ + TypeKind.EXTVECTOR = TypeKind(176) + TypeKind.ATOMIC = TypeKind(177) ++TypeKind.BTFTAGATTRIBUTED = TypeKind(178) + + class RefQualifierKind(BaseEnumeration): + """Describes a specific ref-qualifier of a type.""" +diff --git a/clang/include/clang-c/Index.h b/clang/include/clang-c/Index.h +index 3f3620609b6dd..60db3cf0966c0 100644 +--- a/clang/include/clang-c/Index.h ++++ b/clang/include/clang-c/Index.h +@@ -1675,7 +1675,7 @@ enum CXCursorKind { + CXCursor_ConceptSpecializationExpr = 153, + + /** +- * Expression that references a C++20 concept. ++ * Expression that references a C++20 requires expression. + */ + CXCursor_RequiresExpr = 154, + diff --git a/packages/llvm/package.py b/packages/llvm/package.py index b437b0ee..a0b9bd0e 100644 --- a/packages/llvm/package.py +++ b/packages/llvm/package.py @@ -548,6 +548,22 @@ class Llvm(CMakePackage, CudaPackage, LlvmDetection, CompilerPackage): sha256="c6ca6b925f150e8644ce756023797b7f94c9619c62507231f979edab1c09af78", when="@6:13", ) + + # begin EBRAINS + # add missing concept declaration CursorKind to clang's Python bindings + patch( + "https://github.com/llvm/llvm-project/commit/8b322895207c34b434698954dc67404e0bbf8d8e.patch?full_index=1", + sha256="173050a7f08c01bbc18d1145f455cac827754d0436befc2afd1add86e884fac0", + when="@15:16", + ) + # Add some Kinds existing in Index.h but missing in cindex.py (dropped release note changes) + patch( + # based on https://github.com/llvm/llvm-project/commit/eb264d825beb048c6e673ddaf5aca069511fcfb3.patch?full_index=1 + "llvm15-clang-python-missing-kinds.patch", + when="@15:18", + ) + # end EBRAINS + # fix building of older versions of llvm with newer versions of glibc for compiler_rt_as in ["project", "runtime"]: with when("compiler-rt={0}".format(compiler_rt_as)): -- GitLab From f53dab33c030369ba3723abb8e2326843d1c6a57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Thu, 6 Mar 2025 09:01:28 +0100 Subject: [PATCH 095/111] fix: small bug in install script --- install_spack_env.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install_spack_env.sh b/install_spack_env.sh index 31232625..a0ed1f67 100644 --- a/install_spack_env.sh +++ b/install_spack_env.sh @@ -130,7 +130,7 @@ spack-python -c "exit(not len(spack.environment.active_environment().uninstalled else echo "Updating of the source cache disabled." fi - if [ $ret -ne 0 ]; then + if [ "$ret" -ne 0 ]; then (exit $ret) fi ) -- GitLab From 429fc858aed713a5dcb086c89f4eb0563fe44aaf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Thu, 6 Mar 2025 09:01:40 +0100 Subject: [PATCH 096/111] fix(BSS2): cache content when downloading oppulance first --- packages/oppulance/package.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/oppulance/package.py b/packages/oppulance/package.py index 7df3c70f..7e0d22b6 100644 --- a/packages/oppulance/package.py +++ b/packages/oppulance/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +import unittest.mock class Oppulance(Package): @@ -137,6 +138,15 @@ class Oppulance(Package): expand=False, ) + # see build-brainscales package for a description; we need to duplicate + # here, as the cache content is shared between the two repositories + @when("@9:") + def do_fetch(self, mirror_only=False): + # in the configure step, we need access to all archived .git folders + def custom_archive(self, destination): + super(spack.fetch_strategy.GitFetchStrategy, self).archive(destination) + with unittest.mock.patch('spack.fetch_strategy.GitFetchStrategy.archive', new=custom_archive): + super().do_fetch(mirror_only) def install(self, spec, prefix): ln = which('ln') -- GitLab From 366d5d56ee5ee18f884e649ff9674c145a730918 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Sat, 8 Mar 2025 10:08:31 +0100 Subject: [PATCH 097/111] fix(BSS2): ignore test fails because of missing catchsegv --- packages/build-brainscales/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/build-brainscales/package.py b/packages/build-brainscales/package.py index d1ab34f9..2419107c 100644 --- a/packages/build-brainscales/package.py +++ b/packages/build-brainscales/package.py @@ -208,6 +208,7 @@ class BuildBrainscales(WafPackage): if (elem.tag == 'failure') and not ( elem.get('message').startswith("pylint:") or elem.get('message').startswith("pycodestyle:") or + "catchsegv: not found" in elem.get('message') or ("OK" in elem.get('message') and "Segmentation fault" in elem.get('message'))): raise RuntimeError("Failed test found: {}".format(testcase.get('name'))) -- GitLab From 4ad76adb2ab36d449761ef20b362d1f2b210194e Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 10 Mar 2025 17:38:37 +0000 Subject: [PATCH 098/111] feat(apbs): relax python dependency version range --- packages/apbs/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/apbs/package.py b/packages/apbs/package.py index 63025099..f7f45c6e 100644 --- a/packages/apbs/package.py +++ b/packages/apbs/package.py @@ -40,7 +40,7 @@ class Apbs(CMakePackage): depends_on('arpack-ng', type=('build', 'run')) depends_on('suite-sparse', type=('build', 'run')) depends_on('maloc', type=('build', 'run')) - depends_on('python@3.8:3.10', type=('build', 'run')) + depends_on('python@3.8:3.11', type=('build', 'run')) def cmake_args(self): # Min and max Python versions need to be set as variables to pass tests. -- GitLab From ef9a764384ac97e9ea1bf2f55f7707bb7a9a401e Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 10 Mar 2025 17:41:03 +0000 Subject: [PATCH 099/111] feat(py-pdb2pqr): relax py-docutils dependency version range --- packages/py-pdb2pqr/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/py-pdb2pqr/package.py b/packages/py-pdb2pqr/package.py index 9e3a72e8..1e87023a 100644 --- a/packages/py-pdb2pqr/package.py +++ b/packages/py-pdb2pqr/package.py @@ -28,7 +28,7 @@ class PyPdb2pqr(PythonPackage): depends_on('python@3.8:', type=('build','run')) depends_on('py-setuptools', type=('build')) - depends_on('py-docutils@:0.18', type=('build','run')) + depends_on('py-docutils', type=('build','run')) depends_on('py-mmcif-pdbx@1.1.2:', type=('build','run')) depends_on('py-numpy', type=('build','run')) depends_on('py-propka@3.2:', type=('build','run')) -- GitLab From a9c6db5fbc21e0cfde8423398e75afc61f5affde Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 10 Mar 2025 17:39:56 +0000 Subject: [PATCH 100/111] feat(py-biobb): add version 4.1.0 --- packages/py-biobb-analysis/package.py | 15 ++------------- packages/py-biobb-chemistry/package.py | 15 ++------------- spack.yaml | 4 ++-- 3 files changed, 6 insertions(+), 28 deletions(-) diff --git a/packages/py-biobb-analysis/package.py b/packages/py-biobb-analysis/package.py index 799d4c36..9c580af0 100644 --- a/packages/py-biobb-analysis/package.py +++ b/packages/py-biobb-analysis/package.py @@ -12,14 +12,13 @@ class PyBiobbAnalysis(PythonPackage): # Homepage and download url homepage = "https://github.com/bioexcel/biobb_analysis" git = 'https://github.com/bioexcel/biobb_analysis.git' - url = 'https://github.com/bioexcel/biobb_analysis/archive/refs/tags/v4.0.1.tar.gz' + url = 'https://github.com/bioexcel/biobb_analysis/archive/refs/tags/v4.1.0.tar.gz' # Set the gitlab accounts of this package maintainers maintainers = ['dbeltran'] # Versions - version('master', branch='master') - version('4.0.1', sha256='3d7190d0cae6af42b2e7c0ecf073930a609b699ef311e8b74afc1634958e09b0') + version('4.1.0', sha256='fecbb7ffa0e38f732fcc613adc7f1656e3c65af519a072269dabc244681f1791') # Dependencies depends_on('python@3.8:', type=('build', 'run')) @@ -27,16 +26,6 @@ class PyBiobbAnalysis(PythonPackage): depends_on('gromacs') depends_on('ambertools') - # Patching to enable python 3.10 (not official, might not be stable) - def patch(self): - filter_file(" python_requires='>=3.7,<3.10',", " python_requires='>=3.7,<3.11',", "setup.py") - filter_file( - "'Programming Language :: Python :: 3.9'", - "'Programming Language :: Python :: 3.9',\r\n " - "'Programming Language :: Python :: 3.10'", - "setup.py", - ) - # Test @run_after('install') @on_package_attributes(run_tests=True) diff --git a/packages/py-biobb-chemistry/package.py b/packages/py-biobb-chemistry/package.py index 6ec163b6..c30b8092 100644 --- a/packages/py-biobb-chemistry/package.py +++ b/packages/py-biobb-chemistry/package.py @@ -12,14 +12,13 @@ class PyBiobbChemistry(PythonPackage): # Homepage and download url homepage = "https://github.com/bioexcel/biobb_chemistry" git = 'https://github.com/bioexcel/biobb_chemistry.git' - url = 'https://github.com/bioexcel/biobb_chemistry/archive/refs/tags/v4.0.0.tar.gz' + url = 'https://github.com/bioexcel/biobb_chemistry/archive/refs/tags/v4.1.0.tar.gz' # Set the gitlab accounts of this package maintainers maintainers = ['dbeltran'] # Versions - version('master', branch='master') - version('4.0.0', sha256='40f65b4a93dff24e19995265e41fd6821f5ac2f35199d938f1d00fa035883e64') + version('4.1.0', sha256='8d04943bbfcd83eb4d5d3247949cf035977fd097d5014a39354b0502e1cd16c8') # Dependencies depends_on('python@3.8:', type=('build', 'run')) @@ -28,16 +27,6 @@ class PyBiobbChemistry(PythonPackage): depends_on('ambertools') depends_on('acpype') - # Patching to enable python 3.10 (not official, might not be stable) - def patch(self): - filter_file(" python_requires='>=3.7,<3.10',", " python_requires='>=3.7,<3.11',", "setup.py") - filter_file( - "'Programming Language :: Python :: 3.9'", - "'Programming Language :: Python :: 3.9',\r\n " - "'Programming Language :: Python :: 3.10'", - "setup.py", - ) - # Test @run_after('install') @on_package_attributes(run_tests=True) diff --git a/spack.yaml b/spack.yaml index c6d455fd..9d86fd2f 100644 --- a/spack.yaml +++ b/spack.yaml @@ -4,8 +4,8 @@ spack: specs: # EBRAINS tools - arbor@0.10.0 +python +mpi - - py-biobb-analysis@4.0.1 - - py-biobb-chemistry@4.0.0 + - py-biobb-analysis@4.1.0 + - py-biobb-chemistry@4.1.0 - py-biobb-common@4.1.0 - py-biobb-gromacs@4.1.1 - py-biobb-io@4.1.0 -- GitLab From 0cc20acdd73ce2461740fa0b01ebb7da7632811d Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 10 Mar 2025 17:43:53 +0000 Subject: [PATCH 101/111] feat(ambertools): copy package from upstream --- packages/ambertools/package.py | 88 ++++++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 packages/ambertools/package.py diff --git a/packages/ambertools/package.py b/packages/ambertools/package.py new file mode 100644 index 00000000..f680b43c --- /dev/null +++ b/packages/ambertools/package.py @@ -0,0 +1,88 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Ambertools(CMakePackage): + """AmberTools is a free, useful standalone package and a prerequisite + for installing Amber itself. The AmberTools suite is free of charge, + and its components are mostly released under the GNU General Public + License (GPL). A few components are included that are in the public + domain or which have other, open-source, licenses. The libsander and + libpbsa libraries use the LGPL license.""" + + homepage = "https://ambermd.org/AmberTools.php" + url = "https://ambermd.org/downloads/AmberTools22jlmrcc.tar.bz2" + + maintainers("d-beltran") + + version("22jlmrcc", sha256="1571d4e0f7d45b2a71dce5999fa875aea8c90ee219eb218d7916bf30ea229121") + + depends_on("flex", type="build") + depends_on("bison", type="build") + depends_on("tcsh", type="build") + depends_on("zlib", type=("build", "run")) + depends_on("bzip2", type=("build", "run")) + depends_on("blas", type=("build", "run")) + depends_on("lapack", type=("build", "run")) + depends_on("arpack-ng", type=("build", "run")) + depends_on("netcdf-c", type=("build", "run")) + depends_on("netcdf-fortran", type=("build", "run")) + depends_on("fftw", type=("build", "run")) + depends_on("readline", type=("build", "run")) + depends_on("netlib-xblas~plain_blas", type=("build", "run")) + # Specific variants needed for boost according to build logs + depends_on( + "boost+thread+system+program_options+iostreams+regex+timer+chrono+filesystem+graph", + type=("build", "run"), + ) + # Python dependencies + depends_on("python@3.8:3.10 +tkinter", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-numpy", type=("build", "run")) + depends_on("py-matplotlib", type=("build", "run")) + depends_on("py-scipy", type=("build", "run")) + + def cmake_args(self): + # Translated from ambertools build/run_cmake script + # We also add the TRUST_SYSTEM_LIBS argument mentioned in the ambertools guide + # https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Guide-to-Options + args = [ + self.define("COMPILER", "GNU"), + self.define("MPI", False), + self.define("CUDA", False), + self.define("INSTALL_TESTS", True), + self.define("DOWNLOAD_MINICONDA", False), + self.define("TRUST_SYSTEM_LIBS", True), + # This is to avoid the x11 (X11_Xext_LIB) error + # It is equivalent to the "-noX11" flag accoridng to the docs: + # https://ambermd.org/pmwiki/pmwiki.php/Main/CMake-Common-Options + self.define("BUILD_GUI", False), + ] + return args + + def setup_run_environment(self, env): + env.set("AMBER_PREFIX", self.prefix) + env.set("AMBERHOME", self.prefix) + + def setup_build_environment(self, env): + env.set("AMBER_PREFIX", self.prefix) + env.set("AMBERHOME", self.prefix) + + @run_after("install") + @on_package_attributes(run_tests=True) + def check_install(self): + make("test.serial") + + # Temporarily copy netcdf.h header file to netcdf-fortran/include to pass the Ambertools + # cmake check (quickest fix, will probably cause problems, needs to change) + @run_before("cmake") + def fix_check(self): + cp = Executable("cp") + cp( + self.spec["netcdf-c"].headers.directories[0] + "/netcdf.h", + self.spec["netcdf-fortran"].headers.directories[0], + ) -- GitLab From 2cb260e64969b24b653aad73b9a4fbe7697a48c1 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Mon, 10 Mar 2025 17:45:39 +0000 Subject: [PATCH 102/111] feat(ambertools): add version 23 --- packages/ambertools/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/ambertools/package.py b/packages/ambertools/package.py index f680b43c..abe7df7d 100644 --- a/packages/ambertools/package.py +++ b/packages/ambertools/package.py @@ -19,6 +19,9 @@ class Ambertools(CMakePackage): maintainers("d-beltran") + # begin EBRAINS (added): add version + version("23_rc6", sha256="debb52e6ef2e1b4eaa917a8b4d4934bd2388659c660501a81ea044903bf9ee9d") + # end EBRAINS version("22jlmrcc", sha256="1571d4e0f7d45b2a71dce5999fa875aea8c90ee219eb218d7916bf30ea229121") depends_on("flex", type="build") @@ -40,7 +43,10 @@ class Ambertools(CMakePackage): type=("build", "run"), ) # Python dependencies - depends_on("python@3.8:3.10 +tkinter", type=("build", "run")) + # begin EBRAINS (modified): add version + depends_on("python@3.8:3.10 +tkinter", type=("build", "run"), when="@22jlmrcc") + depends_on("python@3.8: +tkinter", type=("build", "run"), when="@23_rc6") + # end EBRAINS depends_on("py-setuptools", type="build") depends_on("py-numpy", type=("build", "run")) depends_on("py-matplotlib", type=("build", "run")) -- GitLab From 3681ab01f457ab98ffdcad66f5db27b7fbeead3d Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 11 Mar 2025 00:28:55 +0000 Subject: [PATCH 103/111] feat(sbml): add version 5.20.4 --- packages/sbml/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/sbml/package.py b/packages/sbml/package.py index f54fbb6b..7b40ffd5 100644 --- a/packages/sbml/package.py +++ b/packages/sbml/package.py @@ -17,6 +17,7 @@ class Sbml(CMakePackage): license("LGPL-2.1-or-later") # begin EBRAINS (added): add version + version("5.20.4", sha256="02c225d3513e1f5d6e3c0168456f568e67f006eddaab82f09b4bdf0d53d2050e", url="https://github.com/sbmlteam/libsbml/archive/refs/tags/v5.20.4.tar.gz") version("5.19.0", sha256="a7f0e18be78ff0e064e4cdb1cd86634d08bc33be5250db4a1878bd81eeb8b547") # end EBRAINS version("5.18.0", sha256="6c01be2306ec0c9656b59cb082eb7b90176c39506dd0f912b02e08298a553360") -- GitLab From a26393e12b6cf8949fb5cdb8e4e8690cdd58c001 Mon Sep 17 00:00:00 2001 From: Adrian Ciu <adrian.ciu@codemart.ro> Date: Tue, 11 Mar 2025 02:05:22 +0100 Subject: [PATCH 104/111] feat: import spack.package explicitly in all packages --- packages/acpype/package.py | 2 +- packages/apbs/package.py | 2 +- packages/build-brainscales/package.py | 2 +- packages/clb-nb-utils/package.py | 2 +- packages/genpybind/package.py | 2 +- packages/hxtorch/package.py | 2 +- packages/inja/package.py | 2 +- packages/jaxsnn/package.py | 2 +- packages/nest/package.py | 2 +- packages/oppulance/package.py | 2 +- packages/psmisc/package.py | 2 +- packages/py-annarchy/package.py | 2 +- packages/py-bct/package.py | 2 +- packages/py-biobb-analysis/package.py | 2 +- packages/py-biobb-chemistry/package.py | 2 +- packages/py-cerebstats/package.py | 2 +- packages/py-cerebunit/package.py | 2 +- packages/py-clang-format/package.py | 2 +- packages/py-cons/package.py | 2 +- packages/py-dicthash/package.py | 2 +- packages/py-etuples/package.py | 2 +- packages/py-flask-restx/package.py | 2 +- packages/py-formencode/package.py | 3 +-- packages/py-frites/package.py | 2 +- packages/py-hbp-archive/package.py | 2 +- packages/py-hbp-neuromorphic-platform/package.py | 2 +- packages/py-hippounit/package.py | 2 +- packages/py-junitparser/package.py | 2 +- packages/py-lems/package.py | 2 +- packages/py-lfpy/package.py | 2 +- packages/py-lfpykit/package.py | 2 +- packages/py-libneuroml/package.py | 2 +- packages/py-logical-unification/package.py | 2 +- packages/py-mini-kanren/package.py | 2 +- packages/py-mmcif-pdbx/package.py | 2 +- packages/py-morphounit/package.py | 2 +- packages/py-nameparser/package.py | 2 +- packages/py-neo/package.py | 2 +- packages/py-nested-dict/package.py | 2 +- packages/py-nestml/package.py | 2 +- packages/py-nflows/package.py | 2 +- packages/py-nnmt/package.py | 2 +- packages/py-odetoolbox/package.py | 2 +- packages/py-pdb2pqr/package.py | 2 +- packages/py-propka/package.py | 2 +- packages/py-pyaescrypt/package.py | 2 +- packages/py-pycatch22/package.py | 2 +- packages/py-pyknos/package.py | 2 +- packages/py-pymc/package.py | 2 +- packages/py-pynn/package.py | 2 +- packages/py-pyspike/package.py | 2 +- packages/py-pyswarms/package.py | 2 +- packages/py-pytensor/package.py | 2 +- packages/py-python-keycloak/package.py | 2 +- packages/py-pyunicore/package.py | 2 +- packages/py-quantities-scidash/package.py | 2 +- packages/py-quantities/package.py | 2 +- packages/py-sbi/package.py | 2 +- packages/py-sciunit/package.py | 2 +- packages/py-snudda/package.py | 2 +- packages/py-tvb-contrib/package.py | 2 +- packages/py-tvb-data/package.py | 2 +- packages/py-tvb-ext-bucket/package.py | 2 +- packages/py-tvb-framework/package.py | 2 +- packages/py-tvb-gdist/package.py | 2 +- packages/py-tvb-library/package.py | 2 +- packages/py-tvb-multiscale/package.py | 2 +- packages/py-tvb-storage/package.py | 2 +- packages/py-tvb-widgets/package.py | 2 +- packages/py-umnn/package.py | 2 +- packages/py-vbi/package.py | 2 +- packages/py-viziphant/package.py | 2 +- packages/py-zuko/package.py | 2 +- packages/pynn-brainscales/package.py | 2 +- packages/sda/package.py | 2 +- packages/wf-biobb/package.py | 2 +- packages/wf-brainscales2-demos/package.py | 2 +- packages/wf-bsb/package.py | 2 +- packages/wf-custom-python/package.py | 2 +- packages/wf-human-multi-area-model/package.py | 2 +- packages/wf-multi-area-model/package.py | 2 +- packages/wf-protein-association-rates/package.py | 2 +- packages/wf-uq-akar4/package.py | 2 +- 83 files changed, 83 insertions(+), 84 deletions(-) diff --git a/packages/acpype/package.py b/packages/acpype/package.py index cff6bc8e..c6382b76 100644 --- a/packages/acpype/package.py +++ b/packages/acpype/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class Acpype(PythonPackage): """A tool based in Python to use Antechamber to generate topologies for chemical diff --git a/packages/apbs/package.py b/packages/apbs/package.py index f7f45c6e..bbfac340 100644 --- a/packages/apbs/package.py +++ b/packages/apbs/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class Apbs(CMakePackage): diff --git a/packages/build-brainscales/package.py b/packages/build-brainscales/package.py index 2419107c..327501d1 100644 --- a/packages/build-brainscales/package.py +++ b/packages/build-brainscales/package.py @@ -7,7 +7,7 @@ import os import unittest.mock import xml.etree.ElementTree as ET -from spack import * +from spack.package import * from spack.util.environment import EnvironmentModifications import spack.build_environment diff --git a/packages/clb-nb-utils/package.py b/packages/clb-nb-utils/package.py index d75ae48c..40660252 100644 --- a/packages/clb-nb-utils/package.py +++ b/packages/clb-nb-utils/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * # Usage # from clb_nb_utils import oauth diff --git a/packages/genpybind/package.py b/packages/genpybind/package.py index 012d2e5b..bca2c483 100644 --- a/packages/genpybind/package.py +++ b/packages/genpybind/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class Genpybind(WafPackage): diff --git a/packages/hxtorch/package.py b/packages/hxtorch/package.py index d4a5ea66..7d3a26bf 100644 --- a/packages/hxtorch/package.py +++ b/packages/hxtorch/package.py @@ -7,7 +7,7 @@ import os import unittest.mock import xml.etree.ElementTree as ET -from spack import * +from spack.package import * from spack.util.environment import EnvironmentModifications import spack.build_environment diff --git a/packages/inja/package.py b/packages/inja/package.py index f8f653e4..47a20743 100644 --- a/packages/inja/package.py +++ b/packages/inja/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class Inja(CMakePackage): diff --git a/packages/jaxsnn/package.py b/packages/jaxsnn/package.py index b2b33296..8cca2481 100644 --- a/packages/jaxsnn/package.py +++ b/packages/jaxsnn/package.py @@ -7,7 +7,7 @@ import os import unittest.mock import xml.etree.ElementTree as ET -from spack import * +from spack.package import * from spack.util.environment import EnvironmentModifications import spack.build_environment diff --git a/packages/nest/package.py b/packages/nest/package.py index e97b81c8..d8e127da 100644 --- a/packages/nest/package.py +++ b/packages/nest/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * import re diff --git a/packages/oppulance/package.py b/packages/oppulance/package.py index 7e0d22b6..7dc37d0a 100644 --- a/packages/oppulance/package.py +++ b/packages/oppulance/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * import unittest.mock diff --git a/packages/psmisc/package.py b/packages/psmisc/package.py index f1a67530..4b727f19 100644 --- a/packages/psmisc/package.py +++ b/packages/psmisc/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class Psmisc(AutotoolsPackage): diff --git a/packages/py-annarchy/package.py b/packages/py-annarchy/package.py index 4368d1e0..9fb9cab9 100644 --- a/packages/py-annarchy/package.py +++ b/packages/py-annarchy/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyAnnarchy(PythonPackage): diff --git a/packages/py-bct/package.py b/packages/py-bct/package.py index bfa61d91..97ecec1b 100644 --- a/packages/py-bct/package.py +++ b/packages/py-bct/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyBct(PythonPackage): diff --git a/packages/py-biobb-analysis/package.py b/packages/py-biobb-analysis/package.py index 9c580af0..79ce6edc 100644 --- a/packages/py-biobb-analysis/package.py +++ b/packages/py-biobb-analysis/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyBiobbAnalysis(PythonPackage): """Biobb_analysis is the Biobb module collection to perform analysis diff --git a/packages/py-biobb-chemistry/package.py b/packages/py-biobb-chemistry/package.py index c30b8092..969a5058 100644 --- a/packages/py-biobb-chemistry/package.py +++ b/packages/py-biobb-chemistry/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyBiobbChemistry(PythonPackage): """Biobb_chemistry is the Biobb module collection to perform chemistry diff --git a/packages/py-cerebstats/package.py b/packages/py-cerebstats/package.py index 5b1d3865..5c3a6f8a 100644 --- a/packages/py-cerebstats/package.py +++ b/packages/py-cerebstats/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyCerebstats(PythonPackage): diff --git a/packages/py-cerebunit/package.py b/packages/py-cerebunit/package.py index d4a70054..62e29223 100644 --- a/packages/py-cerebunit/package.py +++ b/packages/py-cerebunit/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyCerebunit(PythonPackage): diff --git a/packages/py-clang-format/package.py b/packages/py-clang-format/package.py index 7be2c021..19d74273 100644 --- a/packages/py-clang-format/package.py +++ b/packages/py-clang-format/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyClangFormat(PythonPackage): diff --git a/packages/py-cons/package.py b/packages/py-cons/package.py index 77b2e1ab..e114b5b5 100644 --- a/packages/py-cons/package.py +++ b/packages/py-cons/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyCons(PythonPackage): diff --git a/packages/py-dicthash/package.py b/packages/py-dicthash/package.py index 2fd2daf0..55ae9198 100644 --- a/packages/py-dicthash/package.py +++ b/packages/py-dicthash/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyDicthash(PythonPackage): diff --git a/packages/py-etuples/package.py b/packages/py-etuples/package.py index d3462437..d8cca9f6 100644 --- a/packages/py-etuples/package.py +++ b/packages/py-etuples/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyEtuples(PythonPackage): diff --git a/packages/py-flask-restx/package.py b/packages/py-flask-restx/package.py index 171f1039..7f9c45ec 100644 --- a/packages/py-flask-restx/package.py +++ b/packages/py-flask-restx/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyFlaskRestx(PythonPackage): diff --git a/packages/py-formencode/package.py b/packages/py-formencode/package.py index f78e0e63..56c94e3d 100644 --- a/packages/py-formencode/package.py +++ b/packages/py-formencode/package.py @@ -3,8 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * - +from spack.package import * class PyFormencode(PythonPackage): """ diff --git a/packages/py-frites/package.py b/packages/py-frites/package.py index 67c8f204..59aaca93 100644 --- a/packages/py-frites/package.py +++ b/packages/py-frites/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyFrites(PythonPackage): diff --git a/packages/py-hbp-archive/package.py b/packages/py-hbp-archive/package.py index 580ec106..6f9921b0 100644 --- a/packages/py-hbp-archive/package.py +++ b/packages/py-hbp-archive/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyHbpArchive(PythonPackage): diff --git a/packages/py-hbp-neuromorphic-platform/package.py b/packages/py-hbp-neuromorphic-platform/package.py index 06502497..3308957b 100644 --- a/packages/py-hbp-neuromorphic-platform/package.py +++ b/packages/py-hbp-neuromorphic-platform/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyHbpNeuromorphicPlatform(PythonPackage): diff --git a/packages/py-hippounit/package.py b/packages/py-hippounit/package.py index 0317272c..0a6c294f 100644 --- a/packages/py-hippounit/package.py +++ b/packages/py-hippounit/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyHippounit(PythonPackage): diff --git a/packages/py-junitparser/package.py b/packages/py-junitparser/package.py index 5f93f3e2..b794a44c 100644 --- a/packages/py-junitparser/package.py +++ b/packages/py-junitparser/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyJunitparser(PythonPackage): diff --git a/packages/py-lems/package.py b/packages/py-lems/package.py index 456c8676..ff0173af 100644 --- a/packages/py-lems/package.py +++ b/packages/py-lems/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyLems(PythonPackage): diff --git a/packages/py-lfpy/package.py b/packages/py-lfpy/package.py index bc3a1ee4..27cba28b 100644 --- a/packages/py-lfpy/package.py +++ b/packages/py-lfpy/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyLfpy(PythonPackage): diff --git a/packages/py-lfpykit/package.py b/packages/py-lfpykit/package.py index 3768a8c4..ccabc623 100644 --- a/packages/py-lfpykit/package.py +++ b/packages/py-lfpykit/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyLfpykit(PythonPackage): diff --git a/packages/py-libneuroml/package.py b/packages/py-libneuroml/package.py index d660b23e..babb2b21 100644 --- a/packages/py-libneuroml/package.py +++ b/packages/py-libneuroml/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyLibneuroml(PythonPackage): diff --git a/packages/py-logical-unification/package.py b/packages/py-logical-unification/package.py index 7ba1620e..e4c453c1 100644 --- a/packages/py-logical-unification/package.py +++ b/packages/py-logical-unification/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyLogicalUnification(PythonPackage): diff --git a/packages/py-mini-kanren/package.py b/packages/py-mini-kanren/package.py index 2d24eb02..d777ee61 100644 --- a/packages/py-mini-kanren/package.py +++ b/packages/py-mini-kanren/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyMiniKanren(PythonPackage): diff --git a/packages/py-mmcif-pdbx/package.py b/packages/py-mmcif-pdbx/package.py index 02daf8fc..25dfc8ed 100644 --- a/packages/py-mmcif-pdbx/package.py +++ b/packages/py-mmcif-pdbx/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyMmcifPdbx(PythonPackage): diff --git a/packages/py-morphounit/package.py b/packages/py-morphounit/package.py index 3884482e..382a6d49 100644 --- a/packages/py-morphounit/package.py +++ b/packages/py-morphounit/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyMorphounit(PythonPackage): diff --git a/packages/py-nameparser/package.py b/packages/py-nameparser/package.py index a3e3eb71..f4eb768b 100644 --- a/packages/py-nameparser/package.py +++ b/packages/py-nameparser/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyNameparser(PythonPackage): diff --git a/packages/py-neo/package.py b/packages/py-neo/package.py index 90054098..3ce2a9bc 100644 --- a/packages/py-neo/package.py +++ b/packages/py-neo/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyNeo(PythonPackage): diff --git a/packages/py-nested-dict/package.py b/packages/py-nested-dict/package.py index 5fc5ff00..96e9b79b 100644 --- a/packages/py-nested-dict/package.py +++ b/packages/py-nested-dict/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyNestedDict(PythonPackage): diff --git a/packages/py-nestml/package.py b/packages/py-nestml/package.py index a5ea9b1b..d3437f3b 100644 --- a/packages/py-nestml/package.py +++ b/packages/py-nestml/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyNestml(PythonPackage): diff --git a/packages/py-nflows/package.py b/packages/py-nflows/package.py index 72f82caf..b6c32ffe 100644 --- a/packages/py-nflows/package.py +++ b/packages/py-nflows/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyNflows(PythonPackage): """It is a comprehensive collection of normalizing flows using PyTorch.""" diff --git a/packages/py-nnmt/package.py b/packages/py-nnmt/package.py index 7b722a7f..c605bd72 100644 --- a/packages/py-nnmt/package.py +++ b/packages/py-nnmt/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyNnmt(PythonPackage): diff --git a/packages/py-odetoolbox/package.py b/packages/py-odetoolbox/package.py index 3ebd6e68..c5ccc459 100644 --- a/packages/py-odetoolbox/package.py +++ b/packages/py-odetoolbox/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyOdetoolbox(PythonPackage): diff --git a/packages/py-pdb2pqr/package.py b/packages/py-pdb2pqr/package.py index 1e87023a..b737eb38 100644 --- a/packages/py-pdb2pqr/package.py +++ b/packages/py-pdb2pqr/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPdb2pqr(PythonPackage): diff --git a/packages/py-propka/package.py b/packages/py-propka/package.py index edb466b4..b75a09b3 100644 --- a/packages/py-propka/package.py +++ b/packages/py-propka/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPropka(PythonPackage): diff --git a/packages/py-pyaescrypt/package.py b/packages/py-pyaescrypt/package.py index d0b38eb1..ab15e7f7 100644 --- a/packages/py-pyaescrypt/package.py +++ b/packages/py-pyaescrypt/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPyaescrypt(PythonPackage): diff --git a/packages/py-pycatch22/package.py b/packages/py-pycatch22/package.py index 68c4be09..6707282e 100644 --- a/packages/py-pycatch22/package.py +++ b/packages/py-pycatch22/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPycatch22(PythonPackage): diff --git a/packages/py-pyknos/package.py b/packages/py-pyknos/package.py index e9449a6d..151fd077 100644 --- a/packages/py-pyknos/package.py +++ b/packages/py-pyknos/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPyknos(PythonPackage): diff --git a/packages/py-pymc/package.py b/packages/py-pymc/package.py index b7dc999f..453829ab 100644 --- a/packages/py-pymc/package.py +++ b/packages/py-pymc/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPymc(PythonPackage): diff --git a/packages/py-pynn/package.py b/packages/py-pynn/package.py index f75382bc..73df7c20 100644 --- a/packages/py-pynn/package.py +++ b/packages/py-pynn/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPynn(PythonPackage): diff --git a/packages/py-pyspike/package.py b/packages/py-pyspike/package.py index ddf17e7e..c0ce25b6 100644 --- a/packages/py-pyspike/package.py +++ b/packages/py-pyspike/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPyspike(PythonPackage): diff --git a/packages/py-pyswarms/package.py b/packages/py-pyswarms/package.py index 60c85b39..f9d94540 100644 --- a/packages/py-pyswarms/package.py +++ b/packages/py-pyswarms/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPyswarms(PythonPackage): diff --git a/packages/py-pytensor/package.py b/packages/py-pytensor/package.py index 4c045393..b47935f4 100644 --- a/packages/py-pytensor/package.py +++ b/packages/py-pytensor/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPytensor(PythonPackage): """Python library that allows one to define, optimize, and efficiently evaluate mathematical expressions involving diff --git a/packages/py-python-keycloak/package.py b/packages/py-python-keycloak/package.py index 44c4d562..b7f3d7cc 100644 --- a/packages/py-python-keycloak/package.py +++ b/packages/py-python-keycloak/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPythonKeycloak(PythonPackage): """python-keycloak is a Python package providing access to the Keycloak API""" diff --git a/packages/py-pyunicore/package.py b/packages/py-pyunicore/package.py index 1d6b71b3..0eb54988 100644 --- a/packages/py-pyunicore/package.py +++ b/packages/py-pyunicore/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyPyunicore(PythonPackage): diff --git a/packages/py-quantities-scidash/package.py b/packages/py-quantities-scidash/package.py index 08fcee32..f075e7fa 100644 --- a/packages/py-quantities-scidash/package.py +++ b/packages/py-quantities-scidash/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyQuantitiesScidash(PythonPackage): diff --git a/packages/py-quantities/package.py b/packages/py-quantities/package.py index ffe38173..e5d14e95 100644 --- a/packages/py-quantities/package.py +++ b/packages/py-quantities/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyQuantities(PythonPackage): diff --git a/packages/py-sbi/package.py b/packages/py-sbi/package.py index a560cd93..5e1c50df 100644 --- a/packages/py-sbi/package.py +++ b/packages/py-sbi/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PySbi(PythonPackage): diff --git a/packages/py-sciunit/package.py b/packages/py-sciunit/package.py index 5684040b..ca8cf891 100644 --- a/packages/py-sciunit/package.py +++ b/packages/py-sciunit/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PySciunit(PythonPackage): diff --git a/packages/py-snudda/package.py b/packages/py-snudda/package.py index 621df60f..f8458a3f 100644 --- a/packages/py-snudda/package.py +++ b/packages/py-snudda/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PySnudda(PythonPackage): diff --git a/packages/py-tvb-contrib/package.py b/packages/py-tvb-contrib/package.py index 8f940f0c..7a136a1a 100644 --- a/packages/py-tvb-contrib/package.py +++ b/packages/py-tvb-contrib/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbContrib(PythonPackage): diff --git a/packages/py-tvb-data/package.py b/packages/py-tvb-data/package.py index 1e5e9353..abe486cd 100644 --- a/packages/py-tvb-data/package.py +++ b/packages/py-tvb-data/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbData(PythonPackage): diff --git a/packages/py-tvb-ext-bucket/package.py b/packages/py-tvb-ext-bucket/package.py index 3d5e7c2b..29fdae03 100644 --- a/packages/py-tvb-ext-bucket/package.py +++ b/packages/py-tvb-ext-bucket/package.py @@ -1,4 +1,4 @@ -from spack import * +from spack.package import * class PyTvbExtBucket(PythonPackage): diff --git a/packages/py-tvb-framework/package.py b/packages/py-tvb-framework/package.py index fb1ebc2f..12c01e6a 100644 --- a/packages/py-tvb-framework/package.py +++ b/packages/py-tvb-framework/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbFramework(PythonPackage): diff --git a/packages/py-tvb-gdist/package.py b/packages/py-tvb-gdist/package.py index 34f70127..38cfa771 100644 --- a/packages/py-tvb-gdist/package.py +++ b/packages/py-tvb-gdist/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbGdist(PythonPackage): diff --git a/packages/py-tvb-library/package.py b/packages/py-tvb-library/package.py index c624a102..8386fb5a 100644 --- a/packages/py-tvb-library/package.py +++ b/packages/py-tvb-library/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbLibrary(PythonPackage): diff --git a/packages/py-tvb-multiscale/package.py b/packages/py-tvb-multiscale/package.py index b57b8f03..176f5532 100644 --- a/packages/py-tvb-multiscale/package.py +++ b/packages/py-tvb-multiscale/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbMultiscale(PythonPackage): diff --git a/packages/py-tvb-storage/package.py b/packages/py-tvb-storage/package.py index d25e7690..4c6f89ad 100644 --- a/packages/py-tvb-storage/package.py +++ b/packages/py-tvb-storage/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbStorage(PythonPackage): diff --git a/packages/py-tvb-widgets/package.py b/packages/py-tvb-widgets/package.py index 855ac583..07dd8cf1 100644 --- a/packages/py-tvb-widgets/package.py +++ b/packages/py-tvb-widgets/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyTvbWidgets(PythonPackage): diff --git a/packages/py-umnn/package.py b/packages/py-umnn/package.py index 545b1ec0..6cc01282 100644 --- a/packages/py-umnn/package.py +++ b/packages/py-umnn/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyUmnn(PythonPackage): diff --git a/packages/py-vbi/package.py b/packages/py-vbi/package.py index 0ac988a2..9c4258de 100644 --- a/packages/py-vbi/package.py +++ b/packages/py-vbi/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyVbi(PythonPackage, CudaPackage): diff --git a/packages/py-viziphant/package.py b/packages/py-viziphant/package.py index 22281cb4..8f409f9c 100644 --- a/packages/py-viziphant/package.py +++ b/packages/py-viziphant/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyViziphant(PythonPackage): diff --git a/packages/py-zuko/package.py b/packages/py-zuko/package.py index 9c222273..5e712df7 100644 --- a/packages/py-zuko/package.py +++ b/packages/py-zuko/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyZuko(PythonPackage): diff --git a/packages/pynn-brainscales/package.py b/packages/pynn-brainscales/package.py index e0836671..2a4a0a10 100644 --- a/packages/pynn-brainscales/package.py +++ b/packages/pynn-brainscales/package.py @@ -7,7 +7,7 @@ import os import unittest.mock import xml.etree.ElementTree as ET -from spack import * +from spack.package import * from spack.util.environment import EnvironmentModifications import spack.build_environment diff --git a/packages/sda/package.py b/packages/sda/package.py index 77f19f54..3fb569c5 100644 --- a/packages/sda/package.py +++ b/packages/sda/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class Sda(MakefilePackage): diff --git a/packages/wf-biobb/package.py b/packages/wf-biobb/package.py index 28ff7c41..61d14695 100644 --- a/packages/wf-biobb/package.py +++ b/packages/wf-biobb/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class WfBiobb(BundlePackage): diff --git a/packages/wf-brainscales2-demos/package.py b/packages/wf-brainscales2-demos/package.py index 1dc5b24b..02592a1d 100644 --- a/packages/wf-brainscales2-demos/package.py +++ b/packages/wf-brainscales2-demos/package.py @@ -4,7 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from glob import glob -from spack import * +from spack.package import * import os diff --git a/packages/wf-bsb/package.py b/packages/wf-bsb/package.py index 2e843818..37ec15a7 100644 --- a/packages/wf-bsb/package.py +++ b/packages/wf-bsb/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class WfBsb(BundlePackage): diff --git a/packages/wf-custom-python/package.py b/packages/wf-custom-python/package.py index 84ebe4cc..6fae2ed1 100644 --- a/packages/wf-custom-python/package.py +++ b/packages/wf-custom-python/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class WfCustomPython(BundlePackage): diff --git a/packages/wf-human-multi-area-model/package.py b/packages/wf-human-multi-area-model/package.py index 5bcefe0d..d5246fc8 100644 --- a/packages/wf-human-multi-area-model/package.py +++ b/packages/wf-human-multi-area-model/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class WfHumanMultiAreaModel(Package): diff --git a/packages/wf-multi-area-model/package.py b/packages/wf-multi-area-model/package.py index 51d59bb3..fa4828a0 100644 --- a/packages/wf-multi-area-model/package.py +++ b/packages/wf-multi-area-model/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class WfMultiAreaModel(Package): diff --git a/packages/wf-protein-association-rates/package.py b/packages/wf-protein-association-rates/package.py index b20c409a..3d13eb8d 100644 --- a/packages/wf-protein-association-rates/package.py +++ b/packages/wf-protein-association-rates/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class WfProteinAssociationRates(BundlePackage): diff --git a/packages/wf-uq-akar4/package.py b/packages/wf-uq-akar4/package.py index 10c3fb8f..5e819976 100644 --- a/packages/wf-uq-akar4/package.py +++ b/packages/wf-uq-akar4/package.py @@ -3,7 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class WfUqAkar4(BundlePackage): -- GitLab From 11bd19f35b06131d3f8526cd13944910c949be89 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Fri, 7 Mar 2025 14:07:12 +0100 Subject: [PATCH 105/111] feat(BSS2): update packages (end-of-July 2024 -> "now") --- packages/build-brainscales/package.py | 8 ++++++++ packages/genpybind/package.py | 1 + packages/oppulance/package.py | 7 +++++++ packages/wf-brainscales2-demos/package.py | 1 + spack.yaml | 8 ++++---- 5 files changed, 21 insertions(+), 4 deletions(-) diff --git a/packages/build-brainscales/package.py b/packages/build-brainscales/package.py index 327501d1..9d78d8ce 100644 --- a/packages/build-brainscales/package.py +++ b/packages/build-brainscales/package.py @@ -15,6 +15,13 @@ import spack.build_environment class BuildBrainscales(WafPackage): """Common stuff for BrainScaleS packages...""" + version( + "10.0-a1", + git="https://github.com/electronicvisions/releases-ebrains", + tag="ebrains-10.0-a1", + commit="d9bd675b446be8f313972aef2d6657ffbbb91ed2", + submodules=True, + ) version( "9.0-a9", git="https://github.com/electronicvisions/releases-ebrains", @@ -59,6 +66,7 @@ class BuildBrainscales(WafPackage): ) # common dependencies of BuildBrainscales-derived packages + depends_on('oppulance@10.0-a1', when='@10.0-a1', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a9', when='@9.0-a9', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a8', when='@9.0-a8', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a7', when='@9.0-a7', type=('build', 'link', 'run', 'test')) diff --git a/packages/genpybind/package.py b/packages/genpybind/package.py index bca2c483..79e1d7e8 100644 --- a/packages/genpybind/package.py +++ b/packages/genpybind/package.py @@ -21,6 +21,7 @@ class Genpybind(WafPackage): version('develop', branch='develop') version('visions', branch='master', git='https://github.com/electronicvisions/genpybind') + # good for ebrains-10.0 too… version('ebrains-llvm15', tag='ebrains-9.0-a9', git='https://github.com/electronicvisions/genpybind') version('ebrains', tag='ebrains_release-1-rc1', git='https://github.com/electronicvisions/genpybind') diff --git a/packages/oppulance/package.py b/packages/oppulance/package.py index 7dc37d0a..ede72e35 100644 --- a/packages/oppulance/package.py +++ b/packages/oppulance/package.py @@ -22,6 +22,13 @@ class Oppulance(Package): depends_on('wget') depends_on('gmp') + version( + "10.0-a1", + git="https://github.com/electronicvisions/releases-ebrains", + tag="ebrains-10.0-a1", + commit="d9bd675b446be8f313972aef2d6657ffbbb91ed2", + submodules=True, + ) version( "9.0-a9", git="https://github.com/electronicvisions/releases-ebrains", diff --git a/packages/wf-brainscales2-demos/package.py b/packages/wf-brainscales2-demos/package.py index 02592a1d..e98423a2 100644 --- a/packages/wf-brainscales2-demos/package.py +++ b/packages/wf-brainscales2-demos/package.py @@ -16,6 +16,7 @@ class WfBrainscales2Demos(Package): maintainers = ["emuller", "muffgaga"] # ECM: we probably should build the ipynb file in this package + version("10.0-a1", tag="jupyter-notebooks-10.0-a1") version("9.0-a9", tag="jupyter-notebooks-9.0-a9") version("9.0-a8", tag="jupyter-notebooks-9.0-a8") version("9.0-a7", tag="jupyter-notebooks-9.0-a7") diff --git a/spack.yaml b/spack.yaml index 9d86fd2f..beaf68f9 100644 --- a/spack.yaml +++ b/spack.yaml @@ -12,10 +12,10 @@ spack: - py-biobb-model@4.1.0 - py-biobb-structure-checking@3.13.4 - py-biobb-structure-utils@4.1.0 - - hxtorch@9.0-a9 + - hxtorch@10.0-a1 - nest@3.8 +sonata - neuron@8.2.3 +mpi - - jaxsnn@9.0-a9 + - jaxsnn@10.0-a1 - py-bluepyefe@2.3.6 - py-bluepymm@0.8.7 - py-bluepyopt@1.14.11 @@ -62,14 +62,14 @@ spack: - py-tvb-ext-xircuits@1.1.0 - py-viziphant@0.4.0 - py-vbi - - pynn-brainscales@9.0-a9 + - pynn-brainscales@10.0-a1 - r-rgsl@0.1.1 - r-sbtabvfgen@0.1 - r-uqsa@2.2 - sda@7.3.3d # Workflows (meta-packages) - wf-biobb - - wf-brainscales2-demos@9.0-a9 + - wf-brainscales2-demos@10.0-a1 - wf-bsb@4.4 +nest +neuron - wf-protein-association-rates@0.1 - wf-multi-area-model@1.2.0 -- GitLab From 7aa466477e4eb779707347ed77277dc15c66befc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Sat, 8 Mar 2025 09:56:13 +0100 Subject: [PATCH 106/111] fix(BSS2): use old oppulance (unchanged in code) --- packages/build-brainscales/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/build-brainscales/package.py b/packages/build-brainscales/package.py index 9d78d8ce..f679dab0 100644 --- a/packages/build-brainscales/package.py +++ b/packages/build-brainscales/package.py @@ -66,7 +66,7 @@ class BuildBrainscales(WafPackage): ) # common dependencies of BuildBrainscales-derived packages - depends_on('oppulance@10.0-a1', when='@10.0-a1', type=('build', 'link', 'run', 'test')) + depends_on('oppulance@9.0-a9', when='@10.0-a1', type=('build', 'link', 'run', 'test')) # keep the old one for now depends_on('oppulance@9.0-a9', when='@9.0-a9', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a8', when='@9.0-a8', type=('build', 'link', 'run', 'test')) depends_on('oppulance@9.0-a7', when='@9.0-a7', type=('build', 'link', 'run', 'test')) -- GitLab From 830af90825cf2b7859ebeaff6919cf891fc6d6d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de> Date: Mon, 10 Mar 2025 15:12:30 +0100 Subject: [PATCH 107/111] fix(BSS2(hxtorch)): add missing dependency on py-quantities --- packages/hxtorch/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/hxtorch/package.py b/packages/hxtorch/package.py index 7d3a26bf..a74813aa 100644 --- a/packages/hxtorch/package.py +++ b/packages/hxtorch/package.py @@ -79,6 +79,7 @@ class Hxtorch(build_brainscales.BuildBrainscales): ('py-torch@1.9.1:', { "type": ('build', 'link', 'run', 'test') } ), ('py-torchvision', { "type": ('run') } ), # for demos ('py-pyyaml', { "type": ('build', 'link', 'run') } ), + ('py-quantities@0.12.1:', { "type": ('build', 'link', 'run') } ), # PyNN-like interfacing of things? ('py-scipy', { "type": ('build', 'link', 'run') } ), ('py-sqlalchemy', { "type": ('build', 'link', 'run') } ), ('util-linux', { "type": ('build', 'link', 'run') } ), -- GitLab From 88ea272fa8ea4241d3623e249bf5f59f4042cf85 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 11 Mar 2025 06:35:08 +0000 Subject: [PATCH 108/111] feat(glfw): copy package from upstream --- packages/glfw/package.py | 53 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 packages/glfw/package.py diff --git a/packages/glfw/package.py b/packages/glfw/package.py new file mode 100644 index 00000000..ba005f77 --- /dev/null +++ b/packages/glfw/package.py @@ -0,0 +1,53 @@ +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack.package import * + + +class Glfw(CMakePackage): + """GLFW is an Open Source, multi-platform library for + OpenGL, OpenGL ES and Vulkan development on the desktop. It + provides a simple API for creating windows, contexts and + surfaces, receiving input and events.""" + + homepage = "https://www.glfw.org/" + url = "https://github.com/glfw/glfw/archive/3.3.2.tar.gz" + + license("Zlib") + + version("3.3.8", sha256="f30f42e05f11e5fc62483e513b0488d5bceeab7d9c5da0ffe2252ad81816c713") + version("3.3.2", sha256="98768e12e615fbe9f3386f5bbfeb91b5a3b45a8c4c77159cef06b1f6ff749537") + version("3.3.1", sha256="6bca16e69361798817a4b62a5239a77253c29577fcd5d52ae8b85096e514177f") + version("3.3", sha256="81bf5fde487676a8af55cb317830703086bb534c53968d71936e7b48ee5a0f3e") + version("3.2.1", sha256="e10f0de1384d75e6fc210c53e91843f6110d6c4f3afbfb588130713c2f9d8fe8") + version("3.2", sha256="cb3aab46757981a39ae108e5207a1ecc4378e68949433a2b040ce2e17d8f6aa6") + version("3.1.2", sha256="6ac642087682aaf7f8397761a41a99042b2c656498217a1c63ba9706d1eef122") + version("3.1.1", sha256="4de311ec9bf43bfdc8423ddf93b91dc54dc73dcfbedfb0991b6fbb3a9baf245f") + version("3.1", sha256="2140f4c532e7ce4c84cb7e4c419d0979d5954fa1ce204b7646491bd2cc5bf308") + version("3.0.4", sha256="a4e7c57db2086803de4fc853bd472ff8b6d2639b9aa16e6ac6b19ffb53958caf") + version("3.0.3", sha256="7a182047ba6b1fdcda778b79aac249bb2328b6d141188cb5df29560715d01693") + + depends_on("c", type="build") # generated + + variant("doc", default=False, description="Build documentation") + variant("shared", default=False, description="Builds a shared version of the library") + + # dependencies + depends_on("doxygen", type="build", when="+doc") + + # linux only dependencies + depends_on("libxrandr", when="platform=linux") + depends_on("libxinerama", when="platform=linux") + depends_on("libxcursor", when="platform=linux") + depends_on("libxdamage", when="platform=linux") + depends_on("libxft", when="platform=linux") + depends_on("libxi", when="platform=linux") + depends_on("libxmu", when="platform=linux") + depends_on("freetype", when="platform=linux") + depends_on("fontconfig", when="platform=linux") + depends_on("pkgconfig", type="build", when="platform=linux") + + def cmake_args(self): + return [self.define_from_variant("BUILD_SHARED_LIBS", "shared")] -- GitLab From aaeb83ce0c81ddeb15a4951a9da3c2ee7befafe6 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 11 Mar 2025 07:29:28 +0000 Subject: [PATCH 109/111] feat(glfw): add version 3.4 --- packages/glfw/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/packages/glfw/package.py b/packages/glfw/package.py index ba005f77..ab81c753 100644 --- a/packages/glfw/package.py +++ b/packages/glfw/package.py @@ -17,6 +17,9 @@ class Glfw(CMakePackage): license("Zlib") + # begin EBRAINS (added): new version + version("3.4", sha256="c038d34200234d071fae9345bc455e4a8f2f544ab60150765d7704e08f3dac01") + # end EBRAINS version("3.3.8", sha256="f30f42e05f11e5fc62483e513b0488d5bceeab7d9c5da0ffe2252ad81816c713") version("3.3.2", sha256="98768e12e615fbe9f3386f5bbfeb91b5a3b45a8c4c77159cef06b1f6ff749537") version("3.3.1", sha256="6bca16e69361798817a4b62a5239a77253c29577fcd5d52ae8b85096e514177f") @@ -49,5 +52,10 @@ class Glfw(CMakePackage): depends_on("fontconfig", when="platform=linux") depends_on("pkgconfig", type="build", when="platform=linux") + # begin EBRAINS (added): missing dependency + depends_on("wayland", when="platform=linux") + depends_on("libxkbcommon", when="platform=linux") + # end EBRAINS + def cmake_args(self): return [self.define_from_variant("BUILD_SHARED_LIBS", "shared")] -- GitLab From e6ab62d07c1d7e7b7141a103f0664a74c6d7d23a Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 11 Mar 2025 07:30:14 +0000 Subject: [PATCH 110/111] feat(open3d): add version 0.19.0 --- packages/open3d/package.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/open3d/package.py b/packages/open3d/package.py index c1faaa0b..3deedf47 100644 --- a/packages/open3d/package.py +++ b/packages/open3d/package.py @@ -20,6 +20,9 @@ class Open3d(CMakePackage, CudaPackage): license("MIT") # begin EBRAINS (added): add version + version( + "0.19.0", tag="v0.19.0", commit="1e7b17438687a0b0c1e5a7187321ac7044afe275", submodules=True + ) version( "0.18.0", tag="v0.18.0", commit="0f06a149c4fb9406fd3e432a5cb0c024f38e2f0e", submodules=True ) @@ -66,12 +69,13 @@ class Open3d(CMakePackage, CudaPackage): depends_on("assimp", when="@0.15:") depends_on("jsoncpp", when="@0.15:") depends_on("msgpack-c", when="@0.15:") - # depends_on("tbb", when="@0.15:") + depends_on("tbb", when="@0.15:") depends_on("cppzmq", when="@0.15:") depends_on("curl", when="@0.17:") depends_on("openssl", when="@0.17:") # depends_on("vtk", when="@0.17:") - depends_on("embree@:3", when="@0.18:") + depends_on("embree@:3", when="@0.18") + depends_on("embree@4:", when="@0.19:") # end EBRAINS extends("python", when="+python", type=("build", "link", "run")) @@ -157,7 +161,7 @@ class Open3d(CMakePackage, CudaPackage): args.append(self.define("USE_SYSTEM_JSONCPP", True)) args.append(self.define("USE_SYSTEM_MSGPACK", True)) args.append(self.define("USE_SYSTEM_NANOFLANN", True)) - # args.append(self.define("USE_SYSTEM_TBB", True)) + args.append(self.define("USE_SYSTEM_TBB", True)) args.append(self.define("USE_SYSTEM_ZEROMQ", True)) if self.spec.satisfies("@0.17:"): args.append(self.define("USE_SYSTEM_CURL", True)) @@ -204,3 +208,4 @@ class Open3d(CMakePackage, CudaPackage): with working_dir("spack-test"): python = which(python.path) python("-c", "import open3d") + -- GitLab From b8293048a5da7fef5adf21f225063999816c7438 Mon Sep 17 00:00:00 2001 From: Eleni Mathioulaki <emathioulaki@athenarc.gr> Date: Tue, 11 Mar 2025 13:45:12 +0000 Subject: [PATCH 111/111] feat(CI): increase number of jobs --- .gitlab-ci.yml | 2 +- create_job.sh | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 17476bd7..e8524909 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -274,7 +274,7 @@ sync-gitlab-spack-instance: image: $BUILD_ENV_DOCKER_IMAGE variables: SPACK_REPO_PATH: $SPACK_PATH_GITLAB/ebrains-spack-builds - SPACK_JOBS: 4 + SPACK_JOBS: 16 OCI_CACHE_PREFIX: "" UPDATE_SPACK_OCI_CACHES: false script: diff --git a/create_job.sh b/create_job.sh index 60958e3b..56d80c8f 100644 --- a/create_job.sh +++ b/create_job.sh @@ -36,11 +36,11 @@ spec: imagePullPolicy: Always resources: limits: - cpu: '6' - memory: '18Gi' + cpu: '8' + memory: '32Gi' requests: cpu: '4' - memory: '12Gi' + memory: '20Gi' volumeMounts: - name: sharedbin mountPath: /srv @@ -99,7 +99,7 @@ spec: - name: EBRAINS_REPO_PATH value: $INSTALLATION_ROOT/ebrains-spack-builds - name: SPACK_JOBS - value: '4' + value: '6' volumes: - name: sharedbin persistentVolumeClaim: -- GitLab