Skip to content
Snippets Groups Projects
Commit 96fb0620 authored by Eleni Mathioulaki's avatar Eleni Mathioulaki
Browse files

Merge branch 'master' into 'experimental_rel'

create new experimental release

See merge request technical-coordination/project-internal/devops/platform/ebrains-spack-builds!153
parents 35431cce e12a1f55
No related branches found
No related tags found
2 merge requests!331BrainScaleS: Update software state,!153create new experimental release
Pipeline #18408 failed with stages
in 11 hours, 38 minutes, and 57 seconds
stages:
- build
- deploy
- sync
# start an OpenShift Job that will build the Spack environment
.deploy-build-environment:
......@@ -230,3 +232,108 @@ deploy-exp-release-prod-jsc:
variables:
- $RELEASE == "experimental"
allow_failure: false
build-spack-env-on-runner:
stage: build
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'master'
tags:
- docker-runner
- read-only
image: docker-registry.ebrains.eu/tc/ebrains-spack-build-env:gitlab_runners_latest
variables:
SPACK_DEV_ENV: ebrains-dev
SPACK_DEV_PATH: $CI_PROJECT_DIR/spack
SPACK_USER_CACHE_PATH: $CI_PROJECT_DIR/.spack
SPACK_USER_CONFIG_PATH: $CI_PROJECT_DIR/.spack
TMP: $CI_PROJECT_DIR/.spack-tmp
script:
- git clone -c feature.manyFiles=true https://github.com/spack/spack $SPACK_DEV_PATH
- cd $SPACK_DEV_PATH
- git checkout -b ebrains_spack_commit a8d440d3ababcdec20d665ad938ab880cd9b9d17
- cd ../
- mkdir $TMP
- |
cat <<EOF > $SPACK_DEV_PATH/etc/spack/packages.yaml
packages:
all:
target: [x86_64]
EOF
cat <<EOF > $SPACK_DEV_PATH/etc/spack/defaults/upstreams.yaml
upstreams:
ebrains-gitlab-spack-instance:
install_tree: /mnt/spack/opt/spack
EOF
- . $SPACK_DEV_PATH/share/spack/setup-env.sh
- spack find
- spack load gcc@10.3.0
- spack compiler find
- spack repo add .
- spack repo list
- spack env create $SPACK_DEV_ENV spack.yaml
- spack env activate $SPACK_DEV_ENV
- spack concretize --reuse -f
- spack install --reuse --no-check-signature
# cache:
# key: spack-cache-$CI_COMMIT_REF_SLUG
# paths:
# - "$SPACK_DEV_PATH"
# - "$SPACK_USER_CONFIG_PATH"
# when: always
artifacts:
paths:
- .spack-tmp/spack/spack-stage
when: on_failure
sync_gitlab_spack_instance:
stage: sync
tags:
- docker-runner
- read-write
image: docker-registry.ebrains.eu/tc/ebrains-spack-build-env:gitlab_runners_nfs_latest
variables:
SPACK_NFS_ENV: ebrains-runner-build
SPACK_PATH: /mnt/spack
SPACK_USER_CACHE_PATH: $SPACK_PATH/.spack
SPACK_USER_CONFIG_PATH: $SPACK_PATH/.spack
script:
- |
if [ ! -d $SPACK_PATH ]; then
# git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $SPACK_PATH
git clone -c feature.manyFiles=true https://github.com/spack/spack $SPACK_PATH
cd $SPACK_PATH
git checkout -b ebrains_spack_commit a8d440d3ababcdec20d665ad938ab880cd9b9d17
cd ../
cat <<EOF > $SPACK_PATH/etc/spack/packages.yaml
packages:
all:
target: [x86_64]
EOF
fi
- . $SPACK_PATH/share/spack/setup-env.sh
# - spack bootstrap untrust github-actions
- spack compiler find
- spack compiler list
- spack load gcc@10.3.0 || spack install gcc@10.3.0
- spack load gcc@10.3.0
- spack compiler find
- spack compiler list
- spack install python@3.8.11 %gcc@10.3.0
# - for section in $(spack config list); do spack config blame $section; done
- spack repo list | grep -q ebrains-spack-builds && echo "Repository registered already" || spack repo add $CI_PROJECT_DIR
- spack repo list
- spack env list | grep -q $SPACK_NFS_ENV && echo "Environment created already" || spack env create $SPACK_NFS_ENV $CI_PROJECT_DIR/spack.yaml
- spack env activate $SPACK_NFS_ENV
- cp $CI_PROJECT_DIR/spack.yaml $SPACK_ROOT/var/spack/environments/$SPACK_NFS_ENV/spack.yaml
- spack concretize -f
- rm -rf /mnt/build_logs/
- spack install --no-check-signature || cp -r /tmp/spack/spack-stage /mnt/build_logs
timeout: 1 day
only:
refs:
- schedules
variables:
- $RELEASE == "experimental"
allow_failure: false
......@@ -13,7 +13,7 @@ class Hxtorch(WafPackage):
# This repo provides a waf binary used for the build below
git = "https://github.com/electronicvisions/pynn-brainscales.git"
version('2.0-rc8', branch='waf')
version('3.0-a1', branch='waf')
# PPU compiler dependencies
depends_on('oppulance@2.0:')
......
......@@ -3,7 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import os
class Oppulance(Package):
"""SDK for embedded processors on BrainScaleS-2"""
......@@ -21,8 +21,8 @@ class Oppulance(Package):
releases = [
{
'version': '2.0-rc5',
'tag': 'ebrains-2.0-rc5'
'version': '3.0-a1',
'tag': 'ebrains-3.0-a1'
},
]
......@@ -31,7 +31,6 @@ class Oppulance(Package):
release['version'],
git='https://github.com/electronicvisions/oppulance',
tag=release['tag'],
expand=False,
)
for res in ['binutils-gdb', 'gcc', 'newlib']:
......@@ -39,28 +38,46 @@ class Oppulance(Package):
name=res,
git='https://github.com/electronicvisions/{}'.format(res),
tag=release['tag'],
expand=False,
placement=res
)
def do_fetch(self, mirror_only=False):
super(Oppulance, self).do_fetch(mirror_only)
mkdirp(self.stage.source_path)
tar = which('tar')
ln = which('ln')
bash = which('bash')
with working_dir(self.stage.source_path):
for key in self.resources:
for res in self.resources[key]:
if res.fetcher.stage.archive_file:
tar('xf', res.fetcher.stage.archive_file)
else:
# freshly download
ln('-sf', res.fetcher.stage.source_path, res.name)
bash('gcc/ci/00_download_prerequisites.sh')
# defined by gcc/contrib/download_prerequisites
resource(name='gmp-6.1.0.tar.bz2',
url='http://gcc.gnu.org/pub/gcc/infrastructure/gmp-6.1.0.tar.bz2',
sha256='498449a994efeba527885c10405993427995d3f86b8768d8cdf8d9dd7c6b73e8',
expand=False,
)
resource(name='mpfr-3.1.4.tar.bz2',
url='http://gcc.gnu.org/pub/gcc/infrastructure/mpfr-3.1.4.tar.bz2',
sha256='d3103a80cdad2407ed581f3618c4bed04e0c92d1cf771a65ead662cc397f7775',
expand=False,
)
resource(name='mpc-1.0.3.tar.bz2',
url='http://gcc.gnu.org/pub/gcc/infrastructure/mpc-1.0.3.tar.gz',
sha256='617decc6ea09889fb08ede330917a00b16809b8db88c29c31bfbb49cbf88ecc3',
expand=False,
)
resource(name='isl-0.18.tar.bz2',
url='http://gcc.gnu.org/pub/gcc/infrastructure/isl-0.18.tar.bz2',
sha256='6b8b0fd7f81d0a957beb3679c81bbb34ccc7568d5682844d8924424a0dadcb1b',
expand=False,
)
def install(self, spec, prefix):
ln = which('ln')
# move gcc resources into place
for key in self.resources:
for res in self.resources[key]:
if not res.name.startswith(
('gmp-', 'mpfr-', 'mpc-', 'isl-')):
continue
assert(res.fetcher.stage.archive_file)
ln('-sf', res.fetcher.stage.archive_file, 'gcc/')
bash = which('bash')
# extracts the gcc resources via gcc/contrib/download_prerequisites
# (download is skipped if file exists)
bash('gcc/ci/00_download_prerequisites.sh')
bash('binutils-gdb/ci/00_build_install.sh')
bash('gcc/ci/01_build_install_freestanding.sh')
bash('newlib/ci/00_build_install.sh')
......
......@@ -11,7 +11,7 @@ class PynnBrainscales(WafPackage):
homepage = "https://github.com/electronicvisions/pynn-brainscales"
git = "https://github.com/electronicvisions/pynn-brainscales.git"
version('2.0-rc8', branch='waf')
version('3.0-a1', branch='waf')
# PPU compiler dependencies
depends_on('oppulance@2.0:')
......
......@@ -29,8 +29,8 @@ spack:
- py-pyaescrypt
- py-formencode
- tvb-framework ^binutils+ld+gold
- pynn-brainscales@2.0-rc8 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
- hxtorch@2.0-rc8 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
- pynn-brainscales@3.0-a1 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
- hxtorch@3.0-a1 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
- py-neo
#- py-cerebstats
#- py-cerebunit
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment