-
Eric Müller authored
This pulls in a quickfix that avoids `import yaml` during `waf setup`, i.e. `spack fetch` operation.
7ee4050d
package.py 3.90 KiB
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from glob import glob
from spack import *
import os
class WfBrainscales2Demos(Package):
"""Meta-package to collect all dependencies for the BrainScaleS-2 demos/tutorials."""
homepage="https://electronicvisions.github.io/documentation-brainscales2"
git = "https://github.com/electronicvisions/brainscales2-demos"
maintainer = ["muffgaga"]
# ECM: we probably should build the ipynb file in this package
version("7.0-rc1-fixup3", tag="jupyter-notebooks-7.0-rc1-fixup3")
version("23.6", branch="jupyter-notebooks-experimental")
depends_on('hxtorch', type=("run", "test"))
depends_on('pynn-brainscales', type=("run", "test"))
depends_on('py-matplotlib@3.5.1:', type=("run", "test"))
depends_on('py-ipywidgets@7.7.0:', type=("run", "test"))
depends_on('py-ipycanvas@0.10.2:', type=("run", "test"))
depends_on('py-numpy@1.21.6:', type=("run", "test"))
depends_on('py-ipython@8.0.1:', type=("run", "test"))
depends_on('py-quantities@0.13.0:', type=("run", "test"))
depends_on('py-torch@1.11.0:', type=("run", "test"))
depends_on('py-torchvision@0.12.0:', type=("run", "test"))
depends_on('py-tqdm@4.62.3:', type=("run", "test"))
depends_on('py-requests@2.26.0:', type=("run", "test"))
depends_on('py-neo@0.11.1:', type=("run", "test"))
depends_on('py-scipy@1.8.1:', type=("run", "test"))
depends_on('py-deap@1.3:', type=("run", "test"))
depends_on('py-pandas@1.4.2:', type=("run", "test"))
def install(self, spec, prefix):
install_tree(".", join_path(prefix, "notebooks"))
def _nbconvert(self, nb, nb_out):
jupyter = Executable("jupyter")
args = [
"nbconvert",
"--ExecutePreprocessor.kernel_name=python3",
"--ExecutePreprocessor.timeout=900",
"--execute",
"--to",
"notebook",
nb,
"--output",
nb_out
]
try:
# execute notebook and save
jupyter(*args, output=str.split, error=str.split)
except Exception as e:
# if the notebook execution fails, re-run notebook to produce output with error
# in case of a cell timeout, don't re-run
if "CellTimeoutError" not in str(e):
jupyter(*(args+["--allow-errors"]))
raise
def _run_notebooks(self, output_dir):
mkdirp(output_dir)
# try to run all notebooks, then fail if there are errors
exceptions = []
for fn in glob(join_path(prefix, "notebooks", "ts*.ipynb")) + glob(join_path(prefix, "notebooks", "tp*.ipynb")):
try:
self._nbconvert(fn, join_path(output_dir, os.path.basename(fn)))
except Exception as e:
exceptions.append(e)
if exceptions:
raise Exception("Errors during notebook execution")
def _set_collab_things(self):
# enable "EBRAINS lab" mode
os.environ["LAB_IMAGE_NAME"] = "EBRAINS"
# select "EBRAINS experimental" upstream experiment service
os.environ["LAB_KERNEL_NAME"] = "EBRAINS-experimental"
os.environ["JUPYTERHUB_USER"] = "spack-test-wf-brainscales2-demos"
@run_after("install")
@on_package_attributes(run_tests=True)
def installcheck(self):
self._set_collab_things()
# TODO (ECM): Provide a selection of notebooks that perform local-only tests.
# self._run_notebooks(join_path(self.stage.path, ".install_time_tests"))
# copy_tree(join_path(self.stage.path, ".install_time_tests"), join_path(self.prefix, '.build'))
def test_notebooks(self):
self._set_collab_things()
self._run_notebooks(join_path(self.test_suite.stage, self.spec.format("out-{name}-{version}-{hash:7}")))