From afa84031feca51daa00ac13d991ac27c415fc379 Mon Sep 17 00:00:00 2001
From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com>
Date: Tue, 6 May 2025 09:24:56 +0200
Subject: [PATCH 01/16] change maintainer for py-elephant
---
packages/py-elephant/package.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packages/py-elephant/package.py b/packages/py-elephant/package.py
index 5b556320..80109482 100644
--- a/packages/py-elephant/package.py
+++ b/packages/py-elephant/package.py
@@ -13,7 +13,7 @@ class PyElephant(PythonPackage, CudaPackage):
pypi = "elephant/elephant-0.11.0.tar.gz"
git = "https://github.com/NeuralEnsemble/elephant.git"
- maintainers = ["moritzkern"]
+ maintainers = ["denker"]
version('develop', branch='master')
--
GitLab
From 37db443a59e1a1948692ada2f4006952ba3d0e79 Mon Sep 17 00:00:00 2001
From: Moritz Kern <92092328+Moritz-Alexander-Kern@users.noreply.github.com>
Date: Tue, 6 May 2025 09:25:45 +0200
Subject: [PATCH 02/16] change maintainer for viziphant
---
packages/py-viziphant/package.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packages/py-viziphant/package.py b/packages/py-viziphant/package.py
index 8f409f9c..4603a131 100644
--- a/packages/py-viziphant/package.py
+++ b/packages/py-viziphant/package.py
@@ -12,7 +12,7 @@ class PyViziphant(PythonPackage):
homepage = "https://viziphant.readthedocs.io/en/latest/"
pypi = "viziphant/viziphant-0.1.0.tar.gz"
- maintainers = ['moritzkern']
+ maintainers = ['denker']
version('0.4.0', sha256='ae8c3df517d7781c184086909fad95f30c093534cfb35b74eadf330fa144e336')
version('0.3.0', sha256='40d2970b8384d8e1c5d324caf1b1188391d7e651375ce08dd518bcf4ac6477f2')
--
GitLab
From 99b8845330c9c43d643f30e422181ba86ae9bb63 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Fri, 14 Mar 2025 17:26:04 +0200
Subject: [PATCH 03/16] VT-94: migrate esd bash scripts to Dedal library
---
.gitlab-ci.yml | 6 +-
__init__.py | 0
install_spack_env.py | 137 +++++++++++++++++++++++++++++++++++++++++++
utils/__init__.py | 0
utils/utils.py | 13 ++++
utils/ymerge.py | 15 +++++
6 files changed, 168 insertions(+), 3 deletions(-)
create mode 100644 __init__.py
create mode 100644 install_spack_env.py
create mode 100644 utils/__init__.py
create mode 100644 utils/utils.py
create mode 100644 utils/ymerge.py
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 01afd2ff..f0a9f598 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -232,7 +232,7 @@ build-spack-env-on-runner:
- >
echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
# run installation script
- - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+ - python3 install_spack_env.py $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
after_script:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job
@@ -274,7 +274,7 @@ sync-esd-image:
# => DAG concretization, subsequent cache access + fetching and actual build should be separate steps
- mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT}
- export APPTAINERENV_SYSTEMNAME=$SYSTEMNAME
- - apptainer exec --containall --bind /tmp:/tmp --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+ - apptainer exec --containall --bind /tmp:/tmp --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} python3 install_spack_env.py $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
- echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
- echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
# preparing to assemble the image: move in the CI project contents...
@@ -328,7 +328,7 @@ sync-gitlab-spack-instance:
- git reset --hard $CI_COMMIT_SHA
- git submodule update --force
# run installation script
- - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+ - python3 install_spack_env.py $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
# create kernel spec, so that the environment can be used in gitlab CI jobs
- RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac);
- bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/install_spack_env.py b/install_spack_env.py
new file mode 100644
index 00000000..31a6ab16
--- /dev/null
+++ b/install_spack_env.py
@@ -0,0 +1,137 @@
+import os
+import argparse
+import shutil
+import sys
+from pathlib import Path
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.enum.SpackConfigCommand import SpackConfigCommand
+from dedal.enum.SpackViewEnum import SpackViewEnum
+from dedal.model.SpackDescriptor import SpackDescriptor
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import set_bashrc_variable
+from utils.utils import find_first_upstream_prefix
+from utils.ymerge import merge_spack_envs
+
+parser = argparse.ArgumentParser(
+ prog='install_spack_env.py',
+ description='ESD install spack env.',
+ epilog='...')
+
+parser.add_argument(
+ "spack_jobs",
+ type=int,
+ help="number of jobs"
+)
+
+parser.add_argument(
+ "installation_root",
+ type=str,
+ help="where to set up the installation"
+)
+
+parser.add_argument(
+ "ebrains_repo",
+ type=str,
+ help="location of ebrains-spack-builds repository"
+)
+
+parser.add_argument(
+ "ebrains_spack_env",
+ type=str,
+ help="name of EBRAINS Spack environment to be created/updated"
+)
+
+parser.add_argument(
+ "upstream_instance",
+ type=str,
+ help="path to Spack instance to use as upstream (optional)"
+)
+
+parser.add_argument(
+ "update_spack_oci_caches",
+ type=bool,
+ help="true enables updating the OCI cache for spack sources and build results"
+)
+
+parser.add_argument(
+ "oci_cache_prefix",
+ type=str,
+)
+
+args = parser.parse_args()
+
+spack_jobs = args.spack_jobs
+installation_root = Path(args.installation_root).resolve()
+ebrains_repo = args.ebrains_repo
+ebrains_spack_env = args.ebrains_spack_env
+upstream_instance = args.upstream_instance
+update_spack_oci_caches = args.update_spack_oci_caches
+oci_cache_prefix = args.oci_cache_prefix
+
+set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+
+ci_spack_root = installation_root / 'spack'
+
+set_bashrc_variable('CACHE_SPECFILE', f'env_specfile.yaml')
+set_bashrc_variable('DEDAL_HOME', f'{ebrains_repo}/vendor/yashchiki')
+set_bashrc_variable('SPACK_CACHE_SOURCE', f'{ci_spack_root}/var/spack/cache')
+set_bashrc_variable('SPACK_CACHE_BUILD', f'{ci_spack_root}/var/spack/cache')
+
+ci_spack_root = installation_root / 'spack'
+spack_root_existed = 1
+if ci_spack_root and ebrains_repo:
+ if not os.path.isdir(ci_spack_root):
+ spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
+ try:
+ os.symlink(spack_source, ci_spack_root)
+ spack_root_existed = 0
+ except FileExistsError:
+ pass
+
+data_dir = installation_root / 'cashing'
+env_repo = SpackDescriptor('ebrains-spack-builds', data_dir, ebrains_repo)
+spack_config = SpackConfig(env=env_repo,
+ repos=[env_repo],
+ install_dir=installation_root,
+ upstream_instance=upstream_instance,
+ system_name='VBT',
+ concretization_dir=data_dir / 'concretize_cache',
+ buildcache_dir=data_dir / 'binary_cache',
+ use_spack_global=False,
+ cache_version_build='spack_cache',
+ view=SpackViewEnum.WITHOUT_VIEW)
+
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.install_spack()
+if upstream_instance:
+ upstream_prefix = find_first_upstream_prefix(upstream_instance)
+ spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
+if spack_root_existed == 0:
+ spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
+
+spack_operation.spec_pacakge('aida')
+spack_operation.reindex()
+spack_operation.add_mirror(mirror_name='local_cache', mirror_path=Path(os.getenv('SPACK_CACHE_BUILD')).resolve())
+spack_operation.setup_spack_env()
+
+site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
+if site_config_dest.exists():
+ site_config_dest.rmdir()
+ site_config_dest.mkdir(parents=True, exist_ok=True)
+ site_config_esd = Path(ebrains_spack_env) / 'site-config'
+ shutil.copy(site_config_esd, site_config_dest)
+ merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
+ tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
+ try:
+ with open(tmp_spack_yaml, "w") as f:
+ f.write(merged_envs)
+ print(f"Written merged spack.yaml to {tmp_spack_yaml}")
+ except Exception as e:
+ sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
+ sys.exit(1)
+ shutil.copy(tmp_spack_yaml, site_config_dest)
+
+spack_operation.concretize_spack_env()
+
+spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+spack_operation.reindex()
\ No newline at end of file
diff --git a/utils/__init__.py b/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/utils.py b/utils/utils.py
new file mode 100644
index 00000000..2edf146c
--- /dev/null
+++ b/utils/utils.py
@@ -0,0 +1,13 @@
+import os
+
+
+def find_first_upstream_prefix(upstream_instance):
+ """
+ Search for directories named '.spack-db' within the spack opt directory
+ under upstream_instance, and return a list of their parent directories.
+ """
+ base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
+ for upstream_prefix, dirs, _ in os.walk(base_path):
+ if ".spack-db" in dirs:
+ return upstream_prefix
+ return None
diff --git a/utils/ymerge.py b/utils/ymerge.py
new file mode 100644
index 00000000..79567d63
--- /dev/null
+++ b/utils/ymerge.py
@@ -0,0 +1,15 @@
+# spack-python script that merges two environment configuration files (spack.yaml) into one
+# Usage: spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
+# (note: if the second file does not exist, the output is the first file
+
+import os
+from spack.config import merge_yaml, read_config_file, syaml
+
+
+def merge_spack_envs(top_yaml_path, site_yaml_path):
+ if not os.path.exists(site_yaml_path):
+ merged = syaml.dump(read_config_file(top_yaml_path))
+ else:
+ merged = syaml.dump(merge_yaml(read_config_file(top_yaml_path), read_config_file(site_yaml_path)))
+ return merged
+
--
GitLab
From 1c6b384aac78457b45bb41e3e5be046a07a41ceb Mon Sep 17 00:00:00 2001
From: Eleni Mathioulaki <emathioulaki@athenarc.gr>
Date: Tue, 15 Apr 2025 11:03:14 +0300
Subject: [PATCH 04/16] feat: update dedal to 0.1.0
---
vendor/yashchiki | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 5690db7d..11785be8 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 5690db7dbccd78f3ceef2123a605e662bb8b2c0f
+Subproject commit 11785be8a6e914a8404757670f58b4e78fca2bf9
--
GitLab
From 278286e300763b63e92dc61588facfca2fbc24fb Mon Sep 17 00:00:00 2001
From: Eleni Mathioulaki <emathioulaki@athenarc.gr>
Date: Tue, 15 Apr 2025 11:05:01 +0300
Subject: [PATCH 05/16] fix(CI): install dedal python library
---
.gitlab-ci.yml | 2 ++
1 file changed, 2 insertions(+)
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f0a9f598..2d923cf4 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -228,6 +228,8 @@ build-spack-env-on-runner:
OCI_CACHE_PREFIX: ""
UPDATE_SPACK_OCI_CACHES: false
script:
+ # install dedal python library
+ - pip3 install --break-system-packages vendor/yashchiki
# deactivate environment views (we don't need them for the test build-job)
- >
echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
--
GitLab
From d554f27cecd0ff0199caf1e5ab560c31a9443089 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Mon, 28 Apr 2025 15:23:47 +0300
Subject: [PATCH 06/16] VT-94: fixing incompatibilities
---
.gitmodules | 2 +-
install_spack_env.py | 40 +++++++++++++++++++++++++++-------------
utils/ymerge.py | 13 ++++++-------
vendor/yashchiki | 2 +-
4 files changed, 35 insertions(+), 22 deletions(-)
diff --git a/.gitmodules b/.gitmodules
index 30354655..0b9fab48 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -5,4 +5,4 @@
[submodule "vendor/yashchiki"]
path = vendor/yashchiki
url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal
- shallow = true
+ shallow = true
\ No newline at end of file
diff --git a/install_spack_env.py b/install_spack_env.py
index 31a6ab16..55628af4 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -8,9 +8,8 @@ from dedal.enum.SpackConfigCommand import SpackConfigCommand
from dedal.enum.SpackViewEnum import SpackViewEnum
from dedal.model.SpackDescriptor import SpackDescriptor
from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
-from dedal.utils.utils import set_bashrc_variable
+from dedal.utils.utils import set_bashrc_variable, run_command
from utils.utils import find_first_upstream_prefix
-from utils.ymerge import merge_spack_envs
parser = argparse.ArgumentParser(
prog='install_spack_env.py',
@@ -56,6 +55,8 @@ parser.add_argument(
parser.add_argument(
"oci_cache_prefix",
type=str,
+ nargs="?", # optional
+ default=None,
)
args = parser.parse_args()
@@ -68,6 +69,8 @@ upstream_instance = args.upstream_instance
update_spack_oci_caches = args.update_spack_oci_caches
oci_cache_prefix = args.oci_cache_prefix
+system_name = os.getenv('SYSTEMNAME')
+
set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
ci_spack_root = installation_root / 'spack'
@@ -77,32 +80,32 @@ set_bashrc_variable('DEDAL_HOME', f'{ebrains_repo}/vendor/yashchiki')
set_bashrc_variable('SPACK_CACHE_SOURCE', f'{ci_spack_root}/var/spack/cache')
set_bashrc_variable('SPACK_CACHE_BUILD', f'{ci_spack_root}/var/spack/cache')
-ci_spack_root = installation_root / 'spack'
-spack_root_existed = 1
+spack_root_existed = True
if ci_spack_root and ebrains_repo:
if not os.path.isdir(ci_spack_root):
spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
try:
os.symlink(spack_source, ci_spack_root)
- spack_root_existed = 0
+ spack_root_existed = False
except FileExistsError:
pass
-data_dir = installation_root / 'cashing'
-env_repo = SpackDescriptor('ebrains-spack-builds', data_dir, ebrains_repo)
+data_dir = installation_root / 'caching'
+print(installation_root.parent)
+env_repo = SpackDescriptor('ebrains-spack-builds', path=installation_root.parent)
spack_config = SpackConfig(env=env_repo,
repos=[env_repo],
- install_dir=installation_root,
+ install_dir=installation_root.parent,
upstream_instance=upstream_instance,
system_name='VBT',
concretization_dir=data_dir / 'concretize_cache',
buildcache_dir=data_dir / 'binary_cache',
use_spack_global=False,
cache_version_build='spack_cache',
- view=SpackViewEnum.WITHOUT_VIEW)
+ view=SpackViewEnum.WITHOUT_VIEW,
+ spack_dir=Path(ci_spack_root).resolve())
spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
-spack_operation.install_spack()
if upstream_instance:
upstream_prefix = find_first_upstream_prefix(upstream_instance)
spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
@@ -120,11 +123,22 @@ if site_config_dest.exists():
site_config_dest.mkdir(parents=True, exist_ok=True)
site_config_esd = Path(ebrains_spack_env) / 'site-config'
shutil.copy(site_config_esd, site_config_dest)
- merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
+ # spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
+ # merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
+ merged_envs = result = run_command(
+ "spack-python",
+ f'{ebrains_repo}/site-config/ymerge.py {ebrains_repo}/spack.yaml',
+ f'{ebrains_repo}/site-config/{system_name}/spack.yaml',
+ info_msg='Merging top-level and site-specific spack.yaml files.',
+ exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
+ capture_output=True,
+ text=True,
+ check=True
+ )
tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
try:
with open(tmp_spack_yaml, "w") as f:
- f.write(merged_envs)
+ f.write(merged_envs.stdout.strip())
print(f"Written merged spack.yaml to {tmp_spack_yaml}")
except Exception as e:
sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
@@ -134,4 +148,4 @@ if site_config_dest.exists():
spack_operation.concretize_spack_env()
spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
-spack_operation.reindex()
\ No newline at end of file
+spack_operation.reindex()
diff --git a/utils/ymerge.py b/utils/ymerge.py
index 79567d63..accb8779 100644
--- a/utils/ymerge.py
+++ b/utils/ymerge.py
@@ -2,14 +2,13 @@
# Usage: spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
# (note: if the second file does not exist, the output is the first file
-import os
+import sys, os
from spack.config import merge_yaml, read_config_file, syaml
+if not os.path.exists(sys.argv[2]):
+ merged = syaml.dump(read_config_file(sys.argv[1]))
+else:
+ merged = syaml.dump(merge_yaml(read_config_file(sys.argv[1]), read_config_file(sys.argv[2])))
-def merge_spack_envs(top_yaml_path, site_yaml_path):
- if not os.path.exists(site_yaml_path):
- merged = syaml.dump(read_config_file(top_yaml_path))
- else:
- merged = syaml.dump(merge_yaml(read_config_file(top_yaml_path), read_config_file(site_yaml_path)))
- return merged
+print(merged)
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 11785be8..7a3d1c14 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 11785be8a6e914a8404757670f58b4e78fca2bf9
+Subproject commit 7a3d1c1422fcec27b62c491dd03078b73b933a0b
--
GitLab
From d1e3bc1bb4fcbcbed2fbbb05468148b4242fc071 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Wed, 30 Apr 2025 17:25:37 +0300
Subject: [PATCH 07/16] VT-94: adapt to new changes from dedal
---
.gitlab-ci.yml | 6 ++
install_spack_env.py | 194 +++++++++++++++++++++++++++++++++++--------
vendor/yashchiki | 2 +-
3 files changed, 166 insertions(+), 36 deletions(-)
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2d923cf4..dfd9afb2 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -247,6 +247,8 @@ build-spack-env-on-runner:
artifacts:
paths:
- spack_logs
+ - .dedal.log
+ - .generate_cache.log
# - spack_tests
when: always
timeout: 2 days
@@ -299,6 +301,8 @@ sync-esd-image:
artifacts:
paths:
- spack_logs
+ - .dedal.log
+ - .generate_cache.log
when: always
timeout: 2 days
resource_group: registry-esd-master-image
@@ -346,6 +350,8 @@ sync-gitlab-spack-instance:
artifacts:
paths:
- spack_logs
+ - .dedal.log
+ - .generate_cache.log
when: always
rules:
# branches that update the gitlab-runner upstream (read-only) installation
diff --git a/install_spack_env.py b/install_spack_env.py
index 55628af4..4e225163 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -69,17 +69,41 @@ upstream_instance = args.upstream_instance
update_spack_oci_caches = args.update_spack_oci_caches
oci_cache_prefix = args.oci_cache_prefix
-system_name = os.getenv('SYSTEMNAME')
+# define SYSTEMNAME variable in sites where it's not already defined
+system_name = (
+ os.getenv('SYSTEMNAME')
+ or os.getenv('HPC_SYSTEM')
+ or os.getenv('BSC_MACHINE')
+)
+
+# disable local configuration and cache directories
+set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
+os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
+set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
+os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
-set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+if oci_cache_prefix:
+ set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+ os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
+# make sure spack uses the symlinked folder as path
ci_spack_root = installation_root / 'spack'
-set_bashrc_variable('CACHE_SPECFILE', f'env_specfile.yaml')
-set_bashrc_variable('DEDAL_HOME', f'{ebrains_repo}/vendor/yashchiki')
-set_bashrc_variable('SPACK_CACHE_SOURCE', f'{ci_spack_root}/var/spack/cache')
-set_bashrc_variable('SPACK_CACHE_BUILD', f'{ci_spack_root}/var/spack/cache')
+# cache related variables
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
+set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
+os.environ['CACHE_SPECFILE'] = cache_specfile
+dedal_home = f'{ebrains_repo}/vendor/yashchiki'
+os.environ['DEDAL_HOME'] = dedal_home
+set_bashrc_variable('DEDAL_HOME', dedal_home)
+spack_cache_source = f'{ci_spack_root}/var/spack/cache'
+set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
+os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
+set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
+os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
+spack_cache_build = spack_cache_source
+# initial setup: use spack submodule if spack dir doesn't already exist
spack_root_existed = True
if ci_spack_root and ebrains_repo:
if not os.path.isdir(ci_spack_root):
@@ -91,61 +115,161 @@ if ci_spack_root and ebrains_repo:
pass
data_dir = installation_root / 'caching'
-print(installation_root.parent)
-env_repo = SpackDescriptor('ebrains-spack-builds', path=installation_root.parent)
+
+env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
+
+binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
spack_config = SpackConfig(env=env_repo,
- repos=[env_repo],
+ repos=[],
install_dir=installation_root.parent,
upstream_instance=upstream_instance,
- system_name='VBT',
- concretization_dir=data_dir / 'concretize_cache',
- buildcache_dir=data_dir / 'binary_cache',
+ system_name=system_name,
+ buildcache_dir=binary_cache_path,
use_spack_global=False,
cache_version_build='spack_cache',
view=SpackViewEnum.WITHOUT_VIEW,
spack_dir=Path(ci_spack_root).resolve())
spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.setup_spack_env()
+
if upstream_instance:
upstream_prefix = find_first_upstream_prefix(upstream_instance)
spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
if spack_root_existed == 0:
spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
+# make sure all fetching/clingo stuff happens before anything else
spack_operation.spec_pacakge('aida')
+# rebuild spack's database (could be an debugging session)
spack_operation.reindex()
-spack_operation.add_mirror(mirror_name='local_cache', mirror_path=Path(os.getenv('SPACK_CACHE_BUILD')).resolve())
-spack_operation.setup_spack_env()
+spack_operation.add_mirror(mirror_name='local_cache', mirror_path=binary_cache_path)
-site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
+site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env / 'site-config'
if site_config_dest.exists():
- site_config_dest.rmdir()
- site_config_dest.mkdir(parents=True, exist_ok=True)
- site_config_esd = Path(ebrains_spack_env) / 'site-config'
- shutil.copy(site_config_esd, site_config_dest)
- # spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
- # merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
- merged_envs = result = run_command(
- "spack-python",
- f'{ebrains_repo}/site-config/ymerge.py {ebrains_repo}/spack.yaml',
- f'{ebrains_repo}/site-config/{system_name}/spack.yaml',
- info_msg='Merging top-level and site-specific spack.yaml files.',
- exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
- capture_output=True,
- text=True,
- check=True
- )
+ shutil.rmtree(site_config_dest / 'site-config')
+ os.makedirs(site_config_dest / 'site-config')
+
+site_config = Path(ebrains_repo) / 'site-config'
+shutil.copytree(site_config, site_config_dest)
+
+y_merge_path = Path(ebrains_repo) / 'site-config/ymerge.py'
+merge_path_1 = Path(ebrains_repo) / 'spack.yaml'
+merge_path_2 = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
+
+# update environment site-configs
+merged_envs = run_command(
+ "bash", "-c",
+ f'{spack_operation.spack_setup_script} && spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
+ info_msg='Merging top-level and site-specific spack.yaml files.',
+ exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
+ capture_output=True,
+ text=True,
+ check=True
+).stdout
+
+if merged_envs is None:
+ sys.exit(-1)
+else:
tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
try:
with open(tmp_spack_yaml, "w") as f:
- f.write(merged_envs.stdout.strip())
- print(f"Written merged spack.yaml to {tmp_spack_yaml}")
+ f.write(merged_envs)
except Exception as e:
sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
sys.exit(1)
- shutil.copy(tmp_spack_yaml, site_config_dest)
+ shutil.copy(tmp_spack_yaml, site_config_dest.parent)
+
+# add repo if it does not exist
+spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
+spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
+
+dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
-spack_operation.concretize_spack_env()
+try:
+ with open(cache_specfile, "w") as f:
+ f.write(dump_dag)
+except Exception as e:
+ sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
+ sys.exit(1)
+
+if oci_cache_prefix:
+ # fetch missing sources (if packages not yet installed)
+ fetch_cached_sources = Path(dedal_home).resolve() / '/cli' / 'fetch_cached_sources.py'
+ run_command(
+ "bash", "-c",
+ f"{spack_operation.spack_setup_script} && python3 "
+ f"{fetch_cached_sources} "
+ f"--local-cache spack_cache_source "
+ f"--remote-cache-type oci "
+ f"--remote-cache {oci_cache_prefix}/source_cache "
+ f"--yashchiki-home {dedal_home} "
+ f"/tmp/missing_paths_sources.dat cache_specfile",
+ info_msg="Fetching missing sources",
+ exception_msg="Failed to fetch missing sources",
+ check=True
+ )
+ # fetch missing build results (if packages not yet installed)
+ fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.dat'
+ run_command(
+ "bash", "-c",
+ f"{spack_operation.spack_setup_script} && python3 "
+ f"{fetch_cached_buildresults} "
+ f"--local-cache",
+ f"{spack_cache_build}/build_cache "
+ f"--remote-cache-type oci "
+ f"--remote-cache {oci_cache_prefix}/build_cache "
+ f"--yashchiki-home {dedal_home} "
+ f"/tmp/missing_paths_buildresults.dat {cache_specfile}",
+ info_msg="Fetching missing build results",
+ exception_msg="Failed to fetch missing build results",
+ check=True
+ )
+
+run_command(
+ "bash", "-c",
+ f'{spack_operation.spack_setup_script} && spack-python',
+ f'exit(not len(spack.environment.active_environment().uninstalled_specs()))',
+ info_msg='Checking for uninstalled Spack specs',
+ check=True
+)
+
+spack_operation.fetch(dependencies=True, missing=True)
+
+if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+ print("Performing update of the source cache")
+ update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
+ cache_cmd = run_command(
+ "bash", "-c",
+ f'{spack_operation.spack_setup_script} && python3 {update_cached_sources} '
+ f'--local-cache {spack_cache_source}'
+ f'--remote-cache-type oci '
+ f'--remote-cache f"{oci_cache_prefix}/source_cache '
+ f'/tmp/missing_paths_sources.dat',
+ info_msg='Updating remote OCI cache',
+ check=True
+ )
+ if cache_cmd is None or cache_cmd.returncode != 0:
+ print("Cache update failed.")
+ sys.exit(cache_cmd.returncode if cache_cmd else 1)
+else:
+ print("Updating of the source cache disabled.")
+
+if oci_cache_prefix:
+ specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
+ dag_hashes_pre_install = run_command(
+ "bash", "-c",
+ f'{spack_operation.spack_setup_script} && spack-python specfile_dag_hash {cache_specfile}',
+ capture_output=True,
+ text=True,
+ check=True
+ ).stdout
spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+
+if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+ print('Some spack packages failed to install.')
+ sys.exit(-1)
+
+print('Installed all spack packages.')
spack_operation.reindex()
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 7a3d1c14..ffc59776 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 7a3d1c1422fcec27b62c491dd03078b73b933a0b
+Subproject commit ffc597764769b6b01e8f75b7275caa4693cb3ac4
--
GitLab
From 84da6208ff3e6099a6d8096612ce512bb3b660f6 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Fri, 9 May 2025 10:54:12 +0300
Subject: [PATCH 08/16] VT-94: Caching migration to python
---
install_spack_env.py | 56 ++++++++++++++++++++++++++++++++++++++------
vendor/yashchiki | 2 +-
2 files changed, 50 insertions(+), 8 deletions(-)
diff --git a/install_spack_env.py b/install_spack_env.py
index 4e225163..aef88437 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -1,6 +1,7 @@
import os
import argparse
import shutil
+import subprocess
import sys
from pathlib import Path
from dedal.configuration.SpackConfig import SpackConfig
@@ -195,13 +196,13 @@ except Exception as e:
if oci_cache_prefix:
# fetch missing sources (if packages not yet installed)
- fetch_cached_sources = Path(dedal_home).resolve() / '/cli' / 'fetch_cached_sources.py'
+ fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
run_command(
"bash", "-c",
f"{spack_operation.spack_setup_script} && python3 "
f"{fetch_cached_sources} "
f"--local-cache spack_cache_source "
- f"--remote-cache-type oci "
+ f"--remote-cache-type=oci "
f"--remote-cache {oci_cache_prefix}/source_cache "
f"--yashchiki-home {dedal_home} "
f"/tmp/missing_paths_sources.dat cache_specfile",
@@ -210,14 +211,14 @@ if oci_cache_prefix:
check=True
)
# fetch missing build results (if packages not yet installed)
- fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.dat'
+ fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
run_command(
"bash", "-c",
f"{spack_operation.spack_setup_script} && python3 "
f"{fetch_cached_buildresults} "
f"--local-cache",
f"{spack_cache_build}/build_cache "
- f"--remote-cache-type oci "
+ f"--remote-cache-type=oci "
f"--remote-cache {oci_cache_prefix}/build_cache "
f"--yashchiki-home {dedal_home} "
f"/tmp/missing_paths_buildresults.dat {cache_specfile}",
@@ -243,7 +244,7 @@ if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 't
"bash", "-c",
f'{spack_operation.spack_setup_script} && python3 {update_cached_sources} '
f'--local-cache {spack_cache_source}'
- f'--remote-cache-type oci '
+ f'--remote-cache-type=oci'
f'--remote-cache f"{oci_cache_prefix}/source_cache '
f'/tmp/missing_paths_sources.dat',
info_msg='Updating remote OCI cache',
@@ -255,18 +256,59 @@ if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 't
else:
print("Updating of the source cache disabled.")
+dag_hashes_pre_install = []
if oci_cache_prefix:
specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
dag_hashes_pre_install = run_command(
"bash", "-c",
- f'{spack_operation.spack_setup_script} && spack-python specfile_dag_hash {cache_specfile}',
+ f'{spack_operation.spack_setup_script} && spack-python {specfile_dag_hash} {cache_specfile}',
capture_output=True,
text=True,
check=True
- ).stdout
+ ).stdout.strip().split()
spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+ for dag_hash in dag_hashes_pre_install:
+ package = Path(spack_cache_build).resolve() / dag_hash
+ print(package)
+ result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+ if result != 0:
+ print(f'Failed to push {dag_hash}, trying to call spack find on it:')
+ spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+
+ # upload packages from local to remote cache
+ print("Performing update of the build cache")
+ update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
+ local_cache = Path(spack_cache_build).resolve() / 'build_cache'
+ run_command(
+ "bash", "-c",
+ f'{spack_operation.spack_setup_script} && python3 {update_cached_buildresults} '
+ f'--local-cache {local_cache}'
+ f'--remote-cache-type=oci '
+ f'--remote-cache f"{local_cache} '
+ f'/tmp/missing_paths_sources.dat',
+ check=True
+ )
+else:
+ print('Updating of the build cache disabled.')
+
+spack_operation.reindex()
+
+exports = run_command("bash", "-c",
+ f'spack env activate --sh {ebrains_spack_env}',
+ check=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ capture_output=True,
+ text=True,
+ ).stdout
+
+out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
+with open(out_file, "w") as f:
+ f.write(exports)
+
if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
print('Some spack packages failed to install.')
sys.exit(-1)
diff --git a/vendor/yashchiki b/vendor/yashchiki
index ffc59776..e21c2c08 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit ffc597764769b6b01e8f75b7275caa4693cb3ac4
+Subproject commit e21c2c085cdf1712aedda75caea961f152d460f0
--
GitLab
From b9af6d87695f527a68072a0c3d2c532c9fb22f8c Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Mon, 12 May 2025 15:23:08 +0300
Subject: [PATCH 09/16] VT-94: Installing spack before running
install_spack_env.py script
---
.gitlab-ci.yml | 4 ++++
install_spack_env.py | 19 ++++++++++---------
2 files changed, 14 insertions(+), 9 deletions(-)
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index dfd9afb2..d2084d50 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -230,6 +230,8 @@ build-spack-env-on-runner:
script:
# install dedal python library
- pip3 install --break-system-packages vendor/yashchiki
+ # install spack
+ - source vendor/spack/share/spack/setup-env.sh
# deactivate environment views (we don't need them for the test build-job)
- >
echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
@@ -333,6 +335,8 @@ sync-gitlab-spack-instance:
- git fetch origin
- git reset --hard $CI_COMMIT_SHA
- git submodule update --force
+ # install spack
+ - source vendor/spack/share/spack/setup-env.sh
# run installation script
- python3 install_spack_env.py $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
# create kernel spec, so that the environment can be used in gitlab CI jobs
diff --git a/install_spack_env.py b/install_spack_env.py
index aef88437..ea0ca80c 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -146,7 +146,8 @@ spack_operation.spec_pacakge('aida')
spack_operation.reindex()
spack_operation.add_mirror(mirror_name='local_cache', mirror_path=binary_cache_path)
-site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env / 'site-config'
+env_path = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
+site_config_dest = env_path / 'site-config'
if site_config_dest.exists():
shutil.rmtree(site_config_dest / 'site-config')
os.makedirs(site_config_dest / 'site-config')
@@ -161,7 +162,7 @@ merge_path_2 = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
# update environment site-configs
merged_envs = run_command(
"bash", "-c",
- f'{spack_operation.spack_setup_script} && spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
+ f'spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
info_msg='Merging top-level and site-specific spack.yaml files.',
exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
capture_output=True,
@@ -199,7 +200,7 @@ if oci_cache_prefix:
fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
run_command(
"bash", "-c",
- f"{spack_operation.spack_setup_script} && python3 "
+ f"python3 "
f"{fetch_cached_sources} "
f"--local-cache spack_cache_source "
f"--remote-cache-type=oci "
@@ -214,7 +215,7 @@ if oci_cache_prefix:
fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
run_command(
"bash", "-c",
- f"{spack_operation.spack_setup_script} && python3 "
+ f"python3 "
f"{fetch_cached_buildresults} "
f"--local-cache",
f"{spack_cache_build}/build_cache "
@@ -229,7 +230,7 @@ if oci_cache_prefix:
run_command(
"bash", "-c",
- f'{spack_operation.spack_setup_script} && spack-python',
+ f'spack-python',
f'exit(not len(spack.environment.active_environment().uninstalled_specs()))',
info_msg='Checking for uninstalled Spack specs',
check=True
@@ -242,7 +243,7 @@ if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 't
update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
cache_cmd = run_command(
"bash", "-c",
- f'{spack_operation.spack_setup_script} && python3 {update_cached_sources} '
+ f'python3 {update_cached_sources} '
f'--local-cache {spack_cache_source}'
f'--remote-cache-type=oci'
f'--remote-cache f"{oci_cache_prefix}/source_cache '
@@ -261,7 +262,7 @@ if oci_cache_prefix:
specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
dag_hashes_pre_install = run_command(
"bash", "-c",
- f'{spack_operation.spack_setup_script} && spack-python {specfile_dag_hash} {cache_specfile}',
+ f'spack-python {specfile_dag_hash} {cache_specfile}',
capture_output=True,
text=True,
check=True
@@ -284,7 +285,7 @@ if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
local_cache = Path(spack_cache_build).resolve() / 'build_cache'
run_command(
"bash", "-c",
- f'{spack_operation.spack_setup_script} && python3 {update_cached_buildresults} '
+ f'python3 {update_cached_buildresults} '
f'--local-cache {local_cache}'
f'--remote-cache-type=oci '
f'--remote-cache f"{local_cache} '
@@ -297,7 +298,7 @@ else:
spack_operation.reindex()
exports = run_command("bash", "-c",
- f'spack env activate --sh {ebrains_spack_env}',
+ f'spack env activate --sh {env_path}',
check=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
--
GitLab
From 6b19ab57388115e43321785c503a5540fc7c6743 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Tue, 13 May 2025 10:57:24 +0300
Subject: [PATCH 10/16] VT-94: compatibility with spack-python; dedal caching;
dynamic dedal version
---
.gitlab-ci.yml | 2 +-
utils/__init__.py => create_spack_env.py | 0
dedal_env/spack.yaml | 8 +
dedal_install.py | 44 ++++
dedal_install_spack_env.sh | 23 +++
install_spack_env.py | 243 +++++++++++------------
packages/py-dedal/package.py | 22 ++
packages/py-oras/package.py | 33 +++
tools/__init__.py | 0
tools/tools.py | 32 +++
{utils => tools}/ymerge.py | 0
utils/utils.py | 13 --
vendor/yashchiki | 2 +-
13 files changed, 275 insertions(+), 147 deletions(-)
rename utils/__init__.py => create_spack_env.py (100%)
create mode 100644 dedal_env/spack.yaml
create mode 100644 dedal_install.py
create mode 100755 dedal_install_spack_env.sh
create mode 100644 packages/py-dedal/package.py
create mode 100644 packages/py-oras/package.py
create mode 100644 tools/__init__.py
create mode 100644 tools/tools.py
rename {utils => tools}/ymerge.py (100%)
delete mode 100644 utils/utils.py
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d2084d50..b17fe3bc 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -236,7 +236,7 @@ build-spack-env-on-runner:
- >
echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
# run installation script
- - python3 install_spack_env.py $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+ - bash dedal_install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
after_script:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job
diff --git a/utils/__init__.py b/create_spack_env.py
similarity index 100%
rename from utils/__init__.py
rename to create_spack_env.py
diff --git a/dedal_env/spack.yaml b/dedal_env/spack.yaml
new file mode 100644
index 00000000..9cd58b5c
--- /dev/null
+++ b/dedal_env/spack.yaml
@@ -0,0 +1,8 @@
+spack:
+ specs:
+ - python@3.13.0
+ - py-oras@0.2.31
+ - py-ruamel-yaml@0.17.32
+ - py-click@8.1.7
+ - py-jsonpickle@2.2.0
+ - py-pyyaml@5.4.1
\ No newline at end of file
diff --git a/dedal_install.py b/dedal_install.py
new file mode 100644
index 00000000..2b827162
--- /dev/null
+++ b/dedal_install.py
@@ -0,0 +1,44 @@
+from pathlib import Path
+from dedal.configuration.GpgConfig import GpgConfig
+from dedal.configuration.SpackConfig import SpackConfig
+import os
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import count_files_in_folder
+
+from dedal.model.SpackDescriptor import SpackDescriptor
+
+from vendor.yashchiki.dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache
+
+dedal_env = SpackDescriptor(name='dedal_env', path=Path('./'))
+ebrains_repo = SpackDescriptor(name='ebrains-spack-builds', path=Path('../'))
+dedal_dir = 'dedal_install_dir'
+install_dir = Path('./') / dedal_dir
+os.makedirs(install_dir, exist_ok=True)
+concretization_dir = install_dir / 'concretization'
+buildcache_dir = install_dir / 'buildcache'
+spack_config = SpackConfig(env=dedal_env,
+ repos=[ebrains_repo],
+ install_dir=install_dir,
+ upstream_instance=None,
+ system_name=None,
+ concretization_dir=concretization_dir,
+ buildcache_dir=buildcache_dir,
+ gpg=None,
+ use_spack_global=False,
+ cache_version_build='latest',
+ cache_version_concretize='latest',
+ override_cache=False,
+ spack_dir=Path('./vendor').resolve() / 'spack'
+ )
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=True)
+
+if isinstance(spack_operation, SpackOperationUseCache) and (len(spack_operation.cache_dependency.list_tags()) == 0 or len(spack_operation.build_cache.list_tags()) == 0):
+ print('No cache available')
+ gpg = GpgConfig('dedal', 'science@codemart.ro')
+ spack_config.gpg = gpg
+ spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+ print(type(spack_operation))
+
+spack_operation.setup_spack_env()
+spack_operation.concretize_spack_env()
+spack_operation.install_packages(os.cpu_count(), signed=False)
diff --git a/dedal_install_spack_env.sh b/dedal_install_spack_env.sh
new file mode 100755
index 00000000..83c73680
--- /dev/null
+++ b/dedal_install_spack_env.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+set -eo pipefail
+
+SPACK_JOBS=$1 # number of jobs
+INSTALLATION_ROOT=$2 # where to set up the installation
+EBRAINS_REPO=$3 # location of ebrains-spack-builds repository
+EBRAINS_SPACK_ENV=$4 # name of EBRAINS Spack environment to be created/updated
+UPSTREAM_INSTANCE=$5 # path to Spack instance to use as upstream (optional)
+UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results
+export OCI_CACHE_PREFIX=$7
+
+python3 dedal_install.py
+
+spack env activate -p ./dedal_env
+spack mirror remove local_cache
+spack clean -a
+spack concretize
+# install the latest version of dedal from the specified branch (the cache version might not be the latest yet)
+spack install --add py-dedal@experimental
+spack load py-dedal@experimental
+
+spack-python install_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO" "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX"
\ No newline at end of file
diff --git a/install_spack_env.py b/install_spack_env.py
index ea0ca80c..f00105e3 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -3,6 +3,7 @@ import argparse
import shutil
import subprocess
import sys
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from pathlib import Path
from dedal.configuration.SpackConfig import SpackConfig
from dedal.enum.SpackConfigCommand import SpackConfigCommand
@@ -10,7 +11,8 @@ from dedal.enum.SpackViewEnum import SpackViewEnum
from dedal.model.SpackDescriptor import SpackDescriptor
from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
from dedal.utils.utils import set_bashrc_variable, run_command
-from utils.utils import find_first_upstream_prefix
+from tools.tools import find_first_upstream_prefix
+import spack
parser = argparse.ArgumentParser(
prog='install_spack_env.py',
@@ -70,6 +72,8 @@ upstream_instance = args.upstream_instance
update_spack_oci_caches = args.update_spack_oci_caches
oci_cache_prefix = args.oci_cache_prefix
+print(f'oci_cache_prefix: {oci_cache_prefix}')
+
# define SYSTEMNAME variable in sites where it's not already defined
system_name = (
os.getenv('SYSTEMNAME')
@@ -115,8 +119,6 @@ if ci_spack_root and ebrains_repo:
except FileExistsError:
pass
-data_dir = installation_root / 'caching'
-
env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
@@ -125,9 +127,7 @@ spack_config = SpackConfig(env=env_repo,
install_dir=installation_root.parent,
upstream_instance=upstream_instance,
system_name=system_name,
- buildcache_dir=binary_cache_path,
use_spack_global=False,
- cache_version_build='spack_cache',
view=SpackViewEnum.WITHOUT_VIEW,
spack_dir=Path(ci_spack_root).resolve())
@@ -144,43 +144,7 @@ if spack_root_existed == 0:
spack_operation.spec_pacakge('aida')
# rebuild spack's database (could be an debugging session)
spack_operation.reindex()
-spack_operation.add_mirror(mirror_name='local_cache', mirror_path=binary_cache_path)
-
-env_path = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
-site_config_dest = env_path / 'site-config'
-if site_config_dest.exists():
- shutil.rmtree(site_config_dest / 'site-config')
- os.makedirs(site_config_dest / 'site-config')
-
-site_config = Path(ebrains_repo) / 'site-config'
-shutil.copytree(site_config, site_config_dest)
-
-y_merge_path = Path(ebrains_repo) / 'site-config/ymerge.py'
-merge_path_1 = Path(ebrains_repo) / 'spack.yaml'
-merge_path_2 = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
-
-# update environment site-configs
-merged_envs = run_command(
- "bash", "-c",
- f'spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
- info_msg='Merging top-level and site-specific spack.yaml files.',
- exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
- capture_output=True,
- text=True,
- check=True
-).stdout
-
-if merged_envs is None:
- sys.exit(-1)
-else:
- tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
- try:
- with open(tmp_spack_yaml, "w") as f:
- f.write(merged_envs)
- except Exception as e:
- sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
- sys.exit(1)
- shutil.copy(tmp_spack_yaml, site_config_dest.parent)
+spack_operation.add_mirror(mirror_name='local_cache_esd', mirror_path=binary_cache_path, autopush=True)
# add repo if it does not exist
spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
@@ -191,16 +155,23 @@ dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
try:
with open(cache_specfile, "w") as f:
f.write(dump_dag)
+ print(f'Dumped cache_specfile')
except Exception as e:
sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
sys.exit(1)
+print(spack_operation.env_path)
+
+##################### first part ##################
+
if oci_cache_prefix:
+ print('=======================Started fetching===================')
# fetch missing sources (if packages not yet installed)
fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
+ print(fetch_cached_sources)
run_command(
"bash", "-c",
- f"python3 "
+ f"{spack_operation.spack_command_on_env} && python3 "
f"{fetch_cached_sources} "
f"--local-cache spack_cache_source "
f"--remote-cache-type=oci "
@@ -213,9 +184,10 @@ if oci_cache_prefix:
)
# fetch missing build results (if packages not yet installed)
fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
+ print(fetch_cached_buildresults)
run_command(
"bash", "-c",
- f"python3 "
+ f"{spack_operation.spack_command_on_env} && python3 "
f"{fetch_cached_buildresults} "
f"--local-cache",
f"{spack_cache_build}/build_cache "
@@ -228,91 +200,98 @@ if oci_cache_prefix:
check=True
)
-run_command(
- "bash", "-c",
- f'spack-python',
- f'exit(not len(spack.environment.active_environment().uninstalled_specs()))',
- info_msg='Checking for uninstalled Spack specs',
- check=True
-)
-
-spack_operation.fetch(dependencies=True, missing=True)
-
-if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
- print("Performing update of the source cache")
- update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
- cache_cmd = run_command(
- "bash", "-c",
- f'python3 {update_cached_sources} '
- f'--local-cache {spack_cache_source}'
- f'--remote-cache-type=oci'
- f'--remote-cache f"{oci_cache_prefix}/source_cache '
- f'/tmp/missing_paths_sources.dat',
- info_msg='Updating remote OCI cache',
- check=True
- )
- if cache_cmd is None or cache_cmd.returncode != 0:
- print("Cache update failed.")
- sys.exit(cache_cmd.returncode if cache_cmd else 1)
-else:
- print("Updating of the source cache disabled.")
-
-dag_hashes_pre_install = []
-if oci_cache_prefix:
- specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
- dag_hashes_pre_install = run_command(
- "bash", "-c",
- f'spack-python {specfile_dag_hash} {cache_specfile}',
- capture_output=True,
- text=True,
- check=True
- ).stdout.strip().split()
-
-spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
-
-if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
- for dag_hash in dag_hashes_pre_install:
- package = Path(spack_cache_build).resolve() / dag_hash
- print(package)
- result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
- if result != 0:
- print(f'Failed to push {dag_hash}, trying to call spack find on it:')
- spack_operation.find_package(package=package, long=True, variants=True, paths=True)
-
- # upload packages from local to remote cache
- print("Performing update of the build cache")
- update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
- local_cache = Path(spack_cache_build).resolve() / 'build_cache'
- run_command(
- "bash", "-c",
- f'python3 {update_cached_buildresults} '
- f'--local-cache {local_cache}'
- f'--remote-cache-type=oci '
- f'--remote-cache f"{local_cache} '
- f'/tmp/missing_paths_sources.dat',
- check=True
- )
-else:
- print('Updating of the build cache disabled.')
-
-spack_operation.reindex()
-
-exports = run_command("bash", "-c",
- f'spack env activate --sh {env_path}',
- check=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- capture_output=True,
- text=True,
- ).stdout
-
-out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
-with open(out_file, "w") as f:
- f.write(exports)
-
-if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
- print('Some spack packages failed to install.')
- sys.exit(-1)
-
-print('Installed all spack packages.')
-spack_operation.reindex()
+# cmd = [
+# "spack-python",
+# "-c",
+# "exit(not len(spack.environment.active_environment().uninstalled_specs()))"
+# ]
+
+# packages_not_installed = subprocess.run(cmd).returncode == 0
+packages_not_installed = not len(spack.environment.active_environment().uninstalled_specs())
+
+
+print(f'=====================Packages_not_installed: {packages_not_installed}=====================')
+
+if packages_not_installed:
+ spack_operation.fetch(dependencies=True, missing=True)
+#
+# if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+# print("Performing update of the source cache")
+# update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
+# cache_cmd = run_command(
+# "bash", "-c",
+# f'{spack_operation.spack_command_on_env} && python3 {update_cached_sources} '
+# f'--local-cache {spack_cache_source}'
+# f'--remote-cache-type=oci'
+# f'--remote-cache f"{oci_cache_prefix}/source_cache '
+# f'/tmp/missing_paths_sources.dat',
+# info_msg='Updating remote OCI cache',
+# check=True
+# )
+# if cache_cmd is None or cache_cmd.returncode != 0:
+# print("Cache update failed.")
+# sys.exit(cache_cmd.returncode if cache_cmd else 1)
+# else:
+# print("Updating of the source cache disabled.")
+#
+# dag_hashes_pre_install = []
+# if oci_cache_prefix:
+# specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
+# dag_hashes_pre_install = run_command(
+# "bash", "-c",
+# f'{spack_operation.spack_command_on_env} && spack-python {specfile_dag_hash} {cache_specfile}',
+# capture_output=True,
+# text=True,
+# check=True
+# ).stdout.strip().split()
+#
+# print(dag_hashes_pre_install)
+
+# spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+#
+# if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+# for dag_hash in dag_hashes_pre_install:
+# package = Path(spack_cache_build).resolve() / dag_hash
+# print(package)
+# result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+# if result != 0:
+# print(f'Failed to push {dag_hash}, trying to call spack find on it:')
+# spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+#
+# # upload packages from local to remote cache
+# print("Performing update of the build cache")
+# update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
+# local_cache_esd = Path(spack_cache_build).resolve() / 'build_cache'
+# run_command(
+# "bash", "-c",
+# f'{spack_operation.spack_command_on_env} && python3 {update_cached_buildresults} '
+# f'--local-cache {local_cache_esd}'
+# f'--remote-cache-type=oci '
+# f'--remote-cache f"{local_cache_esd} '
+# f'/tmp/missing_paths_sources.dat',
+# check=True
+# )
+# else:
+# print('Updating of the build cache disabled.')
+#
+# spack_operation.reindex()
+#
+# exports = run_command("bash", "-c",
+# f'spack env activate --sh {env_path}',
+# check=True,
+# stdout=subprocess.PIPE,
+# stderr=subprocess.PIPE,
+# capture_output=True,
+# text=True,
+# ).stdout
+#
+# out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
+# with open(out_file, "w") as f:
+# f.write(exports)
+#
+# if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+# print('Some spack packages failed to install.')
+# sys.exit(-1)
+#
+# print('Installed all spack packages.')
+# spack_operation.reindex()
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
new file mode 100644
index 00000000..07cf5a8f
--- /dev/null
+++ b/packages/py-dedal/package.py
@@ -0,0 +1,22 @@
+from spack.package import PythonPackage
+
+class PyDedal(PythonPackage):
+ homepage = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+ git = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+
+ version(
+ 'experimental',
+ commit='8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445'
+ )
+
+ maintainers("Brainiacs")
+
+ depends_on('py-setuptools', type='build')
+
+ depends_on("python@3.10:", type=("build", "run"))
+ depends_on("py-oras@0.2.31:", type=("build", "run"))
+
+ depends_on("py-ruamel-yaml", type=("build", "run"))
+ depends_on("py-click", type=("build", "run"))
+ depends_on("py-jsonpickle", type=("build", "run"))
+ depends_on("py-pyyaml", type=("build", "run"))
\ No newline at end of file
diff --git a/packages/py-oras/package.py b/packages/py-oras/package.py
new file mode 100644
index 00000000..9a5651da
--- /dev/null
+++ b/packages/py-oras/package.py
@@ -0,0 +1,33 @@
+# Copyright Spack Project Developers. See COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyOras(PythonPackage):
+ """ORAS Python SDK: OCI Registry as Storage Python SDK."""
+
+ homepage = "https://oras.land"
+ git = "https://github.com/oras-project/oras-py"
+ url = "https://files.pythonhosted.org/packages/28/86/cbae8797a1041fe3bef37f31aa1ecdd3f8914fbc7cfb663532255b6ec16e/oras-0.2.31.tar.gz"
+
+ maintainers("vsoch")
+
+ license("Apache 2.0 License")
+
+ version("0.2.31", sha256="95c0a341359458747c2946dab47d584cc444d1f9d379b6d63fb7a84cabc54de4")
+
+ depends_on('py-setuptools', type='build')
+
+ depends_on("python@3.7:", type=("build", "run"))
+
+ depends_on("py-jsonschema", type=("build", "run"))
+ depends_on("py-requests", type=("build", "run"))
+
+ variant("tests", default=False, description="Enable test suite")
+ depends_on("py-pytest@4.6.2:", when="+tests", type=("build", "run"))
+
+ variant("docker", default=False, description="Enable Docker extra support")
+ depends_on("py-docker@5.0.1", when="+docker", type=("build", "run"))
\ No newline at end of file
diff --git a/tools/__init__.py b/tools/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tools/tools.py b/tools/tools.py
new file mode 100644
index 00000000..035d80a4
--- /dev/null
+++ b/tools/tools.py
@@ -0,0 +1,32 @@
+import os
+import subprocess
+from pathlib import Path
+
+
+def find_first_upstream_prefix(upstream_instance):
+ """
+ Search for directories named '.spack-db' within the spack opt directory
+ under upstream_instance, and return a list of their parent directories.
+ """
+ base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
+ for upstream_prefix, dirs, _ in os.walk(base_path):
+ if ".spack-db" in dirs:
+ return upstream_prefix
+ return None
+
+
+def get_submodule_commit_hash(submodule_path: Path):
+ try:
+ # Run git rev-parse to get the commit hash of the submodule
+ result = subprocess.run(
+ ['git', 'rev-parse', 'HEAD'],
+ cwd=submodule_path,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ text=True,
+ check=True
+ )
+ return result.stdout.strip()
+ except subprocess.CalledProcessError as e:
+ print(f"Error retrieving commit hash: {e.stderr}")
+ return None
diff --git a/utils/ymerge.py b/tools/ymerge.py
similarity index 100%
rename from utils/ymerge.py
rename to tools/ymerge.py
diff --git a/utils/utils.py b/utils/utils.py
deleted file mode 100644
index 2edf146c..00000000
--- a/utils/utils.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import os
-
-
-def find_first_upstream_prefix(upstream_instance):
- """
- Search for directories named '.spack-db' within the spack opt directory
- under upstream_instance, and return a list of their parent directories.
- """
- base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
- for upstream_prefix, dirs, _ in os.walk(base_path):
- if ".spack-db" in dirs:
- return upstream_prefix
- return None
diff --git a/vendor/yashchiki b/vendor/yashchiki
index e21c2c08..8d8bb40a 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit e21c2c085cdf1712aedda75caea961f152d460f0
+Subproject commit 8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445
--
GitLab
From be165a5bc34ff758a11a3c783e865f6a771bd2fe Mon Sep 17 00:00:00 2001
From: adrianciu <adrianciu25@gmail.com>
Date: Thu, 22 May 2025 11:43:45 +0300
Subject: [PATCH 11/16] VT-94: integrated original scripts of yashchiki
---
.gitlab-ci.yml | 4 +-
create_spack_env.py | 169 ++++++++++
..._spack_env.sh => dedal_manage_spack_env.sh | 6 +-
install_spack_env.py | 301 +++++-------------
packages/py-dedal/package.py | 5 +-
tools/tools.py | 14 -
vendor/yashchiki | 2 +-
7 files changed, 266 insertions(+), 235 deletions(-)
rename dedal_install_spack_env.sh => dedal_manage_spack_env.sh (60%)
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index b17fe3bc..1688b301 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -236,7 +236,7 @@ build-spack-env-on-runner:
- >
echo " view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
# run installation script
- - bash dedal_install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+ - bash dedal_manage_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
after_script:
- mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
# for succesfully installed packages: keep the spack logs for any package modified during this CI job
@@ -338,7 +338,7 @@ sync-gitlab-spack-instance:
# install spack
- source vendor/spack/share/spack/setup-env.sh
# run installation script
- - python3 install_spack_env.py $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+ - bash dedal_manage_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
# create kernel spec, so that the environment can be used in gitlab CI jobs
- RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac);
- bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env
diff --git a/create_spack_env.py b/create_spack_env.py
index e69de29b..148a982a 100644
--- a/create_spack_env.py
+++ b/create_spack_env.py
@@ -0,0 +1,169 @@
+import os
+import argparse
+import sys
+
+from dedal.logger.logger_builder import get_logger
+from dedal.utils.spack_utils import find_first_upstream_prefix
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
+from pathlib import Path
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.enum.SpackConfigCommand import SpackConfigCommand
+from dedal.enum.SpackViewEnum import SpackViewEnum
+from dedal.model.SpackDescriptor import SpackDescriptor
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import set_bashrc_variable
+import spack
+
+parser = argparse.ArgumentParser(
+ prog='install_spack_env.py',
+ description='ESD install spack env.',
+ epilog='...')
+
+parser.add_argument(
+ "spack_jobs",
+ type=int,
+ help="number of jobs"
+)
+
+parser.add_argument(
+ "installation_root",
+ type=str,
+ help="where to set up the installation"
+)
+
+parser.add_argument(
+ "ebrains_repo",
+ type=str,
+ help="location of ebrains-spack-builds repository"
+)
+
+parser.add_argument(
+ "ebrains_spack_env",
+ type=str,
+ help="name of EBRAINS Spack environment to be created/updated"
+)
+
+parser.add_argument(
+ "upstream_instance",
+ type=str,
+ help="path to Spack instance to use as upstream (optional)"
+)
+
+parser.add_argument(
+ "update_spack_oci_caches",
+ type=bool,
+ help="true enables updating the OCI cache for spack sources and build results"
+)
+
+parser.add_argument(
+ "oci_cache_prefix",
+ type=str,
+ nargs="?", # optional
+ default=None,
+)
+
+LOGGER = get_logger(__name__)
+
+args = parser.parse_args()
+
+spack_jobs = args.spack_jobs
+installation_root = Path(args.installation_root).resolve()
+ebrains_repo = args.ebrains_repo
+ebrains_spack_env = args.ebrains_spack_env
+upstream_instance = args.upstream_instance
+update_spack_oci_caches = args.update_spack_oci_caches
+oci_cache_prefix = args.oci_cache_prefix
+
+# define SYSTEMNAME variable in sites where it's not already defined
+system_name = (
+ os.getenv('SYSTEMNAME')
+ or os.getenv('HPC_SYSTEM')
+ or os.getenv('BSC_MACHINE')
+)
+
+# disable local configuration and cache directories
+set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
+os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
+set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
+os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
+
+if oci_cache_prefix:
+ set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+ os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
+
+# make sure spack uses the symlinked folder as path
+ci_spack_root = installation_root / 'spack'
+
+# cache related variables
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
+set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
+os.environ['CACHE_SPECFILE'] = cache_specfile
+dedal_home = f'{ebrains_repo}/vendor/yashchiki'
+os.environ['DEDAL_HOME'] = dedal_home
+set_bashrc_variable('DEDAL_HOME', dedal_home)
+spack_cache_source = f'{ci_spack_root}/var/spack/cache'
+set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
+os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
+set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
+os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
+spack_cache_build = spack_cache_source
+
+# initial setup: use spack submodule if spack dir doesn't already exist
+spack_root_existed = True
+if ci_spack_root and ebrains_repo:
+ if not os.path.isdir(ci_spack_root):
+ spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
+ try:
+ os.symlink(spack_source, ci_spack_root)
+ spack_root_existed = False
+ except FileExistsError:
+ pass
+
+env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
+
+binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
+spack_config = SpackConfig(env=env_repo,
+ repos=[],
+ install_dir=installation_root.parent,
+ upstream_instance=upstream_instance,
+ system_name=system_name,
+ use_spack_global=False,
+ view=SpackViewEnum.WITHOUT_VIEW,
+ spack_dir=Path(ci_spack_root).resolve(),
+ serialize_name='data_esd.pkl')
+
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.setup_spack_env()
+
+if upstream_instance:
+ upstream_prefix = find_first_upstream_prefix(upstream_instance)
+ spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
+if spack_root_existed == 0:
+ spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
+
+# make sure all fetching/clingo stuff happens before anything else
+spack_operation.spec_pacakge('aida')
+# rebuild spack's database (could be an debugging session)
+spack_operation.reindex()
+spack_operation.add_mirror(mirror_name='local_cache_esd', mirror_path=binary_cache_path, autopush=True)
+
+ebrains_env = Path(ebrains_repo) / 'spack.yaml'
+site_config = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
+spack_operation.merge_envs(ebrains_env, site_config)
+# add repo if it does not exist
+spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
+spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
+
+dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
+
+try:
+ with open(cache_specfile, "w") as f:
+ f.write(dump_dag)
+ LOGGER.info(f'Dumped cache_specfile')
+except Exception as e:
+ sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
+ sys.exit(1)
+
+print(spack_operation.spack_command_on_env)
+
+spack_operation.serialize()
\ No newline at end of file
diff --git a/dedal_install_spack_env.sh b/dedal_manage_spack_env.sh
similarity index 60%
rename from dedal_install_spack_env.sh
rename to dedal_manage_spack_env.sh
index 83c73680..28c0b0c7 100755
--- a/dedal_install_spack_env.sh
+++ b/dedal_manage_spack_env.sh
@@ -10,6 +10,8 @@ UPSTREAM_INSTANCE=$5 # path to Spack instance to use as upstream (optional)
UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results
export OCI_CACHE_PREFIX=$7
+source vendor/spack/share/spack/setup-env.sh
+
python3 dedal_install.py
spack env activate -p ./dedal_env
@@ -20,4 +22,6 @@ spack concretize
spack install --add py-dedal@experimental
spack load py-dedal@experimental
-spack-python install_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO" "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX"
\ No newline at end of file
+spack_env_cmd=$(spack-python create_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO" "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX" | tail -n 1)
+echo "Captured environment command: $spack_env_cmd"
+eval "$spack_env_cmd && spack-python install_spack_env.py \"$SPACK_JOBS\" \"$INSTALLATION_ROOT\" \"$EBRAINS_REPO\" \"$EBRAINS_SPACK_ENV\" \"$UPSTREAM_INSTANCE\" \"$UPDATE_SPACK_OCI_CACHES\" \"$OCI_CACHE_PREFIX\""
diff --git a/install_spack_env.py b/install_spack_env.py
index f00105e3..824d66e5 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -1,17 +1,16 @@
import os
import argparse
-import shutil
-import subprocess
import sys
+from dedal.build_cache.CachedBuildResultUploader import CachedBuildResultUploader
+from dedal.build_cache.CachedBuildResultsFetcher import CachedBuildResultsFetcher
+from dedal.build_cache.CachedSourceFetcher import CachedSourceFetcher
+from dedal.build_cache.CachedSourceUploader import CachedSourceUploader
+from dedal.build_cache.SpecfileDagHash import SpecfileDagHash
+from dedal.error_handling.exceptions import SpackInstallPackagesException
+from dedal.logger.logger_builder import get_logger
+from dedal.spack_factory.SpackOperation import SpackOperation
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from pathlib import Path
-from dedal.configuration.SpackConfig import SpackConfig
-from dedal.enum.SpackConfigCommand import SpackConfigCommand
-from dedal.enum.SpackViewEnum import SpackViewEnum
-from dedal.model.SpackDescriptor import SpackDescriptor
-from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
-from dedal.utils.utils import set_bashrc_variable, run_command
-from tools.tools import find_first_upstream_prefix
import spack
parser = argparse.ArgumentParser(
@@ -62,236 +61,106 @@ parser.add_argument(
default=None,
)
+LOGGER = get_logger(__name__)
+
args = parser.parse_args()
-spack_jobs = args.spack_jobs
installation_root = Path(args.installation_root).resolve()
+ci_spack_root = installation_root / 'spack'
+spack_jobs = args.spack_jobs
ebrains_repo = args.ebrains_repo
ebrains_spack_env = args.ebrains_spack_env
upstream_instance = args.upstream_instance
update_spack_oci_caches = args.update_spack_oci_caches
oci_cache_prefix = args.oci_cache_prefix
-
-print(f'oci_cache_prefix: {oci_cache_prefix}')
-
-# define SYSTEMNAME variable in sites where it's not already defined
-system_name = (
- os.getenv('SYSTEMNAME')
- or os.getenv('HPC_SYSTEM')
- or os.getenv('BSC_MACHINE')
-)
-
-# disable local configuration and cache directories
-set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
-os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
-set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
-os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
-
-if oci_cache_prefix:
- set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
- os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
-
-# make sure spack uses the symlinked folder as path
-ci_spack_root = installation_root / 'spack'
-
-# cache related variables
-cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
-set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
-os.environ['CACHE_SPECFILE'] = cache_specfile
dedal_home = f'{ebrains_repo}/vendor/yashchiki'
-os.environ['DEDAL_HOME'] = dedal_home
-set_bashrc_variable('DEDAL_HOME', dedal_home)
spack_cache_source = f'{ci_spack_root}/var/spack/cache'
-set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
-os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
-set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
-os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
spack_cache_build = spack_cache_source
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
-# initial setup: use spack submodule if spack dir doesn't already exist
-spack_root_existed = True
-if ci_spack_root and ebrains_repo:
- if not os.path.isdir(ci_spack_root):
- spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
- try:
- os.symlink(spack_source, ci_spack_root)
- spack_root_existed = False
- except FileExistsError:
- pass
-
-env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
-
-binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
-spack_config = SpackConfig(env=env_repo,
- repos=[],
- install_dir=installation_root.parent,
- upstream_instance=upstream_instance,
- system_name=system_name,
- use_spack_global=False,
- view=SpackViewEnum.WITHOUT_VIEW,
- spack_dir=Path(ci_spack_root).resolve())
-
-spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
-spack_operation.setup_spack_env()
-
-if upstream_instance:
- upstream_prefix = find_first_upstream_prefix(upstream_instance)
- spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
-if spack_root_existed == 0:
- spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
-
-# make sure all fetching/clingo stuff happens before anything else
-spack_operation.spec_pacakge('aida')
-# rebuild spack's database (could be an debugging session)
-spack_operation.reindex()
-spack_operation.add_mirror(mirror_name='local_cache_esd', mirror_path=binary_cache_path, autopush=True)
-
-# add repo if it does not exist
-spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
-spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
-
-dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
-
-try:
- with open(cache_specfile, "w") as f:
- f.write(dump_dag)
- print(f'Dumped cache_specfile')
-except Exception as e:
- sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
- sys.exit(1)
-
-print(spack_operation.env_path)
-
-##################### first part ##################
+spack_operation = SpackOperation.deserialize(file_location=installation_root.parent, file_name='data_esd.pkl')
if oci_cache_prefix:
- print('=======================Started fetching===================')
+ LOGGER.info('Fetching missing sources (if packages not yet installed)')
# fetch missing sources (if packages not yet installed)
- fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
- print(fetch_cached_sources)
- run_command(
- "bash", "-c",
- f"{spack_operation.spack_command_on_env} && python3 "
- f"{fetch_cached_sources} "
- f"--local-cache spack_cache_source "
- f"--remote-cache-type=oci "
- f"--remote-cache {oci_cache_prefix}/source_cache "
- f"--yashchiki-home {dedal_home} "
- f"/tmp/missing_paths_sources.dat cache_specfile",
- info_msg="Fetching missing sources",
- exception_msg="Failed to fetch missing sources",
- check=True
+ cache_source_fetcher = CachedSourceFetcher(
+ path_missing=Path("/tmp/missing_paths_sources.dat"),
+ specfiles=["cache_specfile"],
+ remote_cache=f"{oci_cache_prefix}/source_cache",
+ remote_cache_type="oci",
+ local_cache=spack_cache_source,
+ dedal_home=dedal_home
)
+
+ LOGGER.info('Fetching build results (if packages not yet installed)')
# fetch missing build results (if packages not yet installed)
- fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
- print(fetch_cached_buildresults)
- run_command(
- "bash", "-c",
- f"{spack_operation.spack_command_on_env} && python3 "
- f"{fetch_cached_buildresults} "
- f"--local-cache",
- f"{spack_cache_build}/build_cache "
- f"--remote-cache-type=oci "
- f"--remote-cache {oci_cache_prefix}/build_cache "
- f"--yashchiki-home {dedal_home} "
- f"/tmp/missing_paths_buildresults.dat {cache_specfile}",
- info_msg="Fetching missing build results",
- exception_msg="Failed to fetch missing build results",
- check=True
+ fetch_build_results = CachedBuildResultsFetcher(
+ path_missing=Path("/tmp/missing_paths_buildresults.dat"),
+ specfiles=[cache_specfile],
+ dedal_home=dedal_home,
+ remote_cache=f"{oci_cache_prefix}/build_cache",
+ remote_cache_type="oci",
+ local_cache=f"{spack_cache_build}/build_cache"
)
-
-# cmd = [
-# "spack-python",
-# "-c",
-# "exit(not len(spack.environment.active_environment().uninstalled_specs()))"
-# ]
+ fetch_build_results.fetch()
# packages_not_installed = subprocess.run(cmd).returncode == 0
packages_not_installed = not len(spack.environment.active_environment().uninstalled_specs())
-
-print(f'=====================Packages_not_installed: {packages_not_installed}=====================')
+LOGGER.info(f'Packages_not_installed: {packages_not_installed}')
if packages_not_installed:
spack_operation.fetch(dependencies=True, missing=True)
-#
-# if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
-# print("Performing update of the source cache")
-# update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
-# cache_cmd = run_command(
-# "bash", "-c",
-# f'{spack_operation.spack_command_on_env} && python3 {update_cached_sources} '
-# f'--local-cache {spack_cache_source}'
-# f'--remote-cache-type=oci'
-# f'--remote-cache f"{oci_cache_prefix}/source_cache '
-# f'/tmp/missing_paths_sources.dat',
-# info_msg='Updating remote OCI cache',
-# check=True
-# )
-# if cache_cmd is None or cache_cmd.returncode != 0:
-# print("Cache update failed.")
-# sys.exit(cache_cmd.returncode if cache_cmd else 1)
-# else:
-# print("Updating of the source cache disabled.")
-#
-# dag_hashes_pre_install = []
-# if oci_cache_prefix:
-# specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
-# dag_hashes_pre_install = run_command(
-# "bash", "-c",
-# f'{spack_operation.spack_command_on_env} && spack-python {specfile_dag_hash} {cache_specfile}',
-# capture_output=True,
-# text=True,
-# check=True
-# ).stdout.strip().split()
-#
-# print(dag_hashes_pre_install)
+ if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+ LOGGER.info("Performing update of the source cache")
+
+ update_cached_sources = CachedSourceUploader(
+ path_missing=Path("/tmp/missing_paths_sources.dat"),
+ remote_cache=f"{oci_cache_prefix}/source_cache",
+ remote_cache_type="oci",
+ local_cache=spack_cache_source
+ )
+ update_cached_sources.upload_missing_sources()
+ else:
+ LOGGER.info("Updating of the source cache disabled.")
+
+dag_hashes_pre_install = []
+if oci_cache_prefix:
+ dag_hashes_pre_install = SpecfileDagHash(path_specfile=cache_specfile)
+ dag_hashes_pre_install = dag_hashes_pre_install.run()
+
+LOGGER.info(dag_hashes_pre_install)
+
+spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+
+if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+ for dag_hash in dag_hashes_pre_install:
+ package = Path(spack_cache_build).resolve() / dag_hash
+ LOGGER.info(package)
+ result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+ if result != 0:
+ LOGGER.info(f'Failed to push {dag_hash}, trying to call spack find on it:')
+ spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+
+ # upload packages from local to remote cache
+ LOGGER.info("Performing update of the build cache")
+ local_cache_esd = Path(spack_cache_build).resolve() / 'build_cache'
+ update_cached_build = CachedBuildResultUploader(
+ path_missing=Path("/tmp/missing_paths_sources.dat"),
+ remote_cache=local_cache_esd,
+ remote_cache_type="oci",
+ local_cache=local_cache_esd
+ )
+ update_cached_build.upload_missing_results()
+else:
+ LOGGER.info('Updating of the build cache disabled.')
+
+spack_operation.reindex()
+
+spack_operation.create_load_env_script()
+spack_operation.reindex()
+
+if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+ raise SpackInstallPackagesException('Some spack packages failed to install.')
-# spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
-#
-# if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
-# for dag_hash in dag_hashes_pre_install:
-# package = Path(spack_cache_build).resolve() / dag_hash
-# print(package)
-# result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
-# if result != 0:
-# print(f'Failed to push {dag_hash}, trying to call spack find on it:')
-# spack_operation.find_package(package=package, long=True, variants=True, paths=True)
-#
-# # upload packages from local to remote cache
-# print("Performing update of the build cache")
-# update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
-# local_cache_esd = Path(spack_cache_build).resolve() / 'build_cache'
-# run_command(
-# "bash", "-c",
-# f'{spack_operation.spack_command_on_env} && python3 {update_cached_buildresults} '
-# f'--local-cache {local_cache_esd}'
-# f'--remote-cache-type=oci '
-# f'--remote-cache f"{local_cache_esd} '
-# f'/tmp/missing_paths_sources.dat',
-# check=True
-# )
-# else:
-# print('Updating of the build cache disabled.')
-#
-# spack_operation.reindex()
-#
-# exports = run_command("bash", "-c",
-# f'spack env activate --sh {env_path}',
-# check=True,
-# stdout=subprocess.PIPE,
-# stderr=subprocess.PIPE,
-# capture_output=True,
-# text=True,
-# ).stdout
-#
-# out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
-# with open(out_file, "w") as f:
-# f.write(exports)
-#
-# if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
-# print('Some spack packages failed to install.')
-# sys.exit(-1)
-#
-# print('Installed all spack packages.')
-# spack_operation.reindex()
+LOGGER.info('Installed all spack packages.')
\ No newline at end of file
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
index 07cf5a8f..32ae7f9c 100644
--- a/packages/py-dedal/package.py
+++ b/packages/py-dedal/package.py
@@ -3,12 +3,15 @@ from spack.package import PythonPackage
class PyDedal(PythonPackage):
homepage = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
git = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+ pypi = 'dedal/dedal-0.9.1.tar.gz'
version(
'experimental',
- commit='8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445'
+ commit='ee84f83c44e13b7c439de20f3c9b0b03d44de5ab'
)
+ version('0.9.1', '8ab265f6d920e617025aba64603e68d42b2be7e483d459e0ab34bfa97213d020')
+
maintainers("Brainiacs")
depends_on('py-setuptools', type='build')
diff --git a/tools/tools.py b/tools/tools.py
index 035d80a4..1c1827ca 100644
--- a/tools/tools.py
+++ b/tools/tools.py
@@ -1,20 +1,6 @@
-import os
import subprocess
from pathlib import Path
-
-def find_first_upstream_prefix(upstream_instance):
- """
- Search for directories named '.spack-db' within the spack opt directory
- under upstream_instance, and return a list of their parent directories.
- """
- base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
- for upstream_prefix, dirs, _ in os.walk(base_path):
- if ".spack-db" in dirs:
- return upstream_prefix
- return None
-
-
def get_submodule_commit_hash(submodule_path: Path):
try:
# Run git rev-parse to get the commit hash of the submodule
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 8d8bb40a..ee84f83c 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445
+Subproject commit ee84f83c44e13b7c439de20f3c9b0b03d44de5ab
--
GitLab
From 5556726312e566cc08af76424d43bd9d7e8e3489 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Wed, 5 Nov 2025 16:41:34 +0200
Subject: [PATCH 12/16] update spack dedal to latest version (v1.0.0) from pypi
---
dedal_manage_spack_env.sh | 4 ++--
packages/py-dedal/package.py | 9 ++-------
2 files changed, 4 insertions(+), 9 deletions(-)
diff --git a/dedal_manage_spack_env.sh b/dedal_manage_spack_env.sh
index 28c0b0c7..615b04c4 100755
--- a/dedal_manage_spack_env.sh
+++ b/dedal_manage_spack_env.sh
@@ -19,8 +19,8 @@ spack mirror remove local_cache
spack clean -a
spack concretize
# install the latest version of dedal from the specified branch (the cache version might not be the latest yet)
-spack install --add py-dedal@experimental
-spack load py-dedal@experimental
+spack install --add py-dedal@1.0.0
+spack load py-dedal@1.0.0
spack_env_cmd=$(spack-python create_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO" "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX" | tail -n 1)
echo "Captured environment command: $spack_env_cmd"
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
index 32ae7f9c..85e068ad 100644
--- a/packages/py-dedal/package.py
+++ b/packages/py-dedal/package.py
@@ -3,14 +3,9 @@ from spack.package import PythonPackage
class PyDedal(PythonPackage):
homepage = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
git = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
- pypi = 'dedal/dedal-0.9.1.tar.gz'
+ pypi = 'dedal/dedal-1.0.0.tar.gz'
- version(
- 'experimental',
- commit='ee84f83c44e13b7c439de20f3c9b0b03d44de5ab'
- )
-
- version('0.9.1', '8ab265f6d920e617025aba64603e68d42b2be7e483d459e0ab34bfa97213d020')
+ version('1.0.0', '4925aab3d9804ba156cb105456b36bfe8aa42eb6660e2444bc48a39e6c068aae')
maintainers("Brainiacs")
--
GitLab
From f203e05b5660fe1bdb7d1c1d2659505d7f1860a1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de>
Date: Fri, 7 Nov 2025 16:11:42 +0100
Subject: [PATCH 13/16] fix: explicit --fresh
---
install_spack_env.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/install_spack_env.py b/install_spack_env.py
index 824d66e5..6292582e 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -131,7 +131,7 @@ if oci_cache_prefix:
LOGGER.info(dag_hashes_pre_install)
-spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, fresh=True, test='root')
if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
for dag_hash in dag_hashes_pre_install:
--
GitLab
From 3b5e32ed77ce018f31422a917acae402ab62498a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de>
Date: Fri, 7 Nov 2025 16:11:52 +0100
Subject: [PATCH 14/16] fix: update dedal
---
vendor/yashchiki | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/vendor/yashchiki b/vendor/yashchiki
index ee84f83c..d3d5d4ba 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit ee84f83c44e13b7c439de20f3c9b0b03d44de5ab
+Subproject commit d3d5d4ba13893abd9506eec70264ffeadf4b7d27
--
GitLab
From 72c6ce1b4c9b6aade2e43a1c1dc226473366f929 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Eric=20M=C3=BCller?= <mueller@kip.uni-heidelberg.de>
Date: Fri, 7 Nov 2025 16:17:19 +0100
Subject: [PATCH 15/16] fix: fixup for dedal update
---
dedal_install.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/dedal_install.py b/dedal_install.py
index 2b827162..00b94376 100644
--- a/dedal_install.py
+++ b/dedal_install.py
@@ -41,4 +41,4 @@ if isinstance(spack_operation, SpackOperationUseCache) and (len(spack_operation.
spack_operation.setup_spack_env()
spack_operation.concretize_spack_env()
-spack_operation.install_packages(os.cpu_count(), signed=False)
+spack_operation.install_packages(os.cpu_count())
--
GitLab
From f0799dd294c62fb691ba4a2e98263c06682151cf Mon Sep 17 00:00:00 2001
From: Eleni Mathioulaki <emathioulaki@athenarc.gr>
Date: Tue, 11 Nov 2025 16:16:03 +0100
Subject: [PATCH 16/16] fix: update dedal pypi hash
---
packages/py-dedal/package.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
index 85e068ad..a8414b1e 100644
--- a/packages/py-dedal/package.py
+++ b/packages/py-dedal/package.py
@@ -5,7 +5,7 @@ class PyDedal(PythonPackage):
git = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
pypi = 'dedal/dedal-1.0.0.tar.gz'
- version('1.0.0', '4925aab3d9804ba156cb105456b36bfe8aa42eb6660e2444bc48a39e6c068aae')
+ version('1.0.0', '7e16ca82117e842bc9e1a21432ed8e95e65132012e283b5b2e8890b35d3112ab')
maintainers("Brainiacs")
--
GitLab