From 7a9e9e4075df8149fcde545656356e6867d393e5 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Fri, 14 Mar 2025 17:26:04 +0200
Subject: [PATCH 1/9] VT-94: migrate esd bash scripts to Dedal library

---
 .gitlab-ci.yml       |   6 +-
 __init__.py          |   0
 install_spack_env.py | 137 +++++++++++++++++++++++++++++++++++++++++++
 utils/__init__.py    |   0
 utils/utils.py       |  13 ++++
 utils/ymerge.py      |  15 +++++
 6 files changed, 168 insertions(+), 3 deletions(-)
 create mode 100644 __init__.py
 create mode 100644 install_spack_env.py
 create mode 100644 utils/__init__.py
 create mode 100644 utils/utils.py
 create mode 100644 utils/ymerge.py

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 91b188c3..aefca059 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -231,7 +231,7 @@ build-spack-env-on-runner:
     - >
         echo "  view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
     # run installation script
-    - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - python3 install_spack_env.py $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
   after_script:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
@@ -272,7 +272,7 @@ sync-esd-image:
     # run installation script inside future container environment
     #   => DAG concretization, subsequent cache access + fetching and actual build should be separate steps
     - mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT}
-    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} python3 install_spack_env.py $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     - echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     - echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     # preparing to assemble the image: move in the CI project contents...
@@ -326,7 +326,7 @@ sync-gitlab-spack-instance:
     - git reset --hard $CI_COMMIT_SHA
     - git submodule update --force
     # run installation script
-    - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - python3 install_spack_env.py $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     # create kernel spec, so that the environment can be used in gitlab CI jobs
     - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac);
     - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/install_spack_env.py b/install_spack_env.py
new file mode 100644
index 00000000..31a6ab16
--- /dev/null
+++ b/install_spack_env.py
@@ -0,0 +1,137 @@
+import os
+import argparse
+import shutil
+import sys
+from pathlib import Path
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.enum.SpackConfigCommand import SpackConfigCommand
+from dedal.enum.SpackViewEnum import SpackViewEnum
+from dedal.model.SpackDescriptor import SpackDescriptor
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import set_bashrc_variable
+from utils.utils import find_first_upstream_prefix
+from utils.ymerge import merge_spack_envs
+
+parser = argparse.ArgumentParser(
+    prog='install_spack_env.py',
+    description='ESD install spack env.',
+    epilog='...')
+
+parser.add_argument(
+    "spack_jobs",
+    type=int,
+    help="number of jobs"
+)
+
+parser.add_argument(
+    "installation_root",
+    type=str,
+    help="where to set up the installation"
+)
+
+parser.add_argument(
+    "ebrains_repo",
+    type=str,
+    help="location of ebrains-spack-builds repository"
+)
+
+parser.add_argument(
+    "ebrains_spack_env",
+    type=str,
+    help="name of EBRAINS Spack environment to be created/updated"
+)
+
+parser.add_argument(
+    "upstream_instance",
+    type=str,
+    help="path to Spack instance to use as upstream (optional)"
+)
+
+parser.add_argument(
+    "update_spack_oci_caches",
+    type=bool,
+    help="true enables updating the OCI cache for spack sources and build results"
+)
+
+parser.add_argument(
+    "oci_cache_prefix",
+    type=str,
+)
+
+args = parser.parse_args()
+
+spack_jobs = args.spack_jobs
+installation_root = Path(args.installation_root).resolve()
+ebrains_repo = args.ebrains_repo
+ebrains_spack_env = args.ebrains_spack_env
+upstream_instance = args.upstream_instance
+update_spack_oci_caches = args.update_spack_oci_caches
+oci_cache_prefix = args.oci_cache_prefix
+
+set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+
+ci_spack_root = installation_root / 'spack'
+
+set_bashrc_variable('CACHE_SPECFILE', f'env_specfile.yaml')
+set_bashrc_variable('DEDAL_HOME', f'{ebrains_repo}/vendor/yashchiki')
+set_bashrc_variable('SPACK_CACHE_SOURCE', f'{ci_spack_root}/var/spack/cache')
+set_bashrc_variable('SPACK_CACHE_BUILD', f'{ci_spack_root}/var/spack/cache')
+
+ci_spack_root = installation_root / 'spack'
+spack_root_existed = 1
+if ci_spack_root and ebrains_repo:
+    if not os.path.isdir(ci_spack_root):
+        spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
+        try:
+            os.symlink(spack_source, ci_spack_root)
+            spack_root_existed = 0
+        except FileExistsError:
+            pass
+
+data_dir = installation_root / 'cashing'
+env_repo = SpackDescriptor('ebrains-spack-builds', data_dir, ebrains_repo)
+spack_config = SpackConfig(env=env_repo,
+                           repos=[env_repo],
+                           install_dir=installation_root,
+                           upstream_instance=upstream_instance,
+                           system_name='VBT',
+                           concretization_dir=data_dir / 'concretize_cache',
+                           buildcache_dir=data_dir / 'binary_cache',
+                           use_spack_global=False,
+                           cache_version_build='spack_cache',
+                           view=SpackViewEnum.WITHOUT_VIEW)
+
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.install_spack()
+if upstream_instance:
+    upstream_prefix = find_first_upstream_prefix(upstream_instance)
+    spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
+if spack_root_existed == 0:
+    spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
+
+spack_operation.spec_pacakge('aida')
+spack_operation.reindex()
+spack_operation.add_mirror(mirror_name='local_cache', mirror_path=Path(os.getenv('SPACK_CACHE_BUILD')).resolve())
+spack_operation.setup_spack_env()
+
+site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
+if site_config_dest.exists():
+    site_config_dest.rmdir()
+    site_config_dest.mkdir(parents=True, exist_ok=True)
+    site_config_esd = Path(ebrains_spack_env) / 'site-config'
+    shutil.copy(site_config_esd, site_config_dest)
+    merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
+    tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
+    try:
+        with open(tmp_spack_yaml, "w") as f:
+            f.write(merged_envs)
+        print(f"Written merged spack.yaml to {tmp_spack_yaml}")
+    except Exception as e:
+        sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
+        sys.exit(1)
+    shutil.copy(tmp_spack_yaml, site_config_dest)
+
+spack_operation.concretize_spack_env()
+
+spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+spack_operation.reindex()
\ No newline at end of file
diff --git a/utils/__init__.py b/utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/utils/utils.py b/utils/utils.py
new file mode 100644
index 00000000..2edf146c
--- /dev/null
+++ b/utils/utils.py
@@ -0,0 +1,13 @@
+import os
+
+
+def find_first_upstream_prefix(upstream_instance):
+    """
+    Search for directories named '.spack-db' within the spack opt directory
+    under upstream_instance, and return a list of their parent directories.
+    """
+    base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
+    for upstream_prefix, dirs, _ in os.walk(base_path):
+        if ".spack-db" in dirs:
+            return upstream_prefix
+    return None
diff --git a/utils/ymerge.py b/utils/ymerge.py
new file mode 100644
index 00000000..79567d63
--- /dev/null
+++ b/utils/ymerge.py
@@ -0,0 +1,15 @@
+# spack-python script that merges two environment configuration files (spack.yaml) into one
+# Usage: spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
+# (note: if the second file does not exist, the output is the first file
+
+import os
+from spack.config import merge_yaml, read_config_file, syaml
+
+
+def merge_spack_envs(top_yaml_path, site_yaml_path):
+    if not os.path.exists(site_yaml_path):
+        merged = syaml.dump(read_config_file(top_yaml_path))
+    else:
+        merged = syaml.dump(merge_yaml(read_config_file(top_yaml_path), read_config_file(site_yaml_path)))
+    return merged
+
-- 
GitLab


From 817188aab39fb800c6fc16458556d9713892d3f0 Mon Sep 17 00:00:00 2001
From: Eleni Mathioulaki <emathioulaki@athenarc.gr>
Date: Tue, 15 Apr 2025 11:03:14 +0300
Subject: [PATCH 2/9] feat: update dedal to 0.1.0

---
 vendor/yashchiki | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/vendor/yashchiki b/vendor/yashchiki
index 5690db7d..11785be8 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 5690db7dbccd78f3ceef2123a605e662bb8b2c0f
+Subproject commit 11785be8a6e914a8404757670f58b4e78fca2bf9
-- 
GitLab


From 2041e603e5ede0134decafd64e7a0514d7aa4544 Mon Sep 17 00:00:00 2001
From: Eleni Mathioulaki <emathioulaki@athenarc.gr>
Date: Tue, 15 Apr 2025 11:05:01 +0300
Subject: [PATCH 3/9] fix(CI): install dedal python library

---
 .gitlab-ci.yml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index aefca059..6f1ae550 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -227,6 +227,8 @@ build-spack-env-on-runner:
     OCI_CACHE_PREFIX: ""
     UPDATE_SPACK_OCI_CACHES: false
   script:
+    # install dedal python library
+    - pip3 install --break-system-packages vendor/yashchiki
     # deactivate environment views (we don't need them for the test build-job)
     - >
         echo "  view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
-- 
GitLab


From 1ff8f0898b515db091bcfe450b2d931080934211 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Mon, 28 Apr 2025 15:23:47 +0300
Subject: [PATCH 4/9] VT-94: fixing incompatibilities

---
 .gitmodules          |  2 +-
 install_spack_env.py | 40 +++++++++++++++++++++++++++-------------
 utils/ymerge.py      | 13 ++++++-------
 vendor/yashchiki     |  2 +-
 4 files changed, 35 insertions(+), 22 deletions(-)

diff --git a/.gitmodules b/.gitmodules
index 30354655..0b9fab48 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -5,4 +5,4 @@
 [submodule "vendor/yashchiki"]
 	path = vendor/yashchiki
 	url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal
-	shallow = true
+	shallow = true
\ No newline at end of file
diff --git a/install_spack_env.py b/install_spack_env.py
index 31a6ab16..55628af4 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -8,9 +8,8 @@ from dedal.enum.SpackConfigCommand import SpackConfigCommand
 from dedal.enum.SpackViewEnum import SpackViewEnum
 from dedal.model.SpackDescriptor import SpackDescriptor
 from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
-from dedal.utils.utils import set_bashrc_variable
+from dedal.utils.utils import set_bashrc_variable, run_command
 from utils.utils import find_first_upstream_prefix
-from utils.ymerge import merge_spack_envs
 
 parser = argparse.ArgumentParser(
     prog='install_spack_env.py',
@@ -56,6 +55,8 @@ parser.add_argument(
 parser.add_argument(
     "oci_cache_prefix",
     type=str,
+    nargs="?",  # optional
+    default=None,
 )
 
 args = parser.parse_args()
@@ -68,6 +69,8 @@ upstream_instance = args.upstream_instance
 update_spack_oci_caches = args.update_spack_oci_caches
 oci_cache_prefix = args.oci_cache_prefix
 
+system_name = os.getenv('SYSTEMNAME')
+
 set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
 
 ci_spack_root = installation_root / 'spack'
@@ -77,32 +80,32 @@ set_bashrc_variable('DEDAL_HOME', f'{ebrains_repo}/vendor/yashchiki')
 set_bashrc_variable('SPACK_CACHE_SOURCE', f'{ci_spack_root}/var/spack/cache')
 set_bashrc_variable('SPACK_CACHE_BUILD', f'{ci_spack_root}/var/spack/cache')
 
-ci_spack_root = installation_root / 'spack'
-spack_root_existed = 1
+spack_root_existed = True
 if ci_spack_root and ebrains_repo:
     if not os.path.isdir(ci_spack_root):
         spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
         try:
             os.symlink(spack_source, ci_spack_root)
-            spack_root_existed = 0
+            spack_root_existed = False
         except FileExistsError:
             pass
 
-data_dir = installation_root / 'cashing'
-env_repo = SpackDescriptor('ebrains-spack-builds', data_dir, ebrains_repo)
+data_dir = installation_root / 'caching'
+print(installation_root.parent)
+env_repo = SpackDescriptor('ebrains-spack-builds', path=installation_root.parent)
 spack_config = SpackConfig(env=env_repo,
                            repos=[env_repo],
-                           install_dir=installation_root,
+                           install_dir=installation_root.parent,
                            upstream_instance=upstream_instance,
                            system_name='VBT',
                            concretization_dir=data_dir / 'concretize_cache',
                            buildcache_dir=data_dir / 'binary_cache',
                            use_spack_global=False,
                            cache_version_build='spack_cache',
-                           view=SpackViewEnum.WITHOUT_VIEW)
+                           view=SpackViewEnum.WITHOUT_VIEW,
+                           spack_dir=Path(ci_spack_root).resolve())
 
 spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
-spack_operation.install_spack()
 if upstream_instance:
     upstream_prefix = find_first_upstream_prefix(upstream_instance)
     spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
@@ -120,11 +123,22 @@ if site_config_dest.exists():
     site_config_dest.mkdir(parents=True, exist_ok=True)
     site_config_esd = Path(ebrains_spack_env) / 'site-config'
     shutil.copy(site_config_esd, site_config_dest)
-    merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
+    # spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
+    # merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
+    merged_envs = result = run_command(
+        "spack-python",
+        f'{ebrains_repo}/site-config/ymerge.py {ebrains_repo}/spack.yaml',
+        f'{ebrains_repo}/site-config/{system_name}/spack.yaml',
+        info_msg='Merging top-level and site-specific spack.yaml files.',
+        exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
+        capture_output=True,
+        text=True,
+        check=True
+    )
     tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
     try:
         with open(tmp_spack_yaml, "w") as f:
-            f.write(merged_envs)
+            f.write(merged_envs.stdout.strip())
         print(f"Written merged spack.yaml to {tmp_spack_yaml}")
     except Exception as e:
         sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
@@ -134,4 +148,4 @@ if site_config_dest.exists():
 spack_operation.concretize_spack_env()
 
 spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
-spack_operation.reindex()
\ No newline at end of file
+spack_operation.reindex()
diff --git a/utils/ymerge.py b/utils/ymerge.py
index 79567d63..accb8779 100644
--- a/utils/ymerge.py
+++ b/utils/ymerge.py
@@ -2,14 +2,13 @@
 # Usage: spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
 # (note: if the second file does not exist, the output is the first file
 
-import os
+import sys, os
 from spack.config import merge_yaml, read_config_file, syaml
 
+if not os.path.exists(sys.argv[2]):
+    merged = syaml.dump(read_config_file(sys.argv[1]))
+else:
+    merged = syaml.dump(merge_yaml(read_config_file(sys.argv[1]), read_config_file(sys.argv[2])))
 
-def merge_spack_envs(top_yaml_path, site_yaml_path):
-    if not os.path.exists(site_yaml_path):
-        merged = syaml.dump(read_config_file(top_yaml_path))
-    else:
-        merged = syaml.dump(merge_yaml(read_config_file(top_yaml_path), read_config_file(site_yaml_path)))
-    return merged
+print(merged)
 
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 11785be8..7a3d1c14 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 11785be8a6e914a8404757670f58b4e78fca2bf9
+Subproject commit 7a3d1c1422fcec27b62c491dd03078b73b933a0b
-- 
GitLab


From 457bf472ec94502d62eb6de330351850faea9ee0 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Wed, 30 Apr 2025 17:25:37 +0300
Subject: [PATCH 5/9] VT-94: adapt to new changes from dedal

---
 .gitlab-ci.yml       |   6 ++
 install_spack_env.py | 194 +++++++++++++++++++++++++++++++++++--------
 vendor/yashchiki     |   2 +-
 3 files changed, 166 insertions(+), 36 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6f1ae550..fa2c7e8f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -246,6 +246,8 @@ build-spack-env-on-runner:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
       # - spack_tests
     when: always
   timeout: 2 days
@@ -297,6 +299,8 @@ sync-esd-image:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
     when: always
   timeout: 2 days
   resource_group: registry-esd-master-image
@@ -344,6 +348,8 @@ sync-gitlab-spack-instance:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
     when: always
   rules:
     # branches that update the gitlab-runner upstream (read-only) installation
diff --git a/install_spack_env.py b/install_spack_env.py
index 55628af4..4e225163 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -69,17 +69,41 @@ upstream_instance = args.upstream_instance
 update_spack_oci_caches = args.update_spack_oci_caches
 oci_cache_prefix = args.oci_cache_prefix
 
-system_name = os.getenv('SYSTEMNAME')
+# define SYSTEMNAME variable in sites where it's not already defined
+system_name = (
+        os.getenv('SYSTEMNAME')
+        or os.getenv('HPC_SYSTEM')
+        or os.getenv('BSC_MACHINE')
+)
+
+# disable local configuration and cache directories
+set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
+os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
+set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
+os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
 
-set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+if oci_cache_prefix:
+    set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+    os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
 
+# make sure spack uses the symlinked folder as path
 ci_spack_root = installation_root / 'spack'
 
-set_bashrc_variable('CACHE_SPECFILE', f'env_specfile.yaml')
-set_bashrc_variable('DEDAL_HOME', f'{ebrains_repo}/vendor/yashchiki')
-set_bashrc_variable('SPACK_CACHE_SOURCE', f'{ci_spack_root}/var/spack/cache')
-set_bashrc_variable('SPACK_CACHE_BUILD', f'{ci_spack_root}/var/spack/cache')
+# cache related variables
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
+set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
+os.environ['CACHE_SPECFILE'] = cache_specfile
+dedal_home = f'{ebrains_repo}/vendor/yashchiki'
+os.environ['DEDAL_HOME'] = dedal_home
+set_bashrc_variable('DEDAL_HOME', dedal_home)
+spack_cache_source = f'{ci_spack_root}/var/spack/cache'
+set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
+os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
+set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
+os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
+spack_cache_build = spack_cache_source
 
+# initial setup: use spack submodule if spack dir doesn't already exist
 spack_root_existed = True
 if ci_spack_root and ebrains_repo:
     if not os.path.isdir(ci_spack_root):
@@ -91,61 +115,161 @@ if ci_spack_root and ebrains_repo:
             pass
 
 data_dir = installation_root / 'caching'
-print(installation_root.parent)
-env_repo = SpackDescriptor('ebrains-spack-builds', path=installation_root.parent)
+
+env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
+
+binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
 spack_config = SpackConfig(env=env_repo,
-                           repos=[env_repo],
+                           repos=[],
                            install_dir=installation_root.parent,
                            upstream_instance=upstream_instance,
-                           system_name='VBT',
-                           concretization_dir=data_dir / 'concretize_cache',
-                           buildcache_dir=data_dir / 'binary_cache',
+                           system_name=system_name,
+                           buildcache_dir=binary_cache_path,
                            use_spack_global=False,
                            cache_version_build='spack_cache',
                            view=SpackViewEnum.WITHOUT_VIEW,
                            spack_dir=Path(ci_spack_root).resolve())
 
 spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.setup_spack_env()
+
 if upstream_instance:
     upstream_prefix = find_first_upstream_prefix(upstream_instance)
     spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
 if spack_root_existed == 0:
     spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
 
+# make sure all fetching/clingo stuff happens before anything else
 spack_operation.spec_pacakge('aida')
+# rebuild spack's database (could be an debugging session)
 spack_operation.reindex()
-spack_operation.add_mirror(mirror_name='local_cache', mirror_path=Path(os.getenv('SPACK_CACHE_BUILD')).resolve())
-spack_operation.setup_spack_env()
+spack_operation.add_mirror(mirror_name='local_cache', mirror_path=binary_cache_path)
 
-site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
+site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env / 'site-config'
 if site_config_dest.exists():
-    site_config_dest.rmdir()
-    site_config_dest.mkdir(parents=True, exist_ok=True)
-    site_config_esd = Path(ebrains_spack_env) / 'site-config'
-    shutil.copy(site_config_esd, site_config_dest)
-    # spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
-    # merged_envs = merge_spack_envs(Path(ebrains_spack_env) / 'spack.yaml', site_config_esd / os.getenv('SYSTEMNAME') / 'spack.yaml')
-    merged_envs = result = run_command(
-        "spack-python",
-        f'{ebrains_repo}/site-config/ymerge.py {ebrains_repo}/spack.yaml',
-        f'{ebrains_repo}/site-config/{system_name}/spack.yaml',
-        info_msg='Merging top-level and site-specific spack.yaml files.',
-        exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
-        capture_output=True,
-        text=True,
-        check=True
-    )
+    shutil.rmtree(site_config_dest / 'site-config')
+    os.makedirs(site_config_dest / 'site-config')
+
+site_config = Path(ebrains_repo) / 'site-config'
+shutil.copytree(site_config, site_config_dest)
+
+y_merge_path = Path(ebrains_repo) / 'site-config/ymerge.py'
+merge_path_1 = Path(ebrains_repo) / 'spack.yaml'
+merge_path_2 = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
+
+# update environment site-configs
+merged_envs = run_command(
+    "bash", "-c",
+    f'{spack_operation.spack_setup_script} && spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
+    info_msg='Merging top-level and site-specific spack.yaml files.',
+    exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
+    capture_output=True,
+    text=True,
+    check=True
+).stdout
+
+if merged_envs is None:
+    sys.exit(-1)
+else:
     tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
     try:
         with open(tmp_spack_yaml, "w") as f:
-            f.write(merged_envs.stdout.strip())
-        print(f"Written merged spack.yaml to {tmp_spack_yaml}")
+            f.write(merged_envs)
     except Exception as e:
         sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
         sys.exit(1)
-    shutil.copy(tmp_spack_yaml, site_config_dest)
+    shutil.copy(tmp_spack_yaml, site_config_dest.parent)
+
+# add repo if it does not exist
+spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
+spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
+
+dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
 
-spack_operation.concretize_spack_env()
+try:
+    with open(cache_specfile, "w") as f:
+        f.write(dump_dag)
+except Exception as e:
+    sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
+    sys.exit(1)
+
+if oci_cache_prefix:
+    # fetch missing sources (if packages not yet installed)
+    fetch_cached_sources = Path(dedal_home).resolve() / '/cli' / 'fetch_cached_sources.py'
+    run_command(
+        "bash", "-c",
+        f"{spack_operation.spack_setup_script} && python3 "
+        f"{fetch_cached_sources} "
+        f"--local-cache spack_cache_source "
+        f"--remote-cache-type oci "
+        f"--remote-cache {oci_cache_prefix}/source_cache "
+        f"--yashchiki-home {dedal_home} "
+        f"/tmp/missing_paths_sources.dat cache_specfile",
+        info_msg="Fetching missing sources",
+        exception_msg="Failed to fetch missing sources",
+        check=True
+    )
+    # fetch missing build results (if packages not yet installed)
+    fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.dat'
+    run_command(
+        "bash", "-c",
+        f"{spack_operation.spack_setup_script} && python3 "
+        f"{fetch_cached_buildresults} "
+        f"--local-cache",
+        f"{spack_cache_build}/build_cache "
+        f"--remote-cache-type oci "
+        f"--remote-cache {oci_cache_prefix}/build_cache "
+        f"--yashchiki-home {dedal_home} "
+        f"/tmp/missing_paths_buildresults.dat {cache_specfile}",
+        info_msg="Fetching missing build results",
+        exception_msg="Failed to fetch missing build results",
+        check=True
+    )
+
+run_command(
+    "bash", "-c",
+    f'{spack_operation.spack_setup_script} && spack-python',
+    f'exit(not len(spack.environment.active_environment().uninstalled_specs()))',
+    info_msg='Checking for uninstalled Spack specs',
+    check=True
+)
+
+spack_operation.fetch(dependencies=True, missing=True)
+
+if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+    print("Performing update of the source cache")
+    update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
+    cache_cmd = run_command(
+        "bash", "-c",
+        f'{spack_operation.spack_setup_script} && python3 {update_cached_sources} '
+        f'--local-cache {spack_cache_source}'
+        f'--remote-cache-type oci '
+        f'--remote-cache f"{oci_cache_prefix}/source_cache '
+        f'/tmp/missing_paths_sources.dat',
+        info_msg='Updating remote OCI cache',
+        check=True
+    )
+    if cache_cmd is None or cache_cmd.returncode != 0:
+        print("Cache update failed.")
+        sys.exit(cache_cmd.returncode if cache_cmd else 1)
+else:
+    print("Updating of the source cache disabled.")
+
+if oci_cache_prefix:
+    specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
+    dag_hashes_pre_install = run_command(
+        "bash", "-c",
+        f'{spack_operation.spack_setup_script} && spack-python specfile_dag_hash {cache_specfile}',
+        capture_output=True,
+        text=True,
+        check=True
+    ).stdout
 
 spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+
+if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+    print('Some spack packages failed to install.')
+    sys.exit(-1)
+
+print('Installed all spack packages.')
 spack_operation.reindex()
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 7a3d1c14..ffc59776 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 7a3d1c1422fcec27b62c491dd03078b73b933a0b
+Subproject commit ffc597764769b6b01e8f75b7275caa4693cb3ac4
-- 
GitLab


From 4ef5addbb98b1d7ffb53335bf5f1587d8daa9bab Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Fri, 9 May 2025 10:54:12 +0300
Subject: [PATCH 6/9] VT-94: Caching migration to python

---
 install_spack_env.py | 56 ++++++++++++++++++++++++++++++++++++++------
 vendor/yashchiki     |  2 +-
 2 files changed, 50 insertions(+), 8 deletions(-)

diff --git a/install_spack_env.py b/install_spack_env.py
index 4e225163..aef88437 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -1,6 +1,7 @@
 import os
 import argparse
 import shutil
+import subprocess
 import sys
 from pathlib import Path
 from dedal.configuration.SpackConfig import SpackConfig
@@ -195,13 +196,13 @@ except Exception as e:
 
 if oci_cache_prefix:
     # fetch missing sources (if packages not yet installed)
-    fetch_cached_sources = Path(dedal_home).resolve() / '/cli' / 'fetch_cached_sources.py'
+    fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
     run_command(
         "bash", "-c",
         f"{spack_operation.spack_setup_script} && python3 "
         f"{fetch_cached_sources} "
         f"--local-cache spack_cache_source "
-        f"--remote-cache-type oci "
+        f"--remote-cache-type=oci "
         f"--remote-cache {oci_cache_prefix}/source_cache "
         f"--yashchiki-home {dedal_home} "
         f"/tmp/missing_paths_sources.dat cache_specfile",
@@ -210,14 +211,14 @@ if oci_cache_prefix:
         check=True
     )
     # fetch missing build results (if packages not yet installed)
-    fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.dat'
+    fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
     run_command(
         "bash", "-c",
         f"{spack_operation.spack_setup_script} && python3 "
         f"{fetch_cached_buildresults} "
         f"--local-cache",
         f"{spack_cache_build}/build_cache "
-        f"--remote-cache-type oci "
+        f"--remote-cache-type=oci "
         f"--remote-cache {oci_cache_prefix}/build_cache "
         f"--yashchiki-home {dedal_home} "
         f"/tmp/missing_paths_buildresults.dat {cache_specfile}",
@@ -243,7 +244,7 @@ if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 't
         "bash", "-c",
         f'{spack_operation.spack_setup_script} && python3 {update_cached_sources} '
         f'--local-cache {spack_cache_source}'
-        f'--remote-cache-type oci '
+        f'--remote-cache-type=oci'
         f'--remote-cache f"{oci_cache_prefix}/source_cache '
         f'/tmp/missing_paths_sources.dat',
         info_msg='Updating remote OCI cache',
@@ -255,18 +256,59 @@ if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 't
 else:
     print("Updating of the source cache disabled.")
 
+dag_hashes_pre_install = []
 if oci_cache_prefix:
     specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
     dag_hashes_pre_install = run_command(
         "bash", "-c",
-        f'{spack_operation.spack_setup_script} && spack-python specfile_dag_hash {cache_specfile}',
+        f'{spack_operation.spack_setup_script} && spack-python {specfile_dag_hash} {cache_specfile}',
         capture_output=True,
         text=True,
         check=True
-    ).stdout
+    ).stdout.strip().split()
 
 spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
 
+if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+    for dag_hash in dag_hashes_pre_install:
+        package = Path(spack_cache_build).resolve() / dag_hash
+        print(package)
+        result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+        if result != 0:
+            print(f'Failed to push {dag_hash}, trying to call spack find on it:')
+            spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+
+        # upload packages from local to remote cache
+        print("Performing update of the build cache")
+        update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
+        local_cache = Path(spack_cache_build).resolve() / 'build_cache'
+        run_command(
+            "bash", "-c",
+            f'{spack_operation.spack_setup_script} && python3 {update_cached_buildresults} '
+            f'--local-cache {local_cache}'
+            f'--remote-cache-type=oci '
+            f'--remote-cache f"{local_cache} '
+            f'/tmp/missing_paths_sources.dat',
+            check=True
+        )
+else:
+    print('Updating of the build cache disabled.')
+
+spack_operation.reindex()
+
+exports = run_command("bash", "-c",
+                      f'spack env activate --sh {ebrains_spack_env}',
+                      check=True,
+                      stdout=subprocess.PIPE,
+                      stderr=subprocess.PIPE,
+                      capture_output=True,
+                      text=True,
+                      ).stdout
+
+out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
+with open(out_file, "w") as f:
+    f.write(exports)
+
 if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
     print('Some spack packages failed to install.')
     sys.exit(-1)
diff --git a/vendor/yashchiki b/vendor/yashchiki
index ffc59776..e21c2c08 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit ffc597764769b6b01e8f75b7275caa4693cb3ac4
+Subproject commit e21c2c085cdf1712aedda75caea961f152d460f0
-- 
GitLab


From 5e53a433cbfab85749f3e442226c3f84a5ced4a0 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Mon, 12 May 2025 15:23:08 +0300
Subject: [PATCH 7/9] VT-94: Installing spack before running
 install_spack_env.py script

---
 .gitlab-ci.yml       |  6 ++++++
 install_spack_env.py | 19 ++++++++++---------
 2 files changed, 16 insertions(+), 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index fa2c7e8f..e4b80592 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -229,6 +229,8 @@ build-spack-env-on-runner:
   script:
     # install dedal python library
     - pip3 install --break-system-packages vendor/yashchiki
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
     # deactivate environment views (we don't need them for the test build-job)
     - >
         echo "  view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
@@ -276,6 +278,8 @@ sync-esd-image:
     # run installation script inside future container environment
     #   => DAG concretization, subsequent cache access + fetching and actual build should be separate steps
     - mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT}
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
     - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} python3 install_spack_env.py $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     - echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     - echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
@@ -331,6 +335,8 @@ sync-gitlab-spack-instance:
     - git fetch origin
     - git reset --hard $CI_COMMIT_SHA
     - git submodule update --force
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
     # run installation script
     - python3 install_spack_env.py $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     # create kernel spec, so that the environment can be used in gitlab CI jobs
diff --git a/install_spack_env.py b/install_spack_env.py
index aef88437..ea0ca80c 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -146,7 +146,8 @@ spack_operation.spec_pacakge('aida')
 spack_operation.reindex()
 spack_operation.add_mirror(mirror_name='local_cache', mirror_path=binary_cache_path)
 
-site_config_dest = ci_spack_root / 'var/spack/environments' / ebrains_spack_env / 'site-config'
+env_path = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
+site_config_dest = env_path / 'site-config'
 if site_config_dest.exists():
     shutil.rmtree(site_config_dest / 'site-config')
     os.makedirs(site_config_dest / 'site-config')
@@ -161,7 +162,7 @@ merge_path_2 = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
 # update environment site-configs
 merged_envs = run_command(
     "bash", "-c",
-    f'{spack_operation.spack_setup_script} && spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
+    f'spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
     info_msg='Merging top-level and site-specific spack.yaml files.',
     exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
     capture_output=True,
@@ -199,7 +200,7 @@ if oci_cache_prefix:
     fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
     run_command(
         "bash", "-c",
-        f"{spack_operation.spack_setup_script} && python3 "
+        f"python3 "
         f"{fetch_cached_sources} "
         f"--local-cache spack_cache_source "
         f"--remote-cache-type=oci "
@@ -214,7 +215,7 @@ if oci_cache_prefix:
     fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
     run_command(
         "bash", "-c",
-        f"{spack_operation.spack_setup_script} && python3 "
+        f"python3 "
         f"{fetch_cached_buildresults} "
         f"--local-cache",
         f"{spack_cache_build}/build_cache "
@@ -229,7 +230,7 @@ if oci_cache_prefix:
 
 run_command(
     "bash", "-c",
-    f'{spack_operation.spack_setup_script} && spack-python',
+    f'spack-python',
     f'exit(not len(spack.environment.active_environment().uninstalled_specs()))',
     info_msg='Checking for uninstalled Spack specs',
     check=True
@@ -242,7 +243,7 @@ if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 't
     update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
     cache_cmd = run_command(
         "bash", "-c",
-        f'{spack_operation.spack_setup_script} && python3 {update_cached_sources} '
+        f'python3 {update_cached_sources} '
         f'--local-cache {spack_cache_source}'
         f'--remote-cache-type=oci'
         f'--remote-cache f"{oci_cache_prefix}/source_cache '
@@ -261,7 +262,7 @@ if oci_cache_prefix:
     specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
     dag_hashes_pre_install = run_command(
         "bash", "-c",
-        f'{spack_operation.spack_setup_script} && spack-python {specfile_dag_hash} {cache_specfile}',
+        f'spack-python {specfile_dag_hash} {cache_specfile}',
         capture_output=True,
         text=True,
         check=True
@@ -284,7 +285,7 @@ if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
         local_cache = Path(spack_cache_build).resolve() / 'build_cache'
         run_command(
             "bash", "-c",
-            f'{spack_operation.spack_setup_script} && python3 {update_cached_buildresults} '
+            f'python3 {update_cached_buildresults} '
             f'--local-cache {local_cache}'
             f'--remote-cache-type=oci '
             f'--remote-cache f"{local_cache} '
@@ -297,7 +298,7 @@ else:
 spack_operation.reindex()
 
 exports = run_command("bash", "-c",
-                      f'spack env activate --sh {ebrains_spack_env}',
+                      f'spack env activate --sh {env_path}',
                       check=True,
                       stdout=subprocess.PIPE,
                       stderr=subprocess.PIPE,
-- 
GitLab


From b17ec2324b4c7c9000f20b2da43fb41beaa4c003 Mon Sep 17 00:00:00 2001
From: adrianciu <adrian.ciu@codemart.ro>
Date: Tue, 13 May 2025 10:57:24 +0300
Subject: [PATCH 8/9] VT-94: compatibility with spack-python; dedal caching;
 dynamic dedal version

---
 .gitlab-ci.yml                           |   2 +-
 utils/__init__.py => create_spack_env.py |   0
 dedal_env/spack.yaml                     |   8 +
 dedal_install.py                         |  44 ++++
 dedal_install_spack_env.sh               |  23 +++
 install_spack_env.py                     | 243 +++++++++++------------
 packages/py-dedal/package.py             |  22 ++
 packages/py-oras/package.py              |  33 +++
 tools/__init__.py                        |   0
 tools/tools.py                           |  32 +++
 {utils => tools}/ymerge.py               |   0
 utils/utils.py                           |  13 --
 vendor/yashchiki                         |   2 +-
 13 files changed, 275 insertions(+), 147 deletions(-)
 rename utils/__init__.py => create_spack_env.py (100%)
 create mode 100644 dedal_env/spack.yaml
 create mode 100644 dedal_install.py
 create mode 100755 dedal_install_spack_env.sh
 create mode 100644 packages/py-dedal/package.py
 create mode 100644 packages/py-oras/package.py
 create mode 100644 tools/__init__.py
 create mode 100644 tools/tools.py
 rename {utils => tools}/ymerge.py (100%)
 delete mode 100644 utils/utils.py

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e4b80592..140628ef 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -235,7 +235,7 @@ build-spack-env-on-runner:
     - >
         echo "  view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
     # run installation script
-    - python3 install_spack_env.py $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - bash dedal_install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
   after_script:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
diff --git a/utils/__init__.py b/create_spack_env.py
similarity index 100%
rename from utils/__init__.py
rename to create_spack_env.py
diff --git a/dedal_env/spack.yaml b/dedal_env/spack.yaml
new file mode 100644
index 00000000..9cd58b5c
--- /dev/null
+++ b/dedal_env/spack.yaml
@@ -0,0 +1,8 @@
+spack:
+  specs:
+  - python@3.13.0
+  - py-oras@0.2.31
+  - py-ruamel-yaml@0.17.32
+  - py-click@8.1.7
+  - py-jsonpickle@2.2.0
+  - py-pyyaml@5.4.1
\ No newline at end of file
diff --git a/dedal_install.py b/dedal_install.py
new file mode 100644
index 00000000..2b827162
--- /dev/null
+++ b/dedal_install.py
@@ -0,0 +1,44 @@
+from pathlib import Path
+from dedal.configuration.GpgConfig import GpgConfig
+from dedal.configuration.SpackConfig import SpackConfig
+import os
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import count_files_in_folder
+
+from dedal.model.SpackDescriptor import SpackDescriptor
+
+from vendor.yashchiki.dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache
+
+dedal_env = SpackDescriptor(name='dedal_env', path=Path('./'))
+ebrains_repo = SpackDescriptor(name='ebrains-spack-builds', path=Path('../'))
+dedal_dir = 'dedal_install_dir'
+install_dir = Path('./') / dedal_dir
+os.makedirs(install_dir, exist_ok=True)
+concretization_dir = install_dir / 'concretization'
+buildcache_dir = install_dir / 'buildcache'
+spack_config = SpackConfig(env=dedal_env,
+                           repos=[ebrains_repo],
+                           install_dir=install_dir,
+                           upstream_instance=None,
+                           system_name=None,
+                           concretization_dir=concretization_dir,
+                           buildcache_dir=buildcache_dir,
+                           gpg=None,
+                           use_spack_global=False,
+                           cache_version_build='latest',
+                           cache_version_concretize='latest',
+                           override_cache=False,
+                           spack_dir=Path('./vendor').resolve() / 'spack'
+                           )
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=True)
+
+if isinstance(spack_operation, SpackOperationUseCache) and (len(spack_operation.cache_dependency.list_tags()) == 0 or len(spack_operation.build_cache.list_tags()) == 0):
+    print('No cache available')
+    gpg = GpgConfig('dedal', 'science@codemart.ro')
+    spack_config.gpg = gpg
+    spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+    print(type(spack_operation))
+
+spack_operation.setup_spack_env()
+spack_operation.concretize_spack_env()
+spack_operation.install_packages(os.cpu_count(), signed=False)
diff --git a/dedal_install_spack_env.sh b/dedal_install_spack_env.sh
new file mode 100755
index 00000000..83c73680
--- /dev/null
+++ b/dedal_install_spack_env.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+
+set -eo pipefail
+
+SPACK_JOBS=$1         # number of jobs
+INSTALLATION_ROOT=$2  # where to set up the installation
+EBRAINS_REPO=$3       # location of ebrains-spack-builds repository
+EBRAINS_SPACK_ENV=$4  # name of EBRAINS Spack environment to be created/updated
+UPSTREAM_INSTANCE=$5  # path to Spack instance to use as upstream (optional)
+UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results
+export OCI_CACHE_PREFIX=$7
+
+python3 dedal_install.py
+
+spack env activate -p ./dedal_env
+spack mirror remove local_cache
+spack clean -a
+spack concretize
+# install the latest version of dedal from the specified branch (the cache version might not be the latest yet)
+spack install --add py-dedal@experimental
+spack load py-dedal@experimental
+
+spack-python install_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO"  "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX"
\ No newline at end of file
diff --git a/install_spack_env.py b/install_spack_env.py
index ea0ca80c..f00105e3 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -3,6 +3,7 @@ import argparse
 import shutil
 import subprocess
 import sys
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
 from pathlib import Path
 from dedal.configuration.SpackConfig import SpackConfig
 from dedal.enum.SpackConfigCommand import SpackConfigCommand
@@ -10,7 +11,8 @@ from dedal.enum.SpackViewEnum import SpackViewEnum
 from dedal.model.SpackDescriptor import SpackDescriptor
 from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
 from dedal.utils.utils import set_bashrc_variable, run_command
-from utils.utils import find_first_upstream_prefix
+from tools.tools import find_first_upstream_prefix
+import spack
 
 parser = argparse.ArgumentParser(
     prog='install_spack_env.py',
@@ -70,6 +72,8 @@ upstream_instance = args.upstream_instance
 update_spack_oci_caches = args.update_spack_oci_caches
 oci_cache_prefix = args.oci_cache_prefix
 
+print(f'oci_cache_prefix: {oci_cache_prefix}')
+
 # define SYSTEMNAME variable in sites where it's not already defined
 system_name = (
         os.getenv('SYSTEMNAME')
@@ -115,8 +119,6 @@ if ci_spack_root and ebrains_repo:
         except FileExistsError:
             pass
 
-data_dir = installation_root / 'caching'
-
 env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
 
 binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
@@ -125,9 +127,7 @@ spack_config = SpackConfig(env=env_repo,
                            install_dir=installation_root.parent,
                            upstream_instance=upstream_instance,
                            system_name=system_name,
-                           buildcache_dir=binary_cache_path,
                            use_spack_global=False,
-                           cache_version_build='spack_cache',
                            view=SpackViewEnum.WITHOUT_VIEW,
                            spack_dir=Path(ci_spack_root).resolve())
 
@@ -144,43 +144,7 @@ if spack_root_existed == 0:
 spack_operation.spec_pacakge('aida')
 # rebuild spack's database (could be an debugging session)
 spack_operation.reindex()
-spack_operation.add_mirror(mirror_name='local_cache', mirror_path=binary_cache_path)
-
-env_path = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
-site_config_dest = env_path / 'site-config'
-if site_config_dest.exists():
-    shutil.rmtree(site_config_dest / 'site-config')
-    os.makedirs(site_config_dest / 'site-config')
-
-site_config = Path(ebrains_repo) / 'site-config'
-shutil.copytree(site_config, site_config_dest)
-
-y_merge_path = Path(ebrains_repo) / 'site-config/ymerge.py'
-merge_path_1 = Path(ebrains_repo) / 'spack.yaml'
-merge_path_2 = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
-
-# update environment site-configs
-merged_envs = run_command(
-    "bash", "-c",
-    f'spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
-    info_msg='Merging top-level and site-specific spack.yaml files.',
-    exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
-    capture_output=True,
-    text=True,
-    check=True
-).stdout
-
-if merged_envs is None:
-    sys.exit(-1)
-else:
-    tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
-    try:
-        with open(tmp_spack_yaml, "w") as f:
-            f.write(merged_envs)
-    except Exception as e:
-        sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
-        sys.exit(1)
-    shutil.copy(tmp_spack_yaml, site_config_dest.parent)
+spack_operation.add_mirror(mirror_name='local_cache_esd', mirror_path=binary_cache_path, autopush=True)
 
 # add repo if it does not exist
 spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
@@ -191,16 +155,23 @@ dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
 try:
     with open(cache_specfile, "w") as f:
         f.write(dump_dag)
+    print(f'Dumped cache_specfile')
 except Exception as e:
     sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
     sys.exit(1)
 
+print(spack_operation.env_path)
+
+##################### first part ##################
+
 if oci_cache_prefix:
+    print('=======================Started fetching===================')
     # fetch missing sources (if packages not yet installed)
     fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
+    print(fetch_cached_sources)
     run_command(
         "bash", "-c",
-        f"python3 "
+        f"{spack_operation.spack_command_on_env} && python3 "
         f"{fetch_cached_sources} "
         f"--local-cache spack_cache_source "
         f"--remote-cache-type=oci "
@@ -213,9 +184,10 @@ if oci_cache_prefix:
     )
     # fetch missing build results (if packages not yet installed)
     fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
+    print(fetch_cached_buildresults)
     run_command(
         "bash", "-c",
-        f"python3 "
+        f"{spack_operation.spack_command_on_env} && python3 "
         f"{fetch_cached_buildresults} "
         f"--local-cache",
         f"{spack_cache_build}/build_cache "
@@ -228,91 +200,98 @@ if oci_cache_prefix:
         check=True
     )
 
-run_command(
-    "bash", "-c",
-    f'spack-python',
-    f'exit(not len(spack.environment.active_environment().uninstalled_specs()))',
-    info_msg='Checking for uninstalled Spack specs',
-    check=True
-)
-
-spack_operation.fetch(dependencies=True, missing=True)
-
-if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
-    print("Performing update of the source cache")
-    update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
-    cache_cmd = run_command(
-        "bash", "-c",
-        f'python3 {update_cached_sources} '
-        f'--local-cache {spack_cache_source}'
-        f'--remote-cache-type=oci'
-        f'--remote-cache f"{oci_cache_prefix}/source_cache '
-        f'/tmp/missing_paths_sources.dat',
-        info_msg='Updating remote OCI cache',
-        check=True
-    )
-    if cache_cmd is None or cache_cmd.returncode != 0:
-        print("Cache update failed.")
-        sys.exit(cache_cmd.returncode if cache_cmd else 1)
-else:
-    print("Updating of the source cache disabled.")
-
-dag_hashes_pre_install = []
-if oci_cache_prefix:
-    specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
-    dag_hashes_pre_install = run_command(
-        "bash", "-c",
-        f'spack-python {specfile_dag_hash} {cache_specfile}',
-        capture_output=True,
-        text=True,
-        check=True
-    ).stdout.strip().split()
-
-spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
-
-if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
-    for dag_hash in dag_hashes_pre_install:
-        package = Path(spack_cache_build).resolve() / dag_hash
-        print(package)
-        result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
-        if result != 0:
-            print(f'Failed to push {dag_hash}, trying to call spack find on it:')
-            spack_operation.find_package(package=package, long=True, variants=True, paths=True)
-
-        # upload packages from local to remote cache
-        print("Performing update of the build cache")
-        update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
-        local_cache = Path(spack_cache_build).resolve() / 'build_cache'
-        run_command(
-            "bash", "-c",
-            f'python3 {update_cached_buildresults} '
-            f'--local-cache {local_cache}'
-            f'--remote-cache-type=oci '
-            f'--remote-cache f"{local_cache} '
-            f'/tmp/missing_paths_sources.dat',
-            check=True
-        )
-else:
-    print('Updating of the build cache disabled.')
-
-spack_operation.reindex()
-
-exports = run_command("bash", "-c",
-                      f'spack env activate --sh {env_path}',
-                      check=True,
-                      stdout=subprocess.PIPE,
-                      stderr=subprocess.PIPE,
-                      capture_output=True,
-                      text=True,
-                      ).stdout
-
-out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
-with open(out_file, "w") as f:
-    f.write(exports)
-
-if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
-    print('Some spack packages failed to install.')
-    sys.exit(-1)
-
-print('Installed all spack packages.')
-spack_operation.reindex()
+# cmd = [
+#     "spack-python",
+#     "-c",
+#     "exit(not len(spack.environment.active_environment().uninstalled_specs()))"
+# ]
+
+# packages_not_installed = subprocess.run(cmd).returncode == 0
+packages_not_installed = not len(spack.environment.active_environment().uninstalled_specs())
+
+
+print(f'=====================Packages_not_installed: {packages_not_installed}=====================')
+
+if packages_not_installed:
+    spack_operation.fetch(dependencies=True, missing=True)
+#
+#     if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+#         print("Performing update of the source cache")
+#         update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
+#         cache_cmd = run_command(
+#             "bash", "-c",
+#             f'{spack_operation.spack_command_on_env} && python3 {update_cached_sources} '
+#             f'--local-cache {spack_cache_source}'
+#             f'--remote-cache-type=oci'
+#             f'--remote-cache f"{oci_cache_prefix}/source_cache '
+#             f'/tmp/missing_paths_sources.dat',
+#             info_msg='Updating remote OCI cache',
+#             check=True
+#         )
+#         if cache_cmd is None or cache_cmd.returncode != 0:
+#             print("Cache update failed.")
+#             sys.exit(cache_cmd.returncode if cache_cmd else 1)
+#     else:
+#         print("Updating of the source cache disabled.")
+#
+# dag_hashes_pre_install = []
+# if oci_cache_prefix:
+#     specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
+#     dag_hashes_pre_install = run_command(
+#         "bash", "-c",
+#         f'{spack_operation.spack_command_on_env} && spack-python {specfile_dag_hash} {cache_specfile}',
+#         capture_output=True,
+#         text=True,
+#         check=True
+#     ).stdout.strip().split()
+#
+# print(dag_hashes_pre_install)
+
+# spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+#
+# if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+#     for dag_hash in dag_hashes_pre_install:
+#         package = Path(spack_cache_build).resolve() / dag_hash
+#         print(package)
+#         result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+#         if result != 0:
+#             print(f'Failed to push {dag_hash}, trying to call spack find on it:')
+#             spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+#
+#         # upload packages from local to remote cache
+#         print("Performing update of the build cache")
+#         update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
+#         local_cache_esd = Path(spack_cache_build).resolve() / 'build_cache'
+#         run_command(
+#             "bash", "-c",
+#             f'{spack_operation.spack_command_on_env} && python3 {update_cached_buildresults} '
+#             f'--local-cache {local_cache_esd}'
+#             f'--remote-cache-type=oci '
+#             f'--remote-cache f"{local_cache_esd} '
+#             f'/tmp/missing_paths_sources.dat',
+#             check=True
+#         )
+# else:
+#     print('Updating of the build cache disabled.')
+#
+# spack_operation.reindex()
+#
+# exports = run_command("bash", "-c",
+#                       f'spack env activate --sh {env_path}',
+#                       check=True,
+#                       stdout=subprocess.PIPE,
+#                       stderr=subprocess.PIPE,
+#                       capture_output=True,
+#                       text=True,
+#                       ).stdout
+#
+# out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
+# with open(out_file, "w") as f:
+#     f.write(exports)
+#
+# if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+#     print('Some spack packages failed to install.')
+#     sys.exit(-1)
+#
+# print('Installed all spack packages.')
+# spack_operation.reindex()
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
new file mode 100644
index 00000000..07cf5a8f
--- /dev/null
+++ b/packages/py-dedal/package.py
@@ -0,0 +1,22 @@
+from spack.package import PythonPackage
+
+class PyDedal(PythonPackage):
+    homepage = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+    git      = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+
+    version(
+        'experimental',
+        commit='8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445'
+    )
+
+    maintainers("Brainiacs")
+
+    depends_on('py-setuptools', type='build')
+
+    depends_on("python@3.10:", type=("build", "run"))
+    depends_on("py-oras@0.2.31:", type=("build", "run"))
+
+    depends_on("py-ruamel-yaml", type=("build", "run"))
+    depends_on("py-click", type=("build", "run"))
+    depends_on("py-jsonpickle", type=("build", "run"))
+    depends_on("py-pyyaml", type=("build", "run"))
\ No newline at end of file
diff --git a/packages/py-oras/package.py b/packages/py-oras/package.py
new file mode 100644
index 00000000..9a5651da
--- /dev/null
+++ b/packages/py-oras/package.py
@@ -0,0 +1,33 @@
+# Copyright Spack Project Developers. See COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyOras(PythonPackage):
+    """ORAS Python SDK: OCI Registry as Storage Python SDK."""
+
+    homepage = "https://oras.land"
+    git = "https://github.com/oras-project/oras-py"
+    url = "https://files.pythonhosted.org/packages/28/86/cbae8797a1041fe3bef37f31aa1ecdd3f8914fbc7cfb663532255b6ec16e/oras-0.2.31.tar.gz"
+
+    maintainers("vsoch")
+
+    license("Apache 2.0 License")
+
+    version("0.2.31", sha256="95c0a341359458747c2946dab47d584cc444d1f9d379b6d63fb7a84cabc54de4")
+
+    depends_on('py-setuptools', type='build')
+
+    depends_on("python@3.7:", type=("build", "run"))
+
+    depends_on("py-jsonschema", type=("build", "run"))
+    depends_on("py-requests", type=("build", "run"))
+
+    variant("tests", default=False, description="Enable test suite")
+    depends_on("py-pytest@4.6.2:", when="+tests", type=("build", "run"))
+
+    variant("docker", default=False, description="Enable Docker extra support")
+    depends_on("py-docker@5.0.1", when="+docker", type=("build", "run"))
\ No newline at end of file
diff --git a/tools/__init__.py b/tools/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tools/tools.py b/tools/tools.py
new file mode 100644
index 00000000..035d80a4
--- /dev/null
+++ b/tools/tools.py
@@ -0,0 +1,32 @@
+import os
+import subprocess
+from pathlib import Path
+
+
+def find_first_upstream_prefix(upstream_instance):
+    """
+    Search for directories named '.spack-db' within the spack opt directory
+    under upstream_instance, and return a list of their parent directories.
+    """
+    base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
+    for upstream_prefix, dirs, _ in os.walk(base_path):
+        if ".spack-db" in dirs:
+            return upstream_prefix
+    return None
+
+
+def get_submodule_commit_hash(submodule_path: Path):
+    try:
+        # Run git rev-parse to get the commit hash of the submodule
+        result = subprocess.run(
+            ['git', 'rev-parse', 'HEAD'],
+            cwd=submodule_path,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            text=True,
+            check=True
+        )
+        return result.stdout.strip()
+    except subprocess.CalledProcessError as e:
+        print(f"Error retrieving commit hash: {e.stderr}")
+        return None
diff --git a/utils/ymerge.py b/tools/ymerge.py
similarity index 100%
rename from utils/ymerge.py
rename to tools/ymerge.py
diff --git a/utils/utils.py b/utils/utils.py
deleted file mode 100644
index 2edf146c..00000000
--- a/utils/utils.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import os
-
-
-def find_first_upstream_prefix(upstream_instance):
-    """
-    Search for directories named '.spack-db' within the spack opt directory
-    under upstream_instance, and return a list of their parent directories.
-    """
-    base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
-    for upstream_prefix, dirs, _ in os.walk(base_path):
-        if ".spack-db" in dirs:
-            return upstream_prefix
-    return None
diff --git a/vendor/yashchiki b/vendor/yashchiki
index e21c2c08..8d8bb40a 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit e21c2c085cdf1712aedda75caea961f152d460f0
+Subproject commit 8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445
-- 
GitLab


From 10cccdcddee68eaf6ffd7a48f1b9841e3fa3d5b0 Mon Sep 17 00:00:00 2001
From: adrianciu <adrianciu25@gmail.com>
Date: Thu, 22 May 2025 11:43:45 +0300
Subject: [PATCH 9/9] VT-94: integrated original scripts of yashchiki

---
 .gitlab-ci.yml                                |   6 +-
 create_spack_env.py                           | 169 ++++++++++
 ..._spack_env.sh => dedal_manage_spack_env.sh |   6 +-
 install_spack_env.py                          | 301 +++++-------------
 packages/py-dedal/package.py                  |   5 +-
 tools/tools.py                                |  14 -
 vendor/yashchiki                              |   2 +-
 7 files changed, 267 insertions(+), 236 deletions(-)
 rename dedal_install_spack_env.sh => dedal_manage_spack_env.sh (60%)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 140628ef..4e26d6ff 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -235,7 +235,7 @@ build-spack-env-on-runner:
     - >
         echo "  view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
     # run installation script
-    - bash dedal_install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - bash dedal_manage_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
   after_script:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
@@ -280,7 +280,7 @@ sync-esd-image:
     - mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT}
     # install spack
     - source vendor/spack/share/spack/setup-env.sh
-    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} python3 install_spack_env.py $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash dedal_manage_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     - echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     - echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     # preparing to assemble the image: move in the CI project contents...
@@ -338,7 +338,7 @@ sync-gitlab-spack-instance:
     # install spack
     - source vendor/spack/share/spack/setup-env.sh
     # run installation script
-    - python3 install_spack_env.py $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - bash dedal_manage_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     # create kernel spec, so that the environment can be used in gitlab CI jobs
     - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac);
     - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env
diff --git a/create_spack_env.py b/create_spack_env.py
index e69de29b..148a982a 100644
--- a/create_spack_env.py
+++ b/create_spack_env.py
@@ -0,0 +1,169 @@
+import os
+import argparse
+import sys
+
+from dedal.logger.logger_builder import get_logger
+from dedal.utils.spack_utils import find_first_upstream_prefix
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
+from pathlib import Path
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.enum.SpackConfigCommand import SpackConfigCommand
+from dedal.enum.SpackViewEnum import SpackViewEnum
+from dedal.model.SpackDescriptor import SpackDescriptor
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import set_bashrc_variable
+import spack
+
+parser = argparse.ArgumentParser(
+    prog='install_spack_env.py',
+    description='ESD install spack env.',
+    epilog='...')
+
+parser.add_argument(
+    "spack_jobs",
+    type=int,
+    help="number of jobs"
+)
+
+parser.add_argument(
+    "installation_root",
+    type=str,
+    help="where to set up the installation"
+)
+
+parser.add_argument(
+    "ebrains_repo",
+    type=str,
+    help="location of ebrains-spack-builds repository"
+)
+
+parser.add_argument(
+    "ebrains_spack_env",
+    type=str,
+    help="name of EBRAINS Spack environment to be created/updated"
+)
+
+parser.add_argument(
+    "upstream_instance",
+    type=str,
+    help="path to Spack instance to use as upstream (optional)"
+)
+
+parser.add_argument(
+    "update_spack_oci_caches",
+    type=bool,
+    help="true enables updating the OCI cache for spack sources and build results"
+)
+
+parser.add_argument(
+    "oci_cache_prefix",
+    type=str,
+    nargs="?",  # optional
+    default=None,
+)
+
+LOGGER = get_logger(__name__)
+
+args = parser.parse_args()
+
+spack_jobs = args.spack_jobs
+installation_root = Path(args.installation_root).resolve()
+ebrains_repo = args.ebrains_repo
+ebrains_spack_env = args.ebrains_spack_env
+upstream_instance = args.upstream_instance
+update_spack_oci_caches = args.update_spack_oci_caches
+oci_cache_prefix = args.oci_cache_prefix
+
+# define SYSTEMNAME variable in sites where it's not already defined
+system_name = (
+        os.getenv('SYSTEMNAME')
+        or os.getenv('HPC_SYSTEM')
+        or os.getenv('BSC_MACHINE')
+)
+
+# disable local configuration and cache directories
+set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
+os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
+set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
+os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
+
+if oci_cache_prefix:
+    set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+    os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
+
+# make sure spack uses the symlinked folder as path
+ci_spack_root = installation_root / 'spack'
+
+# cache related variables
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
+set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
+os.environ['CACHE_SPECFILE'] = cache_specfile
+dedal_home = f'{ebrains_repo}/vendor/yashchiki'
+os.environ['DEDAL_HOME'] = dedal_home
+set_bashrc_variable('DEDAL_HOME', dedal_home)
+spack_cache_source = f'{ci_spack_root}/var/spack/cache'
+set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
+os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
+set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
+os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
+spack_cache_build = spack_cache_source
+
+# initial setup: use spack submodule if spack dir doesn't already exist
+spack_root_existed = True
+if ci_spack_root and ebrains_repo:
+    if not os.path.isdir(ci_spack_root):
+        spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
+        try:
+            os.symlink(spack_source, ci_spack_root)
+            spack_root_existed = False
+        except FileExistsError:
+            pass
+
+env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
+
+binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
+spack_config = SpackConfig(env=env_repo,
+                           repos=[],
+                           install_dir=installation_root.parent,
+                           upstream_instance=upstream_instance,
+                           system_name=system_name,
+                           use_spack_global=False,
+                           view=SpackViewEnum.WITHOUT_VIEW,
+                           spack_dir=Path(ci_spack_root).resolve(),
+                           serialize_name='data_esd.pkl')
+
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.setup_spack_env()
+
+if upstream_instance:
+    upstream_prefix = find_first_upstream_prefix(upstream_instance)
+    spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
+if spack_root_existed == 0:
+    spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
+
+# make sure all fetching/clingo stuff happens before anything else
+spack_operation.spec_pacakge('aida')
+# rebuild spack's database (could be an debugging session)
+spack_operation.reindex()
+spack_operation.add_mirror(mirror_name='local_cache_esd', mirror_path=binary_cache_path, autopush=True)
+
+ebrains_env = Path(ebrains_repo) / 'spack.yaml'
+site_config = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
+spack_operation.merge_envs(ebrains_env, site_config)
+# add repo if it does not exist
+spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
+spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
+
+dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
+
+try:
+    with open(cache_specfile, "w") as f:
+        f.write(dump_dag)
+    LOGGER.info(f'Dumped cache_specfile')
+except Exception as e:
+    sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
+    sys.exit(1)
+
+print(spack_operation.spack_command_on_env)
+
+spack_operation.serialize()
\ No newline at end of file
diff --git a/dedal_install_spack_env.sh b/dedal_manage_spack_env.sh
similarity index 60%
rename from dedal_install_spack_env.sh
rename to dedal_manage_spack_env.sh
index 83c73680..28c0b0c7 100755
--- a/dedal_install_spack_env.sh
+++ b/dedal_manage_spack_env.sh
@@ -10,6 +10,8 @@ UPSTREAM_INSTANCE=$5  # path to Spack instance to use as upstream (optional)
 UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results
 export OCI_CACHE_PREFIX=$7
 
+source vendor/spack/share/spack/setup-env.sh
+
 python3 dedal_install.py
 
 spack env activate -p ./dedal_env
@@ -20,4 +22,6 @@ spack concretize
 spack install --add py-dedal@experimental
 spack load py-dedal@experimental
 
-spack-python install_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO"  "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX"
\ No newline at end of file
+spack_env_cmd=$(spack-python create_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO" "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX" | tail -n 1)
+echo "Captured environment command: $spack_env_cmd"
+eval "$spack_env_cmd && spack-python install_spack_env.py \"$SPACK_JOBS\" \"$INSTALLATION_ROOT\" \"$EBRAINS_REPO\" \"$EBRAINS_SPACK_ENV\" \"$UPSTREAM_INSTANCE\" \"$UPDATE_SPACK_OCI_CACHES\" \"$OCI_CACHE_PREFIX\""
diff --git a/install_spack_env.py b/install_spack_env.py
index f00105e3..824d66e5 100644
--- a/install_spack_env.py
+++ b/install_spack_env.py
@@ -1,17 +1,16 @@
 import os
 import argparse
-import shutil
-import subprocess
 import sys
+from dedal.build_cache.CachedBuildResultUploader import CachedBuildResultUploader
+from dedal.build_cache.CachedBuildResultsFetcher import CachedBuildResultsFetcher
+from dedal.build_cache.CachedSourceFetcher import CachedSourceFetcher
+from dedal.build_cache.CachedSourceUploader import CachedSourceUploader
+from dedal.build_cache.SpecfileDagHash import SpecfileDagHash
+from dedal.error_handling.exceptions import SpackInstallPackagesException
+from dedal.logger.logger_builder import get_logger
+from dedal.spack_factory.SpackOperation import SpackOperation
 sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
 from pathlib import Path
-from dedal.configuration.SpackConfig import SpackConfig
-from dedal.enum.SpackConfigCommand import SpackConfigCommand
-from dedal.enum.SpackViewEnum import SpackViewEnum
-from dedal.model.SpackDescriptor import SpackDescriptor
-from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
-from dedal.utils.utils import set_bashrc_variable, run_command
-from tools.tools import find_first_upstream_prefix
 import spack
 
 parser = argparse.ArgumentParser(
@@ -62,236 +61,106 @@ parser.add_argument(
     default=None,
 )
 
+LOGGER = get_logger(__name__)
+
 args = parser.parse_args()
 
-spack_jobs = args.spack_jobs
 installation_root = Path(args.installation_root).resolve()
+ci_spack_root = installation_root / 'spack'
+spack_jobs = args.spack_jobs
 ebrains_repo = args.ebrains_repo
 ebrains_spack_env = args.ebrains_spack_env
 upstream_instance = args.upstream_instance
 update_spack_oci_caches = args.update_spack_oci_caches
 oci_cache_prefix = args.oci_cache_prefix
-
-print(f'oci_cache_prefix: {oci_cache_prefix}')
-
-# define SYSTEMNAME variable in sites where it's not already defined
-system_name = (
-        os.getenv('SYSTEMNAME')
-        or os.getenv('HPC_SYSTEM')
-        or os.getenv('BSC_MACHINE')
-)
-
-# disable local configuration and cache directories
-set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
-os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
-set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
-os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
-
-if oci_cache_prefix:
-    set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
-    os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
-
-# make sure spack uses the symlinked folder as path
-ci_spack_root = installation_root / 'spack'
-
-# cache related variables
-cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
-set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
-os.environ['CACHE_SPECFILE'] = cache_specfile
 dedal_home = f'{ebrains_repo}/vendor/yashchiki'
-os.environ['DEDAL_HOME'] = dedal_home
-set_bashrc_variable('DEDAL_HOME', dedal_home)
 spack_cache_source = f'{ci_spack_root}/var/spack/cache'
-set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
-os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
-set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
-os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
 spack_cache_build = spack_cache_source
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
 
-# initial setup: use spack submodule if spack dir doesn't already exist
-spack_root_existed = True
-if ci_spack_root and ebrains_repo:
-    if not os.path.isdir(ci_spack_root):
-        spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
-        try:
-            os.symlink(spack_source, ci_spack_root)
-            spack_root_existed = False
-        except FileExistsError:
-            pass
-
-env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
-
-binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
-spack_config = SpackConfig(env=env_repo,
-                           repos=[],
-                           install_dir=installation_root.parent,
-                           upstream_instance=upstream_instance,
-                           system_name=system_name,
-                           use_spack_global=False,
-                           view=SpackViewEnum.WITHOUT_VIEW,
-                           spack_dir=Path(ci_spack_root).resolve())
-
-spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
-spack_operation.setup_spack_env()
-
-if upstream_instance:
-    upstream_prefix = find_first_upstream_prefix(upstream_instance)
-    spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
-if spack_root_existed == 0:
-    spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
-
-# make sure all fetching/clingo stuff happens before anything else
-spack_operation.spec_pacakge('aida')
-# rebuild spack's database (could be an debugging session)
-spack_operation.reindex()
-spack_operation.add_mirror(mirror_name='local_cache_esd', mirror_path=binary_cache_path, autopush=True)
-
-# add repo if it does not exist
-spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
-spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
-
-dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
-
-try:
-    with open(cache_specfile, "w") as f:
-        f.write(dump_dag)
-    print(f'Dumped cache_specfile')
-except Exception as e:
-    sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
-    sys.exit(1)
-
-print(spack_operation.env_path)
-
-##################### first part ##################
+spack_operation = SpackOperation.deserialize(file_location=installation_root.parent, file_name='data_esd.pkl')
 
 if oci_cache_prefix:
-    print('=======================Started fetching===================')
+    LOGGER.info('Fetching missing sources (if packages not yet installed)')
     # fetch missing sources (if packages not yet installed)
-    fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
-    print(fetch_cached_sources)
-    run_command(
-        "bash", "-c",
-        f"{spack_operation.spack_command_on_env} && python3 "
-        f"{fetch_cached_sources} "
-        f"--local-cache spack_cache_source "
-        f"--remote-cache-type=oci "
-        f"--remote-cache {oci_cache_prefix}/source_cache "
-        f"--yashchiki-home {dedal_home} "
-        f"/tmp/missing_paths_sources.dat cache_specfile",
-        info_msg="Fetching missing sources",
-        exception_msg="Failed to fetch missing sources",
-        check=True
+    cache_source_fetcher = CachedSourceFetcher(
+        path_missing=Path("/tmp/missing_paths_sources.dat"),
+        specfiles=["cache_specfile"],
+        remote_cache=f"{oci_cache_prefix}/source_cache",
+        remote_cache_type="oci",
+        local_cache=spack_cache_source,
+        dedal_home=dedal_home
     )
+
+    LOGGER.info('Fetching build results (if packages not yet installed)')
     # fetch missing build results (if packages not yet installed)
-    fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
-    print(fetch_cached_buildresults)
-    run_command(
-        "bash", "-c",
-        f"{spack_operation.spack_command_on_env} && python3 "
-        f"{fetch_cached_buildresults} "
-        f"--local-cache",
-        f"{spack_cache_build}/build_cache "
-        f"--remote-cache-type=oci "
-        f"--remote-cache {oci_cache_prefix}/build_cache "
-        f"--yashchiki-home {dedal_home} "
-        f"/tmp/missing_paths_buildresults.dat {cache_specfile}",
-        info_msg="Fetching missing build results",
-        exception_msg="Failed to fetch missing build results",
-        check=True
+    fetch_build_results = CachedBuildResultsFetcher(
+        path_missing=Path("/tmp/missing_paths_buildresults.dat"),
+        specfiles=[cache_specfile],
+        dedal_home=dedal_home,
+        remote_cache=f"{oci_cache_prefix}/build_cache",
+        remote_cache_type="oci",
+        local_cache=f"{spack_cache_build}/build_cache"
     )
-
-# cmd = [
-#     "spack-python",
-#     "-c",
-#     "exit(not len(spack.environment.active_environment().uninstalled_specs()))"
-# ]
+    fetch_build_results.fetch()
 
 # packages_not_installed = subprocess.run(cmd).returncode == 0
 packages_not_installed = not len(spack.environment.active_environment().uninstalled_specs())
 
-
-print(f'=====================Packages_not_installed: {packages_not_installed}=====================')
+LOGGER.info(f'Packages_not_installed: {packages_not_installed}')
 
 if packages_not_installed:
     spack_operation.fetch(dependencies=True, missing=True)
-#
-#     if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
-#         print("Performing update of the source cache")
-#         update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
-#         cache_cmd = run_command(
-#             "bash", "-c",
-#             f'{spack_operation.spack_command_on_env} && python3 {update_cached_sources} '
-#             f'--local-cache {spack_cache_source}'
-#             f'--remote-cache-type=oci'
-#             f'--remote-cache f"{oci_cache_prefix}/source_cache '
-#             f'/tmp/missing_paths_sources.dat',
-#             info_msg='Updating remote OCI cache',
-#             check=True
-#         )
-#         if cache_cmd is None or cache_cmd.returncode != 0:
-#             print("Cache update failed.")
-#             sys.exit(cache_cmd.returncode if cache_cmd else 1)
-#     else:
-#         print("Updating of the source cache disabled.")
-#
-# dag_hashes_pre_install = []
-# if oci_cache_prefix:
-#     specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
-#     dag_hashes_pre_install = run_command(
-#         "bash", "-c",
-#         f'{spack_operation.spack_command_on_env} && spack-python {specfile_dag_hash} {cache_specfile}',
-#         capture_output=True,
-#         text=True,
-#         check=True
-#     ).stdout.strip().split()
-#
-# print(dag_hashes_pre_install)
+    if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+        LOGGER.info("Performing update of the source cache")
+
+        update_cached_sources = CachedSourceUploader(
+            path_missing=Path("/tmp/missing_paths_sources.dat"),
+            remote_cache=f"{oci_cache_prefix}/source_cache",
+            remote_cache_type="oci",
+            local_cache=spack_cache_source
+        )
+        update_cached_sources.upload_missing_sources()
+    else:
+        LOGGER.info("Updating of the source cache disabled.")
+
+dag_hashes_pre_install = []
+if oci_cache_prefix:
+    dag_hashes_pre_install = SpecfileDagHash(path_specfile=cache_specfile)
+    dag_hashes_pre_install = dag_hashes_pre_install.run()
+
+LOGGER.info(dag_hashes_pre_install)
+
+spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+
+if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+    for dag_hash in dag_hashes_pre_install:
+        package = Path(spack_cache_build).resolve() / dag_hash
+        LOGGER.info(package)
+        result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+        if result != 0:
+            LOGGER.info(f'Failed to push {dag_hash}, trying to call spack find on it:')
+            spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+
+        # upload packages from local to remote cache
+        LOGGER.info("Performing update of the build cache")
+        local_cache_esd = Path(spack_cache_build).resolve() / 'build_cache'
+        update_cached_build = CachedBuildResultUploader(
+            path_missing=Path("/tmp/missing_paths_sources.dat"),
+            remote_cache=local_cache_esd,
+            remote_cache_type="oci",
+            local_cache=local_cache_esd
+        )
+        update_cached_build.upload_missing_results()
+else:
+    LOGGER.info('Updating of the build cache disabled.')
+
+spack_operation.reindex()
+
+spack_operation.create_load_env_script()
+spack_operation.reindex()
+
+if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+    raise SpackInstallPackagesException('Some spack packages failed to install.')
 
-# spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
-#
-# if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
-#     for dag_hash in dag_hashes_pre_install:
-#         package = Path(spack_cache_build).resolve() / dag_hash
-#         print(package)
-#         result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
-#         if result != 0:
-#             print(f'Failed to push {dag_hash}, trying to call spack find on it:')
-#             spack_operation.find_package(package=package, long=True, variants=True, paths=True)
-#
-#         # upload packages from local to remote cache
-#         print("Performing update of the build cache")
-#         update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
-#         local_cache_esd = Path(spack_cache_build).resolve() / 'build_cache'
-#         run_command(
-#             "bash", "-c",
-#             f'{spack_operation.spack_command_on_env} && python3 {update_cached_buildresults} '
-#             f'--local-cache {local_cache_esd}'
-#             f'--remote-cache-type=oci '
-#             f'--remote-cache f"{local_cache_esd} '
-#             f'/tmp/missing_paths_sources.dat',
-#             check=True
-#         )
-# else:
-#     print('Updating of the build cache disabled.')
-#
-# spack_operation.reindex()
-#
-# exports = run_command("bash", "-c",
-#                       f'spack env activate --sh {env_path}',
-#                       check=True,
-#                       stdout=subprocess.PIPE,
-#                       stderr=subprocess.PIPE,
-#                       capture_output=True,
-#                       text=True,
-#                       ).stdout
-#
-# out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
-# with open(out_file, "w") as f:
-#     f.write(exports)
-#
-# if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
-#     print('Some spack packages failed to install.')
-#     sys.exit(-1)
-#
-# print('Installed all spack packages.')
-# spack_operation.reindex()
+LOGGER.info('Installed all spack packages.')
\ No newline at end of file
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
index 07cf5a8f..32ae7f9c 100644
--- a/packages/py-dedal/package.py
+++ b/packages/py-dedal/package.py
@@ -3,12 +3,15 @@ from spack.package import PythonPackage
 class PyDedal(PythonPackage):
     homepage = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
     git      = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+    pypi = 'dedal/dedal-0.9.1.tar.gz'
 
     version(
         'experimental',
-        commit='8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445'
+        commit='ee84f83c44e13b7c439de20f3c9b0b03d44de5ab'
     )
 
+    version('0.9.1', '8ab265f6d920e617025aba64603e68d42b2be7e483d459e0ab34bfa97213d020')
+
     maintainers("Brainiacs")
 
     depends_on('py-setuptools', type='build')
diff --git a/tools/tools.py b/tools/tools.py
index 035d80a4..1c1827ca 100644
--- a/tools/tools.py
+++ b/tools/tools.py
@@ -1,20 +1,6 @@
-import os
 import subprocess
 from pathlib import Path
 
-
-def find_first_upstream_prefix(upstream_instance):
-    """
-    Search for directories named '.spack-db' within the spack opt directory
-    under upstream_instance, and return a list of their parent directories.
-    """
-    base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
-    for upstream_prefix, dirs, _ in os.walk(base_path):
-        if ".spack-db" in dirs:
-            return upstream_prefix
-    return None
-
-
 def get_submodule_commit_hash(submodule_path: Path):
     try:
         # Run git rev-parse to get the commit hash of the submodule
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 8d8bb40a..ee84f83c 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 8d8bb40a74fc7421cbd7bbd9abedebc1b9db7445
+Subproject commit ee84f83c44e13b7c439de20f3c9b0b03d44de5ab
-- 
GitLab