diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 91b188c38c1a9f21f7a408bfe52f278fa26a164b..4e26d6ff3778c7f2a7b01e7000bcbf306b8b580b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -227,11 +227,15 @@ build-spack-env-on-runner:
     OCI_CACHE_PREFIX: ""
     UPDATE_SPACK_OCI_CACHES: false
   script:
+    # install dedal python library
+    - pip3 install --break-system-packages vendor/yashchiki
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
     # deactivate environment views (we don't need them for the test build-job)
     - >
         echo "  view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
     # run installation script
-    - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - bash dedal_manage_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
   after_script:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
@@ -244,6 +248,8 @@ build-spack-env-on-runner:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
       # - spack_tests
     when: always
   timeout: 2 days
@@ -272,7 +278,9 @@ sync-esd-image:
     # run installation script inside future container environment
     #   => DAG concretization, subsequent cache access + fetching and actual build should be separate steps
     - mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT}
-    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
+    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash dedal_manage_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     - echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     - echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     # preparing to assemble the image: move in the CI project contents...
@@ -295,6 +303,8 @@ sync-esd-image:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
     when: always
   timeout: 2 days
   resource_group: registry-esd-master-image
@@ -325,8 +335,10 @@ sync-gitlab-spack-instance:
     - git fetch origin
     - git reset --hard $CI_COMMIT_SHA
     - git submodule update --force
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
     # run installation script
-    - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - bash dedal_manage_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     # create kernel spec, so that the environment can be used in gitlab CI jobs
     - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac);
     - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env
@@ -342,6 +354,8 @@ sync-gitlab-spack-instance:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
     when: always
   rules:
     # branches that update the gitlab-runner upstream (read-only) installation
diff --git a/.gitmodules b/.gitmodules
index 30354655734aa2d7408ccd13b0d9e23455436578..0b9fab48222e2dffacd6231481ae5ae423cd6450 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -5,4 +5,4 @@
 [submodule "vendor/yashchiki"]
 	path = vendor/yashchiki
 	url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal
-	shallow = true
+	shallow = true
\ No newline at end of file
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/create_spack_env.py b/create_spack_env.py
new file mode 100644
index 0000000000000000000000000000000000000000..148a982a1a333a64dd7c47934fe2f94497b9bfab
--- /dev/null
+++ b/create_spack_env.py
@@ -0,0 +1,169 @@
+import os
+import argparse
+import sys
+
+from dedal.logger.logger_builder import get_logger
+from dedal.utils.spack_utils import find_first_upstream_prefix
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
+from pathlib import Path
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.enum.SpackConfigCommand import SpackConfigCommand
+from dedal.enum.SpackViewEnum import SpackViewEnum
+from dedal.model.SpackDescriptor import SpackDescriptor
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import set_bashrc_variable
+import spack
+
+parser = argparse.ArgumentParser(
+    prog='install_spack_env.py',
+    description='ESD install spack env.',
+    epilog='...')
+
+parser.add_argument(
+    "spack_jobs",
+    type=int,
+    help="number of jobs"
+)
+
+parser.add_argument(
+    "installation_root",
+    type=str,
+    help="where to set up the installation"
+)
+
+parser.add_argument(
+    "ebrains_repo",
+    type=str,
+    help="location of ebrains-spack-builds repository"
+)
+
+parser.add_argument(
+    "ebrains_spack_env",
+    type=str,
+    help="name of EBRAINS Spack environment to be created/updated"
+)
+
+parser.add_argument(
+    "upstream_instance",
+    type=str,
+    help="path to Spack instance to use as upstream (optional)"
+)
+
+parser.add_argument(
+    "update_spack_oci_caches",
+    type=bool,
+    help="true enables updating the OCI cache for spack sources and build results"
+)
+
+parser.add_argument(
+    "oci_cache_prefix",
+    type=str,
+    nargs="?",  # optional
+    default=None,
+)
+
+LOGGER = get_logger(__name__)
+
+args = parser.parse_args()
+
+spack_jobs = args.spack_jobs
+installation_root = Path(args.installation_root).resolve()
+ebrains_repo = args.ebrains_repo
+ebrains_spack_env = args.ebrains_spack_env
+upstream_instance = args.upstream_instance
+update_spack_oci_caches = args.update_spack_oci_caches
+oci_cache_prefix = args.oci_cache_prefix
+
+# define SYSTEMNAME variable in sites where it's not already defined
+system_name = (
+        os.getenv('SYSTEMNAME')
+        or os.getenv('HPC_SYSTEM')
+        or os.getenv('BSC_MACHINE')
+)
+
+# disable local configuration and cache directories
+set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
+os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
+set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
+os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
+
+if oci_cache_prefix:
+    set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+    os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
+
+# make sure spack uses the symlinked folder as path
+ci_spack_root = installation_root / 'spack'
+
+# cache related variables
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
+set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
+os.environ['CACHE_SPECFILE'] = cache_specfile
+dedal_home = f'{ebrains_repo}/vendor/yashchiki'
+os.environ['DEDAL_HOME'] = dedal_home
+set_bashrc_variable('DEDAL_HOME', dedal_home)
+spack_cache_source = f'{ci_spack_root}/var/spack/cache'
+set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
+os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
+set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
+os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
+spack_cache_build = spack_cache_source
+
+# initial setup: use spack submodule if spack dir doesn't already exist
+spack_root_existed = True
+if ci_spack_root and ebrains_repo:
+    if not os.path.isdir(ci_spack_root):
+        spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
+        try:
+            os.symlink(spack_source, ci_spack_root)
+            spack_root_existed = False
+        except FileExistsError:
+            pass
+
+env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
+
+binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
+spack_config = SpackConfig(env=env_repo,
+                           repos=[],
+                           install_dir=installation_root.parent,
+                           upstream_instance=upstream_instance,
+                           system_name=system_name,
+                           use_spack_global=False,
+                           view=SpackViewEnum.WITHOUT_VIEW,
+                           spack_dir=Path(ci_spack_root).resolve(),
+                           serialize_name='data_esd.pkl')
+
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.setup_spack_env()
+
+if upstream_instance:
+    upstream_prefix = find_first_upstream_prefix(upstream_instance)
+    spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
+if spack_root_existed == 0:
+    spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
+
+# make sure all fetching/clingo stuff happens before anything else
+spack_operation.spec_pacakge('aida')
+# rebuild spack's database (could be an debugging session)
+spack_operation.reindex()
+spack_operation.add_mirror(mirror_name='local_cache_esd', mirror_path=binary_cache_path, autopush=True)
+
+ebrains_env = Path(ebrains_repo) / 'spack.yaml'
+site_config = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
+spack_operation.merge_envs(ebrains_env, site_config)
+# add repo if it does not exist
+spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
+spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
+
+dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
+
+try:
+    with open(cache_specfile, "w") as f:
+        f.write(dump_dag)
+    LOGGER.info(f'Dumped cache_specfile')
+except Exception as e:
+    sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
+    sys.exit(1)
+
+print(spack_operation.spack_command_on_env)
+
+spack_operation.serialize()
\ No newline at end of file
diff --git a/dedal_env/spack.yaml b/dedal_env/spack.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..9cd58b5c5996dfc346934317fff4e05ebba4b6cd
--- /dev/null
+++ b/dedal_env/spack.yaml
@@ -0,0 +1,8 @@
+spack:
+  specs:
+  - python@3.13.0
+  - py-oras@0.2.31
+  - py-ruamel-yaml@0.17.32
+  - py-click@8.1.7
+  - py-jsonpickle@2.2.0
+  - py-pyyaml@5.4.1
\ No newline at end of file
diff --git a/dedal_install.py b/dedal_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..2b827162796705a43fda01979005d97154aea7c5
--- /dev/null
+++ b/dedal_install.py
@@ -0,0 +1,44 @@
+from pathlib import Path
+from dedal.configuration.GpgConfig import GpgConfig
+from dedal.configuration.SpackConfig import SpackConfig
+import os
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import count_files_in_folder
+
+from dedal.model.SpackDescriptor import SpackDescriptor
+
+from vendor.yashchiki.dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache
+
+dedal_env = SpackDescriptor(name='dedal_env', path=Path('./'))
+ebrains_repo = SpackDescriptor(name='ebrains-spack-builds', path=Path('../'))
+dedal_dir = 'dedal_install_dir'
+install_dir = Path('./') / dedal_dir
+os.makedirs(install_dir, exist_ok=True)
+concretization_dir = install_dir / 'concretization'
+buildcache_dir = install_dir / 'buildcache'
+spack_config = SpackConfig(env=dedal_env,
+                           repos=[ebrains_repo],
+                           install_dir=install_dir,
+                           upstream_instance=None,
+                           system_name=None,
+                           concretization_dir=concretization_dir,
+                           buildcache_dir=buildcache_dir,
+                           gpg=None,
+                           use_spack_global=False,
+                           cache_version_build='latest',
+                           cache_version_concretize='latest',
+                           override_cache=False,
+                           spack_dir=Path('./vendor').resolve() / 'spack'
+                           )
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=True)
+
+if isinstance(spack_operation, SpackOperationUseCache) and (len(spack_operation.cache_dependency.list_tags()) == 0 or len(spack_operation.build_cache.list_tags()) == 0):
+    print('No cache available')
+    gpg = GpgConfig('dedal', 'science@codemart.ro')
+    spack_config.gpg = gpg
+    spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+    print(type(spack_operation))
+
+spack_operation.setup_spack_env()
+spack_operation.concretize_spack_env()
+spack_operation.install_packages(os.cpu_count(), signed=False)
diff --git a/dedal_manage_spack_env.sh b/dedal_manage_spack_env.sh
new file mode 100755
index 0000000000000000000000000000000000000000..28c0b0c7fb790b99ba7d7774ef3dd1deb64b9a81
--- /dev/null
+++ b/dedal_manage_spack_env.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+set -eo pipefail
+
+SPACK_JOBS=$1         # number of jobs
+INSTALLATION_ROOT=$2  # where to set up the installation
+EBRAINS_REPO=$3       # location of ebrains-spack-builds repository
+EBRAINS_SPACK_ENV=$4  # name of EBRAINS Spack environment to be created/updated
+UPSTREAM_INSTANCE=$5  # path to Spack instance to use as upstream (optional)
+UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results
+export OCI_CACHE_PREFIX=$7
+
+source vendor/spack/share/spack/setup-env.sh
+
+python3 dedal_install.py
+
+spack env activate -p ./dedal_env
+spack mirror remove local_cache
+spack clean -a
+spack concretize
+# install the latest version of dedal from the specified branch (the cache version might not be the latest yet)
+spack install --add py-dedal@experimental
+spack load py-dedal@experimental
+
+spack_env_cmd=$(spack-python create_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO" "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX" | tail -n 1)
+echo "Captured environment command: $spack_env_cmd"
+eval "$spack_env_cmd && spack-python install_spack_env.py \"$SPACK_JOBS\" \"$INSTALLATION_ROOT\" \"$EBRAINS_REPO\" \"$EBRAINS_SPACK_ENV\" \"$UPSTREAM_INSTANCE\" \"$UPDATE_SPACK_OCI_CACHES\" \"$OCI_CACHE_PREFIX\""
diff --git a/install_spack_env.py b/install_spack_env.py
new file mode 100644
index 0000000000000000000000000000000000000000..824d66e59034ae04ca8cbb98f1911531d7535b1b
--- /dev/null
+++ b/install_spack_env.py
@@ -0,0 +1,166 @@
+import os
+import argparse
+import sys
+from dedal.build_cache.CachedBuildResultUploader import CachedBuildResultUploader
+from dedal.build_cache.CachedBuildResultsFetcher import CachedBuildResultsFetcher
+from dedal.build_cache.CachedSourceFetcher import CachedSourceFetcher
+from dedal.build_cache.CachedSourceUploader import CachedSourceUploader
+from dedal.build_cache.SpecfileDagHash import SpecfileDagHash
+from dedal.error_handling.exceptions import SpackInstallPackagesException
+from dedal.logger.logger_builder import get_logger
+from dedal.spack_factory.SpackOperation import SpackOperation
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
+from pathlib import Path
+import spack
+
+parser = argparse.ArgumentParser(
+    prog='install_spack_env.py',
+    description='ESD install spack env.',
+    epilog='...')
+
+parser.add_argument(
+    "spack_jobs",
+    type=int,
+    help="number of jobs"
+)
+
+parser.add_argument(
+    "installation_root",
+    type=str,
+    help="where to set up the installation"
+)
+
+parser.add_argument(
+    "ebrains_repo",
+    type=str,
+    help="location of ebrains-spack-builds repository"
+)
+
+parser.add_argument(
+    "ebrains_spack_env",
+    type=str,
+    help="name of EBRAINS Spack environment to be created/updated"
+)
+
+parser.add_argument(
+    "upstream_instance",
+    type=str,
+    help="path to Spack instance to use as upstream (optional)"
+)
+
+parser.add_argument(
+    "update_spack_oci_caches",
+    type=bool,
+    help="true enables updating the OCI cache for spack sources and build results"
+)
+
+parser.add_argument(
+    "oci_cache_prefix",
+    type=str,
+    nargs="?",  # optional
+    default=None,
+)
+
+LOGGER = get_logger(__name__)
+
+args = parser.parse_args()
+
+installation_root = Path(args.installation_root).resolve()
+ci_spack_root = installation_root / 'spack'
+spack_jobs = args.spack_jobs
+ebrains_repo = args.ebrains_repo
+ebrains_spack_env = args.ebrains_spack_env
+upstream_instance = args.upstream_instance
+update_spack_oci_caches = args.update_spack_oci_caches
+oci_cache_prefix = args.oci_cache_prefix
+dedal_home = f'{ebrains_repo}/vendor/yashchiki'
+spack_cache_source = f'{ci_spack_root}/var/spack/cache'
+spack_cache_build = spack_cache_source
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
+
+spack_operation = SpackOperation.deserialize(file_location=installation_root.parent, file_name='data_esd.pkl')
+
+if oci_cache_prefix:
+    LOGGER.info('Fetching missing sources (if packages not yet installed)')
+    # fetch missing sources (if packages not yet installed)
+    cache_source_fetcher = CachedSourceFetcher(
+        path_missing=Path("/tmp/missing_paths_sources.dat"),
+        specfiles=["cache_specfile"],
+        remote_cache=f"{oci_cache_prefix}/source_cache",
+        remote_cache_type="oci",
+        local_cache=spack_cache_source,
+        dedal_home=dedal_home
+    )
+
+    LOGGER.info('Fetching build results (if packages not yet installed)')
+    # fetch missing build results (if packages not yet installed)
+    fetch_build_results = CachedBuildResultsFetcher(
+        path_missing=Path("/tmp/missing_paths_buildresults.dat"),
+        specfiles=[cache_specfile],
+        dedal_home=dedal_home,
+        remote_cache=f"{oci_cache_prefix}/build_cache",
+        remote_cache_type="oci",
+        local_cache=f"{spack_cache_build}/build_cache"
+    )
+    fetch_build_results.fetch()
+
+# packages_not_installed = subprocess.run(cmd).returncode == 0
+packages_not_installed = not len(spack.environment.active_environment().uninstalled_specs())
+
+LOGGER.info(f'Packages_not_installed: {packages_not_installed}')
+
+if packages_not_installed:
+    spack_operation.fetch(dependencies=True, missing=True)
+    if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+        LOGGER.info("Performing update of the source cache")
+
+        update_cached_sources = CachedSourceUploader(
+            path_missing=Path("/tmp/missing_paths_sources.dat"),
+            remote_cache=f"{oci_cache_prefix}/source_cache",
+            remote_cache_type="oci",
+            local_cache=spack_cache_source
+        )
+        update_cached_sources.upload_missing_sources()
+    else:
+        LOGGER.info("Updating of the source cache disabled.")
+
+dag_hashes_pre_install = []
+if oci_cache_prefix:
+    dag_hashes_pre_install = SpecfileDagHash(path_specfile=cache_specfile)
+    dag_hashes_pre_install = dag_hashes_pre_install.run()
+
+LOGGER.info(dag_hashes_pre_install)
+
+spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+
+if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+    for dag_hash in dag_hashes_pre_install:
+        package = Path(spack_cache_build).resolve() / dag_hash
+        LOGGER.info(package)
+        result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+        if result != 0:
+            LOGGER.info(f'Failed to push {dag_hash}, trying to call spack find on it:')
+            spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+
+        # upload packages from local to remote cache
+        LOGGER.info("Performing update of the build cache")
+        local_cache_esd = Path(spack_cache_build).resolve() / 'build_cache'
+        update_cached_build = CachedBuildResultUploader(
+            path_missing=Path("/tmp/missing_paths_sources.dat"),
+            remote_cache=local_cache_esd,
+            remote_cache_type="oci",
+            local_cache=local_cache_esd
+        )
+        update_cached_build.upload_missing_results()
+else:
+    LOGGER.info('Updating of the build cache disabled.')
+
+spack_operation.reindex()
+
+spack_operation.create_load_env_script()
+spack_operation.reindex()
+
+if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+    raise SpackInstallPackagesException('Some spack packages failed to install.')
+
+LOGGER.info('Installed all spack packages.')
\ No newline at end of file
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..32ae7f9cb43962002b4b552506f9a9fda40616b7
--- /dev/null
+++ b/packages/py-dedal/package.py
@@ -0,0 +1,25 @@
+from spack.package import PythonPackage
+
+class PyDedal(PythonPackage):
+    homepage = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+    git      = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+    pypi = 'dedal/dedal-0.9.1.tar.gz'
+
+    version(
+        'experimental',
+        commit='ee84f83c44e13b7c439de20f3c9b0b03d44de5ab'
+    )
+
+    version('0.9.1', '8ab265f6d920e617025aba64603e68d42b2be7e483d459e0ab34bfa97213d020')
+
+    maintainers("Brainiacs")
+
+    depends_on('py-setuptools', type='build')
+
+    depends_on("python@3.10:", type=("build", "run"))
+    depends_on("py-oras@0.2.31:", type=("build", "run"))
+
+    depends_on("py-ruamel-yaml", type=("build", "run"))
+    depends_on("py-click", type=("build", "run"))
+    depends_on("py-jsonpickle", type=("build", "run"))
+    depends_on("py-pyyaml", type=("build", "run"))
\ No newline at end of file
diff --git a/packages/py-oras/package.py b/packages/py-oras/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a5651da6c42ae2eb9d914ee5947d28de4c12c0b
--- /dev/null
+++ b/packages/py-oras/package.py
@@ -0,0 +1,33 @@
+# Copyright Spack Project Developers. See COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyOras(PythonPackage):
+    """ORAS Python SDK: OCI Registry as Storage Python SDK."""
+
+    homepage = "https://oras.land"
+    git = "https://github.com/oras-project/oras-py"
+    url = "https://files.pythonhosted.org/packages/28/86/cbae8797a1041fe3bef37f31aa1ecdd3f8914fbc7cfb663532255b6ec16e/oras-0.2.31.tar.gz"
+
+    maintainers("vsoch")
+
+    license("Apache 2.0 License")
+
+    version("0.2.31", sha256="95c0a341359458747c2946dab47d584cc444d1f9d379b6d63fb7a84cabc54de4")
+
+    depends_on('py-setuptools', type='build')
+
+    depends_on("python@3.7:", type=("build", "run"))
+
+    depends_on("py-jsonschema", type=("build", "run"))
+    depends_on("py-requests", type=("build", "run"))
+
+    variant("tests", default=False, description="Enable test suite")
+    depends_on("py-pytest@4.6.2:", when="+tests", type=("build", "run"))
+
+    variant("docker", default=False, description="Enable Docker extra support")
+    depends_on("py-docker@5.0.1", when="+docker", type=("build", "run"))
\ No newline at end of file
diff --git a/tools/__init__.py b/tools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tools/tools.py b/tools/tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..1c1827caae344a68e1c38b25e6a85a33e6474b75
--- /dev/null
+++ b/tools/tools.py
@@ -0,0 +1,18 @@
+import subprocess
+from pathlib import Path
+
+def get_submodule_commit_hash(submodule_path: Path):
+    try:
+        # Run git rev-parse to get the commit hash of the submodule
+        result = subprocess.run(
+            ['git', 'rev-parse', 'HEAD'],
+            cwd=submodule_path,
+            stdout=subprocess.PIPE,
+            stderr=subprocess.PIPE,
+            text=True,
+            check=True
+        )
+        return result.stdout.strip()
+    except subprocess.CalledProcessError as e:
+        print(f"Error retrieving commit hash: {e.stderr}")
+        return None
diff --git a/tools/ymerge.py b/tools/ymerge.py
new file mode 100644
index 0000000000000000000000000000000000000000..accb877977fd1a95b83ea3ec44179d7928059ae8
--- /dev/null
+++ b/tools/ymerge.py
@@ -0,0 +1,14 @@
+# spack-python script that merges two environment configuration files (spack.yaml) into one
+# Usage: spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
+# (note: if the second file does not exist, the output is the first file
+
+import sys, os
+from spack.config import merge_yaml, read_config_file, syaml
+
+if not os.path.exists(sys.argv[2]):
+    merged = syaml.dump(read_config_file(sys.argv[1]))
+else:
+    merged = syaml.dump(merge_yaml(read_config_file(sys.argv[1]), read_config_file(sys.argv[2])))
+
+print(merged)
+
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 5690db7dbccd78f3ceef2123a605e662bb8b2c0f..ee84f83c44e13b7c439de20f3c9b0b03d44de5ab 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 5690db7dbccd78f3ceef2123a605e662bb8b2c0f
+Subproject commit ee84f83c44e13b7c439de20f3c9b0b03d44de5ab