diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 91b188c38c1a9f21f7a408bfe52f278fa26a164b..140628ef618e7aab43c2326537d08ee321334d73 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -227,11 +227,15 @@ build-spack-env-on-runner:
     OCI_CACHE_PREFIX: ""
     UPDATE_SPACK_OCI_CACHES: false
   script:
+    # install dedal python library
+    - pip3 install --break-system-packages vendor/yashchiki
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
     # deactivate environment views (we don't need them for the test build-job)
     - >
         echo "  view: False" >> $CI_PROJECT_DIR/site-config/$SYSTEMNAME/spack.yaml
     # run installation script
-    - bash install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - bash dedal_install_spack_env.sh $SPACK_JOBS $CI_PROJECT_DIR $CI_PROJECT_DIR $SPACK_DEV_ENV $SPACK_PATH_GITLAB $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
   after_script:
     - mkdir -p $CI_PROJECT_DIR/spack_logs/installed $CI_PROJECT_DIR/spack_logs/not_installed
       # for succesfully installed packages: keep the spack logs for any package modified during this CI job
@@ -244,6 +248,8 @@ build-spack-env-on-runner:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
       # - spack_tests
     when: always
   timeout: 2 days
@@ -272,7 +278,9 @@ sync-esd-image:
     # run installation script inside future container environment
     #   => DAG concretization, subsequent cache access + fetching and actual build should be separate steps
     - mkdir --mode=0777 -p ${SANDBOX_ROOT}/${INSTALLATION_ROOT}
-    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} bash ./install_spack_env.sh $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
+    - apptainer exec --writable --bind ${CI_PROJECT_DIR}:${INSTALLATION_ROOT} --cwd ${INSTALLATION_ROOT} ${SANDBOX_ROOT} python3 install_spack_env.py $SPACK_JOBS $INSTALLATION_ROOT ${INSTALLATION_ROOT} $CI_SPACK_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     - echo "export SYSTEMNAME=${SYSTEMNAME}" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     - echo ". ${INSTALLATION_ROOT}/vendor/spack/var/spack/environments/${CI_SPACK_ENV}/load_env.sh" >> ${SANDBOX_ROOT}/.singularity.d/env/90-environment.sh
     # preparing to assemble the image: move in the CI project contents...
@@ -295,6 +303,8 @@ sync-esd-image:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
     when: always
   timeout: 2 days
   resource_group: registry-esd-master-image
@@ -325,8 +335,10 @@ sync-gitlab-spack-instance:
     - git fetch origin
     - git reset --hard $CI_COMMIT_SHA
     - git submodule update --force
+    # install spack
+    - source vendor/spack/share/spack/setup-env.sh
     # run installation script
-    - bash install_spack_env.sh $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
+    - python3 install_spack_env.py $SPACK_JOBS $SPACK_PATH_GITLAB $SPACK_REPO_PATH $SPACK_NFS_ENV "" $UPDATE_SPACK_OCI_CACHES $OCI_CACHE_PREFIX
     # create kernel spec, so that the environment can be used in gitlab CI jobs
     - RELEASE_NAME=$(case $CI_COMMIT_BRANCH in experimental_rel) echo ebrains-experimental;; ebrains*) echo ${CI_COMMIT_BRANCH:0:10}.${CI_COMMIT_BRANCH:11};; *) echo $CI_COMMIT_BRANCH;; esac);
     - bash create_JupyterLab_kernel.sh $SPACK_PATH_GITLAB $SPACK_NFS_ENV $RELEASE_NAME /mnt/ebrains_env
@@ -342,6 +354,8 @@ sync-gitlab-spack-instance:
   artifacts:
     paths:
       - spack_logs
+      - .dedal.log
+      - .generate_cache.log
     when: always
   rules:
     # branches that update the gitlab-runner upstream (read-only) installation
diff --git a/.gitmodules b/.gitmodules
index 30354655734aa2d7408ccd13b0d9e23455436578..0b9fab48222e2dffacd6231481ae5ae423cd6450 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -5,4 +5,4 @@
 [submodule "vendor/yashchiki"]
 	path = vendor/yashchiki
 	url = https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal
-	shallow = true
+	shallow = true
\ No newline at end of file
diff --git a/__init__.py b/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/create_spack_env.py b/create_spack_env.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal_env/spack.yaml b/dedal_env/spack.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b618d6e73862a5aee66cd50dd86194bb5f167da2
--- /dev/null
+++ b/dedal_env/spack.yaml
@@ -0,0 +1,6 @@
+spack:
+  specs:
+    - py-dedal
+
+  concretizer:
+    unify: true
diff --git a/dedal_install.py b/dedal_install.py
new file mode 100644
index 0000000000000000000000000000000000000000..98681041aefcfcff079ed57b38b8368595099e8a
--- /dev/null
+++ b/dedal_install.py
@@ -0,0 +1,41 @@
+from pathlib import Path
+from dedal.configuration.GpgConfig import GpgConfig
+from dedal.configuration.SpackConfig import SpackConfig
+import os
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import count_files_in_folder
+
+from dedal.model.SpackDescriptor import SpackDescriptor
+
+if __name__ == "__main__":
+    dedal_env = SpackDescriptor(name='dedal_env', path=Path('./'))
+    ebrains_repo = SpackDescriptor(name='ebrains-spack-builds', path=Path('../'))
+    dedal_dir = 'dedal_install_dir'
+    install_dir = Path('./') / dedal_dir
+    os.makedirs(install_dir, exist_ok=True)
+    concretization_dir = install_dir / 'concretization'
+    buildcache_dir = install_dir / 'buildcache'
+    spack_config = SpackConfig(env=dedal_env,
+                               repos=[ebrains_repo],
+                               install_dir=install_dir,
+                               upstream_instance=None,
+                               system_name='Dedal',
+                               concretization_dir=concretization_dir,
+                               buildcache_dir=buildcache_dir,
+                               gpg=None,
+                               use_spack_global=False,
+                               cache_version_build='latest',
+                               cache_version_concretize='latest',
+                               override_cache=False,
+                               spack_dir=Path('./vendor').resolve() / 'spack'
+                               )
+    spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=True)
+    spack_operation.setup_spack_env()
+    if count_files_in_folder(concretization_dir) == 0 or count_files_in_folder(buildcache_dir) == 0:
+        print('No cache available')
+        gpg = GpgConfig('dedal', 'science@codemart.ro')
+        spack_config.gpg = gpg
+        spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+
+    spack_operation.concretize_spack_env()
+    spack_operation.install_packages(os.cpu_count())
diff --git a/dedal_install_spack_env.sh b/dedal_install_spack_env.sh
new file mode 100644
index 0000000000000000000000000000000000000000..ca48b31dad36e17239592cee59ba4572562aa982
--- /dev/null
+++ b/dedal_install_spack_env.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+set -eo pipefail
+
+SPACK_JOBS=$1         # number of jobs
+INSTALLATION_ROOT=$2  # where to set up the installation
+EBRAINS_REPO=$3       # location of ebrains-spack-builds repository
+EBRAINS_SPACK_ENV=$4  # name of EBRAINS Spack environment to be created/updated
+UPSTREAM_INSTANCE=$5  # path to Spack instance to use as upstream (optional)
+UPDATE_SPACK_OCI_CACHES=$6 # "true" enables updating the OCI cache for spack sources and build results
+export OCI_CACHE_PREFIX=$7
+#python3 create_spack_env.py
+
+#spack env activate -p ./spack_tools
+#spack repo add ./
+#spack concretize --force --fresh --test root
+#spack install -j$SPACK_JOBS --fresh --test root
+python3 dedal_install.py
+
+spack env activate -p ./spack_tools
+spack uninstall dedal -y
+spack mirror remove local_cache
+# install the latest version of dedal from the specified branch (the cache version might not be the latest yet)
+spack install dedal
+spack load py-dedal
+
+spack-python install_spack_env.py "$SPACK_JOBS" "$INSTALLATION_ROOT" "$EBRAINS_REPO"  "$EBRAINS_SPACK_ENV" "$UPSTREAM_INSTANCE" "$UPDATE_SPACK_OCI_CACHES" "$OCI_CACHE_PREFIX"
\ No newline at end of file
diff --git a/install_spack_env.py b/install_spack_env.py
new file mode 100644
index 0000000000000000000000000000000000000000..77ab6efda6499f5d3e72578e11232a1efd52dfb5
--- /dev/null
+++ b/install_spack_env.py
@@ -0,0 +1,326 @@
+import os
+import argparse
+import shutil
+import subprocess
+import sys
+from pathlib import Path
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.enum.SpackConfigCommand import SpackConfigCommand
+from dedal.enum.SpackViewEnum import SpackViewEnum
+from dedal.model.SpackDescriptor import SpackDescriptor
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.utils.utils import set_bashrc_variable, run_command
+from tools.tools import find_first_upstream_prefix
+
+parser = argparse.ArgumentParser(
+    prog='install_spack_env.py',
+    description='ESD install spack env.',
+    epilog='...')
+
+parser.add_argument(
+    "spack_jobs",
+    type=int,
+    help="number of jobs"
+)
+
+parser.add_argument(
+    "installation_root",
+    type=str,
+    help="where to set up the installation"
+)
+
+parser.add_argument(
+    "ebrains_repo",
+    type=str,
+    help="location of ebrains-spack-builds repository"
+)
+
+parser.add_argument(
+    "ebrains_spack_env",
+    type=str,
+    help="name of EBRAINS Spack environment to be created/updated"
+)
+
+parser.add_argument(
+    "upstream_instance",
+    type=str,
+    help="path to Spack instance to use as upstream (optional)"
+)
+
+parser.add_argument(
+    "update_spack_oci_caches",
+    type=bool,
+    help="true enables updating the OCI cache for spack sources and build results"
+)
+
+parser.add_argument(
+    "oci_cache_prefix",
+    type=str,
+    nargs="?",  # optional
+    default=None,
+)
+
+args = parser.parse_args()
+
+spack_jobs = args.spack_jobs
+installation_root = Path(args.installation_root).resolve()
+ebrains_repo = args.ebrains_repo
+ebrains_spack_env = args.ebrains_spack_env
+upstream_instance = args.upstream_instance
+update_spack_oci_caches = args.update_spack_oci_caches
+oci_cache_prefix = args.oci_cache_prefix
+
+# define SYSTEMNAME variable in sites where it's not already defined
+system_name = (
+        os.getenv('SYSTEMNAME')
+        or os.getenv('HPC_SYSTEM')
+        or os.getenv('BSC_MACHINE')
+)
+
+# disable local configuration and cache directories
+set_bashrc_variable('SPACK_DISABLE_LOCAL_CONFIG', 'true')
+os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
+set_bashrc_variable('SPACK_USER_CACHE_PATH', '/tmp/spack')
+os.environ['SPACK_USER_CACHE_PATH'] = '/tmp/spack'
+
+if oci_cache_prefix:
+    set_bashrc_variable('OCI_CACHE_PREFIX', oci_cache_prefix)
+    os.environ['OCI_CACHE_PREFIX'] = oci_cache_prefix
+
+# make sure spack uses the symlinked folder as path
+ci_spack_root = installation_root / 'spack'
+
+# cache related variables
+cache_specfile = os.environ.get("CACHE_SPECFILE", "env_specfile.yaml")
+set_bashrc_variable('CACHE_SPECFILE', cache_specfile)
+os.environ['CACHE_SPECFILE'] = cache_specfile
+dedal_home = f'{ebrains_repo}/vendor/yashchiki'
+os.environ['DEDAL_HOME'] = dedal_home
+set_bashrc_variable('DEDAL_HOME', dedal_home)
+spack_cache_source = f'{ci_spack_root}/var/spack/cache'
+set_bashrc_variable('SPACK_CACHE_SOURCE', spack_cache_source)
+os.environ['SPACK_CACHE_SOURCE'] = spack_cache_source
+set_bashrc_variable('SPACK_CACHE_BUILD', spack_cache_source)
+os.environ['SPACK_CACHE_BUILD'] = spack_cache_source
+spack_cache_build = spack_cache_source
+
+# initial setup: use spack submodule if spack dir doesn't already exist
+spack_root_existed = True
+if ci_spack_root and ebrains_repo:
+    if not os.path.isdir(ci_spack_root):
+        spack_source = os.path.join(ebrains_repo, 'vendor', 'spack')
+        try:
+            os.symlink(spack_source, ci_spack_root)
+            spack_root_existed = False
+        except FileExistsError:
+            pass
+
+data_dir = installation_root / 'caching'
+
+env_repo = SpackDescriptor(ebrains_spack_env, path=ci_spack_root / 'var/spack/environments')
+
+binary_cache_path = Path(os.getenv('SPACK_CACHE_BUILD'))
+spack_config = SpackConfig(env=env_repo,
+                           repos=[],
+                           install_dir=installation_root.parent,
+                           upstream_instance=upstream_instance,
+                           system_name=system_name,
+                           buildcache_dir=binary_cache_path,
+                           use_spack_global=False,
+                           cache_version_build='spack_cache',
+                           view=SpackViewEnum.WITHOUT_VIEW,
+                           spack_dir=Path(ci_spack_root).resolve())
+
+spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=False)
+spack_operation.setup_spack_env()
+
+if upstream_instance:
+    upstream_prefix = find_first_upstream_prefix(upstream_instance)
+    spack_operation.config(SpackConfigCommand.ADD, f'upstreams:upstream-spack-instance:install_tree:{upstream_prefix}')
+if spack_root_existed == 0:
+    spack_operation.config(SpackConfigCommand.ADD, 'config:install_tree:padded_length:128')
+
+# make sure all fetching/clingo stuff happens before anything else
+spack_operation.spec_pacakge('aida')
+# rebuild spack's database (could be an debugging session)
+spack_operation.reindex()
+spack_operation.add_mirror(mirror_name='local_cache', mirror_path=binary_cache_path)
+
+env_path = ci_spack_root / 'var/spack/environments' / ebrains_spack_env
+site_config_dest = env_path / 'site-config'
+if site_config_dest.exists():
+    shutil.rmtree(site_config_dest / 'site-config')
+    os.makedirs(site_config_dest / 'site-config')
+
+site_config = Path(ebrains_repo) / 'site-config'
+shutil.copytree(site_config, site_config_dest)
+
+y_merge_path = Path(ebrains_repo) / 'site-config/ymerge.py'
+merge_path_1 = Path(ebrains_repo) / 'spack.yaml'
+merge_path_2 = Path(ebrains_repo) / f'site-config/{system_name}/spack.yaml'
+
+# update environment site-configs
+merged_envs = run_command(
+    "bash", "-c",
+    f'spack-python {y_merge_path} {merge_path_1} {merge_path_2}',
+    info_msg='Merging top-level and site-specific spack.yaml files.',
+    exception_msg='Failed to merge top-level and site-specific spack.yaml files.',
+    capture_output=True,
+    text=True,
+    check=True
+).stdout
+
+if merged_envs is None:
+    sys.exit(-1)
+else:
+    tmp_spack_yaml = Path("/tmp/spack.yaml").resolve()
+    try:
+        with open(tmp_spack_yaml, "w") as f:
+            f.write(merged_envs)
+    except Exception as e:
+        sys.stderr.write(f"Error writing {tmp_spack_yaml}: {e}\n")
+        sys.exit(1)
+    shutil.copy(tmp_spack_yaml, site_config_dest.parent)
+
+# add repo if it does not exist
+spack_operation.add_spack_repo(repo_name='ebrains-spack-builds', repo_path=installation_root.parent)
+spack_operation.concretize_spack_env(force=True, fresh=True, test='root')
+
+dump_dag = spack_operation.spack_env_operation.spec_pacakge('-y', True)
+
+try:
+    with open(cache_specfile, "w") as f:
+        f.write(dump_dag)
+except Exception as e:
+    sys.stderr.write(f"Failed to dump dag to file: {dump_dag}: {e}\n")
+    sys.exit(1)
+
+if oci_cache_prefix:
+    # fetch missing sources (if packages not yet installed)
+    fetch_cached_sources = Path(dedal_home).resolve() / 'cli' / 'fetch_cached_sources.py'
+    print(fetch_cached_sources)
+    run_command(
+        "bash", "-c",
+        f"{spack_operation.spack_command_on_env} && python3 "
+        f"{fetch_cached_sources} "
+        f"--local-cache spack_cache_source "
+        f"--remote-cache-type=oci "
+        f"--remote-cache {oci_cache_prefix}/source_cache "
+        f"--yashchiki-home {dedal_home} "
+        f"/tmp/missing_paths_sources.dat cache_specfile",
+        info_msg="Fetching missing sources",
+        exception_msg="Failed to fetch missing sources",
+        check=True
+    )
+    # fetch missing build results (if packages not yet installed)
+    fetch_cached_buildresults = Path(dedal_home).resolve() / 'cache' / 'fetch_cached_buildresults.py'
+    print(fetch_cached_buildresults)
+    run_command(
+        "bash", "-c",
+        f"{spack_operation.spack_command_on_env} && python3 "
+        f"{fetch_cached_buildresults} "
+        f"--local-cache",
+        f"{spack_cache_build}/build_cache "
+        f"--remote-cache-type=oci "
+        f"--remote-cache {oci_cache_prefix}/build_cache "
+        f"--yashchiki-home {dedal_home} "
+        f"/tmp/missing_paths_buildresults.dat {cache_specfile}",
+        info_msg="Fetching missing build results",
+        exception_msg="Failed to fetch missing build results",
+        check=True
+    )
+
+cmd = [
+    "spack-python",
+    "-c",
+    "exit(not len(spack.environment.active_environment().uninstalled_specs()))"
+]
+
+packages_not_installed = subprocess.run(cmd).returncode == 0
+
+
+print(f'results: {packages_not_installed}')
+
+if packages_not_installed:
+    spack_operation.fetch(dependencies=True, missing=True)
+#
+#     if oci_cache_prefix and os.environ.get('UPDATE_SPACK_OCI_CACHES', 'false') == 'true':
+#         print("Performing update of the source cache")
+#         update_cached_sources = Path(dedal_home).resolve() / 'cli/update_cached_sources.py'
+#         cache_cmd = run_command(
+#             "bash", "-c",
+#             f'{spack_operation.spack_command_on_env} && python3 {update_cached_sources} '
+#             f'--local-cache {spack_cache_source}'
+#             f'--remote-cache-type=oci'
+#             f'--remote-cache f"{oci_cache_prefix}/source_cache '
+#             f'/tmp/missing_paths_sources.dat',
+#             info_msg='Updating remote OCI cache',
+#             check=True
+#         )
+#         if cache_cmd is None or cache_cmd.returncode != 0:
+#             print("Cache update failed.")
+#             sys.exit(cache_cmd.returncode if cache_cmd else 1)
+#     else:
+#         print("Updating of the source cache disabled.")
+#
+# dag_hashes_pre_install = []
+# if oci_cache_prefix:
+#     specfile_dag_hash = Path(dedal_home).resolve() / 'cli/specfile_dag_hash.py'
+#     dag_hashes_pre_install = run_command(
+#         "bash", "-c",
+#         f'{spack_operation.spack_command_on_env} && spack-python {specfile_dag_hash} {cache_specfile}',
+#         capture_output=True,
+#         text=True,
+#         check=True
+#     ).stdout.strip().split()
+#
+# print(dag_hashes_pre_install)
+
+# spack_operation.install_packages(jobs=int(os.getenv('SPACK_JOBS')), signed=False, test='root')
+#
+# if oci_cache_prefix and update_spack_oci_caches.lower() == "true":
+#     for dag_hash in dag_hashes_pre_install:
+#         package = Path(spack_cache_build).resolve() / dag_hash
+#         print(package)
+#         result = spack_operation.create_build_cache(package=package, unsigned=True, only=True)
+#         if result != 0:
+#             print(f'Failed to push {dag_hash}, trying to call spack find on it:')
+#             spack_operation.find_package(package=package, long=True, variants=True, paths=True)
+#
+#         # upload packages from local to remote cache
+#         print("Performing update of the build cache")
+#         update_cached_buildresults = Path(dedal_home).resolve() / 'cli/update_cached_buildresults.py'
+#         local_cache = Path(spack_cache_build).resolve() / 'build_cache'
+#         run_command(
+#             "bash", "-c",
+#             f'{spack_operation.spack_command_on_env} && python3 {update_cached_buildresults} '
+#             f'--local-cache {local_cache}'
+#             f'--remote-cache-type=oci '
+#             f'--remote-cache f"{local_cache} '
+#             f'/tmp/missing_paths_sources.dat',
+#             check=True
+#         )
+# else:
+#     print('Updating of the build cache disabled.')
+#
+# spack_operation.reindex()
+#
+# exports = run_command("bash", "-c",
+#                       f'spack env activate --sh {env_path}',
+#                       check=True,
+#                       stdout=subprocess.PIPE,
+#                       stderr=subprocess.PIPE,
+#                       capture_output=True,
+#                       text=True,
+#                       ).stdout
+#
+# out_file = Path(ci_spack_root).resolve() / f'var/spack/environments/{ebrains_spack_env}/load_env.sh'
+# with open(out_file, "w") as f:
+#     f.write(exports)
+#
+# if not spack_operation.check_installed_spack_packages(Path(ebrains_repo).resolve()):
+#     print('Some spack packages failed to install.')
+#     sys.exit(-1)
+#
+# print('Installed all spack packages.')
+# spack_operation.reindex()
diff --git a/packages/py-dedal/package.py b/packages/py-dedal/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..f59e5fcd286c9bb0f63cfe5cf3ea1b7171dc777f
--- /dev/null
+++ b/packages/py-dedal/package.py
@@ -0,0 +1,20 @@
+from spack.package import PythonPackage
+
+class PyDedal(PythonPackage):
+    homepage = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+    git      = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/dedal.git'
+
+    version(
+        '1.0.0',
+        branch='VT-109-HPC'
+    )
+
+    depends_on("python@3.10:", type=("build", "run"))
+    depends_on("py-setuptools", type="build")
+    depends_on("py-oras@0.2.31:", type="run")
+    # depends_on("spack", type="run")
+
+    depends_on("py-ruamel-yaml", type=("build", "run"))
+    depends_on("py-click", type=("build", "run"))
+    depends_on("py-jsonpickle", type=("build", "run"))
+    depends_on("py-pyyaml", type=("build", "run"))
\ No newline at end of file
diff --git a/packages/py-oras/package.py b/packages/py-oras/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa92ed1181614fa7aa30178ffae9f92803b299d8
--- /dev/null
+++ b/packages/py-oras/package.py
@@ -0,0 +1,31 @@
+# Copyright Spack Project Developers. See COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+from spack.package import *
+
+
+class PyOras(PythonPackage):
+    """ORAS Python SDK: OCI Registry as Storage Python SDK."""
+
+    homepage = "https://oras.land"
+    git = "https://github.com/oras-project/oras-py"
+    url = "https://files.pythonhosted.org/packages/28/86/cbae8797a1041fe3bef37f31aa1ecdd3f8914fbc7cfb663532255b6ec16e/oras-0.2.31.tar.gz"
+
+    maintainers("vsoch")
+
+    license("Apache 2.0 License")
+
+    version("0.2.31", sha256="95c0a341359458747c2946dab47d584cc444d1f9d379b6d63fb7a84cabc54de4")
+
+    depends_on("python@3.7:", type=("build", "run"))
+
+    depends_on("py-jsonschema", type=("build", "run"))
+    depends_on("py-requests", type=("build", "run"))
+
+    variant("tests", default=False, description="Enable test suite")
+    depends_on("py-pytest@4.6.2:", when="+tests", type=("build", "run"))
+
+    variant("docker", default=False, description="Enable Docker extra support")
+    depends_on("py-docker@5.0.1", when="+docker", type=("build", "run"))
\ No newline at end of file
diff --git a/tools/__init__.py b/tools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/tools/tools.py b/tools/tools.py
new file mode 100644
index 0000000000000000000000000000000000000000..2edf146c5c7b8bb9c002e4015479daada4007431
--- /dev/null
+++ b/tools/tools.py
@@ -0,0 +1,13 @@
+import os
+
+
+def find_first_upstream_prefix(upstream_instance):
+    """
+    Search for directories named '.spack-db' within the spack opt directory
+    under upstream_instance, and return a list of their parent directories.
+    """
+    base_path = os.path.join(upstream_instance, "spack", "opt", "spack")
+    for upstream_prefix, dirs, _ in os.walk(base_path):
+        if ".spack-db" in dirs:
+            return upstream_prefix
+    return None
diff --git a/tools/ymerge.py b/tools/ymerge.py
new file mode 100644
index 0000000000000000000000000000000000000000..accb877977fd1a95b83ea3ec44179d7928059ae8
--- /dev/null
+++ b/tools/ymerge.py
@@ -0,0 +1,14 @@
+# spack-python script that merges two environment configuration files (spack.yaml) into one
+# Usage: spack-python /path/to/first/spack.yaml /path/to/second/spack.yaml
+# (note: if the second file does not exist, the output is the first file
+
+import sys, os
+from spack.config import merge_yaml, read_config_file, syaml
+
+if not os.path.exists(sys.argv[2]):
+    merged = syaml.dump(read_config_file(sys.argv[1]))
+else:
+    merged = syaml.dump(merge_yaml(read_config_file(sys.argv[1]), read_config_file(sys.argv[2])))
+
+print(merged)
+
diff --git a/vendor/yashchiki b/vendor/yashchiki
index 5690db7dbccd78f3ceef2123a605e662bb8b2c0f..7c1dccf1c19930245fd1e2ab9a9d38550f66d4b7 160000
--- a/vendor/yashchiki
+++ b/vendor/yashchiki
@@ -1 +1 @@
-Subproject commit 5690db7dbccd78f3ceef2123a605e662bb8b2c0f
+Subproject commit 7c1dccf1c19930245fd1e2ab9a9d38550f66d4b7