diff --git a/bin/yashchiki b/bin/yashchiki
index b56d83f21c66846d1e06ef85f8a08829790fc3d7..f679741f509b9c71e8285f313163d08bcb4e7e0a 100755
--- a/bin/yashchiki
+++ b/bin/yashchiki
@@ -165,9 +165,9 @@ tmpdir.mkdir(exist_ok=True, parents=True)
 # behavior
 env = os.environ.copy()
 env.update({
-    "DOCKER_BASE_IMAGE": config["docker_base_image"],
-    "BUILD_BASE_SANDBOX": str(int(config["build_base_sandbox"])),
-    "SPACK_ENVIRONMENT": str(int(config["spack_environment"])),
+    "DOCKER_BASE_IMAGE": config.get("docker_base_image", ""),
+    "BUILD_BASE_SANDBOX": str(int(config.get("build_base_sandbox", "false"))),
+    "SPACK_ENVIRONMENT": str(int(config.get("spack_environment", "false"))),
     # This needs to be here because otherwise the default python
     # (2.7.18) will pollute the spec and lead to a conflict
     # can be removed as soon as the explicit preferred version
@@ -193,6 +193,14 @@ env.update({
     "ROOT_DIR": root_dir,
 })
 
+if config.get("cache", None):
+    if config["cache"].get("source", None):
+        if config["cache"]["source"].get("type", None):
+            env.update({"CACHE_SOURCE_TYPE": config["cache"]["source"]["type"]})
+    if config["cache"].get("build", None):
+        if config["cache"]["build"].get("type", None):
+            env.update({"CACHE_BUILD_TYPE": config["cache"]["build"]["type"]})
+
 needs_spackdir = False
 
 if "fetch" in args.stages:
diff --git a/lib/yashchiki/commons.sh b/lib/yashchiki/commons.sh
index aa2c6c46a6864156e79b88dc822a31ce79626fc7..30be20818b8ae4f86e52a3b80b64b3776423e62f 100755
--- a/lib/yashchiki/commons.sh
+++ b/lib/yashchiki/commons.sh
@@ -403,7 +403,7 @@ install_from_buildcache_from_specfiles() {
     done
 
     # install packages from buildcache
-    cat "${specfiles[@]}" | sed -n 's/.*hash:\s*\(.*\)/\1/p' > "${FILE_HASHES_SPACK_ALL}"
+    cat "${specfiles[@]}" | sed -n 's/.*hash:\s*\([A-Za-z0-9]*\).*/\1/p' > "${FILE_HASHES_SPACK_ALL}"
 
     # make each unique
     cat "${FILE_HASHES_SPACK_ALL}" | sort | uniq > "${FILE_HASHES_SPACK}"
diff --git a/lib/yashchiki/fetch.sh b/lib/yashchiki/fetch.sh
index 1fb4f094a64be00fe9fc9bd9be18801f4f1ca668..e8b0b0fea4cf71b3770a57b0ceb9ff059a27a52f 100755
--- a/lib/yashchiki/fetch.sh
+++ b/lib/yashchiki/fetch.sh
@@ -15,12 +15,14 @@ fi
 tmp_config_scope=("$(mktemp -d)")
 
 # set download mirror stuff to prefill outside of container
-export MY_SPACK_FOLDER="${YASHCHIKI_SPACK_PATH}"
+export MY_WORKSPACE="${PWD}"
+export MY_SPACK_FOLDER="${PWD}/${YASHCHIKI_SPACK_PATH}"
 # here we need the spack path outside of the container, but in commons.sh
 # the inside-container location is defined
 export MY_SPACK_BIN="${MY_SPACK_FOLDER}/bin/spack"
 # therefore we also need to redefine this command variable
 export MY_SPACK_CMD="${MY_SPACK_BIN} --config-scope ${YASHCHIKI_SPACK_CONFIG} --config-scope ${tmp_config_scope}"
+export MY_SPACK_PYTHON="${MY_SPACK_FOLDER}/bin/spack-python"
 
 cat >"${tmp_config_scope}/config.yaml" <<EOF
 config:
@@ -96,6 +98,11 @@ compilers::  # two colons to overwrite lower-precedence settings, i.e. system co
     spec: ${YASHCHIKI_SPACK_GCC}
 EOF
 
+cat "${tmp_config_scope}/compilers.yaml"
+PATH=${MY_SPACK_FOLDER}/bin:$PATH ${MY_SPACK_PYTHON} -c "import distro; print(distro.id(), distro.version())"
+${MY_SPACK_CMD} arch -o
+${MY_SPACK_CMD} arch -g
+${MY_SPACK_CMD} compilers
 
 echo "CONCRETIZE PACKAGES IN PARALLEL"
 packages_to_spec=(
@@ -134,7 +141,7 @@ if [ -n "${SPACK_ENVIRONMENT:-}" ]; then
     # FIXME: track concretizer errors here:
     (
         # FIXME: can we hash something to get a reproducible non-static specfile name?
-        specfile="env_specfile";
+        specfile="./env_specfile.yaml";
         # FIXME: style config!
         ${MY_SPACK_CMD} -e default --test=root --fresh concretize &&
         ${MY_SPACK_CMD} -e default spec -y > "${specfile}"
@@ -172,6 +179,7 @@ if [ ${YASHCHIKI_BUILD_SPACK_GCC} -eq 1 ]; then
 fi
 
 fetch_specfiles=()
+fetch_specfiles_cache=()
 for package in "${packages_to_fetch[@]}"; do
     specfile="$(get_specfile_name "${package}")"
     echo "Specfile for ${package} is ${specfile}."
@@ -185,6 +193,7 @@ for package in "${packages_to_fetch[@]}"; do
         exit 1
     fi
     fetch_specfiles+=( "${specfile}" )
+    fetch_specfiles_cache+=( "${specfile}" )
 done
 
 if [ -n "${SPACK_ENVIRONMENT:-}" ]; then
@@ -193,6 +202,31 @@ if [ -n "${SPACK_ENVIRONMENT:-}" ]; then
     for split_specfile in ./env_specfile_split*.yaml; do
         fetch_specfiles+=( ${split_specfile} )
     done
+    # multi-doc style for "normal" yaml parsing
+    fetch_specfiles_cache+=( "./env_specfile.yaml" )
+fi
+
+missing_paths=()
+if [ -n "${CACHE_SOURCE_TYPE:-}" ]; then
+    if [ ${CACHE_SOURCE_TYPE} != "oci" ]; then
+        echo "Unknown cache type"
+        exit 1
+    fi
+    echo "Source cache type: ${CACHE_SOURCE_TYPE}"
+    for fetch_specfile in "${fetch_specfiles_cache[@]}"; do
+        raw_paths=$(PATH=${MY_SPACK_FOLDER}/bin:$PATH ${MY_SPACK_PYTHON} ${MY_WORKSPACE}/lib/yashchiki/specfile_storage_path.py ${fetch_specfile})
+        fetch_paths=(${raw_paths})
+        pushd ${YASHCHIKI_CACHES_ROOT}/download_cache
+        for fetch_path in "${fetch_paths[@]}"; do
+            # FIXME: gitlab env vars!
+            oras pull ${HARBOR_HOST}/${HARBOR_PROJECT}/esd_source_cache:$(basename ${fetch_path}) 2>&1 && ret=$? || ret=$?
+            if [ ${ret} -ne 0 ]; then
+                missing_paths+=( "${fetch_path}" )
+            fi
+        done
+        popd
+    done
+    echo "Missing source cache entries: ${missing_paths[@]}"
 fi
 
 if ! ${MY_SPACK_CMD} fetch -D "${fetch_specfiles[@]/^/-f }"; then
@@ -200,4 +234,52 @@ if ! ${MY_SPACK_CMD} fetch -D "${fetch_specfiles[@]/^/-f }"; then
     exit 1
 fi
 
+if [ -n "${CACHE_SOURCE_TYPE:-}" ]; then
+    if [ ${CACHE_SOURCE_TYPE} != "oci" ]; then
+        echo "Unknown cache type"
+        exit 1
+    fi
+    pushd ${YASHCHIKI_CACHES_ROOT}/download_cache
+    for missing_path in "${missing_paths[@]}"; do
+        echo "Uploading to OCI cache: ${missing_path}"
+        # FIXME: gitlab env vars!
+        oras push \
+            --username ${HARBOR_USERNAME} \
+            --password ${HARBOR_PASSWORD} \
+            --annotation="path=${missing_path}" \
+            ${HARBOR_HOST}/${HARBOR_PROJECT}/esd_source_cache:$(basename ${missing_path}) \
+            ${missing_path} 2>&1 && ret=$? || ret=$?
+        if [ ${ret} -ne 0 ]; then
+            echo "Uploading of \"${fetch_path}\" to OCI cache failed."
+        fi
+    done
+    popd
+fi
+
+if [ -n "${CACHE_BUILD_TYPE:-}" ]; then
+    if [ ${CACHE_SOURCE_TYPE} != "oci" ]; then
+        echo "Unknown cache type"
+        exit 1
+    fi
+    echo "Trying to download from remote build cache"
+    # cf. _install_from_buildcache...
+    FILE_HASHES_SPACK_ALL=$(mktemp)
+    cat "${fetch_specfiles[@]}" | sed -n 's/.*hash:\s*\([A-Za-z0-9]*\).*/\1/p' > "${FILE_HASHES_SPACK_ALL}"
+    cat "${FILE_HASHES_SPACK_ALL}"
+    FILE_HASHES_SPACK=$(mktemp)
+    cat "${FILE_HASHES_SPACK_ALL}" | sort | uniq > "${FILE_HASHES_SPACK}"
+    file_hashes=($(cat "${FILE_HASHES_SPACK}"))
+    test -d "${BUILD_CACHE_OUTSIDE}" || mkdir -p "${BUILD_CACHE_OUTSIDE}"
+    missing_in_buildcache=()
+    pushd "${BUILD_CACHE_OUTSIDE}"
+    for file_hash in "${file_hashes[@]}"; do
+        oras pull ${HARBOR_HOST}/${HARBOR_PROJECT}/esd_build_cache:${file_hash}.tar.gz 2>&1 && ret=$? || ret=$?
+        if [ ${ret} -ne 0 ]; then
+            missing_in_buildcache+=( "${file_hash}" )
+        fi
+    done
+    popd
+    echo "Missing build cache entries: ${missing_in_buildcache[@]}"
+fi
+
 echo
diff --git a/lib/yashchiki/install_spack_packages.sh b/lib/yashchiki/install_spack_packages.sh
index c655785b1545ffbc550463fd866977e62bebff20..8a6b5818733f9a910fc796d0e5d1baa55f058ad2 100755
--- a/lib/yashchiki/install_spack_packages.sh
+++ b/lib/yashchiki/install_spack_packages.sh
@@ -13,7 +13,10 @@ cd "$HOME"
 
 install_from_buildcache "${spack_packages[@]+"${spack_packages[@]}"}"
 
-# FIXME: install_from_buildcache for environments!
+if [ -n "${SPACK_ENVIRONMENT:-}" ]; then
+    # FIXME: hardcoded file name for envs
+    install_environment_from_buildcache "default"
+fi
 
 echo "INSTALLING PACKAGES"
 # heurisitic: let's use -j 8 typically… (and parallelize spack builds)
diff --git a/lib/yashchiki/specfile_dag_hash.py b/lib/yashchiki/specfile_dag_hash.py
new file mode 100644
index 0000000000000000000000000000000000000000..76a16826331c6516c72804ea3b1f2fed9a70eea7
--- /dev/null
+++ b/lib/yashchiki/specfile_dag_hash.py
@@ -0,0 +1,38 @@
+import argparse
+from collections.abc import Iterable
+import pathlib
+import ruamel.yaml as yaml
+from spack import spec
+import spack.binary_distribution as bindist
+
+parser = argparse.ArgumentParser(
+        prog='specfile_dag_hash.py',
+        description='Extracting DAG hashes from a given specfile',
+        epilog='...')
+
+parser.add_argument(
+    "path_specfile", type=pathlib.Path,
+    help="Location of the specfile to parse")
+
+args = parser.parse_args()
+
+with open(args.path_specfile, "r") as fd:
+    file_content = fd.read()
+    data = list(yaml.safe_load_all(file_content))
+
+to_be_fetched = []
+for rspec in data:
+    s = spec.Spec.from_dict(rspec)
+    if not isinstance(s, Iterable):
+        s = [s]
+
+    maybe_to_be_fetched = spack.traverse.traverse_nodes(s, key=spack.traverse.by_dag_hash)
+    
+    for spec in maybe_to_be_fetched:
+        build_cache_paths = [
+            bindist.tarball_path_name(spec, ".spack"),
+            bindist.tarball_name(spec, ".spec.json.sig"),
+            bindist.tarball_name(spec, ".spec.json"),
+            bindist.tarball_name(spec, ".spec.yaml"),
+        ]
+        print("\n".join(build_cache_paths))
diff --git a/lib/yashchiki/specfile_storage_path.py b/lib/yashchiki/specfile_storage_path.py
new file mode 100644
index 0000000000000000000000000000000000000000..3568cb631723e877e9f9bf6e2f60932bf905d149
--- /dev/null
+++ b/lib/yashchiki/specfile_storage_path.py
@@ -0,0 +1,36 @@
+import argparse
+from collections.abc import Iterable
+import pathlib
+import ruamel.yaml as yaml
+from spack import spec
+
+parser = argparse.ArgumentParser(
+        prog='specfile_storage_path.py',
+        description='Extracting storage paths from a given specfile',
+        epilog='...')
+
+parser.add_argument(
+    "path_specfile", type=pathlib.Path,
+    help="Location of the specfile to parse")
+
+args = parser.parse_args()
+
+with open(args.path_specfile, "r") as fd:
+    file_content = fd.read()
+    data = list(yaml.safe_load_all(file_content))
+
+to_be_fetched = []
+for rspec in data:
+    s = spec.Spec.from_dict(rspec)
+    if not isinstance(s, Iterable):
+        s = [s]
+
+    maybe_to_be_fetched = spack.traverse.traverse_nodes(s, key=spack.traverse.by_dag_hash)
+    
+    for ss in maybe_to_be_fetched:
+        # we could skip sources for already installed packages?
+        #if ss.installed:
+        #    continue
+        pkg = ss.package
+        to_be_fetched.append(pkg)
+        print(spack.mirror.mirror_archive_paths(pkg.fetcher, 'whatever').storage_path)
diff --git a/lib/yashchiki/update_build_cache.sh b/lib/yashchiki/update_build_cache.sh
index 306fb6990e0236b875aefbf6c39d3dc687684793..76d3bbc5537c0881eed86551539f8d56df826f7e 100755
--- a/lib/yashchiki/update_build_cache.sh
+++ b/lib/yashchiki/update_build_cache.sh
@@ -33,3 +33,26 @@ apptainer exec\
     -B "${BUILD_CACHE_OUTSIDE}:${BUILD_CACHE_INSIDE}:rw"\
     "${IMAGE_NAME}" \
     /opt/spack_install_scripts/update_build_cache_in_container.sh -j ${YASHCHIKI_JOBS} -q || exit 0
+
+if [ -n "${CACHE_BUILD_TYPE:-}" ]; then
+    if [ ${CACHE_SOURCE_TYPE} != "oci" ]; then
+        echo "Unknown cache type"
+        exit 1
+    fi
+    echo "Trying to upload to remote build cache"
+    pushd ${BUILD_CACHE_OUTSIDE}
+    for path in $(find . -mindepth 1); do
+        # FIXME: only push hash_dag from built things (there could be more/unrelated stuff in the build cache)
+        echo oras push \
+            --annotation="path=${path}" \
+            --username ${HARBOR_USERNAME} \
+            --password ${HARBOR_PASSWORD} \
+            ${HARBOR_HOST}/${HARBOR_PROJECT}/esd_build_cache:$(basename ${path}) ${path}
+        oras push \
+            --annotation="path=${path}" \
+            --username ${HARBOR_USERNAME} \
+            --password ${HARBOR_PASSWORD} \
+            ${HARBOR_HOST}/${HARBOR_PROJECT}/esd_build_cache:$(basename ${path}) ${path} && ret=$? || ret=$?
+    done
+    popd
+fi