diff --git a/.ci/Jenkinsfile_asic b/.ci/Jenkinsfile_asic
index b4d58ec9ab21ed2622ff5c933ca8c9bb64b36884..eafcd599c5dfa2cb6a3da717d9b99c04665f1970 100755
--- a/.ci/Jenkinsfile_asic
+++ b/.ci/Jenkinsfile_asic
@@ -6,6 +6,11 @@ Closure cleanupSteps = {
 	sh "sudo /bin/rm -rf \"${WORKSPACE}/sandboxes/\" || exit 0"
 	// remove tmp (not only for spack)
 	sh "sudo /bin/rm -rf \"/tmp/${NODE_NAME}/\""
+	// the spack repository gets bind mounted into the sandbox and owned by
+	// spack user during build -> revert prior to cleaning worksapce
+	sh "[ -d \"$WORKSPACE/spack\" ] && sudo chown -R vis_jenkins \"$WORKSPACE/spack\" || true"
+	cleanWs(patterns: [[pattern: 'download_cache/', type: 'EXCLUDE']],
+	        deleteDirs: true)
 }
 
 pipeline {
@@ -16,6 +21,12 @@ pipeline {
 		skipDefaultCheckout()
 	}
 
+	parameters {
+		string(name: 'BUILD_CACHE_NAME',
+			   defaultValue: 'asic_init_from_2021-06-18_1',
+			   description: 'Which buildcache to use? They reside under $HOME/build_caches/$BUILD_CACHE_NAME and will be created if they do not exist.')
+	}
+
 	environment {
 		CONTAINER_STYLE = "asic"
 	}
@@ -25,6 +36,11 @@ pipeline {
 			agent { label 'conviz1||conviz2' }
 			environment {
 				DOCKER_BASE_IMAGE = "centos:7"
+				// versions from system packages
+				DEPENDENCY_PYTHON = "python@2.7.5"
+				DEPENDENCY_PYTHON3 = "python@3.6.8"
+				VISIONARY_GCC_VERSION = "4.8.5"
+				VISIONARY_GCC = "gcc@${VISIONARY_GCC_VERSION}"
 				TMPDIR = "/tmp/${env.NODE_NAME}"
 				JOB_TMP_SPACK = sh(script: "mkdir -p ${env.TMPDIR} &>/dev/null; mktemp -d ${env.TMPDIR}/spack-XXXXXXXXXX",
 				                   returnStdout: true).trim()
@@ -44,6 +60,42 @@ pipeline {
 						checkout scm
 					}
 				}
+				stage('Validate environment') {
+					steps {
+						sh ".ci/validate_environment.sh"
+					}
+				}
+				stage('Spack Clone') {
+					steps {
+						sh ".ci/clone.sh"
+					}
+				}
+				stage('Dump Meta Info') {
+					steps {
+						sh ".ci/dump_meta_info.sh"
+						sh ".ci/notify_gerrit.sh -m 'Build containing this change started..'"
+					}
+				}
+				stage('Spack Fetch') {
+					steps {
+						script {
+							try {
+								sh ".ci/fetch.sh"
+							}
+							catch (Throwable t) {
+								archiveArtifacts "errors_concretization.log"
+								throw t
+							}
+							spec_folder_in_container = sh(script: ".ci/get_jenkins_env.sh SPEC_FOLDER_IN_CONTAINER", returnStdout: true).trim()
+							archiveArtifacts(artifacts: "sandboxes/*/$spec_folder_in_container/*.yaml", allowEmptyArchive: true)
+						}
+					}
+				}
+				stage('Deploy utilities') {
+					steps {
+						sh ".ci/deploy_utilities.sh"
+					}
+				}
 				stage('Create asic recipe') {
 					steps {
 						sh ".ci/asic_create_recipe.sh"
@@ -65,6 +117,25 @@ pipeline {
 							// we only want the container name, tail everything else
 							CONTAINER_IMAGE = sh(script: ".ci/deploy_container.sh | tail -n 1", returnStdout: true).trim()
 						}
+						sh ".ci/update_build_cache.sh -c \"$CONTAINER_IMAGE\""
+						sh ".ci/notify_gerrit.sh -t Build -c \"$CONTAINER_IMAGE\""
+					}
+				}
+			}
+			post {
+				failure {
+					script {
+						cache_failed = sh(script: ".ci/create_temporary_build_cache_after_failure.sh", returnStdout: true).trim()
+					}
+					sh ".ci/notify_gerrit.sh -v -1 -t Build -m \"Successfully built packages stored in cache. Resume by issuing:\nWITH_CACHE_NAME=${cache_failed}\n\nIn your next gerrit comment, NOT commit message!\""
+				}
+				cleanup {
+					archiveArtifacts "jenkins.env"
+					archiveArtifacts "out_singularity_build_asic_recipe.txt"
+					// Clean build artifacts because otherwise the latest build from each jenkins job can take up to 50GB.
+					// 2 executors and 5 Jenkins-Jobs (testing, testing-spack, testing-asic, stable, stable-asic) will slowly but surely eat away memory.
+					script {
+						cleanupSteps()
 					}
 				}
 			}
@@ -77,7 +148,7 @@ pipeline {
 					steps {
 						build(job: 'bld_gerrit_tools-xilinx_dependencies',
 						      parameters: [string(name: 'OVERWRITE_DEFAULT_ASIC_CONTAINER_IMAGE',
-						                          value: CONTAINER_NAME)])
+						                          value: CONTAINER_IMAGE)])
 					}
 				}
 			}
diff --git a/.ci/asic_create_recipe.sh b/.ci/asic_create_recipe.sh
index acbda861868b41475fc58ed6a5aed272082d9da7..f3c978a909ede1b9ffd5841ffade4571fae5b4ec 100755
--- a/.ci/asic_create_recipe.sh
+++ b/.ci/asic_create_recipe.sh
@@ -1,12 +1,8 @@
 #!/bin/bash
 
 SOURCE_DIR="$(dirname "$(readlink -m "${BASH_SOURCE[0]}")")"
-source "${SOURCE_DIR}/asic_dummy_variables.sh"
 source "${SOURCE_DIR}/commons.sh"
 
-GITLOG="git_log_yashchiki.txt"
-( cd ${SOURCE_DIR} && git log > "${WORKSPACE}/${GITLOG}" )
-
 RECIPE_FILENAME="${WORKSPACE}/asic_recipe.def"
 
 # create container description file
@@ -16,12 +12,48 @@ cat <<EOF >"${RECIPE_FILENAME}"
 Bootstrap: docker
 From: ${DOCKER_BASE_IMAGE}
 
+%setup
+    # bind-mount spack-folder as moving involves copying the complete download cache
+    mkdir \${SINGULARITY_ROOTFS}/opt/spack
+    mount --no-mtab --bind "${WORKSPACE}/spack" "\${SINGULARITY_ROOTFS}/opt/spack"
+    # bind-mount ccache
+    mkdir \${SINGULARITY_ROOTFS}/opt/ccache
+    mount --no-mtab --bind "${HOME}/spack_ccache" "\${SINGULARITY_ROOTFS}/opt/ccache"
+    # bind-mount build_cache
+    mkdir -p "\${SINGULARITY_ROOTFS}${BUILD_CACHE_INSIDE}"
+    # create buildcache directory if it does not exist
+    [ ! -d "${BUILD_CACHE_OUTSIDE}" ] && mkdir -p "${BUILD_CACHE_OUTSIDE}"
+    # mount the full build cache folder into container because some files might be symlinked to other buildcaches
+    mount --no-mtab --bind "${BASE_BUILD_CACHE_OUTSIDE}" "\${SINGULARITY_ROOTFS}${BASE_BUILD_CACHE_INSIDE}"
+    # bind-mount preserved packages in case the build fails
+    mkdir -p "\${SINGULARITY_ROOTFS}${PRESERVED_PACKAGES_INSIDE}"
+    mount --no-mtab --bind "${PRESERVED_PACKAGES_OUTSIDE}" "\${SINGULARITY_ROOTFS}${PRESERVED_PACKAGES_INSIDE}"
+    # bind-mount tmp-folder
+    mkdir -p "\${SINGULARITY_ROOTFS}/tmp/spack"
+    mount --no-mtab --bind "${JOB_TMP_SPACK}" "\${SINGULARITY_ROOTFS}/tmp/spack"
+    # copy install scripts
+    mkdir "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
+    rsync -av "${SOURCE_DIR}"/*.sh "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
+    rsync -av "${SOURCE_DIR}"/*.awk "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
+    rsync -av "${SOURCE_DIR}"/pinned "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
+    rsync -av "${SOURCE_DIR}"/patches "\${SINGULARITY_ROOTFS}/${SPACK_INSTALL_SCRIPTS}"
+    mkdir -p "\${SINGULARITY_ROOTFS}/${META_DIR_INSIDE}"
+    rsync -av "${META_DIR_OUTSIDE}"/* "\${SINGULARITY_ROOTFS}/${META_DIR_INSIDE}"
+    # init scripts for user convenience
+    mkdir -p "\${SINGULARITY_ROOTFS}/opt/init"
+    rsync -av "${WORKSPACE}"/misc-files/init/*.sh "\${SINGULARITY_ROOTFS}/opt/init"
+
 %files
     # NOTE: Due to a bug in singularity 2.6 all paths in this section _cannot_
     # be surrounded in quotes.. ergo there should be no spaces in filenames! If
     # there are, I pray for your poor soul that escaping them works..
     # --obreitwi, 17-02-19 # 23:45:51
-    ${WORKSPACE}/${GITLOG} ${GITLOG}
+    # provide spack command to login shells
+    ${WORKSPACE}/misc-files/setup-spack.sh /etc/profile.d/setup-spack.sh
+    ${WORKSPACE}/misc-files/locale.gen /etc/locale.gen
+    ${WORKSPACE}/misc-files/locale.alias /etc/locale.alias
+    ${WORKSPACE}/misc-files/sudoers /etc/sudoers
+    ${JENKINS_ENV_FILE} ${JENKINS_ENV_FILE_INSIDE}
 
 %post
     # Apparently, upon building the CentOS docker images it has been decided that
@@ -46,6 +78,33 @@ From: ${DOCKER_BASE_IMAGE}
     ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh
     /opt/conda/bin/conda install -y pylint pycodestyle nose pyyaml
 
+    # ECM: and now some abspacking
+    yum -y install ccache sudo parallel
+
+    # create a fingerprint by which we can identify the container from within
+    cat /proc/sys/kernel/random/uuid > /opt/fingerprint
+
+    ## prerequisites
+    #"${SPACK_INSTALL_SCRIPTS}/install_prerequisites.sh" || exit 1
+    ## cannot specify permissions in files-section
+    #chmod 440 /etc/sudoers
+    #chown root:root /etc/sudoers
+    ## install locales
+    #locale-gen
+    # propagate environment variables to container recipe
+    export DEPENDENCY_PYTHON="${DEPENDENCY_PYTHON}"
+    export DEPENDENCY_PYTHON3="${DEPENDENCY_PYTHON3}"
+    export VISIONARY_GCC="${VISIONARY_GCC}"
+    export VISIONARY_GCC_VERSION="${VISIONARY_GCC_VERSION}"
+    export CONTAINER_STYLE="${CONTAINER_STYLE}"
+    "${SPACK_INSTALL_SCRIPTS}/complete_spack_install_routine_called_in_post_as_root.sh"
+    wait
+    "${SPACK_INSTALL_SCRIPTS}/install_singularity_as_root.sh" || \
+    (
+    sudo -Eu spack "${SPACK_INSTALL_SCRIPTS}/preserve_built_spack_packages.sh" &&
+        exit 1  # propagate the error
+    )
+
 %environment
     # NOTE: We provide a MODULESHOME in all cases (otherwise a login shell is
     # required to load the module environment)
@@ -64,3 +123,46 @@ From: ${DOCKER_BASE_IMAGE}
     # ensure conda sees a clean env
     unset PYTHONHOME
 EOF
+
+# create appenvs for all views...
+# append apps for each spackview...
+generate_appenv() {
+local name_app="$1"
+local name_view="$2"
+cat <<EOF
+%appenv ${name_app}
+    # there can only be one app loaded at any time
+    export VISIONARY_ENV=${name_view}
+    SVF=/opt/spack_views/\${VISIONARY_ENV}
+    export PATH=\${SVF}/bin\${PATH:+:}\${PATH}
+    # there is no python in asic app for now
+    #export PYTHONHOME=\${SVF}
+    #export SPACK_PYTHON_BINARY=\${SVF}/bin/python
+    export MANPATH=\${SVF}/man:\${SVF}/share/man\${MANPATH:+:}\${MANPATH}
+    export LIBRARY_PATH=\${SVF}/lib:\${SVF}/lib64\${LIBRARY_PATH:+:}\${LIBRARY_PATH}
+    export LD_LIBRARY_PATH=\${SVF}/lib:\${SVF}/lib64\${LD_LIBRARY_PATH:+:}\${LD_LIBRARY_PATH}
+    export TCLLIBPATH=\${SVF}/lib\${TCLLIBPATH:+:}\${TCLLIBPATH}
+    export CPATH=\${SVF}/include\${CPATH:+:}\${CPATH}
+    export C_INCLUDE_PATH=\${SVF}/include\${C_INCLUDE_PATH:+:}\${C_INCLUDE_PATH}
+    export CPLUS_INCLUDE_PATH=\${SVF}/include\${CPLUS_INCLUDE_PATH:+:}\${CPLUS_INCLUDE_PATH}
+    export QUIET_CPATH=\${CPATH}
+    export QUIET_C_INCLUDE_PATH=\${C_INCLUDE_PATH}
+    export QUIET_CPLUS_INCLUDE_PATH=\${CPLUS_INCLUDE_PATH}
+    export PKG_CONFIG_PATH=\${SVF}/lib/pkgconfig:\${SVF}/lib64/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig\${PKG_CONFIG_PATH:+:}\${PKG_CONFIG_PATH}
+    export CMAKE_PREFIX_PATH=\${SVF}\${CMAKE_PREFIX_PATH:+:}\${CMAKE_PREFIX_PATH}
+EOF
+}
+for view in "${spack_views[@]}"; do
+    # generate two apps, one with visionary- prefix for compatability with old
+    # scripts and one with stripped visionary- prefix
+    (
+        generate_appenv "${view}" "${view}"
+        [[ "${view}" =~ ^visionary- ]] && generate_appenv "${view#visionary-}" "${view}"
+    ) >> "${RECIPE_FILENAME}"
+
+    if [ "${view}" = "visionary-simulation" ];then
+cat <<EOF >>"${RECIPE_FILENAME}"
+    export NEST_MODULES=visionarymodule
+EOF
+    fi
+done
diff --git a/.ci/asic_dummy_variables.sh b/.ci/asic_dummy_variables.sh
deleted file mode 100755
index b790fd28ed2d05971a7c31d403135100ee2e11ae..0000000000000000000000000000000000000000
--- a/.ci/asic_dummy_variables.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-echo "Setting undefined required environment variables to 'undefined'." >&2
-
-export BUILD_CACHE_NAME="${BUILD_CACHE_NAME:-undefined}"
-export DEPENDENCY_PYTHON3="${DEPENDENCY_PYTHON3:-undefined}"
-export DEPENDENCY_PYTHON="${DEPENDENCY_PYTHON:-undefined}"
-export VISIONARY_GCC="${VISIONARY_GCC:-undefined}"
diff --git a/.ci/asic_spack_collection.sh b/.ci/asic_spack_collection.sh
new file mode 100644
index 0000000000000000000000000000000000000000..9ac03b82e8d8df1a38a150952bc794424875b91c
--- /dev/null
+++ b/.ci/asic_spack_collection.sh
@@ -0,0 +1,19 @@
+# All spack packages that should be fetched/installed in the container
+spack_packages=(
+#    "${SPEC_VIEW_VISIONARY_DEV_TOOLS}" # FIXME
+    "visionary-asic ^${DEPENDENCY_PYTHON} %${VISIONARY_GCC}"
+)
+
+spack_views=(\
+    visionary-asic
+)
+
+spack_views_no_default_gcc=(\
+    visionary-asic # ECM: system compiler for now
+)
+
+spack_gid="nobody"
+
+spack_create_user_cmd() {
+    adduser spack --uid 888 --gid nobody --no-create-home --no-user-group --home /opt/spack --system --shell /bin/bash
+}
diff --git a/.ci/asic_spack_custom_view.sh b/.ci/asic_spack_custom_view.sh
new file mode 100644
index 0000000000000000000000000000000000000000..794bbe6372517b28bcce79c1fd3dfcdae7817b31
--- /dev/null
+++ b/.ci/asic_spack_custom_view.sh
@@ -0,0 +1,4 @@
+cat <<EOF
+# just visionary-asic
+${MY_SPACK_BIN} ${SPACK_ARGS_VIEW[@]+"${SPACK_ARGS_VIEW[@]}"} view -d yes symlink -i ${MY_SPACK_VIEW_PREFIX}/visionary-asic $(get_latest_hash "visionary-asic")
+EOF
diff --git a/.ci/install_singularity_as_root.sh b/.ci/install_singularity_as_root.sh
index 26081710d54c86d9b4eaf971cbe20ea1fd6a7338..a1ad3025bae6cb067ef19a818559bd47868d5f11 100755
--- a/.ci/install_singularity_as_root.sh
+++ b/.ci/install_singularity_as_root.sh
@@ -27,7 +27,10 @@ rm_tmp_modules() {
 add_cleanup_step rm_tmp_modules
 {
     echo "source /opt/init/modules.sh"
-    spack module tcl loads -r "$(get_latest_hash "${VISIONARY_GCC}")"
+    if [ "${CONTAINER_STYLE}" != "asic" ]; then
+        # TODO: the ASIC container does not feature a spack compiler yet
+        spack module tcl loads -r "$(get_latest_hash "${VISIONARY_GCC}")"
+    fi
 } | tee "${TMP_MODULES}"
 source "${TMP_MODULES}"
 # --------- 8< ---------- 8< -------- 8< ---------
diff --git a/.ci/pinned/asic.list b/.ci/pinned/asic.list
new file mode 100644
index 0000000000000000000000000000000000000000..659622ef8e52f66a4327e5c1daf8f5072a8e1e44
--- /dev/null
+++ b/.ci/pinned/asic.list
@@ -0,0 +1,2 @@
+tk@8.5.19
+tcl@8.5.19