diff --git a/.env b/.env
new file mode 100644
index 0000000000000000000000000000000000000000..93e626530f31129609801d8e4bb3e61003ad3362
--- /dev/null
+++ b/.env
@@ -0,0 +1,9 @@
+BUILDCACHE_OCI_HOST=""
+BUILDCACHE_OCI_PASSWORD=""
+BUILDCACHE_OCI_PROJECT=""
+BUILDCACHE_OCI_USERNAME=""
+
+CONCRETIZE_OCI_HOST=""
+CONCRETIZE_OCI_PASSWORD=""
+CONCRETIZE_OCI_PROJECT=""
+CONCRETIZE_OCI_USERNAME""
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7cdc2157784919724ef7e210b7f1bf9789334397..2b497048d572c8df8798ae2d62fc8e13589add33 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,10 +1,11 @@
 stages:
-  - build
   - test
+  - build
 
 variables:
   BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/esd/tmp:latest
 
+
 build-wheel:
   stage: build
   tags:
@@ -21,18 +22,22 @@ build-wheel:
     expire_in: 1 week
 
 
-testing:
+testing-pytest:
   stage: test
   tags:
     - docker-runner
-  image: python:latest
+  image: ubuntu:22.04
   script:
-    - pip install -e .
-    - pytest ./dedal/tests/ --junitxml=test-results.xml
+    - chmod +x dedal/utils/bootstrap.sh
+    - ./dedal/utils/bootstrap.sh
+    - pip install .
+    - pytest ./dedal/tests/ -s --junitxml=test-results.xml
   artifacts:
     when: always
     reports:
       junit: test-results.xml
     paths:
       - test-results.xml
-    expire_in: 1 week
\ No newline at end of file
+      - .dedal.log
+    expire_in: 1 week
+
diff --git a/README.md b/README.md
index 62e00c689da73463a22bb85cc9671f8634b25c06..dd68dcfa9fecfd80b6cf28a890a71a80c4bed9b1 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,53 @@
-# ~~Yashchiki~~Koutakia
+# Dedal
 
-For now, this repository provides helpers for the EBRAINS container image build flow.
+This repository provides functionalities to easily ```managed spack environments``` and ```helpers for the container image build flow```.
+
+This library runs only on different kinds of linux distribution operating systems.
+
+The lowest ```spack version``` compatible with this library is ```v0.23.0```.
+
+
+        This repository also provied CLI interface. For more informations, after installing this library, call dedal --help.
+
+
+**Setting up the needed environment variables**
+    The ````<checkout path>\dedal\.env```` file contains the environment variables required for OCI registry used for caching.
+    Ensure that you edit the ````<checkout path>\dedal\.env```` file to match your environment.
+    The following provides an explanation of the various environment variables:
+
+
+       # OCI Registry Configuration Sample for concretization caches
+       # =============================
+       # The following variables configure the Harbor docker OCI registry (EBRAINS) used for caching.
+       
+       # The hostname of the OCI registry. e.g. docker-registry.ebrains.eu
+       CONCRETIZE__OCI_HOST="docker-registry.ebrains.eu"
+       
+       # The project name in the Docker registry.
+       CONCRETIZE__OCI_PROJECT="concretize_caches"
+       
+       # The username used for authentication with the Docker registry.
+       CONCRETIZE__OCI_USERNAME="robot$concretize-cache-test+user"
+       
+       # The password used for authentication with the Docker registry.
+       CONCRETIZE__OCI_HOST="###ACCESS_TOKEN###"
+        
+
+       # OCI Registry Configuration Sample for binary caches
+       # =============================
+       # The following variables configure the Harbor docker OCI registry (EBRAINS) used for caching.
+       
+       # The hostname of the OCI registry. e.g. docker-registry.ebrains.eu
+       BUILDCACHE_OCI_HOST="docker-registry.ebrains.eu"
+       
+       # The project name in the Docker registry.
+       BUILDCACHE_OCI_PROJECT="binary-cache-test"
+       
+       # The username used for authentication with the Docker registry.
+       BUILDCACHE_OCI_USERNAME="robot$binary-cache-test+user"
+       
+       # The password used for authentication with the Docker registry.
+       BUILDCACHE_OCI_HOST="###ACCESS_TOKEN###"
+
+For both concretization and binary caches, the cache version can be changed via the attributes ```cache_version_concretize``` and ```cache_version_build```. 
+The default values are ```v1```.
diff --git a/dedal/build_cache/BuildCacheManager.py b/dedal/build_cache/BuildCacheManager.py
index 2da39e252c8ebc3e0b9aa6d6a5612d9f1f4ee02e..55fa10cb3f6fc888fe43fcfbb055a49b6092a005 100644
--- a/dedal/build_cache/BuildCacheManager.py
+++ b/dedal/build_cache/BuildCacheManager.py
@@ -12,48 +12,51 @@ class BuildCacheManager(BuildCacheManagerInterface):
         This class aims to manage the push/pull/delete of build cache files
     """
 
-    def __init__(self, auth_backend='basic', insecure=False):
-        self.logger = get_logger(__name__, BuildCacheManager.__name__)
-        self.home_path = Path(os.environ.get("HOME_PATH", os.getcwd()))
-        self.registry_project = os.environ.get("REGISTRY_PROJECT")
+    def __init__(self, registry_host, registry_project, registry_username, registry_password, cache_version='cache',
+                 auth_backend='basic',
+                 insecure=False):
+        self._logger = get_logger(__name__, BuildCacheManager.__name__)
+        self._registry_project = registry_project
 
-        self._registry_username = str(os.environ.get("REGISTRY_USERNAME"))
-        self._registry_password = str(os.environ.get("REGISTRY_PASSWORD"))
+        self._registry_username = registry_username
+        self._registry_password = registry_password
 
-        self.registry_host = str(os.environ.get("REGISTRY_HOST"))
+        self._registry_host = registry_host
         # Initialize an OrasClient instance.
         # This method utilizes the OCI Registry for container image and artifact management.
         # Refer to the official OCI Registry documentation for detailed information on the available authentication methods.
         # Supported authentication types may include basic authentication (username/password), token-based authentication,
-        self.client = oras.client.OrasClient(hostname=self.registry_host, auth_backend=auth_backend, insecure=insecure)
-        self.client.login(username=self._registry_username, password=self._registry_password)
-        self.oci_registry_path = f'{self.registry_host}/{self.registry_project}/cache'
+        self._client = oras.client.OrasClient(hostname=self._registry_host, auth_backend=auth_backend,
+                                              insecure=insecure)
+        self._client.login(username=self._registry_username, password=self._registry_password)
+        self.cache_version = cache_version
+        self._oci_registry_path = f'{self._registry_host}/{self._registry_project}/{self.cache_version}'
 
     def upload(self, out_dir: Path):
         """
             This method pushed all the files from the build cache folder into the OCI Registry
         """
-        build_cache_path = self.home_path / out_dir
+        build_cache_path = out_dir.resolve()
         # build cache folder must exist before pushing all the artifacts
         if not build_cache_path.exists():
-            self.logger.error(f"Path {build_cache_path} not found.")
+            self._logger.error(f"Path {build_cache_path} not found.")
 
         for sub_path in build_cache_path.rglob("*"):
             if sub_path.is_file():
                 rel_path = str(sub_path.relative_to(build_cache_path)).replace(str(sub_path.name), "")
-                target = f"{self.registry_host}/{self.registry_project}/cache:{str(sub_path.name)}"
+                target = f"{self._registry_host}/{self._registry_project}/{self.cache_version}:{str(sub_path.name)}"
                 try:
-                    self.logger.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...")
-                    self.client.push(
+                    self._logger.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...")
+                    self._client.push(
                         files=[str(sub_path)],
                         target=target,
                         # save in manifest the relative path for reconstruction
                         manifest_annotations={"path": rel_path},
                         disable_path_validation=True,
                     )
-                    self.logger.info(f"Successfully pushed {sub_path.name}")
+                    self._logger.info(f"Successfully pushed {sub_path.name}")
                 except Exception as e:
-                    self.logger.error(
+                    self._logger.error(
                         f"An error occurred while pushing: {e}")
         # todo to be discussed hot to delete the build cache after being pushed to the OCI Registry
         # clean_up([str(build_cache_path)], self.logger)
@@ -63,37 +66,38 @@ class BuildCacheManager(BuildCacheManagerInterface):
             This method retrieves all tags from an OCI Registry
         """
         try:
-            return self.client.get_tags(self.oci_registry_path)
+            return self._client.get_tags(self._oci_registry_path)
         except Exception as e:
-            self.logger.error(f"Failed to list tags: {e}")
+            self._logger.error(f"Failed to list tags: {e}")
         return None
 
     def download(self, in_dir: Path):
         """
             This method pulls all the files from the OCI Registry into the build cache folder
         """
-        build_cache_path = self.home_path / in_dir
+        build_cache_path = in_dir.resolve()
         # create the buildcache dir if it does not exist
         os.makedirs(build_cache_path, exist_ok=True)
         tags = self.list_tags()
         if tags is not None:
             for tag in tags:
-                ref = f"{self.registry_host}/{self.registry_project}/cache:{tag}"
+                ref = f"{self._registry_host}/{self._registry_project}/{self.cache_version}:{tag}"
                 # reconstruct the relative path of each artifact by getting it from the manifest
                 cache_path = \
-                    self.client.get_manifest(f'{self.registry_host}/{self.registry_project}/cache:{tag}')[
+                    self._client.get_manifest(
+                        f'{self._registry_host}/{self._registry_project}/{self.cache_version}:{tag}')[
                         'annotations'][
                         'path']
                 try:
-                    self.client.pull(
+                    self._client.pull(
                         ref,
                         # missing dirs to output dir are created automatically by OrasClient pull method
                         outdir=str(build_cache_path / cache_path),
                         overwrite=True
                     )
-                    self.logger.info(f"Successfully pulled artifact {tag}.")
+                    self._logger.info(f"Successfully pulled artifact {tag}.")
                 except Exception as e:
-                    self.logger.error(
+                    self._logger.error(
                         f"Failed to pull artifact {tag} : {e}")
 
     def delete(self):
@@ -106,8 +110,8 @@ class BuildCacheManager(BuildCacheManagerInterface):
         tags = self.list_tags()
         if tags is not None:
             try:
-                self.client.delete_tags(self.oci_registry_path, tags)
-                self.logger.info(f"Successfully deleted all artifacts form OCI registry.")
+                self._client.delete_tags(self._oci_registry_path, tags)
+                self._logger.info(f"Successfully deleted all artifacts form OCI registry.")
             except RuntimeError as e:
-                self.logger.error(
+                self._logger.error(
                     f"Failed to delete artifacts: {e}")
diff --git a/dedal/cli/SpackManager.py b/dedal/cli/SpackManager.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/configuration/GpgConfig.py b/dedal/configuration/GpgConfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..a8f0c2d3bc0f39db8c5b251d9ffc6f6fa3a577ec
--- /dev/null
+++ b/dedal/configuration/GpgConfig.py
@@ -0,0 +1,7 @@
+class GpgConfig:
+    """
+    Configuration for gpg key used by spack
+    """
+    def __init__(self, gpg_name='example', gpg_mail='example@example.com'):
+        self.name = gpg_name
+        self.mail = gpg_mail
diff --git a/dedal/configuration/SpackConfig.py b/dedal/configuration/SpackConfig.py
new file mode 100644
index 0000000000000000000000000000000000000000..0d4706796ec13d6bf733c3d68459e455a6da53b7
--- /dev/null
+++ b/dedal/configuration/SpackConfig.py
@@ -0,0 +1,34 @@
+import os
+from pathlib import Path
+
+from dedal.configuration.GpgConfig import GpgConfig
+from dedal.model import SpackDescriptor
+
+
+class SpackConfig:
+    def __init__(self, env: SpackDescriptor = None, repos: list[SpackDescriptor] = None,
+                 install_dir=Path(os.getcwd()).resolve(), upstream_instance=None, system_name=None,
+                 concretization_dir: Path = None, buildcache_dir: Path = None, gpg: GpgConfig = None):
+        self.env = env
+        if repos is None:
+            self.repos = []
+        else:
+            self.repos = repos
+        self.install_dir = install_dir
+        if self.install_dir:
+            os.makedirs(self.install_dir, exist_ok=True)
+        self.upstream_instance = upstream_instance
+        self.system_name = system_name
+        self.concretization_dir = concretization_dir
+        if self.concretization_dir:
+            os.makedirs(self.concretization_dir, exist_ok=True)
+        self.buildcache_dir = buildcache_dir
+        if self.buildcache_dir:
+            os.makedirs(self.buildcache_dir, exist_ok=True)
+        self.gpg = gpg
+
+    def add_repo(self, repo: SpackDescriptor):
+        if self.repos is None:
+            self.repos = []
+        else:
+            self.repos.append(repo)
diff --git a/dedal/configuration/__init__.py b/dedal/configuration/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/error_handling/__init__.py b/dedal/error_handling/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/error_handling/exceptions.py b/dedal/error_handling/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..0256f886ab0cf4b958ac12d59d6fcea2d5f568ec
--- /dev/null
+++ b/dedal/error_handling/exceptions.py
@@ -0,0 +1,41 @@
+class SpackException(Exception):
+
+    def __init__(self, message):
+        super().__init__(message)
+        self.message = str(message)
+
+    def __str__(self):
+        return self.message
+
+
+class BashCommandException(SpackException):
+    """
+    To be thrown when a bash command has failed
+    """
+
+
+class NoSpackEnvironmentException(BashCommandException):
+    """
+    To be thrown when an operation on a spack environment is executed without the environment being activated or existent
+    """
+
+
+class SpackConcertizeException(BashCommandException):
+    """
+    To be thrown when the spack concretization step fails
+    """
+
+class SpackInstallPackagesException(BashCommandException):
+    """
+    To be thrown when the spack fails to install spack packages
+    """
+
+class SpackMirrorException(BashCommandException):
+    """
+    To be thrown when the spack add mirror command fails
+    """
+
+class SpackGpgException(BashCommandException):
+    """
+    To be thrown when the spack fails to create gpg keys
+    """
diff --git a/dedal/logger/logger_config.py b/dedal/logger/logger_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..3ca3b000fd171dde8ceafdc6731dfb02009e845c
--- /dev/null
+++ b/dedal/logger/logger_config.py
@@ -0,0 +1,33 @@
+import logging
+
+
+class LoggerConfig:
+    """
+        This class sets up logging with a file handler
+        and a stream handler, ensuring consistent
+        and formatted log messages.
+    """
+    def __init__(self, log_file):
+        self.log_file = log_file
+        self._configure_logger()
+
+    def _configure_logger(self):
+        formatter = logging.Formatter(
+            fmt='%(asctime)s - %(levelname)s - %(message)s',
+            datefmt='%Y-%m-%d %H:%M:%S'
+        )
+
+        file_handler = logging.FileHandler(self.log_file)
+        file_handler.setFormatter(formatter)
+
+        stream_handler = logging.StreamHandler()
+        stream_handler.setFormatter(formatter)
+
+        self.logger = logging.getLogger(__name__)
+        self.logger.setLevel(logging.DEBUG)
+
+        self.logger.addHandler(file_handler)
+        self.logger.addHandler(stream_handler)
+
+    def get_logger(self):
+        return self.logger
diff --git a/dedal/model/SpackDescriptor.py b/dedal/model/SpackDescriptor.py
new file mode 100644
index 0000000000000000000000000000000000000000..70e484fb3d39e4333389682d14a32ac46c08a912
--- /dev/null
+++ b/dedal/model/SpackDescriptor.py
@@ -0,0 +1,13 @@
+import os
+from pathlib import Path
+
+
+class SpackDescriptor:
+    """"
+    Provides details about the spack environment
+    """
+
+    def __init__(self, env_name: str, path: Path = Path(os.getcwd()).resolve(), git_path: str = None):
+        self.env_name = env_name
+        self.path = path
+        self.git_path = git_path
diff --git a/dedal/model/__init__.py b/dedal/model/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/spack_factory/SpackOperation.py b/dedal/spack_factory/SpackOperation.py
new file mode 100644
index 0000000000000000000000000000000000000000..ecfbb8e5084fc1e49b32894aecd47190df36fd98
--- /dev/null
+++ b/dedal/spack_factory/SpackOperation.py
@@ -0,0 +1,269 @@
+import os
+import re
+import subprocess
+from pathlib import Path
+from dedal.error_handling.exceptions import BashCommandException, NoSpackEnvironmentException, \
+    SpackInstallPackagesException, SpackConcertizeException, SpackMirrorException, SpackGpgException
+from dedal.logger.logger_builder import get_logger
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.tests.testing_variables import SPACK_VERSION
+from dedal.wrapper.spack_wrapper import check_spack_env
+from dedal.utils.utils import run_command, git_clone_repo, log_command, set_bashrc_variable
+
+
+class SpackOperation:
+    """
+    This class should implement the methods necessary for installing spack, set up an environment, concretize and install packages.
+    Factory design pattern is used because there are 2 cases: creating an environment from scratch or creating an environment from the buildcache.
+
+    Attributes:
+    -----------
+    env : SpackDescriptor
+        spack environment details
+    repos : list[SpackDescriptor]
+    upstream_instance : str
+        path to Spack instance to use as upstream (optional)
+    """
+
+    def __init__(self, spack_config: SpackConfig = SpackConfig(), logger=get_logger(__name__)):
+        self.spack_config = spack_config
+        self.spack_config.install_dir.mkdir(parents=True, exist_ok=True)
+        self.spack_dir = self.spack_config.install_dir / 'spack'
+        self.spack_setup_script = self.spack_dir / 'share' / 'spack' / 'setup-env.sh'
+        self.logger = logger
+        if self.spack_config.env and spack_config.env.path:
+            self.spack_config.env.path = spack_config.env.path.resolve()
+            self.spack_config.env.path.mkdir(parents=True, exist_ok=True)
+            self.env_path = spack_config.env.path / spack_config.env.env_name
+            self.spack_command_on_env = f'source {self.spack_setup_script} && spack env activate -p {self.env_path}'
+
+    def create_fetch_spack_environment(self):
+        if self.spack_config.env.git_path:
+            git_clone_repo(self.spack_config.env.env_name, self.spack_config.env.path / self.spack_config.env.env_name,
+                           self.spack_config.env.git_path,
+                           logger=self.logger)
+        else:
+            os.makedirs(self.spack_config.env.path / self.spack_config.env.env_name, exist_ok=True)
+            run_command("bash", "-c",
+                        f'source {self.spack_setup_script} && spack env create -d {self.env_path}',
+                        check=True, logger=self.logger,
+                        info_msg=f"Created {self.spack_config.env.env_name} spack environment",
+                        exception_msg=f"Failed to create {self.spack_config.env.env_name} spack environment",
+                        exception=BashCommandException)
+
+    def setup_spack_env(self):
+        """
+        This method prepares a spack environment by fetching/creating the spack environment and adding the necessary repos
+        """
+        bashrc_path = os.path.expanduser("~/.bashrc")
+        if self.spack_config.system_name:
+            set_bashrc_variable('SYSTEMNAME', self.spack_config.system_name, bashrc_path, logger=self.logger)
+            os.environ['SYSTEMNAME'] = self.spack_config.system_name
+        if self.spack_dir.exists() and self.spack_dir.is_dir():
+            set_bashrc_variable('SPACK_USER_CACHE_PATH', str(self.spack_dir / ".spack"), bashrc_path,
+                                logger=self.logger)
+            set_bashrc_variable('SPACK_USER_CONFIG_PATH', str(self.spack_dir / ".spack"), bashrc_path,
+                                logger=self.logger)
+            self.logger.debug('Added env variables SPACK_USER_CACHE_PATH and SPACK_USER_CONFIG_PATH')
+        else:
+            self.logger.error(f'Invalid installation path: {self.spack_dir}')
+        # Restart the bash after adding environment variables
+        self.create_fetch_spack_environment()
+        if self.spack_config.install_dir.exists():
+            for repo in self.spack_config.repos:
+                repo_dir = self.spack_config.install_dir / repo.path / repo.env_name
+                git_clone_repo(repo.env_name, repo_dir, repo.git_path, logger=self.logger)
+                if not self.spack_repo_exists(repo.env_name):
+                    self.add_spack_repo(repo.path, repo.env_name)
+                    self.logger.debug(f'Added spack repository {repo.env_name}')
+                else:
+                    self.logger.debug(f'Spack repository {repo.env_name} already added')
+
+    def spack_repo_exists(self, repo_name: str) -> bool | None:
+        """Check if the given Spack repository exists."""
+        if self.spack_config.env is None:
+            result = run_command("bash", "-c",
+                                 f'source {self.spack_setup_script} && spack repo list',
+                                 check=True,
+                                 capture_output=True, text=True, logger=self.logger,
+                                 info_msg=f'Checking if {repo_name} exists')
+            if result is None:
+                return False
+        else:
+            if self.spack_env_exists():
+                result = run_command("bash", "-c",
+                                     f'{self.spack_command_on_env} && spack repo list',
+                                     check=True,
+                                     capture_output=True, text=True, logger=self.logger,
+                                     info_msg=f'Checking if repository {repo_name} was added')
+            else:
+                self.logger.debug('No spack environment defined')
+                raise NoSpackEnvironmentException('No spack environment defined')
+            if result is None:
+                return False
+        return any(line.strip().endswith(repo_name) for line in result.stdout.splitlines())
+
+    def spack_env_exists(self):
+        result = run_command("bash", "-c",
+                             self.spack_command_on_env,
+                             check=True,
+                             capture_output=True, text=True, logger=self.logger,
+                             info_msg=f'Checking if environment {self.spack_config.env.env_name} exists')
+        if result is None:
+            return False
+        return True
+
+    @check_spack_env
+    def add_spack_repo(self, repo_path: Path, repo_name: str):
+        """Add the Spack repository if it does not exist."""
+        run_command("bash", "-c",
+                    f'{self.spack_command_on_env} && spack repo add {repo_path}/{repo_name}',
+                    check=True, logger=self.logger,
+                    info_msg=f"Added {repo_name} to spack environment {self.spack_config.env.env_name}",
+                    exception_msg=f"Failed to add {repo_name} to spack environment {self.spack_config.env.env_name}",
+                    exception=BashCommandException)
+
+    @check_spack_env
+    def get_compiler_version(self):
+        result = run_command("bash", "-c",
+                             f'{self.spack_command_on_env} && spack compiler list',
+                             check=True, logger=self.logger,
+                             capture_output=True, text=True,
+                             info_msg=f"Checking spack environment compiler version for {self.spack_config.env.env_name}",
+                             exception_msg=f"Failed to checking spack environment compiler version for {self.spack_config.env.env_name}",
+                             exception=BashCommandException)
+        # todo add error handling and tests
+        if result.stdout is None:
+            self.logger.debug('No gcc found for {self.env.env_name}')
+            return None
+
+        # Find the first occurrence of a GCC compiler using regex
+        match = re.search(r"gcc@([\d\.]+)", result.stdout)
+        gcc_version = match.group(1)
+        self.logger.debug(f'Found gcc for {self.spack_config.env.env_name}: {gcc_version}')
+        return gcc_version
+
+    def get_spack_installed_version(self):
+        spack_version = run_command("bash", "-c", f'source {self.spack_setup_script} && spack --version',
+                                    capture_output=True, text=True, check=True,
+                                    logger=self.logger,
+                                    info_msg=f"Getting spack version",
+                                    exception_msg=f"Error retrieving Spack version")
+        if spack_version:
+            return spack_version.stdout.strip().split()[0]
+        return None
+
+    @check_spack_env
+    def concretize_spack_env(self, force=True):
+        force = '--force' if force else ''
+        run_command("bash", "-c",
+                    f'{self.spack_command_on_env} && spack concretize {force}',
+                    check=True,
+                     logger=self.logger,
+                    info_msg=f'Concertization step for {self.spack_config.env.env_name}',
+                    exception_msg=f'Failed the concertization step for {self.spack_config.env.env_name}',
+                    exception=SpackConcertizeException)
+
+    def create_gpg_keys(self):
+        if self.spack_config.gpg:
+            run_command("bash", "-c",
+                        f'source {self.spack_setup_script} && spack gpg init && spack gpg create {self.spack_config.gpg.name} {self.spack_config.gpg.mail}',
+                        check=True,
+                        logger=self.logger,
+                        info_msg=f'Created pgp keys for {self.spack_config.env.env_name}',
+                        exception_msg=f'Failed to create pgp keys mirror {self.spack_config.env.env_name}',
+                        exception=SpackGpgException)
+        else:
+            raise SpackGpgException('No GPG configuration was defined is spack configuration')
+
+    def add_mirror(self, mirror_name: str, mirror_path: Path, signed=False, autopush=False, global_mirror=False):
+        autopush = '--autopush' if autopush else ''
+        signed = '--signed' if signed else ''
+        if global_mirror:
+            run_command("bash", "-c",
+                        f'source {self.spack_setup_script} && spack mirror add {autopush} {signed} {mirror_name} {mirror_path}',
+                        check=True,
+                        logger=self.logger,
+                        info_msg=f'Added mirror {mirror_name}',
+                        exception_msg=f'Failed to add mirror {mirror_name}',
+                        exception=SpackMirrorException)
+        else:
+            check_spack_env(
+                run_command("bash", "-c",
+                            f'{self.spack_command_on_env} && spack mirror add {autopush} {signed} {mirror_name} {mirror_path}',
+                            check=True,
+                            logger=self.logger,
+                            info_msg=f'Added mirror {mirror_name}',
+                            exception_msg=f'Failed to add mirror {mirror_name}',
+                            exception=SpackMirrorException))
+
+    def remove_mirror(self, mirror_name: str):
+        run_command("bash", "-c",
+                    f'source {self.spack_setup_script} && spack mirror rm {mirror_name}',
+                    check=True,
+                    logger=self.logger,
+                    info_msg=f'Removing mirror {mirror_name}',
+                    exception_msg=f'Failed to remove mirror {mirror_name}',
+                    exception=SpackMirrorException)
+
+    @check_spack_env
+    def install_packages(self, jobs: int, signed=True, fresh=False, debug=False):
+        signed = '' if signed else '--no-check-signature'
+        fresh = '--fresh' if fresh else ''
+        debug = '--debug' if debug else ''
+        install_result = run_command("bash", "-c",
+                                     f'{self.spack_command_on_env} && spack {debug} install -v {signed} --j {jobs} {fresh}',
+                                     stdout=subprocess.PIPE,
+                                     stderr=subprocess.PIPE,
+                                     text=True,
+                                     logger=self.logger,
+                                     info_msg=f"Installing spack packages for {self.spack_config.env.env_name}",
+                                     exception_msg=f"Error installing spack packages for {self.spack_config.env.env_name}",
+                                     exception=SpackInstallPackagesException)
+        log_command(install_result, str(Path(os.getcwd()).resolve() / ".generate_cache.log"))
+        return install_result
+
+    def install_spack(self, spack_version=f'v{SPACK_VERSION}', spack_repo='https://github.com/spack/spack'):
+        try:
+            user = os.getlogin()
+        except OSError:
+            user = None
+
+        self.logger.info(f"Starting to install Spack into {self.spack_dir} from branch {spack_version}")
+        if not self.spack_dir.exists():
+            run_command(
+                "git", "clone", "--depth", "1",
+                "-c", "advice.detachedHead=false",
+                "-c", "feature.manyFiles=true",
+                "--branch", spack_version, spack_repo, self.spack_dir
+                , check=True, logger=self.logger)
+            self.logger.debug("Cloned spack")
+        else:
+            self.logger.debug("Spack already cloned.")
+
+        bashrc_path = os.path.expanduser("~/.bashrc")
+        # ensure the file exists before opening it
+        if not os.path.exists(bashrc_path):
+            open(bashrc_path, "w").close()
+        # add spack setup commands to .bashrc
+        with open(bashrc_path, "a") as bashrc:
+            bashrc.write(f'export PATH="{self.spack_dir}/bin:$PATH"\n')
+            bashrc.write(f"source {self.spack_setup_script}\n")
+        self.logger.info("Added Spack PATH to .bashrc")
+        if user:
+            run_command("chown", "-R", f"{user}:{user}", self.spack_dir, check=True, logger=self.logger,
+                        info_msg='Adding permissions to the logged in user')
+        run_command("bash", "-c", f"source {bashrc_path}", check=True, logger=self.logger, info_msg='Restart bash')
+        self.logger.info("Spack install completed")
+        # Restart Bash after the installation ends
+        os.system("exec bash")
+
+        # Configure upstream Spack instance if specified
+        if self.spack_config.upstream_instance:
+            upstreams_yaml_path = os.path.join(self.spack_dir, "etc/spack/defaults/upstreams.yaml")
+            with open(upstreams_yaml_path, "w") as file:
+                file.write(f"""upstreams:
+                                  upstream-spack-instance:
+                                    install_tree: {self.spack_config.upstream_instance}/spack/opt/spack
+                                """)
+            self.logger.info("Added upstream spack instance")
diff --git a/dedal/spack_factory/SpackOperationCreator.py b/dedal/spack_factory/SpackOperationCreator.py
new file mode 100644
index 0000000000000000000000000000000000000000..54517a845bad14629c6019416f0e19581472991e
--- /dev/null
+++ b/dedal/spack_factory/SpackOperationCreator.py
@@ -0,0 +1,14 @@
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.spack_factory.SpackOperation import SpackOperation
+from dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache
+
+
+class SpackOperationCreator:
+    @staticmethod
+    def get_spack_operator(spack_config: SpackConfig = None):
+        if spack_config is None:
+            return SpackOperation(SpackConfig())
+        elif spack_config.concretization_dir is None and spack_config.buildcache_dir is None:
+            return SpackOperation(spack_config)
+        else:
+            return SpackOperationUseCache(spack_config)
diff --git a/dedal/spack_factory/SpackOperationUseCache.py b/dedal/spack_factory/SpackOperationUseCache.py
new file mode 100644
index 0000000000000000000000000000000000000000..41a9094ca550f1682082dbee4eced5b8183a9f7a
--- /dev/null
+++ b/dedal/spack_factory/SpackOperationUseCache.py
@@ -0,0 +1,32 @@
+import os
+from dedal.build_cache.BuildCacheManager import BuildCacheManager
+from dedal.logger.logger_builder import get_logger
+from dedal.spack_factory.SpackOperation import SpackOperation
+from dedal.configuration.SpackConfig import SpackConfig
+
+
+class SpackOperationUseCache(SpackOperation):
+    """
+    This class uses caching for the concretization step and for the installation step.
+    """
+
+    def __init__(self, spack_config: SpackConfig = SpackConfig(), cache_version_concretize='v1',
+                 cache_version_build='v1'):
+        super().__init__(spack_config, logger=get_logger(__name__))
+        self.cache_dependency = BuildCacheManager(os.environ.get('CONCRETIZE_OCI_HOST'),
+                                                  os.environ.get('CONCRETIZE_OCI_PROJECT'),
+                                                  os.environ.get('CONCRETIZE_OCI_USERNAME'),
+                                                  os.environ.get('CONCRETIZE_OCI_PASSWORD'),
+                                                  cache_version=cache_version_concretize)
+        self.build_cache = BuildCacheManager(os.environ.get('BUILDCACHE_OCI_HOST'),
+                                             os.environ.get('BUILDCACHE_OCI_PROJECT'),
+                                             os.environ.get('BUILDCACHE_OCI_USERNAME'),
+                                             os.environ.get('BUILDCACHE_OCI_PASSWORD'),
+                                             cache_version=cache_version_build)
+
+    def setup_spack_env(self):
+        super().setup_spack_env()
+        # todo add buildcache to the spack environment
+
+    def concretize_spack_env(self, force=True):
+        pass
diff --git a/dedal/spack_factory/__init__.py b/dedal/spack_factory/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/specfile_storage_path_source.py b/dedal/specfile_storage_path_source.py
index 6e8a8889e809c0d8c5116670e19bd7fbd420904c..4d2ff65888aa6dcce2f2a9526575b85d8966d90d 100644
--- a/dedal/specfile_storage_path_source.py
+++ b/dedal/specfile_storage_path_source.py
@@ -49,14 +49,14 @@ for rspec in data:
 
         format_string = "{name}-{version}"
         pretty_name = pkg.spec.format_path(format_string)
-        cosmetic_path = os.path.join(pkg.name, pretty_name)
+        cosmetic_path = os.path.join(pkg.env_name, pretty_name)
         to_be_fetched.add(str(spack.mirror.mirror_archive_paths(pkg.fetcher, cosmetic_path).storage_path))
         for resource in pkg._get_needed_resources():
-            pretty_resource_name = fsys.polite_filename(f"{resource.name}-{pkg.version}")
+            pretty_resource_name = fsys.polite_filename(f"{resource.env_name}-{pkg.version}")
             to_be_fetched.add(str(spack.mirror.mirror_archive_paths(resource.fetcher, pretty_resource_name).storage_path))
         for patch in ss.patches:
             if isinstance(patch, spack.patch.UrlPatch):
-                to_be_fetched.add(str(spack.mirror.mirror_archive_paths(patch.stage.fetcher, patch.stage.name).storage_path))
+                to_be_fetched.add(str(spack.mirror.mirror_archive_paths(patch.stage.fetcher, patch.stage.env_name).storage_path))
 
 for elem in to_be_fetched:
     print(elem)
diff --git a/dedal/tests/integration_tests/__init__.py b/dedal/tests/integration_tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/tests/integration_tests/spack_from_scratch_test.py b/dedal/tests/integration_tests/spack_from_scratch_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..d080bc7b144c4530f375b1a265cba41ce936fc3d
--- /dev/null
+++ b/dedal/tests/integration_tests/spack_from_scratch_test.py
@@ -0,0 +1,202 @@
+from pathlib import Path
+import pytest
+from dedal.configuration.SpackConfig import SpackConfig
+from dedal.error_handling.exceptions import BashCommandException, NoSpackEnvironmentException
+from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator
+from dedal.model.SpackDescriptor import SpackDescriptor
+from dedal.tests.testing_variables import test_spack_env_git, ebrains_spack_builds_git
+from dedal.utils.utils import file_exists_and_not_empty
+
+
+def test_spack_repo_exists_1(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('ebrains-spack-builds', install_dir)
+    config = SpackConfig(env=env, install_dir=install_dir)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    with pytest.raises(NoSpackEnvironmentException):
+        spack_operation.spack_repo_exists(env.env_name)
+
+
+def test_spack_repo_exists_2(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('ebrains-spack-builds', install_dir)
+    config = SpackConfig(env=env, install_dir=install_dir)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    print(spack_operation.get_spack_installed_version())
+    spack_operation.setup_spack_env()
+    assert spack_operation.spack_repo_exists(env.env_name) == False
+
+
+def test_spack_from_scratch_setup_1(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    assert spack_operation.spack_repo_exists(env.env_name) == False
+
+
+def test_spack_from_scratch_setup_2(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    repo = env
+    config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir)
+    config.add_repo(repo)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    assert spack_operation.spack_repo_exists(env.env_name) == True
+
+
+def test_spack_from_scratch_setup_3(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('new_env1', install_dir)
+    repo = env
+    config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir)
+    config.add_repo(repo)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    with pytest.raises(BashCommandException):
+        spack_operation.setup_spack_env()
+
+
+def test_spack_from_scratch_setup_4(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('new_env2', install_dir)
+    config = SpackConfig(env=env, install_dir=install_dir)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    assert spack_operation.spack_env_exists() == True
+
+
+def test_spack_not_a_valid_repo():
+    env = SpackDescriptor('ebrains-spack-builds', Path(), None)
+    repo = env
+    config = SpackConfig(env=env, system_name='ebrainslab')
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    with pytest.raises(BashCommandException):
+        spack_operation.add_spack_repo(repo.path, repo.env_name)
+
+
+@pytest.mark.skip(
+    reason="Skipping the concretization step because it may freeze when numerous Spack packages are added to the environment.")
+def test_spack_from_scratch_concretize_1(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    repo = env
+    config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir)
+    config.add_repo(repo)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    spack_operation.concretize_spack_env(force=True)
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == True
+
+
+@pytest.mark.skip(
+    reason="Skipping the concretization step because it may freeze when numerous Spack packages are added to the environment.")
+def test_spack_from_scratch_concretize_2(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    repo = env
+    config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir)
+    config.add_repo(repo)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    spack_operation.concretize_spack_env(force=False)
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == True
+
+
+def test_spack_from_scratch_concretize_3(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    repo = env
+    config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir)
+    config.add_repo(repo)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == False
+
+
+def test_spack_from_scratch_concretize_4(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git)
+    config = SpackConfig(env=env, install_dir=install_dir)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    spack_operation.concretize_spack_env(force=False)
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == True
+
+
+def test_spack_from_scratch_concretize_5(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git)
+    config = SpackConfig(env=env, install_dir=install_dir)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    spack_operation.concretize_spack_env(force=True)
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == True
+
+
+def test_spack_from_scratch_concretize_6(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git)
+    repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    config = SpackConfig(env=env, install_dir=install_dir)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    spack_operation.concretize_spack_env(force=False)
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == True
+
+
+def test_spack_from_scratch_concretize_7(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git)
+    repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    config = SpackConfig(env=env)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    spack_operation.concretize_spack_env(force=True)
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == True
+
+
+def test_spack_from_scratch_install(tmp_path):
+    install_dir = tmp_path
+    env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git)
+    repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git)
+    config = SpackConfig(env=env)
+    config.add_repo(repo)
+    spack_operation = SpackOperationCreator.get_spack_operator(config)
+    spack_operation.install_spack()
+    spack_operation.setup_spack_env()
+    spack_operation.concretize_spack_env(force=True)
+    concretization_file_path = spack_operation.env_path / 'spack.lock'
+    assert file_exists_and_not_empty(concretization_file_path) == True
+    install_result = spack_operation.install_packages(jobs=2, signed=False, fresh=True, debug=False)
+    assert install_result.returncode == 0
diff --git a/dedal/tests/integration_tests/spack_install_test.py b/dedal/tests/integration_tests/spack_install_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..564d5c6aa2138e815cd7d092215a4f2eee8816f6
--- /dev/null
+++ b/dedal/tests/integration_tests/spack_install_test.py
@@ -0,0 +1,12 @@
+import pytest
+from dedal.spack_factory.SpackOperation import SpackOperation
+from dedal.tests.testing_variables import SPACK_VERSION
+
+
+# run this test first so that spack is installed only once for all the tests
+@pytest.mark.run(order=1)
+def test_spack_install_scratch():
+    spack_operation = SpackOperation()
+    spack_operation.install_spack(spack_version=f'v{SPACK_VERSION}')
+    installed_spack_version = spack_operation.get_spack_installed_version()
+    assert SPACK_VERSION == installed_spack_version
diff --git a/dedal/tests/testing_variables.py b/dedal/tests/testing_variables.py
new file mode 100644
index 0000000000000000000000000000000000000000..e441a2864497edbca5e1f50619befe0c1a90863e
--- /dev/null
+++ b/dedal/tests/testing_variables.py
@@ -0,0 +1,6 @@
+import os
+
+ebrains_spack_builds_git = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git'
+SPACK_VERSION = "0.23.0"
+SPACK_ENV_ACCESS_TOKEN = os.getenv("SPACK_ENV_ACCESS_TOKEN")
+test_spack_env_git = f'https://oauth2:{SPACK_ENV_ACCESS_TOKEN}@gitlab.ebrains.eu/ri/projects-and-initiatives/virtualbraintwin/tools/test-spack-env.git'
diff --git a/dedal/tests/unit_tests/__init__.py b/dedal/tests/unit_tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/tests/unit_tests/utils_test.py b/dedal/tests/unit_tests/utils_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..cd47860602b6624f59e8fb7c06c7c156608e3eb3
--- /dev/null
+++ b/dedal/tests/unit_tests/utils_test.py
@@ -0,0 +1,150 @@
+import subprocess
+
+import pytest
+from pathlib import Path
+from unittest.mock import mock_open, patch, MagicMock
+from dedal.utils.utils import clean_up, file_exists_and_not_empty, log_command, run_command
+
+
+@pytest.fixture
+def temp_directories(tmp_path):
+    """
+    Create temporary directories with files and subdirectories for testing.
+    """
+    test_dirs = []
+
+    for i in range(3):
+        dir_path = tmp_path / f"test_dir_{i}"
+        dir_path.mkdir()
+        test_dirs.append(str(dir_path))
+
+        # Add a file to the directory
+        file_path = dir_path / f"file_{i}.txt"
+        file_path.write_text(f"This is a test file in {dir_path}")
+
+        # Add a subdirectory with a file
+        sub_dir = dir_path / f"subdir_{i}"
+        sub_dir.mkdir()
+        sub_file = sub_dir / f"sub_file_{i}.txt"
+        sub_file.write_text(f"This is a sub file in {sub_dir}")
+
+    return test_dirs
+
+
+def test_clean_up(temp_directories, mocker):
+    """
+    Test the clean_up function to ensure directories and contents are removed.
+    """
+    # Mock the logger using pytest-mock's mocker fixture
+    mock_logger = mocker.MagicMock()
+
+    # Ensure directories exist before calling clean_up
+    for dir_path in temp_directories:
+        assert Path(dir_path).exists()
+
+    clean_up(temp_directories, mock_logger)
+
+    for dir_path in temp_directories:
+        assert not Path(dir_path).exists()
+
+    for dir_path in temp_directories:
+        mock_logger.info.assert_any_call(f"Removing {Path(dir_path).resolve()}")
+
+
+def test_clean_up_nonexistent_dirs(mocker):
+    """
+    Test the clean_up function with nonexistent directories.
+    """
+    # Mock the logger using pytest-mock's mocker fixture
+    mock_logger = mocker.MagicMock()
+    nonexistent_dirs = ["nonexistent_dir_1", "nonexistent_dir_2"]
+
+    clean_up(nonexistent_dirs, mock_logger)
+
+    for dir_path in nonexistent_dirs:
+        mock_logger.info.assert_any_call(f"{Path(dir_path).resolve()} does not exist")
+
+
+def test_file_does_not_exist(tmp_path: Path):
+    non_existent_file = tmp_path / "non_existent.txt"
+    assert not file_exists_and_not_empty(non_existent_file)
+
+
+def test_file_exists_but_empty(tmp_path: Path):
+    empty_file = tmp_path / "empty.txt"
+    # Create an empty file
+    empty_file.touch()
+    assert not file_exists_and_not_empty(empty_file)
+
+
+def test_file_exists_and_not_empty(tmp_path: Path):
+    non_empty_file = tmp_path / "non_empty.txt"
+    non_empty_file.write_text("Some content")
+    assert file_exists_and_not_empty(non_empty_file)
+
+
+def test_log_command():
+    results = MagicMock()
+    results.stdout = "Test output"
+    results.stderr = "Test error"
+    mock_file = mock_open()
+
+    with patch("builtins.open", mock_file):
+        log_command(results, "logfile.log")
+
+    mock_file.assert_called_once_with("logfile.log", "w")
+    handle = mock_file()
+    handle.write.assert_any_call("Test output")
+    handle.write.assert_any_call("\n--- STDERR ---\n")
+    handle.write.assert_any_call("Test error")
+
+
+def test_run_command_success(mocker):
+    mock_subprocess = mocker.patch("subprocess.run", return_value=MagicMock(returncode=0))
+    mock_logger = MagicMock()
+    result = run_command('bash',  '-c', 'echo hello', logger=mock_logger, info_msg="Running echo")
+    mock_logger.info.assert_called_with("Running echo: args: ('bash', '-c', 'echo hello')")
+    mock_subprocess.assert_called_once_with(('bash', '-c', 'echo hello'))
+    assert result.returncode == 0
+
+
+def test_run_command_not_found(mocker):
+    mocker.patch("subprocess.run", side_effect=FileNotFoundError)
+    mock_logger = MagicMock()
+    run_command("invalid_command", logger=mock_logger)
+    mock_logger.error.assert_called_with("Command not found. Please check the command syntax.")
+
+
+def test_run_command_permission_error(mocker):
+    mocker.patch("subprocess.run", side_effect=PermissionError)
+    mock_logger = MagicMock()
+    run_command("restricted_command", logger=mock_logger)
+    mock_logger.error.assert_called_with("Permission denied. Try running with appropriate permissions.")
+
+
+def test_run_command_timeout(mocker):
+    mocker.patch("subprocess.run", side_effect=subprocess.TimeoutExpired(cmd="test", timeout=5))
+    mock_logger = MagicMock()
+    run_command("test", logger=mock_logger)
+    mock_logger.error.assert_called_with("Command timed out. Try increasing the timeout duration.")
+
+
+def test_run_command_os_error(mocker):
+    mocker.patch("subprocess.run", side_effect=OSError("OS Error"))
+    mock_logger = MagicMock()
+    run_command("test", logger=mock_logger)
+    mock_logger.error.assert_called_with("OS error occurred: OS Error")
+
+
+def test_run_command_unexpected_exception(mocker):
+    mocker.patch("subprocess.run", side_effect=Exception("Unexpected Error"))
+    mock_logger = MagicMock()
+    run_command("test", logger=mock_logger)
+    mock_logger.error.assert_called_with("An unexpected error occurred: Unexpected Error")
+
+
+def test_run_command_called_process_error(mocker):
+    mocker.patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "test"))
+    mock_logger = MagicMock()
+    run_command("test", logger=mock_logger, exception_msg="Process failed")
+    mock_logger.error.assert_called_with("Process failed: Command 'test' returned non-zero exit status 1.")
diff --git a/dedal/tests/utils_test.py b/dedal/tests/utils_test.py
deleted file mode 100644
index 14795726ce81896eb03d9862a9c096d78123815c..0000000000000000000000000000000000000000
--- a/dedal/tests/utils_test.py
+++ /dev/null
@@ -1,63 +0,0 @@
-import pytest
-from pathlib import Path
-
-from dedal.utils.utils import clean_up
-
-
-@pytest.fixture
-def temp_directories(tmp_path):
-    """
-    Create temporary directories with files and subdirectories for testing.
-    """
-    test_dirs = []
-
-    for i in range(3):
-        dir_path = tmp_path / f"test_dir_{i}"
-        dir_path.mkdir()
-        test_dirs.append(str(dir_path))
-
-        # Add a file to the directory
-        file_path = dir_path / f"file_{i}.txt"
-        file_path.write_text(f"This is a test file in {dir_path}")
-
-        # Add a subdirectory with a file
-        sub_dir = dir_path / f"subdir_{i}"
-        sub_dir.mkdir()
-        sub_file = sub_dir / f"sub_file_{i}.txt"
-        sub_file.write_text(f"This is a sub file in {sub_dir}")
-
-    return test_dirs
-
-
-def test_clean_up(temp_directories, mocker):
-    """
-    Test the clean_up function to ensure directories and contents are removed.
-    """
-    # Mock the logger using pytest-mock's mocker fixture
-    mock_logger = mocker.MagicMock()
-
-    # Ensure directories exist before calling clean_up
-    for dir_path in temp_directories:
-        assert Path(dir_path).exists()
-
-    clean_up(temp_directories, mock_logger)
-
-    for dir_path in temp_directories:
-        assert not Path(dir_path).exists()
-
-    for dir_path in temp_directories:
-        mock_logger.info.assert_any_call(f"Removing {Path(dir_path).resolve()}")
-
-
-def test_clean_up_nonexistent_dirs(mocker):
-    """
-    Test the clean_up function with nonexistent directories.
-    """
-    # Mock the logger using pytest-mock's mocker fixture
-    mock_logger = mocker.MagicMock()
-    nonexistent_dirs = ["nonexistent_dir_1", "nonexistent_dir_2"]
-
-    clean_up(nonexistent_dirs, mock_logger)
-
-    for dir_path in nonexistent_dirs:
-        mock_logger.info.assert_any_call(f"{Path(dir_path).resolve()} does not exist")
diff --git a/dedal/utils/bootstrap.sh b/dedal/utils/bootstrap.sh
new file mode 100644
index 0000000000000000000000000000000000000000..9b7d0131e95a3be9b0f2cfc4dc82492517fb22dc
--- /dev/null
+++ b/dedal/utils/bootstrap.sh
@@ -0,0 +1,6 @@
+# Minimal prerequisites for installing the esd_library
+# pip must be installed on the OS
+echo "Bootstrapping..."
+apt update
+apt install -y bzip2 ca-certificates g++ gcc gfortran git gzip lsb-release patch python3 python3-pip tar unzip xz-utils zstd
+python3 -m pip install --upgrade pip setuptools wheel
diff --git a/dedal/utils/utils.py b/dedal/utils/utils.py
index 811d258e7e5856f4b666bc3196996f3b24571112..9fc82ad520b4a22715c819d3ca11042b88b87c7f 100644
--- a/dedal/utils/utils.py
+++ b/dedal/utils/utils.py
@@ -1,20 +1,100 @@
+import logging
+import os
 import shutil
+import subprocess
 from pathlib import Path
 
+from dedal.error_handling.exceptions import BashCommandException
+import re
 
-def clean_up(dirs: list[str], logging, ignore_errors=True):
+
+def clean_up(dirs: list[str], logger: logging = logging.getLogger(__name__), ignore_errors=True):
     """
         All the folders from the list dirs are removed with all the content in them
     """
     for cleanup_dir in dirs:
         cleanup_dir = Path(cleanup_dir).resolve()
         if cleanup_dir.exists():
-            logging.info(f"Removing {cleanup_dir}")
+            logger.info(f"Removing {cleanup_dir}")
             try:
                 shutil.rmtree(Path(cleanup_dir))
             except OSError as e:
-                logging.error(f"Failed to remove {cleanup_dir}: {e}")
+                logger.error(f"Failed to remove {cleanup_dir}: {e}")
                 if not ignore_errors:
                     raise e
         else:
-            logging.info(f"{cleanup_dir} does not exist")
+            logger.info(f"{cleanup_dir} does not exist")
+
+
+def run_command(*args, logger=logging.getLogger(__name__), info_msg: str = '', exception_msg: str = None,
+                exception=None, **kwargs):
+    try:
+        logger.info(f'{info_msg}: args: {args}')
+        return subprocess.run(args, **kwargs)
+    except subprocess.CalledProcessError as e:
+        if exception_msg is not None:
+            logger.error(f"{exception_msg}: {e}")
+        if exception is not None:
+            raise exception(f'{exception_msg} : {e}')
+        else:
+            return None
+    except FileNotFoundError:
+        logger.error(f"Command not found. Please check the command syntax.")
+    except PermissionError:
+        logger.error(f"Permission denied. Try running with appropriate permissions.")
+    except subprocess.TimeoutExpired:
+        logger.error(f"Command timed out. Try increasing the timeout duration.")
+    except ValueError:
+        logger.error(f"Invalid argument passed to subprocess. Check function parameters.")
+    except OSError as e:
+        logger.error(f"OS error occurred: {e}")
+    except Exception as e:
+        logger.error(f"An unexpected error occurred: {e}")
+
+
+def git_clone_repo(repo_name: str, dir: Path, git_path: str, logger: logging = logging.getLogger(__name__)):
+    if not dir.exists():
+        run_command(
+            "git", "clone", "--depth", "1",
+            "-c", "advice.detachedHead=false",
+            "-c", "feature.manyFiles=true",
+            git_path, dir
+            , check=True, logger=logger,
+            info_msg=f'Cloned repository {repo_name}',
+            exception_msg=f'Failed to clone repository: {repo_name}',
+            exception=BashCommandException)
+    else:
+        logger.info(f'Repository {repo_name} already cloned.')
+
+
+def file_exists_and_not_empty(file: Path) -> bool:
+    return file.is_file() and file.stat().st_size > 0
+
+
+def log_command(results, log_file: str):
+    with open(log_file, "w") as log_file:
+        log_file.write(results.stdout)
+        log_file.write("\n--- STDERR ---\n")
+        log_file.write(results.stderr)
+
+
+def set_bashrc_variable(var_name: str, value: str, bashrc_path: str = os.path.expanduser("~/.bashrc"),
+                        logger: logging = logging.getLogger(__name__)):
+    """Update or add an environment variable in ~/.bashrc."""
+    with open(bashrc_path, "r") as file:
+        lines = file.readlines()
+    pattern = re.compile(rf'^\s*export\s+{var_name}=.*$')
+    updated = False
+    # Modify the existing variable if found
+    for i, line in enumerate(lines):
+        if pattern.match(line):
+            lines[i] = f'export {var_name}={value}\n'
+            updated = True
+            break
+    if not updated:
+        lines.append(f'\nexport {var_name}={value}\n')
+        logger.info(f"Added in {bashrc_path} with: export {var_name}={value}")
+    else:
+        logger.info(f"Updated {bashrc_path} with: export {var_name}={value}")
+    with open(bashrc_path, "w") as file:
+        file.writelines(lines)
diff --git a/dedal/wrapper/__init__.py b/dedal/wrapper/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/wrapper/spack_wrapper.py b/dedal/wrapper/spack_wrapper.py
new file mode 100644
index 0000000000000000000000000000000000000000..018cad482083beda9ee0a278f4bbb2d107476849
--- /dev/null
+++ b/dedal/wrapper/spack_wrapper.py
@@ -0,0 +1,15 @@
+import functools
+
+from dedal.error_handling.exceptions import NoSpackEnvironmentException
+
+
+def check_spack_env(method):
+    @functools.wraps(method)
+    def wrapper(self, *args, **kwargs):
+        if self.spack_env_exists():
+            return method(self, *args, **kwargs)  # Call the method with 'self'
+        else:
+            self.logger.debug('No spack environment defined')
+            raise NoSpackEnvironmentException('No spack environment defined')
+
+    return wrapper
diff --git a/pyproject.toml b/pyproject.toml
index 757f370c12a7602c9b7011c98c8aae416087ca24..b6679ca1f6946fde61d1ec5d288a8b85ec92d7a3 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,15 +1,15 @@
 [build-system]
-requires = ["setuptools", "setuptools-scm"]
+requires = ["setuptools>=64", "wheel"]
 build-backend = "setuptools.build_meta"
 
 [project]
 name = "dedal"
+version = "0.1.0"
 authors = [
     {name = "Eric Müller", email = "mueller@kip.uni-heidelberg.de"},
     {name = "Adrian Ciu", email = "adrian.ciu@codemart.ro"},
 ]
 description = "This package provides all the necessary tools to create an Ebrains Software Distribution environment"
-version = "0.1.0"
 readme = "README.md"
 requires-python = ">=3.10"
 dependencies = [
@@ -18,6 +18,7 @@ dependencies = [
     "ruamel.yaml",
     "pytest",
     "pytest-mock",
+    "pytest-ordering",
 ]
 
 [tool.setuptools.data-files]