diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2b497048d572c8df8798ae2d62fc8e13589add33..0ce029072928ad5d86de3b9d502f139d353a1d8e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -5,7 +5,6 @@ stages:
 variables:
   BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/esd/tmp:latest
 
-
 build-wheel:
   stage: build
   tags:
@@ -28,16 +27,16 @@ testing-pytest:
     - docker-runner
   image: ubuntu:22.04
   script:
-    - chmod +x dedal/utils/bootstrap.sh
-    - ./dedal/utils/bootstrap.sh
+    - chmod +x esd/utils/bootstrap.sh
+    - ./esd/utils/bootstrap.sh
     - pip install .
-    - pytest ./dedal/tests/ -s --junitxml=test-results.xml
+    - pytest ./esd/tests/ -s --junitxml=test-results.xml
   artifacts:
     when: always
     reports:
       junit: test-results.xml
     paths:
       - test-results.xml
-      - .dedal.log
+      - .esd.log
     expire_in: 1 week
 
diff --git a/dedal/build_cache/BuildCacheManager.py b/dedal/build_cache/BuildCacheManager.py
index 55fa10cb3f6fc888fe43fcfbb055a49b6092a005..e1bd6824321c5a2b483800bc4b39824aa4715493 100644
--- a/dedal/build_cache/BuildCacheManager.py
+++ b/dedal/build_cache/BuildCacheManager.py
@@ -2,9 +2,9 @@ import os
 import oras.client
 from pathlib import Path
 
-from dedal.build_cache.BuildCacheManagerInterface import BuildCacheManagerInterface
-from dedal.logger.logger_builder import get_logger
-from dedal.utils.utils import clean_up
+from esd.build_cache.BuildCacheManagerInterface import BuildCacheManagerInterface
+from esd.logger.logger_builder import get_logger
+from esd.utils.utils import clean_up
 
 
 class BuildCacheManager(BuildCacheManagerInterface):
@@ -12,51 +12,48 @@ class BuildCacheManager(BuildCacheManagerInterface):
         This class aims to manage the push/pull/delete of build cache files
     """
 
-    def __init__(self, registry_host, registry_project, registry_username, registry_password, cache_version='cache',
-                 auth_backend='basic',
-                 insecure=False):
-        self._logger = get_logger(__name__, BuildCacheManager.__name__)
-        self._registry_project = registry_project
+    def __init__(self, auth_backend='basic', insecure=False):
+        self.logger = get_logger(__name__, BuildCacheManager.__name__)
+        self.home_path = Path(os.environ.get("HOME_PATH", os.getcwd()))
+        self.registry_project = os.environ.get("REGISTRY_PROJECT")
 
-        self._registry_username = registry_username
-        self._registry_password = registry_password
+        self._registry_username = str(os.environ.get("REGISTRY_USERNAME"))
+        self._registry_password = str(os.environ.get("REGISTRY_PASSWORD"))
 
-        self._registry_host = registry_host
+        self.registry_host = str(os.environ.get("REGISTRY_HOST"))
         # Initialize an OrasClient instance.
         # This method utilizes the OCI Registry for container image and artifact management.
         # Refer to the official OCI Registry documentation for detailed information on the available authentication methods.
         # Supported authentication types may include basic authentication (username/password), token-based authentication,
-        self._client = oras.client.OrasClient(hostname=self._registry_host, auth_backend=auth_backend,
-                                              insecure=insecure)
-        self._client.login(username=self._registry_username, password=self._registry_password)
-        self.cache_version = cache_version
-        self._oci_registry_path = f'{self._registry_host}/{self._registry_project}/{self.cache_version}'
+        self.client = oras.client.OrasClient(hostname=self.registry_host, auth_backend=auth_backend, insecure=insecure)
+        self.client.login(username=self._registry_username, password=self._registry_password)
+        self.oci_registry_path = f'{self.registry_host}/{self.registry_project}/cache'
 
     def upload(self, out_dir: Path):
         """
             This method pushed all the files from the build cache folder into the OCI Registry
         """
-        build_cache_path = out_dir.resolve()
+        build_cache_path = self.home_path / out_dir
         # build cache folder must exist before pushing all the artifacts
         if not build_cache_path.exists():
-            self._logger.error(f"Path {build_cache_path} not found.")
+            self.logger.error(f"Path {build_cache_path} not found.")
 
         for sub_path in build_cache_path.rglob("*"):
             if sub_path.is_file():
-                rel_path = str(sub_path.relative_to(build_cache_path)).replace(str(sub_path.name), "")
-                target = f"{self._registry_host}/{self._registry_project}/{self.cache_version}:{str(sub_path.name)}"
+                rel_path = str(sub_path.relative_to(build_cache_path)).replace(str(sub_path.env_name), "")
+                target = f"{self.registry_host}/{self.registry_project}/cache:{str(sub_path.env_name)}"
                 try:
-                    self._logger.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...")
-                    self._client.push(
+                    self.logger.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...")
+                    self.client.push(
                         files=[str(sub_path)],
                         target=target,
                         # save in manifest the relative path for reconstruction
                         manifest_annotations={"path": rel_path},
                         disable_path_validation=True,
                     )
-                    self._logger.info(f"Successfully pushed {sub_path.name}")
+                    self.logger.info(f"Successfully pushed {sub_path.env_name}")
                 except Exception as e:
-                    self._logger.error(
+                    self.logger.error(
                         f"An error occurred while pushing: {e}")
         # todo to be discussed hot to delete the build cache after being pushed to the OCI Registry
         # clean_up([str(build_cache_path)], self.logger)
@@ -66,38 +63,37 @@ class BuildCacheManager(BuildCacheManagerInterface):
             This method retrieves all tags from an OCI Registry
         """
         try:
-            return self._client.get_tags(self._oci_registry_path)
+            return self.client.get_tags(self.oci_registry_path)
         except Exception as e:
-            self._logger.error(f"Failed to list tags: {e}")
+            self.logger.error(f"Failed to list tags: {e}")
         return None
 
     def download(self, in_dir: Path):
         """
             This method pulls all the files from the OCI Registry into the build cache folder
         """
-        build_cache_path = in_dir.resolve()
+        build_cache_path = self.home_path / in_dir
         # create the buildcache dir if it does not exist
         os.makedirs(build_cache_path, exist_ok=True)
         tags = self.list_tags()
         if tags is not None:
             for tag in tags:
-                ref = f"{self._registry_host}/{self._registry_project}/{self.cache_version}:{tag}"
+                ref = f"{self.registry_host}/{self.registry_project}/cache:{tag}"
                 # reconstruct the relative path of each artifact by getting it from the manifest
                 cache_path = \
-                    self._client.get_manifest(
-                        f'{self._registry_host}/{self._registry_project}/{self.cache_version}:{tag}')[
+                    self.client.get_manifest(f'{self.registry_host}/{self.registry_project}/cache:{tag}')[
                         'annotations'][
                         'path']
                 try:
-                    self._client.pull(
+                    self.client.pull(
                         ref,
                         # missing dirs to output dir are created automatically by OrasClient pull method
                         outdir=str(build_cache_path / cache_path),
                         overwrite=True
                     )
-                    self._logger.info(f"Successfully pulled artifact {tag}.")
+                    self.logger.info(f"Successfully pulled artifact {tag}.")
                 except Exception as e:
-                    self._logger.error(
+                    self.logger.error(
                         f"Failed to pull artifact {tag} : {e}")
 
     def delete(self):
@@ -110,8 +106,8 @@ class BuildCacheManager(BuildCacheManagerInterface):
         tags = self.list_tags()
         if tags is not None:
             try:
-                self._client.delete_tags(self._oci_registry_path, tags)
-                self._logger.info(f"Successfully deleted all artifacts form OCI registry.")
+                self.client.delete_tags(self.oci_registry_path, tags)
+                self.logger.info(f"Successfully deleted all artifacts form OCI registry.")
             except RuntimeError as e:
-                self._logger.error(
+                self.logger.error(
                     f"Failed to delete artifacts: {e}")
diff --git a/dedal/logger/logger_config.py b/dedal/logger/logger_config.py
deleted file mode 100644
index 3ca3b000fd171dde8ceafdc6731dfb02009e845c..0000000000000000000000000000000000000000
--- a/dedal/logger/logger_config.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import logging
-
-
-class LoggerConfig:
-    """
-        This class sets up logging with a file handler
-        and a stream handler, ensuring consistent
-        and formatted log messages.
-    """
-    def __init__(self, log_file):
-        self.log_file = log_file
-        self._configure_logger()
-
-    def _configure_logger(self):
-        formatter = logging.Formatter(
-            fmt='%(asctime)s - %(levelname)s - %(message)s',
-            datefmt='%Y-%m-%d %H:%M:%S'
-        )
-
-        file_handler = logging.FileHandler(self.log_file)
-        file_handler.setFormatter(formatter)
-
-        stream_handler = logging.StreamHandler()
-        stream_handler.setFormatter(formatter)
-
-        self.logger = logging.getLogger(__name__)
-        self.logger.setLevel(logging.DEBUG)
-
-        self.logger.addHandler(file_handler)
-        self.logger.addHandler(stream_handler)
-
-    def get_logger(self):
-        return self.logger
diff --git a/dedal/tests/integration_tests/spack_from_scratch_test.py b/dedal/tests/integration_tests/spack_from_scratch_test.py
index d080bc7b144c4530f375b1a265cba41ce936fc3d..fa86230166d8c3bb70589b13cbc8f0bab8f2f056 100644
--- a/dedal/tests/integration_tests/spack_from_scratch_test.py
+++ b/dedal/tests/integration_tests/spack_from_scratch_test.py
@@ -65,14 +65,12 @@ def test_spack_from_scratch_setup_3(tmp_path):
         spack_operation.setup_spack_env()
 
 
-def test_spack_from_scratch_setup_4(tmp_path):
-    install_dir = tmp_path
-    env = SpackDescriptor('new_env2', install_dir)
-    config = SpackConfig(env=env, install_dir=install_dir)
-    spack_operation = SpackOperationCreator.get_spack_operator(config)
-    spack_operation.install_spack()
-    spack_operation.setup_spack_env()
-    assert spack_operation.spack_env_exists() == True
+def test_spack_from_scratch_setup_4():
+    install_dir = Path('./install').resolve()
+    env = SpackModel('new_environment', install_dir, )
+    spack_manager = SpackManagerScratch(env, system_name='ebrainslab')
+    spack_manager.setup_spack_env()
+    assert spack_manager.spack_repo_exists(env.env_name) == True
 
 
 def test_spack_not_a_valid_repo():
diff --git a/dedal/tests/integration_tests/spack_install_test.py b/dedal/tests/integration_tests/spack_install_test.py
index 564d5c6aa2138e815cd7d092215a4f2eee8816f6..34f683234b064d0f800bfa9376fcf6fec592d7ef 100644
--- a/dedal/tests/integration_tests/spack_install_test.py
+++ b/dedal/tests/integration_tests/spack_install_test.py
@@ -1,12 +1,21 @@
 import pytest
-from dedal.spack_factory.SpackOperation import SpackOperation
-from dedal.tests.testing_variables import SPACK_VERSION
 
+from esd.spack_manager.factory.SpackManagerBuildCache import SpackManagerBuildCache
+from esd.spack_manager.factory.SpackManagerScratch import SpackManagerScratch
 
-# run this test first so that spack is installed only once for all the tests
+
+# we need this test to run first so that spack is installed only once
 @pytest.mark.run(order=1)
 def test_spack_install_scratch():
-    spack_operation = SpackOperation()
-    spack_operation.install_spack(spack_version=f'v{SPACK_VERSION}')
-    installed_spack_version = spack_operation.get_spack_installed_version()
-    assert SPACK_VERSION == installed_spack_version
+    spack_manager = SpackManagerScratch()
+    spack_manager.install_spack(spack_version="v0.21.1")
+    installed_spack_version = spack_manager.get_spack_installed_version()
+    required_version = "0.21.1"
+    assert required_version == installed_spack_version
+
+
+def test_spack_install_buildcache():
+    spack_manager = SpackManagerBuildCache()
+    installed_spack_version = spack_manager.get_spack_installed_version()
+    required_version = "0.21.1"
+    assert required_version == installed_spack_version
diff --git a/esd/error_handling/exceptions.py b/esd/error_handling/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..2acca54ea6decc66da1bd0cc827ea5b890b2780d
--- /dev/null
+++ b/esd/error_handling/exceptions.py
@@ -0,0 +1,13 @@
+class SpackException(Exception):
+
+    def __init__(self, message):
+        super().__init__(message)
+        self.message = str(message)
+
+    def __str__(self):
+        return self.message
+
+class BashCommandException(SpackException):
+    """
+    To be thrown when an invalid input is received.
+    """
diff --git a/esd/model/SpackModel.py b/esd/model/SpackModel.py
new file mode 100644
index 0000000000000000000000000000000000000000..4b065dba06f558ce21a0354257d77aa595bcaeb1
--- /dev/null
+++ b/esd/model/SpackModel.py
@@ -0,0 +1,12 @@
+from pathlib import Path
+
+
+class SpackModel:
+    """"
+    Provides details about the spack environment
+    """
+
+    def __init__(self, env_name: str, path: Path, git_path: str = None):
+        self.env_name = env_name
+        self.path = path
+        self.git_path = git_path
diff --git a/esd/model/__init__.py b/esd/model/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/esd/spack_manager/SpackManager.py b/esd/spack_manager/SpackManager.py
new file mode 100644
index 0000000000000000000000000000000000000000..cdf4a1d36d249017636798b05349a88b5e67f97e
--- /dev/null
+++ b/esd/spack_manager/SpackManager.py
@@ -0,0 +1,180 @@
+import os
+import subprocess
+from abc import ABC, abstractmethod
+from pathlib import Path
+
+from esd.error_handling.exceptions import BashCommandException
+from esd.logger.logger_builder import get_logger
+from esd.model.SpackModel import SpackModel
+from esd.utils.utils import run_command, git_clone_repo
+
+
+class SpackManager(ABC):
+    """
+    This class should implement the methods necessary for installing spack, set up an environment, concretize and install packages.
+    Factory design pattern is used because there are 2 cases: creating an environment from scratch or creating an environment from the buildcache.
+
+    Attributes:
+    -----------
+    env : SpackModel
+        spack environment details
+    repos : list[SpackModel]
+    upstream_instance : str
+        path to Spack instance to use as upstream (optional)
+    """
+
+    def __init__(self, env: SpackModel = None, repos=None,
+                 upstream_instance=None, system_name: str = None, logger=get_logger(__name__)):
+        if repos is None:
+            self.repos = list()
+        self.env = env
+        self.upstream_instance = upstream_instance
+        self.install_dir = Path(os.environ.get("INSTALLATION_ROOT") or os.getcwd())
+        self.install_dir.mkdir(parents=True, exist_ok=True)
+        self.spack_dir = self.install_dir / "spack"
+        self.spack_setup_script = self.spack_dir / "share" / "spack" / "setup-env.sh"
+        self.logger = logger
+        self.system_name = system_name
+
+    @abstractmethod
+    def concretize_spack_env(self, force=True):
+        pass
+
+    @abstractmethod
+    def install_spack_packages(self, jobs: 3, verbose=False, debug=False):
+        pass
+
+    def create_fetch_spack_environment(self):
+        env_dir = self.install_dir / self.env.path / self.env.env_name
+        if self.env.git_path:
+            try:
+                git_clone_repo(self.env.env_name, env_dir, self.env.git_path, logger=self.logger)
+            except subprocess.CalledProcessError as e:
+                self.logger.exception(f'Failed to clone repository: {self.env.env_name}: {e}')
+                raise BashCommandException(f'Failed to clone repository: {self.env.env_name}: {e}')
+        else:
+            try:
+                os.makedirs(self.env.path / self.env.env_name, exist_ok=True)
+                run_command("bash", "-c",
+                            f'source {self.spack_setup_script} && spack env create -d {self.env.path}/{self.env.env_name}',
+                            check=True, logger=self.logger)
+                self.logger.debug(f"Created {self.env.env_name} spack environment")
+            except subprocess.CalledProcessError as e:
+                self.logger.error(f"Failed to create {self.env.env_name} spack environment")
+                raise BashCommandException(f"Failed to create {self.env.env_name} spack environment")
+
+    def setup_spack_env(self):
+        """
+        This method prepares a spack environment by fetching/creating the spack environment and adding the necessary repos
+        """
+        bashrc_path = os.path.expanduser("~/.bashrc")
+        if self.system_name:
+            with open(bashrc_path, "a") as bashrc:
+                bashrc.write(f'export SYSTEMNAME="{self.system_name}"\n')
+                os.environ['SYSTEMNAME'] = self.system_name
+        if self.spack_dir.exists() and self.spack_dir.is_dir():
+            with open(bashrc_path, "a") as bashrc:
+                bashrc.write(f'export SPACK_USER_CACHE_PATH="{str(self.spack_dir / ".spack")}"\n')
+                bashrc.write(f'export SPACK_USER_CONFIG_PATH="{str(self.spack_dir / ".spack")}"\n')
+            self.logger.debug('Added env variables SPACK_USER_CACHE_PATH and SPACK_USER_CONFIG_PATH')
+        else:
+            self.logger.error(f'Invalid installation path: {self.spack_dir}')
+        # Restart the bash after adding environment variables
+        self.create_fetch_spack_environment()
+        if self.install_dir.exists():
+            for repo in self.repos:
+                repo_dir = self.install_dir / repo.path / repo.env_name
+                git_clone_repo(repo.env_name, repo_dir, repo.git_path, logger=self.logger)
+                if not self.spack_repo_exists(repo.env_name):
+                    self.add_spack_repo(repo.path, repo.env_name)
+                    self.logger.debug(f'Added spack repository {repo.env_name}')
+                else:
+                    self.logger.debug(f'Spack repository {repo.env_name} already added')
+
+    def spack_repo_exists(self, repo_name: str) -> bool:
+        """Check if the given Spack repository exists."""
+        if self.env is None:
+            try:
+                result = run_command("bash", "-c",
+                                     f'source {self.spack_setup_script} && spack repo list',
+                                     check=True,
+                                     capture_output=True, text=True, logger=self.logger)
+            except subprocess.CalledProcessError:
+                return False
+        else:
+            try:
+                result = run_command("bash", "-c",
+                                     f'source {self.spack_setup_script} && spack env activate -p {self.env.path}/{self.env.env_name} && spack repo list',
+                                     check=True,
+                                     capture_output=True, text=True, logger=self.logger)
+            except subprocess.CalledProcessError:
+                return False
+        return any(line.strip().endswith(repo_name) for line in result.stdout.splitlines())
+
+    def add_spack_repo(self, repo_path: Path, repo_name: str):
+        """Add the Spack repository if it does not exist."""
+        repo_path = repo_path.resolve().as_posix()
+        try:
+            run_command("bash", "-c",
+                        f'source {self.spack_setup_script} && spack env activate -p {self.env.path}/{self.env.env_name} && spack repo add {repo_path}/{repo_name}',
+                        check=True, logger=self.logger)
+            self.logger.debug(f"Added {repo_name} to spack environment {self.env.env_name}")
+        except subprocess.CalledProcessError as e:
+            self.logger.error(f"Failed to add {repo_name} to spack environment {self.env.env_name}")
+            raise BashCommandException(f"Failed to add {repo_name} to spack environment {self.env.env_name}: {e}")
+
+    def get_spack_installed_version(self):
+        try:
+            spack_version = run_command("bash", "-c", f'source {self.spack_setup_script} && spack --version',
+                                        capture_output=True, text=True, check=True,
+                                        logger=self.logger)
+            spack_version = spack_version.stdout.strip().split()[0]
+            self.logger.debug(f"Getting spack version: {spack_version}")
+            return spack_version
+        except subprocess.SubprocessError as e:
+            self.logger.error(f"Error retrieving Spack version: {e}")
+            return None
+
+    def install_spack(self, spack_version="v0.21.1", spack_repo='https://github.com/spack/spack'):
+        try:
+            user = os.getlogin()
+        except OSError:
+            user = None
+
+        self.logger.info(f"Starting to install Spack into {self.spack_dir} from branch {spack_version}")
+        if not self.spack_dir.exists():
+            run_command(
+                "git", "clone", "--depth", "1",
+                "-c", "advice.detachedHead=false",
+                "-c", "feature.manyFiles=true",
+                "--branch", spack_version, spack_repo, self.spack_dir
+                , check=True, logger=self.logger)
+            self.logger.debug("Cloned spack")
+        else:
+            self.logger.debug("Spack already cloned.")
+
+        bashrc_path = os.path.expanduser("~/.bashrc")
+        # ensure the file exists before opening it
+        if not os.path.exists(bashrc_path):
+            open(bashrc_path, "w").close()
+        # add spack setup commands to .bashrc
+        with open(bashrc_path, "a") as bashrc:
+            bashrc.write(f'export PATH="{self.spack_dir}/bin:$PATH"\n')
+            bashrc.write(f"source {self.spack_setup_script}\n")
+        self.logger.info("Added Spack PATH to .bashrc")
+        if user:
+            run_command("chown", "-R", f"{user}:{user}", self.spack_dir, check=True, logger=self.logger)
+        run_command("bash", "-c", f"source {bashrc_path}", check=True, logger=self.logger)
+        self.logger.info("Spack install completed")
+        # Restart Bash after the installation ends
+        os.system("exec bash")
+
+        # Configure upstream Spack instance if specified
+        if self.upstream_instance:
+            upstreams_yaml_path = os.path.join(self.spack_dir, "etc/spack/defaults/upstreams.yaml")
+            with open(upstreams_yaml_path, "w") as file:
+                file.write(f"""upstreams:
+                                  upstream-spack-instance:
+                                    install_tree: {self.upstream_instance}/spack/opt/spack
+                                """)
+            self.logger.info("Added upstream spack instance")
diff --git a/esd/spack_manager/__init__.py b/esd/spack_manager/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/esd/spack_manager/enums/SpackManagerEnum.py b/esd/spack_manager/enums/SpackManagerEnum.py
new file mode 100644
index 0000000000000000000000000000000000000000..a24358394d19ee1903835f7eafea8f8e8c964fa6
--- /dev/null
+++ b/esd/spack_manager/enums/SpackManagerEnum.py
@@ -0,0 +1,6 @@
+from enum import Enum
+
+
+class SpackManagerEnum(Enum):
+    FROM_SCRATCH = "from_scratch",
+    FROM_BUILDCACHE = "from_buildcache",
diff --git a/esd/spack_manager/enums/__init__.py b/esd/spack_manager/enums/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/esd/spack_manager/factory/SpackManagerBuildCache.py b/esd/spack_manager/factory/SpackManagerBuildCache.py
new file mode 100644
index 0000000000000000000000000000000000000000..38151c6d0741d16c7a06e64f4a2ee5bc9db2e376
--- /dev/null
+++ b/esd/spack_manager/factory/SpackManagerBuildCache.py
@@ -0,0 +1,19 @@
+from esd.model.SpackModel import SpackModel
+from esd.spack_manager.SpackManager import SpackManager
+from esd.logger.logger_builder import get_logger
+
+
+class SpackManagerBuildCache(SpackManager):
+    def __init__(self, env: SpackModel = None, repos=None,
+                 upstream_instance=None, system_name: str = None):
+        super().__init__(env, repos, upstream_instance, system_name, logger=get_logger(__name__))
+
+    def setup_spack_env(self):
+        super().setup_spack_env()
+        # todo add buildcache to the spack environment
+
+    def concretize_spack_env(self, force=True):
+        pass
+
+    def install_spack_packages(self, jobs: 3, verbose=False, debug=False):
+        pass
diff --git a/esd/spack_manager/factory/SpackManagerCreator.py b/esd/spack_manager/factory/SpackManagerCreator.py
new file mode 100644
index 0000000000000000000000000000000000000000..9728467f39c2753c9f12ef959c80def1150f0314
--- /dev/null
+++ b/esd/spack_manager/factory/SpackManagerCreator.py
@@ -0,0 +1,14 @@
+from esd.spack_manager.enums.SpackManagerEnum import SpackManagerEnum
+from esd.spack_manager.factory.SpackManagerBuildCache import SpackManagerBuildCache
+from esd.spack_manager.factory.SpackManagerScratch import SpackManagerScratch
+
+
+class SpackManagerCreator:
+    @staticmethod
+    def get_spack_manger(spack_manager_type: SpackManagerEnum, env_name: str, repo: str, repo_name: str,
+                         upstream_instance: str):
+        if spack_manager_type == SpackManagerEnum.FROM_SCRATCH:
+            return SpackManagerScratch(env_name, repo, repo_name, upstream_instance)
+        elif spack_manager_type == SpackManagerEnum.FROM_BUILDCACHE:
+            return SpackManagerBuildCache(env_name, repo, repo_name, upstream_instance)
+
diff --git a/esd/spack_manager/factory/SpackManagerScratch.py b/esd/spack_manager/factory/SpackManagerScratch.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a79797ce924af10d1eda003fd55cf78661314a0
--- /dev/null
+++ b/esd/spack_manager/factory/SpackManagerScratch.py
@@ -0,0 +1,15 @@
+from esd.model.SpackModel import SpackModel
+from esd.spack_manager.SpackManager import SpackManager
+from esd.logger.logger_builder import get_logger
+
+
+class SpackManagerScratch(SpackManager):
+    def __init__(self, env: SpackModel = None, repos=None,
+                 upstream_instance=None, system_name: str = None):
+        super().__init__(env, repos, upstream_instance, system_name, logger=get_logger(__name__))
+
+    def concretize_spack_env(self, force=True):
+        pass
+
+    def install_spack_packages(self, jobs: 3, verbose=False, debug=False):
+        pass
diff --git a/esd/spack_manager/factory/__init__.py b/esd/spack_manager/factory/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/esd/utils/bootstrap.sh b/esd/utils/bootstrap.sh
new file mode 100644
index 0000000000000000000000000000000000000000..9b7d0131e95a3be9b0f2cfc4dc82492517fb22dc
--- /dev/null
+++ b/esd/utils/bootstrap.sh
@@ -0,0 +1,6 @@
+# Minimal prerequisites for installing the esd_library
+# pip must be installed on the OS
+echo "Bootstrapping..."
+apt update
+apt install -y bzip2 ca-certificates g++ gcc gfortran git gzip lsb-release patch python3 python3-pip tar unzip xz-utils zstd
+python3 -m pip install --upgrade pip setuptools wheel
diff --git a/esd/utils/utils.py b/esd/utils/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..d229e3453cb3ba4b29319ec84eeeb68998851447
--- /dev/null
+++ b/esd/utils/utils.py
@@ -0,0 +1,42 @@
+import logging
+import shutil
+import subprocess
+from pathlib import Path
+
+
+def clean_up(dirs: list[str], logging, ignore_errors=True):
+    """
+        All the folders from the list dirs are removed with all the content in them
+    """
+    for cleanup_dir in dirs:
+        cleanup_dir = Path(cleanup_dir).resolve()
+        if cleanup_dir.exists():
+            logging.info(f"Removing {cleanup_dir}")
+            try:
+                shutil.rmtree(Path(cleanup_dir))
+            except OSError as e:
+                logging.error(f"Failed to remove {cleanup_dir}: {e}")
+                if not ignore_errors:
+                    raise e
+        else:
+            logging.info(f"{cleanup_dir} does not exist")
+
+
+def run_command(*args, logger: None, **kwargs):
+    if logger is None:
+        logger = logging.getLogger(__name__)
+    logger.debug(f'{args}')
+    return subprocess.run(args, **kwargs)
+
+
+def git_clone_repo(repo_name: str, dir: Path, git_path: str, logger: logging):
+    if not dir.exists():
+        run_command(
+            "git", "clone", "--depth", "1",
+            "-c", "advice.detachedHead=false",
+            "-c", "feature.manyFiles=true",
+            git_path, dir
+            , check=True, logger=logger)
+        logger.debug(f'Cloned repository {repo_name}')
+    else:
+        logger.debug(f'Repository {repo_name} already cloned.')
diff --git a/pyproject.toml b/pyproject.toml
index b6679ca1f6946fde61d1ec5d288a8b85ec92d7a3..abcbe05d6637b2832ab0c7b9878516380e0087b2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -3,7 +3,7 @@ requires = ["setuptools>=64", "wheel"]
 build-backend = "setuptools.build_meta"
 
 [project]
-name = "dedal"
+name = "esd-tools"
 version = "0.1.0"
 authors = [
     {name = "Eric Müller", email = "mueller@kip.uni-heidelberg.de"},
@@ -22,4 +22,4 @@ dependencies = [
 ]
 
 [tool.setuptools.data-files]
-"dedal" = ["dedal/logger/logging.conf"]
\ No newline at end of file
+"esd-tools" = ["esd/logger/logging.conf"]
\ No newline at end of file