diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index b8ce57f418905a3890762c7bdcf2ac14310d91e3..4512455ab6e22fd3aa693cfbf98ad27b8b735d9e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,20 +1,38 @@
 stages:
   - build
+  - test
 
 variables:
-  BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/esd/tmp:latest
+  BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/dedal/tmp:latest
 
-build-spack-env-on-runner:
+build-wheel:
   stage: build
   tags:
-    - esd_image
-  image: $BUILD_ENV_DOCKER_IMAGE
+    - docker-runner
+  image: python:latest
+  before_script:
+    - python -m pip install --upgrade pip setuptools wheel build
   script:
-    - /usr/sbin/sysctl user.max_user_namespaces
-    - /usr/sbin/sysctl kernel.unprivileged_userns_clone
-    #- buildah build --isolation=chroot --runtime=crun --network=host --storage-opt="network.network_backend=cni" --storage-opt="overlay.mount_program=/usr/bin/fuse-overlayfs" -f Dockerfile .
-    #- export APPTAINER_VERSION="1.3.6"
-    #- |
-    #  mkdir -p apptainer-install/
-    #  curl -s https://raw.githubusercontent.com/apptainer/apptainer/main/tools/install-unprivileged.sh | bash -s - apptainer-install/
-    - apptainer version
+    - python -m build --sdist --wheel
+  artifacts:
+    paths:
+      - dist/*.whl
+      - dist/*.tar.gz
+    expire_in: 1 week
+
+
+testing:
+  stage: test
+  tags:
+    - docker-runner
+  image: python:latest
+  script:
+    - pip install -e .
+    - pytest ./dedal/tests/ --junitxml=test-results.xml
+  artifacts:
+    when: always
+    reports:
+      junit: test-results.xml
+    paths:
+      - test-results.xml
+    expire_in: 1 week
\ No newline at end of file
diff --git a/dedal/__init__.py b/dedal/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/build_cache/BuildCacheManager.py b/dedal/build_cache/BuildCacheManager.py
new file mode 100644
index 0000000000000000000000000000000000000000..2da39e252c8ebc3e0b9aa6d6a5612d9f1f4ee02e
--- /dev/null
+++ b/dedal/build_cache/BuildCacheManager.py
@@ -0,0 +1,113 @@
+import os
+import oras.client
+from pathlib import Path
+
+from dedal.build_cache.BuildCacheManagerInterface import BuildCacheManagerInterface
+from dedal.logger.logger_builder import get_logger
+from dedal.utils.utils import clean_up
+
+
+class BuildCacheManager(BuildCacheManagerInterface):
+    """
+        This class aims to manage the push/pull/delete of build cache files
+    """
+
+    def __init__(self, auth_backend='basic', insecure=False):
+        self.logger = get_logger(__name__, BuildCacheManager.__name__)
+        self.home_path = Path(os.environ.get("HOME_PATH", os.getcwd()))
+        self.registry_project = os.environ.get("REGISTRY_PROJECT")
+
+        self._registry_username = str(os.environ.get("REGISTRY_USERNAME"))
+        self._registry_password = str(os.environ.get("REGISTRY_PASSWORD"))
+
+        self.registry_host = str(os.environ.get("REGISTRY_HOST"))
+        # Initialize an OrasClient instance.
+        # This method utilizes the OCI Registry for container image and artifact management.
+        # Refer to the official OCI Registry documentation for detailed information on the available authentication methods.
+        # Supported authentication types may include basic authentication (username/password), token-based authentication,
+        self.client = oras.client.OrasClient(hostname=self.registry_host, auth_backend=auth_backend, insecure=insecure)
+        self.client.login(username=self._registry_username, password=self._registry_password)
+        self.oci_registry_path = f'{self.registry_host}/{self.registry_project}/cache'
+
+    def upload(self, out_dir: Path):
+        """
+            This method pushed all the files from the build cache folder into the OCI Registry
+        """
+        build_cache_path = self.home_path / out_dir
+        # build cache folder must exist before pushing all the artifacts
+        if not build_cache_path.exists():
+            self.logger.error(f"Path {build_cache_path} not found.")
+
+        for sub_path in build_cache_path.rglob("*"):
+            if sub_path.is_file():
+                rel_path = str(sub_path.relative_to(build_cache_path)).replace(str(sub_path.name), "")
+                target = f"{self.registry_host}/{self.registry_project}/cache:{str(sub_path.name)}"
+                try:
+                    self.logger.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...")
+                    self.client.push(
+                        files=[str(sub_path)],
+                        target=target,
+                        # save in manifest the relative path for reconstruction
+                        manifest_annotations={"path": rel_path},
+                        disable_path_validation=True,
+                    )
+                    self.logger.info(f"Successfully pushed {sub_path.name}")
+                except Exception as e:
+                    self.logger.error(
+                        f"An error occurred while pushing: {e}")
+        # todo to be discussed hot to delete the build cache after being pushed to the OCI Registry
+        # clean_up([str(build_cache_path)], self.logger)
+
+    def list_tags(self):
+        """
+            This method retrieves all tags from an OCI Registry
+        """
+        try:
+            return self.client.get_tags(self.oci_registry_path)
+        except Exception as e:
+            self.logger.error(f"Failed to list tags: {e}")
+        return None
+
+    def download(self, in_dir: Path):
+        """
+            This method pulls all the files from the OCI Registry into the build cache folder
+        """
+        build_cache_path = self.home_path / in_dir
+        # create the buildcache dir if it does not exist
+        os.makedirs(build_cache_path, exist_ok=True)
+        tags = self.list_tags()
+        if tags is not None:
+            for tag in tags:
+                ref = f"{self.registry_host}/{self.registry_project}/cache:{tag}"
+                # reconstruct the relative path of each artifact by getting it from the manifest
+                cache_path = \
+                    self.client.get_manifest(f'{self.registry_host}/{self.registry_project}/cache:{tag}')[
+                        'annotations'][
+                        'path']
+                try:
+                    self.client.pull(
+                        ref,
+                        # missing dirs to output dir are created automatically by OrasClient pull method
+                        outdir=str(build_cache_path / cache_path),
+                        overwrite=True
+                    )
+                    self.logger.info(f"Successfully pulled artifact {tag}.")
+                except Exception as e:
+                    self.logger.error(
+                        f"Failed to pull artifact {tag} : {e}")
+
+    def delete(self):
+        """
+            Deletes all artifacts from an OCI Registry based on their tags.
+            This method removes artifacts identified by their tags in the specified OCI Registry.
+            It requires appropriate permissions to delete artifacts from the registry.
+            If the registry or user does not have the necessary delete permissions, the operation might fail.
+        """
+        tags = self.list_tags()
+        if tags is not None:
+            try:
+                self.client.delete_tags(self.oci_registry_path, tags)
+                self.logger.info(f"Successfully deleted all artifacts form OCI registry.")
+            except RuntimeError as e:
+                self.logger.error(
+                    f"Failed to delete artifacts: {e}")
diff --git a/dedal/build_cache/BuildCacheManagerInterface.py b/dedal/build_cache/BuildCacheManagerInterface.py
new file mode 100644
index 0000000000000000000000000000000000000000..3016590bac0ac81aaef706dbdbf46b7a1598a3e6
--- /dev/null
+++ b/dedal/build_cache/BuildCacheManagerInterface.py
@@ -0,0 +1,17 @@
+from abc import ABC, abstractmethod
+from pathlib import Path
+
+
+class BuildCacheManagerInterface(ABC):
+
+    @abstractmethod
+    def upload(self, out_dir: Path):
+        pass
+
+    @abstractmethod
+    def download(self, in_dir: Path):
+        pass
+
+    @abstractmethod
+    def delete(self):
+        pass
diff --git a/dedal/build_cache/__init__.py b/dedal/build_cache/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/cli/__init__.py b/dedal/cli/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/fetch_cached_buildresults.py b/dedal/cli/fetch_cached_buildresults.py
similarity index 100%
rename from fetch_cached_buildresults.py
rename to dedal/cli/fetch_cached_buildresults.py
diff --git a/fetch_cached_sources.py b/dedal/fetch_cached_sources.py
similarity index 100%
rename from fetch_cached_sources.py
rename to dedal/fetch_cached_sources.py
diff --git a/dedal/logger/__init__.py b/dedal/logger/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/logger/logger_builder.py b/dedal/logger/logger_builder.py
new file mode 100644
index 0000000000000000000000000000000000000000..c15a2f0eda2ff63c39375f690042b2685c4ef659
--- /dev/null
+++ b/dedal/logger/logger_builder.py
@@ -0,0 +1,51 @@
+import os
+import inspect
+import weakref
+import logging
+import logging.config
+
+
+class LoggerBuilder(object):
+    """
+    Class taking care of uniform Python logger initialization.
+    It uses the Python native logging package.
+    It's purpose is just to offer a common mechanism for initializing all modules in a package.
+    """
+    _instance = None
+
+    def __new__(cls, *args, **kwargs):
+        if cls._instance is None:
+            cls._instance = super().__new__(cls)
+        return cls._instance
+
+    def __init__(self, config_file_name='logging.conf'):
+        """
+        Prepare Python logger based on a configuration file.
+        :param: config_file_name - name of the logging configuration relative to the current package
+        """
+        current_folder = os.path.dirname(inspect.getfile(self.__class__))
+        config_file_path = os.path.join(current_folder, config_file_name)
+        logging.config.fileConfig(config_file_path, disable_existing_loggers=False)
+        self._loggers = weakref.WeakValueDictionary()
+
+    def build_logger(self, parent_module, parent_class):
+        """
+        Build a logger instance and return it
+        """
+        logger_key = f'{parent_module}.{parent_class}' if parent_class else parent_module
+        self._loggers[logger_key] = logger = logging.getLogger(logger_key)
+        return logger
+
+    def set_loggers_level(self, level):
+        for logger in self._loggers.values():
+            logger.setLevel(level)
+
+
+def get_logger(parent_module='', parent_class=None):
+    """
+    Function to retrieve a new Python logger instance for current module.
+
+    :param parent_module: module name for which to create logger.
+    :param parent_class: class name for which to create logger.
+    """
+    return LoggerBuilder().build_logger(parent_module, parent_class)
diff --git a/dedal/logger/logging.conf b/dedal/logger/logging.conf
new file mode 100644
index 0000000000000000000000000000000000000000..fd24c026b5563ee757fca411727b0e31adac050c
--- /dev/null
+++ b/dedal/logger/logging.conf
@@ -0,0 +1,55 @@
+############################################
+## Dedal - logging configuration.   ##
+############################################
+[loggers]
+keys=root, dedal, oras
+
+[handlers]
+keys=consoleHandler, fileHandler
+
+[formatters]
+keys=simpleFormatter
+
+[logger_root]
+level=WARNING
+handlers=consoleHandler, fileHandler
+propagate=0
+
+############################################
+## dedal specific logging            ##
+############################################
+[logger_dedal]
+level=DEBUG
+handlers=consoleHandler, fileHandler
+qualname=dedal
+propagate=0
+
+[logger_oras]
+level=ERROR
+handlers=consoleHandler
+qualname=oras
+propagate=0
+
+############################################
+## Handlers                               ##
+############################################
+
+[handler_consoleHandler]
+class=StreamHandler
+level=DEBUG
+formatter=simpleFormatter
+args=(sys.stdout,)
+
+[handler_fileHandler]
+class=handlers.TimedRotatingFileHandler
+level=INFO
+formatter=simpleFormatter
+args=('.dedal.log', 'midnight', 1, 30, None, False, False)
+
+############################################
+## Formatters                             ##
+############################################
+
+[formatter_simpleFormatter]
+format=%(asctime)s - %(levelname)s - %(name)s::%(funcName)s - %(message)s
+datefmt = %d-%m-%Y %I:%M:%S
\ No newline at end of file
diff --git a/specfile_dag_hash.py b/dedal/specfile_dag_hash.py
similarity index 96%
rename from specfile_dag_hash.py
rename to dedal/specfile_dag_hash.py
index 6e001b84accd5b4cd7c11a79842b7d88df306ec8..e44e1c6296c2499580f286d1b2ec7e753243f53a 100644
--- a/specfile_dag_hash.py
+++ b/dedal/specfile_dag_hash.py
@@ -3,7 +3,7 @@ from collections.abc import Iterable
 import pathlib
 import ruamel.yaml as yaml
 import spack
-import spack.binary_distribution as bindist
+
 
 parser = argparse.ArgumentParser(
         prog='specfile_dag_hash.py',
diff --git a/specfile_storage_path_build.py b/dedal/specfile_storage_path_build.py
similarity index 100%
rename from specfile_storage_path_build.py
rename to dedal/specfile_storage_path_build.py
diff --git a/specfile_storage_path_source.py b/dedal/specfile_storage_path_source.py
similarity index 100%
rename from specfile_storage_path_source.py
rename to dedal/specfile_storage_path_source.py
diff --git a/dedal/tests/__init__.py b/dedal/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/tests/utils_test.py b/dedal/tests/utils_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..14795726ce81896eb03d9862a9c096d78123815c
--- /dev/null
+++ b/dedal/tests/utils_test.py
@@ -0,0 +1,63 @@
+import pytest
+from pathlib import Path
+
+from dedal.utils.utils import clean_up
+
+
+@pytest.fixture
+def temp_directories(tmp_path):
+    """
+    Create temporary directories with files and subdirectories for testing.
+    """
+    test_dirs = []
+
+    for i in range(3):
+        dir_path = tmp_path / f"test_dir_{i}"
+        dir_path.mkdir()
+        test_dirs.append(str(dir_path))
+
+        # Add a file to the directory
+        file_path = dir_path / f"file_{i}.txt"
+        file_path.write_text(f"This is a test file in {dir_path}")
+
+        # Add a subdirectory with a file
+        sub_dir = dir_path / f"subdir_{i}"
+        sub_dir.mkdir()
+        sub_file = sub_dir / f"sub_file_{i}.txt"
+        sub_file.write_text(f"This is a sub file in {sub_dir}")
+
+    return test_dirs
+
+
+def test_clean_up(temp_directories, mocker):
+    """
+    Test the clean_up function to ensure directories and contents are removed.
+    """
+    # Mock the logger using pytest-mock's mocker fixture
+    mock_logger = mocker.MagicMock()
+
+    # Ensure directories exist before calling clean_up
+    for dir_path in temp_directories:
+        assert Path(dir_path).exists()
+
+    clean_up(temp_directories, mock_logger)
+
+    for dir_path in temp_directories:
+        assert not Path(dir_path).exists()
+
+    for dir_path in temp_directories:
+        mock_logger.info.assert_any_call(f"Removing {Path(dir_path).resolve()}")
+
+
+def test_clean_up_nonexistent_dirs(mocker):
+    """
+    Test the clean_up function with nonexistent directories.
+    """
+    # Mock the logger using pytest-mock's mocker fixture
+    mock_logger = mocker.MagicMock()
+    nonexistent_dirs = ["nonexistent_dir_1", "nonexistent_dir_2"]
+
+    clean_up(nonexistent_dirs, mock_logger)
+
+    for dir_path in nonexistent_dirs:
+        mock_logger.info.assert_any_call(f"{Path(dir_path).resolve()} does not exist")
diff --git a/update_cached_buildresults.py b/dedal/update_cached_buildresults.py
similarity index 99%
rename from update_cached_buildresults.py
rename to dedal/update_cached_buildresults.py
index caacf86e46efec1ddb0c8b8c044f4ff8506eace1..58af242b3bcfb03f153a0f4fa1fa22ff2c66d371 100644
--- a/update_cached_buildresults.py
+++ b/dedal/update_cached_buildresults.py
@@ -1,5 +1,4 @@
 import argparse
-import glob
 import os
 import pathlib
 import subprocess
diff --git a/update_cached_sources.py b/dedal/update_cached_sources.py
similarity index 100%
rename from update_cached_sources.py
rename to dedal/update_cached_sources.py
diff --git a/dedal/utils/__init__.py b/dedal/utils/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/dedal/utils/utils.py b/dedal/utils/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..811d258e7e5856f4b666bc3196996f3b24571112
--- /dev/null
+++ b/dedal/utils/utils.py
@@ -0,0 +1,20 @@
+import shutil
+from pathlib import Path
+
+
+def clean_up(dirs: list[str], logging, ignore_errors=True):
+    """
+        All the folders from the list dirs are removed with all the content in them
+    """
+    for cleanup_dir in dirs:
+        cleanup_dir = Path(cleanup_dir).resolve()
+        if cleanup_dir.exists():
+            logging.info(f"Removing {cleanup_dir}")
+            try:
+                shutil.rmtree(Path(cleanup_dir))
+            except OSError as e:
+                logging.error(f"Failed to remove {cleanup_dir}: {e}")
+                if not ignore_errors:
+                    raise e
+        else:
+            logging.info(f"{cleanup_dir} does not exist")
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..757f370c12a7602c9b7011c98c8aae416087ca24
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,24 @@
+[build-system]
+requires = ["setuptools", "setuptools-scm"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "dedal"
+authors = [
+    {name = "Eric Müller", email = "mueller@kip.uni-heidelberg.de"},
+    {name = "Adrian Ciu", email = "adrian.ciu@codemart.ro"},
+]
+description = "This package provides all the necessary tools to create an Ebrains Software Distribution environment"
+version = "0.1.0"
+readme = "README.md"
+requires-python = ">=3.10"
+dependencies = [
+    "oras",
+    "spack",
+    "ruamel.yaml",
+    "pytest",
+    "pytest-mock",
+]
+
+[tool.setuptools.data-files]
+"dedal" = ["dedal/logger/logging.conf"]
\ No newline at end of file
diff --git a/wscript b/wscript
deleted file mode 100755
index 7db8e31e5eaf3d25509eccfa89b7989ee004f682..0000000000000000000000000000000000000000
--- a/wscript
+++ /dev/null
@@ -1,21 +0,0 @@
-def depends(ctx):
-    ctx("spack", branch="visionary")
-
-def options(opt):
-    pass
-
-def configure(cfg):
-    pass
-
-def build(bld):
-    # install /bin
-    for bin in bld.path.ant_glob('bin/**/*'):
-        bld.install_as('${PREFIX}/%s' % bin.path_from(bld.path), bin)
-
-    # install /lib
-    for lib in bld.path.ant_glob('lib/**/*'):
-        bld.install_as('${PREFIX}/%s' % lib.path_from(bld.path), lib)
-
-    # install /share
-    for share in bld.path.ant_glob('share/**/*'):
-        bld.install_as('${PREFIX}/%s' % share.path_from(bld.path), share)