diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7dea24a3e3ed1baa20658a103d8cd17c9ab38ba4..0ce029072928ad5d86de3b9d502f139d353a1d8e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,37 +1,42 @@ stages: - - build - test + - build variables: BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/esd/tmp:latest -build-spack-env-on-runner: +build-wheel: stage: build tags: - - docker-runner # esd_image - image: $BUILD_ENV_DOCKER_IMAGE + - docker-runner + image: python:latest + before_script: + - python -m pip install --upgrade pip setuptools wheel build script: - - /usr/sbin/sysctl user.max_user_namespaces - - /usr/sbin/sysctl kernel.unprivileged_userns_clone - #- buildah build --isolation=chroot --runtime=crun --network=host --storage-opt="network.network_backend=cni" --storage-opt="overlay.mount_program=/usr/bin/fuse-overlayfs" -f Dockerfile . - #- export APPTAINER_VERSION="1.3.6" - #- | - # mkdir -p apptainer-install/ - # curl -s https://raw.githubusercontent.com/apptainer/apptainer/main/tools/install-unprivileged.sh | bash -s - apptainer-install/ - - apptainer version + - python -m build --sdist --wheel + artifacts: + paths: + - dist/*.whl + - dist/*.tar.gz + expire_in: 1 week + -testing: +testing-pytest: stage: test tags: - docker-runner - image: python:latest # $BUILD_ENV_DOCKER_IMAGE + image: ubuntu:22.04 script: - - pip install -e . - - pytest ./esd/tests/ --junitxml=test-results.xml + - chmod +x esd/utils/bootstrap.sh + - ./esd/utils/bootstrap.sh + - pip install . + - pytest ./esd/tests/ -s --junitxml=test-results.xml artifacts: when: always reports: junit: test-results.xml paths: - test-results.xml - expire_in: 1 week \ No newline at end of file + - .esd.log + expire_in: 1 week + diff --git a/esd/build_cache/manage_build_cache.py b/esd/build_cache/BuildCacheManager.py similarity index 66% rename from esd/build_cache/manage_build_cache.py rename to esd/build_cache/BuildCacheManager.py index bc0054d6cd9fb0d66d6f315fd2251f3f0f61cd75..e1bd6824321c5a2b483800bc4b39824aa4715493 100644 --- a/esd/build_cache/manage_build_cache.py +++ b/esd/build_cache/BuildCacheManager.py @@ -2,18 +2,19 @@ import os import oras.client from pathlib import Path -from esd.logger.logger_config import LoggerConfig +from esd.build_cache.BuildCacheManagerInterface import BuildCacheManagerInterface +from esd.logger.logger_builder import get_logger from esd.utils.utils import clean_up -class BuildCacheManager: +class BuildCacheManager(BuildCacheManagerInterface): """ This class aims to manage the push/pull/delete of build cache files """ - def __init__(self, auth_backend='basic', log_path='./', insecure=False): + def __init__(self, auth_backend='basic', insecure=False): + self.logger = get_logger(__name__, BuildCacheManager.__name__) self.home_path = Path(os.environ.get("HOME_PATH", os.getcwd())) - self.log_file = Path(log_path) / "log_oras.txt" self.registry_project = os.environ.get("REGISTRY_PROJECT") self._registry_username = str(os.environ.get("REGISTRY_USERNAME")) @@ -27,24 +28,22 @@ class BuildCacheManager: self.client = oras.client.OrasClient(hostname=self.registry_host, auth_backend=auth_backend, insecure=insecure) self.client.login(username=self._registry_username, password=self._registry_password) self.oci_registry_path = f'{self.registry_host}/{self.registry_project}/cache' - self.LOGGER = LoggerConfig(self.log_file).get_logger() - def oci_registry_upload_build_cache(self, cache_dir: Path): + def upload(self, out_dir: Path): """ This method pushed all the files from the build cache folder into the OCI Registry """ - build_cache_path = self.home_path / cache_dir + build_cache_path = self.home_path / out_dir # build cache folder must exist before pushing all the artifacts if not build_cache_path.exists(): - raise FileNotFoundError( - f"BuildCacheManager::oci_registry_upload_build_cache::Path {build_cache_path} not found.") + self.logger.error(f"Path {build_cache_path} not found.") for sub_path in build_cache_path.rglob("*"): if sub_path.is_file(): - rel_path = str(sub_path.relative_to(build_cache_path)).replace(str(sub_path.name), "") - target = f"{self.registry_host}/{self.registry_project}/cache:{str(sub_path.name)}" + rel_path = str(sub_path.relative_to(build_cache_path)).replace(str(sub_path.env_name), "") + target = f"{self.registry_host}/{self.registry_project}/cache:{str(sub_path.env_name)}" try: - self.LOGGER.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...") + self.logger.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...") self.client.push( files=[str(sub_path)], target=target, @@ -52,31 +51,31 @@ class BuildCacheManager: manifest_annotations={"path": rel_path}, disable_path_validation=True, ) - self.LOGGER.info(f"Successfully pushed {sub_path.name}") + self.logger.info(f"Successfully pushed {sub_path.env_name}") except Exception as e: - self.LOGGER.error( - f"BuildCacheManager::registry_upload_build_cache::An error occurred while pushing: {e}") - # delete the build cache after being pushed to the OCI Registry - clean_up([str(build_cache_path)], self.LOGGER) + self.logger.error( + f"An error occurred while pushing: {e}") + # todo to be discussed hot to delete the build cache after being pushed to the OCI Registry + # clean_up([str(build_cache_path)], self.logger) - def oci_registry_get_tags(self): + def list_tags(self): """ This method retrieves all tags from an OCI Registry """ try: return self.client.get_tags(self.oci_registry_path) except Exception as e: - self.LOGGER.error(f"BuildCacheManager::oci_registry_get_tags::Failed to list tags: {e}") + self.logger.error(f"Failed to list tags: {e}") return None - def oci_registry_download_build_cache(self, cache_dir: Path): + def download(self, in_dir: Path): """ This method pulls all the files from the OCI Registry into the build cache folder """ - build_cache_path = self.home_path / cache_dir + build_cache_path = self.home_path / in_dir # create the buildcache dir if it does not exist os.makedirs(build_cache_path, exist_ok=True) - tags = self.oci_registry_get_tags() + tags = self.list_tags() if tags is not None: for tag in tags: ref = f"{self.registry_host}/{self.registry_project}/cache:{tag}" @@ -88,27 +87,27 @@ class BuildCacheManager: try: self.client.pull( ref, - # missing dirs to outdir are created automatically by OrasClient pull method + # missing dirs to output dir are created automatically by OrasClient pull method outdir=str(build_cache_path / cache_path), overwrite=True ) - self.LOGGER.info(f"Successfully pulled artifact {tag}.") + self.logger.info(f"Successfully pulled artifact {tag}.") except Exception as e: - self.LOGGER.error( - f"BuildCacheManager::registry_download_build_cache::Failed to pull artifact {tag} : {e}") + self.logger.error( + f"Failed to pull artifact {tag} : {e}") - def oci_registry_delete_build_cache(self): + def delete(self): """ Deletes all artifacts from an OCI Registry based on their tags. This method removes artifacts identified by their tags in the specified OCI Registry. It requires appropriate permissions to delete artifacts from the registry. If the registry or user does not have the necessary delete permissions, the operation might fail. """ - tags = self.oci_registry_get_tags() + tags = self.list_tags() if tags is not None: try: self.client.delete_tags(self.oci_registry_path, tags) - self.LOGGER.info(f"Successfully deleted all artifacts form OCI registry.") + self.logger.info(f"Successfully deleted all artifacts form OCI registry.") except RuntimeError as e: - self.LOGGER.error( - f"BuildCacheManager::registry_delete_build_cache::Failed to delete artifacts: {e}") + self.logger.error( + f"Failed to delete artifacts: {e}") diff --git a/esd/build_cache/BuildCacheManagerInterface.py b/esd/build_cache/BuildCacheManagerInterface.py new file mode 100644 index 0000000000000000000000000000000000000000..3016590bac0ac81aaef706dbdbf46b7a1598a3e6 --- /dev/null +++ b/esd/build_cache/BuildCacheManagerInterface.py @@ -0,0 +1,17 @@ +from abc import ABC, abstractmethod +from pathlib import Path + + +class BuildCacheManagerInterface(ABC): + + @abstractmethod + def upload(self, out_dir: Path): + pass + + @abstractmethod + def download(self, in_dir: Path): + pass + + @abstractmethod + def delete(self): + pass diff --git a/esd/error_handling/exceptions.py b/esd/error_handling/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..2acca54ea6decc66da1bd0cc827ea5b890b2780d --- /dev/null +++ b/esd/error_handling/exceptions.py @@ -0,0 +1,13 @@ +class SpackException(Exception): + + def __init__(self, message): + super().__init__(message) + self.message = str(message) + + def __str__(self): + return self.message + +class BashCommandException(SpackException): + """ + To be thrown when an invalid input is received. + """ diff --git a/esd/logger/logger_builder.py b/esd/logger/logger_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..c15a2f0eda2ff63c39375f690042b2685c4ef659 --- /dev/null +++ b/esd/logger/logger_builder.py @@ -0,0 +1,51 @@ +import os +import inspect +import weakref +import logging +import logging.config + + +class LoggerBuilder(object): + """ + Class taking care of uniform Python logger initialization. + It uses the Python native logging package. + It's purpose is just to offer a common mechanism for initializing all modules in a package. + """ + _instance = None + + def __new__(cls, *args, **kwargs): + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def __init__(self, config_file_name='logging.conf'): + """ + Prepare Python logger based on a configuration file. + :param: config_file_name - name of the logging configuration relative to the current package + """ + current_folder = os.path.dirname(inspect.getfile(self.__class__)) + config_file_path = os.path.join(current_folder, config_file_name) + logging.config.fileConfig(config_file_path, disable_existing_loggers=False) + self._loggers = weakref.WeakValueDictionary() + + def build_logger(self, parent_module, parent_class): + """ + Build a logger instance and return it + """ + logger_key = f'{parent_module}.{parent_class}' if parent_class else parent_module + self._loggers[logger_key] = logger = logging.getLogger(logger_key) + return logger + + def set_loggers_level(self, level): + for logger in self._loggers.values(): + logger.setLevel(level) + + +def get_logger(parent_module='', parent_class=None): + """ + Function to retrieve a new Python logger instance for current module. + + :param parent_module: module name for which to create logger. + :param parent_class: class name for which to create logger. + """ + return LoggerBuilder().build_logger(parent_module, parent_class) diff --git a/esd/logger/logger_config.py b/esd/logger/logger_config.py deleted file mode 100644 index 3ca3b000fd171dde8ceafdc6731dfb02009e845c..0000000000000000000000000000000000000000 --- a/esd/logger/logger_config.py +++ /dev/null @@ -1,33 +0,0 @@ -import logging - - -class LoggerConfig: - """ - This class sets up logging with a file handler - and a stream handler, ensuring consistent - and formatted log messages. - """ - def __init__(self, log_file): - self.log_file = log_file - self._configure_logger() - - def _configure_logger(self): - formatter = logging.Formatter( - fmt='%(asctime)s - %(levelname)s - %(message)s', - datefmt='%Y-%m-%d %H:%M:%S' - ) - - file_handler = logging.FileHandler(self.log_file) - file_handler.setFormatter(formatter) - - stream_handler = logging.StreamHandler() - stream_handler.setFormatter(formatter) - - self.logger = logging.getLogger(__name__) - self.logger.setLevel(logging.DEBUG) - - self.logger.addHandler(file_handler) - self.logger.addHandler(stream_handler) - - def get_logger(self): - return self.logger diff --git a/esd/logger/logging.conf b/esd/logger/logging.conf new file mode 100644 index 0000000000000000000000000000000000000000..d95ba87c9d23a99f1cf98c24f961ddb9ca0528c8 --- /dev/null +++ b/esd/logger/logging.conf @@ -0,0 +1,55 @@ +############################################ +## ESD - logging configuration. ## +############################################ +[loggers] +keys=root, esd, oras + +[handlers] +keys=consoleHandler, fileHandler + +[formatters] +keys=simpleFormatter + +[logger_root] +level=WARNING +handlers=consoleHandler, fileHandler +propagate=0 + +############################################ +## esd specific logging ## +############################################ +[logger_esd] +level=DEBUG +handlers=consoleHandler, fileHandler +qualname=esd +propagate=0 + +[logger_oras] +level=ERROR +handlers=consoleHandler +qualname=oras +propagate=0 + +############################################ +## Handlers ## +############################################ + +[handler_consoleHandler] +class=StreamHandler +level=DEBUG +formatter=simpleFormatter +args=(sys.stdout,) + +[handler_fileHandler] +class=handlers.TimedRotatingFileHandler +level=INFO +formatter=simpleFormatter +args=('.esd.log', 'midnight', 1, 30, None, False, False) + +############################################ +## Formatters ## +############################################ + +[formatter_simpleFormatter] +format=%(asctime)s - %(levelname)s - %(name)s::%(funcName)s - %(message)s +datefmt = %d-%m-%Y %I:%M:%S \ No newline at end of file diff --git a/esd/model/SpackModel.py b/esd/model/SpackModel.py new file mode 100644 index 0000000000000000000000000000000000000000..4b065dba06f558ce21a0354257d77aa595bcaeb1 --- /dev/null +++ b/esd/model/SpackModel.py @@ -0,0 +1,12 @@ +from pathlib import Path + + +class SpackModel: + """" + Provides details about the spack environment + """ + + def __init__(self, env_name: str, path: Path, git_path: str = None): + self.env_name = env_name + self.path = path + self.git_path = git_path diff --git a/esd/model/__init__.py b/esd/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/esd/spack_manager/SpackManager.py b/esd/spack_manager/SpackManager.py new file mode 100644 index 0000000000000000000000000000000000000000..cdf4a1d36d249017636798b05349a88b5e67f97e --- /dev/null +++ b/esd/spack_manager/SpackManager.py @@ -0,0 +1,180 @@ +import os +import subprocess +from abc import ABC, abstractmethod +from pathlib import Path + +from esd.error_handling.exceptions import BashCommandException +from esd.logger.logger_builder import get_logger +from esd.model.SpackModel import SpackModel +from esd.utils.utils import run_command, git_clone_repo + + +class SpackManager(ABC): + """ + This class should implement the methods necessary for installing spack, set up an environment, concretize and install packages. + Factory design pattern is used because there are 2 cases: creating an environment from scratch or creating an environment from the buildcache. + + Attributes: + ----------- + env : SpackModel + spack environment details + repos : list[SpackModel] + upstream_instance : str + path to Spack instance to use as upstream (optional) + """ + + def __init__(self, env: SpackModel = None, repos=None, + upstream_instance=None, system_name: str = None, logger=get_logger(__name__)): + if repos is None: + self.repos = list() + self.env = env + self.upstream_instance = upstream_instance + self.install_dir = Path(os.environ.get("INSTALLATION_ROOT") or os.getcwd()) + self.install_dir.mkdir(parents=True, exist_ok=True) + self.spack_dir = self.install_dir / "spack" + self.spack_setup_script = self.spack_dir / "share" / "spack" / "setup-env.sh" + self.logger = logger + self.system_name = system_name + + @abstractmethod + def concretize_spack_env(self, force=True): + pass + + @abstractmethod + def install_spack_packages(self, jobs: 3, verbose=False, debug=False): + pass + + def create_fetch_spack_environment(self): + env_dir = self.install_dir / self.env.path / self.env.env_name + if self.env.git_path: + try: + git_clone_repo(self.env.env_name, env_dir, self.env.git_path, logger=self.logger) + except subprocess.CalledProcessError as e: + self.logger.exception(f'Failed to clone repository: {self.env.env_name}: {e}') + raise BashCommandException(f'Failed to clone repository: {self.env.env_name}: {e}') + else: + try: + os.makedirs(self.env.path / self.env.env_name, exist_ok=True) + run_command("bash", "-c", + f'source {self.spack_setup_script} && spack env create -d {self.env.path}/{self.env.env_name}', + check=True, logger=self.logger) + self.logger.debug(f"Created {self.env.env_name} spack environment") + except subprocess.CalledProcessError as e: + self.logger.error(f"Failed to create {self.env.env_name} spack environment") + raise BashCommandException(f"Failed to create {self.env.env_name} spack environment") + + def setup_spack_env(self): + """ + This method prepares a spack environment by fetching/creating the spack environment and adding the necessary repos + """ + bashrc_path = os.path.expanduser("~/.bashrc") + if self.system_name: + with open(bashrc_path, "a") as bashrc: + bashrc.write(f'export SYSTEMNAME="{self.system_name}"\n') + os.environ['SYSTEMNAME'] = self.system_name + if self.spack_dir.exists() and self.spack_dir.is_dir(): + with open(bashrc_path, "a") as bashrc: + bashrc.write(f'export SPACK_USER_CACHE_PATH="{str(self.spack_dir / ".spack")}"\n') + bashrc.write(f'export SPACK_USER_CONFIG_PATH="{str(self.spack_dir / ".spack")}"\n') + self.logger.debug('Added env variables SPACK_USER_CACHE_PATH and SPACK_USER_CONFIG_PATH') + else: + self.logger.error(f'Invalid installation path: {self.spack_dir}') + # Restart the bash after adding environment variables + self.create_fetch_spack_environment() + if self.install_dir.exists(): + for repo in self.repos: + repo_dir = self.install_dir / repo.path / repo.env_name + git_clone_repo(repo.env_name, repo_dir, repo.git_path, logger=self.logger) + if not self.spack_repo_exists(repo.env_name): + self.add_spack_repo(repo.path, repo.env_name) + self.logger.debug(f'Added spack repository {repo.env_name}') + else: + self.logger.debug(f'Spack repository {repo.env_name} already added') + + def spack_repo_exists(self, repo_name: str) -> bool: + """Check if the given Spack repository exists.""" + if self.env is None: + try: + result = run_command("bash", "-c", + f'source {self.spack_setup_script} && spack repo list', + check=True, + capture_output=True, text=True, logger=self.logger) + except subprocess.CalledProcessError: + return False + else: + try: + result = run_command("bash", "-c", + f'source {self.spack_setup_script} && spack env activate -p {self.env.path}/{self.env.env_name} && spack repo list', + check=True, + capture_output=True, text=True, logger=self.logger) + except subprocess.CalledProcessError: + return False + return any(line.strip().endswith(repo_name) for line in result.stdout.splitlines()) + + def add_spack_repo(self, repo_path: Path, repo_name: str): + """Add the Spack repository if it does not exist.""" + repo_path = repo_path.resolve().as_posix() + try: + run_command("bash", "-c", + f'source {self.spack_setup_script} && spack env activate -p {self.env.path}/{self.env.env_name} && spack repo add {repo_path}/{repo_name}', + check=True, logger=self.logger) + self.logger.debug(f"Added {repo_name} to spack environment {self.env.env_name}") + except subprocess.CalledProcessError as e: + self.logger.error(f"Failed to add {repo_name} to spack environment {self.env.env_name}") + raise BashCommandException(f"Failed to add {repo_name} to spack environment {self.env.env_name}: {e}") + + def get_spack_installed_version(self): + try: + spack_version = run_command("bash", "-c", f'source {self.spack_setup_script} && spack --version', + capture_output=True, text=True, check=True, + logger=self.logger) + spack_version = spack_version.stdout.strip().split()[0] + self.logger.debug(f"Getting spack version: {spack_version}") + return spack_version + except subprocess.SubprocessError as e: + self.logger.error(f"Error retrieving Spack version: {e}") + return None + + def install_spack(self, spack_version="v0.21.1", spack_repo='https://github.com/spack/spack'): + try: + user = os.getlogin() + except OSError: + user = None + + self.logger.info(f"Starting to install Spack into {self.spack_dir} from branch {spack_version}") + if not self.spack_dir.exists(): + run_command( + "git", "clone", "--depth", "1", + "-c", "advice.detachedHead=false", + "-c", "feature.manyFiles=true", + "--branch", spack_version, spack_repo, self.spack_dir + , check=True, logger=self.logger) + self.logger.debug("Cloned spack") + else: + self.logger.debug("Spack already cloned.") + + bashrc_path = os.path.expanduser("~/.bashrc") + # ensure the file exists before opening it + if not os.path.exists(bashrc_path): + open(bashrc_path, "w").close() + # add spack setup commands to .bashrc + with open(bashrc_path, "a") as bashrc: + bashrc.write(f'export PATH="{self.spack_dir}/bin:$PATH"\n') + bashrc.write(f"source {self.spack_setup_script}\n") + self.logger.info("Added Spack PATH to .bashrc") + if user: + run_command("chown", "-R", f"{user}:{user}", self.spack_dir, check=True, logger=self.logger) + run_command("bash", "-c", f"source {bashrc_path}", check=True, logger=self.logger) + self.logger.info("Spack install completed") + # Restart Bash after the installation ends + os.system("exec bash") + + # Configure upstream Spack instance if specified + if self.upstream_instance: + upstreams_yaml_path = os.path.join(self.spack_dir, "etc/spack/defaults/upstreams.yaml") + with open(upstreams_yaml_path, "w") as file: + file.write(f"""upstreams: + upstream-spack-instance: + install_tree: {self.upstream_instance}/spack/opt/spack + """) + self.logger.info("Added upstream spack instance") diff --git a/esd/spack_manager/__init__.py b/esd/spack_manager/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/esd/spack_manager/enums/SpackManagerEnum.py b/esd/spack_manager/enums/SpackManagerEnum.py new file mode 100644 index 0000000000000000000000000000000000000000..a24358394d19ee1903835f7eafea8f8e8c964fa6 --- /dev/null +++ b/esd/spack_manager/enums/SpackManagerEnum.py @@ -0,0 +1,6 @@ +from enum import Enum + + +class SpackManagerEnum(Enum): + FROM_SCRATCH = "from_scratch", + FROM_BUILDCACHE = "from_buildcache", diff --git a/esd/spack_manager/enums/__init__.py b/esd/spack_manager/enums/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/esd/spack_manager/factory/SpackManagerBuildCache.py b/esd/spack_manager/factory/SpackManagerBuildCache.py new file mode 100644 index 0000000000000000000000000000000000000000..38151c6d0741d16c7a06e64f4a2ee5bc9db2e376 --- /dev/null +++ b/esd/spack_manager/factory/SpackManagerBuildCache.py @@ -0,0 +1,19 @@ +from esd.model.SpackModel import SpackModel +from esd.spack_manager.SpackManager import SpackManager +from esd.logger.logger_builder import get_logger + + +class SpackManagerBuildCache(SpackManager): + def __init__(self, env: SpackModel = None, repos=None, + upstream_instance=None, system_name: str = None): + super().__init__(env, repos, upstream_instance, system_name, logger=get_logger(__name__)) + + def setup_spack_env(self): + super().setup_spack_env() + # todo add buildcache to the spack environment + + def concretize_spack_env(self, force=True): + pass + + def install_spack_packages(self, jobs: 3, verbose=False, debug=False): + pass diff --git a/esd/spack_manager/factory/SpackManagerCreator.py b/esd/spack_manager/factory/SpackManagerCreator.py new file mode 100644 index 0000000000000000000000000000000000000000..9728467f39c2753c9f12ef959c80def1150f0314 --- /dev/null +++ b/esd/spack_manager/factory/SpackManagerCreator.py @@ -0,0 +1,14 @@ +from esd.spack_manager.enums.SpackManagerEnum import SpackManagerEnum +from esd.spack_manager.factory.SpackManagerBuildCache import SpackManagerBuildCache +from esd.spack_manager.factory.SpackManagerScratch import SpackManagerScratch + + +class SpackManagerCreator: + @staticmethod + def get_spack_manger(spack_manager_type: SpackManagerEnum, env_name: str, repo: str, repo_name: str, + upstream_instance: str): + if spack_manager_type == SpackManagerEnum.FROM_SCRATCH: + return SpackManagerScratch(env_name, repo, repo_name, upstream_instance) + elif spack_manager_type == SpackManagerEnum.FROM_BUILDCACHE: + return SpackManagerBuildCache(env_name, repo, repo_name, upstream_instance) + diff --git a/esd/spack_manager/factory/SpackManagerScratch.py b/esd/spack_manager/factory/SpackManagerScratch.py new file mode 100644 index 0000000000000000000000000000000000000000..5a79797ce924af10d1eda003fd55cf78661314a0 --- /dev/null +++ b/esd/spack_manager/factory/SpackManagerScratch.py @@ -0,0 +1,15 @@ +from esd.model.SpackModel import SpackModel +from esd.spack_manager.SpackManager import SpackManager +from esd.logger.logger_builder import get_logger + + +class SpackManagerScratch(SpackManager): + def __init__(self, env: SpackModel = None, repos=None, + upstream_instance=None, system_name: str = None): + super().__init__(env, repos, upstream_instance, system_name, logger=get_logger(__name__)) + + def concretize_spack_env(self, force=True): + pass + + def install_spack_packages(self, jobs: 3, verbose=False, debug=False): + pass diff --git a/esd/spack_manager/factory/__init__.py b/esd/spack_manager/factory/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/esd/specfile_storage_path_source.py b/esd/specfile_storage_path_source.py index 6e8a8889e809c0d8c5116670e19bd7fbd420904c..4d2ff65888aa6dcce2f2a9526575b85d8966d90d 100644 --- a/esd/specfile_storage_path_source.py +++ b/esd/specfile_storage_path_source.py @@ -49,14 +49,14 @@ for rspec in data: format_string = "{name}-{version}" pretty_name = pkg.spec.format_path(format_string) - cosmetic_path = os.path.join(pkg.name, pretty_name) + cosmetic_path = os.path.join(pkg.env_name, pretty_name) to_be_fetched.add(str(spack.mirror.mirror_archive_paths(pkg.fetcher, cosmetic_path).storage_path)) for resource in pkg._get_needed_resources(): - pretty_resource_name = fsys.polite_filename(f"{resource.name}-{pkg.version}") + pretty_resource_name = fsys.polite_filename(f"{resource.env_name}-{pkg.version}") to_be_fetched.add(str(spack.mirror.mirror_archive_paths(resource.fetcher, pretty_resource_name).storage_path)) for patch in ss.patches: if isinstance(patch, spack.patch.UrlPatch): - to_be_fetched.add(str(spack.mirror.mirror_archive_paths(patch.stage.fetcher, patch.stage.name).storage_path)) + to_be_fetched.add(str(spack.mirror.mirror_archive_paths(patch.stage.fetcher, patch.stage.env_name).storage_path)) for elem in to_be_fetched: print(elem) diff --git a/esd/tests/spack_from_scratch_test.py b/esd/tests/spack_from_scratch_test.py new file mode 100644 index 0000000000000000000000000000000000000000..4981b760312ff39c81fd15b78c66b6bd76999d41 --- /dev/null +++ b/esd/tests/spack_from_scratch_test.py @@ -0,0 +1,63 @@ +from pathlib import Path + +import pytest + +from esd.error_handling.exceptions import BashCommandException +from esd.model.SpackModel import SpackModel +from esd.spack_manager.factory.SpackManagerScratch import SpackManagerScratch + + +def test_spack_repo_exists_1(): + install_dir = Path('./install').resolve() + env = SpackModel('ebrains-spack-builds', install_dir) + spack_manager = SpackManagerScratch(env=env) + assert spack_manager.spack_repo_exists(env.env_name) == False + + +def test_spack_repo_exists_2(): + spack_manager = SpackManagerScratch() + assert spack_manager.spack_repo_exists('ebrains-spack-builds') == False + + +def test_spack_from_scratch_setup_1(): + install_dir = Path('./install').resolve() + env = SpackModel('ebrains-spack-builds', install_dir, + 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git', ) + spack_manager = SpackManagerScratch(env, [env], system_name='ebrainslab') + spack_manager.setup_spack_env() + assert spack_manager.spack_repo_exists(env.env_name) == True + + +def test_spack_from_scratch_setup_2(): + install_dir = Path('./install').resolve() + env = SpackModel('ebrains-spack-builds', install_dir, + 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git', ) + repo = env + spack_manager = SpackManagerScratch(env, [repo, repo], system_name='ebrainslab') + spack_manager.setup_spack_env() + assert spack_manager.spack_repo_exists(env.env_name) == True + + +def test_spack_from_scratch_setup_3(): + install_dir = Path('./install').resolve() + env = SpackModel('new_environment', install_dir, ) + repo = env + spack_manager = SpackManagerScratch(env, [repo, repo], system_name='ebrainslab') + with pytest.raises(BashCommandException): + spack_manager.setup_spack_env() + + +def test_spack_from_scratch_setup_4(): + install_dir = Path('./install').resolve() + env = SpackModel('new_environment', install_dir, ) + spack_manager = SpackManagerScratch(env, system_name='ebrainslab') + spack_manager.setup_spack_env() + assert spack_manager.spack_repo_exists(env.env_name) == True + + +def test_spack_from_scratch_bash_error(): + env = SpackModel('ebrains-spack-builds', Path(), None) + repo = env + spack_manager = SpackManagerScratch(env, [repo], system_name='ebrainslab') + with pytest.raises(BashCommandException): + spack_manager.add_spack_repo(repo.path, repo.env_name) diff --git a/esd/tests/spack_install_test.py b/esd/tests/spack_install_test.py new file mode 100644 index 0000000000000000000000000000000000000000..34f683234b064d0f800bfa9376fcf6fec592d7ef --- /dev/null +++ b/esd/tests/spack_install_test.py @@ -0,0 +1,21 @@ +import pytest + +from esd.spack_manager.factory.SpackManagerBuildCache import SpackManagerBuildCache +from esd.spack_manager.factory.SpackManagerScratch import SpackManagerScratch + + +# we need this test to run first so that spack is installed only once +@pytest.mark.run(order=1) +def test_spack_install_scratch(): + spack_manager = SpackManagerScratch() + spack_manager.install_spack(spack_version="v0.21.1") + installed_spack_version = spack_manager.get_spack_installed_version() + required_version = "0.21.1" + assert required_version == installed_spack_version + + +def test_spack_install_buildcache(): + spack_manager = SpackManagerBuildCache() + installed_spack_version = spack_manager.get_spack_installed_version() + required_version = "0.21.1" + assert required_version == installed_spack_version diff --git a/esd/utils/bootstrap.sh b/esd/utils/bootstrap.sh new file mode 100644 index 0000000000000000000000000000000000000000..9b7d0131e95a3be9b0f2cfc4dc82492517fb22dc --- /dev/null +++ b/esd/utils/bootstrap.sh @@ -0,0 +1,6 @@ +# Minimal prerequisites for installing the esd_library +# pip must be installed on the OS +echo "Bootstrapping..." +apt update +apt install -y bzip2 ca-certificates g++ gcc gfortran git gzip lsb-release patch python3 python3-pip tar unzip xz-utils zstd +python3 -m pip install --upgrade pip setuptools wheel diff --git a/esd/utils/utils.py b/esd/utils/utils.py index c1aa4c0da97a36ed386ebd28a8ad211bef91b3df..d229e3453cb3ba4b29319ec84eeeb68998851447 100644 --- a/esd/utils/utils.py +++ b/esd/utils/utils.py @@ -1,8 +1,10 @@ +import logging import shutil +import subprocess from pathlib import Path -def clean_up(dirs: list[str], logging): +def clean_up(dirs: list[str], logging, ignore_errors=True): """ All the folders from the list dirs are removed with all the content in them """ @@ -10,6 +12,31 @@ def clean_up(dirs: list[str], logging): cleanup_dir = Path(cleanup_dir).resolve() if cleanup_dir.exists(): logging.info(f"Removing {cleanup_dir}") - shutil.rmtree(Path(cleanup_dir)) + try: + shutil.rmtree(Path(cleanup_dir)) + except OSError as e: + logging.error(f"Failed to remove {cleanup_dir}: {e}") + if not ignore_errors: + raise e else: logging.info(f"{cleanup_dir} does not exist") + + +def run_command(*args, logger: None, **kwargs): + if logger is None: + logger = logging.getLogger(__name__) + logger.debug(f'{args}') + return subprocess.run(args, **kwargs) + + +def git_clone_repo(repo_name: str, dir: Path, git_path: str, logger: logging): + if not dir.exists(): + run_command( + "git", "clone", "--depth", "1", + "-c", "advice.detachedHead=false", + "-c", "feature.manyFiles=true", + git_path, dir + , check=True, logger=logger) + logger.debug(f'Cloned repository {repo_name}') + else: + logger.debug(f'Repository {repo_name} already cloned.') diff --git a/pyproject.toml b/pyproject.toml index cd4afe5443ff7233f219c8cd69341955ac672f36..abcbe05d6637b2832ab0c7b9878516380e0087b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,15 +1,15 @@ [build-system] -requires = ["setuptools", "setuptools-scm"] +requires = ["setuptools>=64", "wheel"] build-backend = "setuptools.build_meta" [project] -name = "esd" +name = "esd-tools" +version = "0.1.0" authors = [ {name = "Eric Müller", email = "mueller@kip.uni-heidelberg.de"}, {name = "Adrian Ciu", email = "adrian.ciu@codemart.ro"}, ] description = "This package provides all the necessary tools to create an Ebrains Software Distribution environment" -version = "0.1.0" readme = "README.md" requires-python = ">=3.10" dependencies = [ @@ -18,4 +18,8 @@ dependencies = [ "ruamel.yaml", "pytest", "pytest-mock", -] \ No newline at end of file + "pytest-ordering", +] + +[tool.setuptools.data-files] +"esd-tools" = ["esd/logger/logging.conf"] \ No newline at end of file