diff --git a/.env b/.env new file mode 100644 index 0000000000000000000000000000000000000000..786f93e2274be154065ab3b3f4e9f937c8a2c102 --- /dev/null +++ b/.env @@ -0,0 +1,8 @@ +BUILDCACHE_OCI_HOST="" +BUILDCACHE_OCI_PASSWORD="" +BUILDCACHE_OCI_PROJECT="" +BUILDCACHE_OCI_USERNAME="" +CONCRETIZE_OCI_HOST="" +CONCRETIZE_OCI_PASSWORD="" +CONCRETIZE_OCI_PROJECT="" +CONCRETIZE_OCI_USERNAME="" diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 7cdc2157784919724ef7e210b7f1bf9789334397..ab31bc6b429207e32c03ee700e536ca6bc77fd5e 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,10 +1,17 @@ stages: - - build - test + - build + - coverage_report variables: BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/esd/tmp:latest +default: + before_script: + - chmod +x dedal/utils/bootstrap.sh + - ./dedal/utils/bootstrap.sh + - pip install -e .[test] + build-wheel: stage: build tags: @@ -20,19 +27,63 @@ build-wheel: - dist/*.tar.gz expire_in: 1 week +unit_tests: + stage: test + tags: + - docker-runner + image: ubuntu:22.04 + script: + - coverage run -m pytest -s --tb=short --junitxml=test-results.xml ./dedal/tests/unit_tests + - mv .coverage .coverage.unit # Rename to avoid overwriting + artifacts: + when: always + reports: + junit: test-results.xml + paths: + - test-results.xml + - .dedal.log + - .generate_cache.log + - .coverage.unit + expire_in: 1 week -testing: +integration_tests: stage: test tags: - docker-runner - image: python:latest + image: ubuntu:22.04 script: - - pip install -e . - - pytest ./dedal/tests/ --junitxml=test-results.xml + - coverage run -m pytest -s --tb=short --junitxml=test-results.xml ./dedal/tests/integration_tests + - mv .coverage .coverage.integration # Rename to avoid overwriting + needs: ["unit_tests"] artifacts: when: always reports: junit: test-results.xml paths: - test-results.xml - expire_in: 1 week \ No newline at end of file + - .dedal.log + - .generate_cache.log + - .coverage.integration + expire_in: 1 week + +merge_coverage: + stage: coverage_report + tags: + - docker-runner + image: ubuntu:22.04 + script: + - coverage combine .coverage.unit .coverage.integration + - coverage report + - coverage xml -o coverage.xml + - coverage html -d coverage_html + artifacts: + reports: + coverage_report: + coverage_format: cobertura + path: coverage.xml + paths: + - coverage.xml + - coverage_html + expire_in: 1 week + coverage: '/TOTAL.*?(\d+\%)$/' + diff --git a/MANIFEST.ini b/MANIFEST.ini new file mode 100644 index 0000000000000000000000000000000000000000..e62be46716825eab56feaa6d891b2f5d0bcf314d --- /dev/null +++ b/MANIFEST.ini @@ -0,0 +1,3 @@ + +include README.md +recursive-include yashchiki/dedal *.* \ No newline at end of file diff --git a/README.md b/README.md index 62e00c689da73463a22bb85cc9671f8634b25c06..299377a1782b473cb83c568160bfd4c2daa077bb 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,146 @@ -# ~~Yashchiki~~Koutakia +# Dedal + + +This repository provides functionalities to easily ```managed spack environments``` and +```helpers for the container image build flow```. + +**Setting up the needed environment variables** +The ````<checkout path>\dedal\.env```` file contains the environment variables required for OCI registry used for +caching. +Ensure that you edit the ````<checkout path>\dedal\.env```` file to match your environment. +The following provides an explanation of the various environment variables: + + # OCI Registry Configuration Sample for concretization caches + # ============================= + # The following variables configure the Harbor docker OCI registry (EBRAINS) used for caching. + + # The hostname of the OCI registry. e.g. docker-registry.ebrains.eu + CONCRETIZE__OCI_HOST="docker-registry.ebrains.eu" + + # The project name in the Docker registry. + CONCRETIZE__OCI_PROJECT="concretize_caches" + + # The username used for authentication with the Docker registry. + CONCRETIZE__OCI_USERNAME="robot$concretize-cache-test+user" + + # The password used for authentication with the Docker registry. + CONCRETIZE__OCI_HOST="###ACCESS_TOKEN###" + + + # OCI Registry Configuration Sample for binary caches + # ============================= + # The following variables configure the Harbor docker OCI registry (EBRAINS) used for caching. + + # The hostname of the OCI registry. e.g. docker-registry.ebrains.eu + BUILDCACHE_OCI_HOST="docker-registry.ebrains.eu" + + # The project name in the Docker registry. + BUILDCACHE_OCI_PROJECT="binary-cache-test" + + # The username used for authentication with the Docker registry. + BUILDCACHE_OCI_USERNAME="robot$binary-cache-test+user" + + # The password used for authentication with the Docker registry. + BUILDCACHE_OCI_HOST="###ACCESS_TOKEN###" + +For both concretization and binary caches, the cache version can be changed via the attributes +```cache_version_concretize``` and ```cache_version_build```. +The default values are ```v1```. + +Before using this library, the following tool must be installed on Linux distribution: + +```` + apt install -y bzip2 ca-certificates g++ gcc gfortran git gzip lsb-release patch python3 python3-pip tar unzip xz-utils zstd +```` + +```` + python3 -m pip install --upgrade pip setuptools wheel +```` + +# Dedal library installation + +```sh + pip install dedal +``` + +# Dedal CLI Commands + +The following commands are available in this CLI tool. You can view detailed explanations by using the `--help` option +with any command. + +### 1. `dedal install-spack` + +Install spack in the install_dir folder. + +**Options:** + +- `--spack_version <TEXT>` : Specifies the Spack version to be installed (default: v0.23.0). +- `--bashrc_path <TEXT>` : Defines the path to .bashrc. + +### 2. `dedal set-config` + +Sets configuration parameters for the session. + +**Options:** + +- `--use_cache` Enables cashing +- `--use_spack_global` Uses spack installed globally on the os +- `--env_name <TEXT>` Environment name +- `--env_path <TEXT>` Environment path to download locally +- `--env_git_path <TEXT>` Git path to download the environment +- `--install_dir <TEXT>` Install directory for installing spack; + spack environments and repositories are + stored here +- `--upstream_instance <TEXT>` Upstream instance for spack environment +- `--system_name <TEXT>` System name; it is used inside the spack + environment +- `--concretization_dir <TEXT>` Directory where the concretization caching + (spack.lock) will be downloaded +- `--buildcache_dir <TEXT>` Directory where the binary caching is + downloaded for the spack packages +- `--gpg_name <TEXT>` Gpg name +- `--gpg_mail <TEXT>` Gpg mail contact address +- `--cache_version_concretize <TEXT>` + Cache version for concretizaion data +- `--cache_version_build <TEXT>` Cache version for binary caches data +- `--view <SpackViewEnum>` Spack environment view +- `--update_cache <bool>` Flag for overriding existing cache + +### 3. `dedal show-config` + +Show the current configuration. + +### 4. `dedal clear-config` + +Clears stored configuration + +### 5. `dedal add-spack-repo` + +Adds a spack repository to the spack environments. + +**Options:** + +- `--repo_name <TEXT>` Repository name [required] +- `--path <TEXT>` Repository path to download locally [required] +- `--git_path <TEXT>` Git path to download the repository [required] + +### 6. `dedal setup-spack-env` + +Setups a spack environment according to the given configuration. + +### 7. `dedal concretize` + +Spack concretization step. + +### 9. `dedal install-packages` + +Installs spack packages present in the spack environment defined in configuration. + +**Options:** + +- `--jobs <INTEGER>` Number of parallel jobs for spack installation + +# Dedal's UML diagram + + -For now, this repository provides helpers for the EBRAINS container image build flow. diff --git a/dedal/bll/SpackManager.py b/dedal/bll/SpackManager.py new file mode 100644 index 0000000000000000000000000000000000000000..e5fae221c7093bff86a4adf541e4664fda353dff --- /dev/null +++ b/dedal/bll/SpackManager.py @@ -0,0 +1,35 @@ +import os +from dedal.model.SpackDescriptor import SpackDescriptor +from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator +from dedal.configuration.SpackConfig import SpackConfig + + +class SpackManager: + """ + This class defines the logic used by the CLI + """ + + def __init__(self, spack_config: SpackConfig = None, use_cache=False): + self._spack_config = spack_config + self._use_cache = use_cache + + def _get_spack_operation(self): + return SpackOperationCreator.get_spack_operator(self._spack_config, self._use_cache) + + def install_spack(self, version: str, bashrc_path=os.path.expanduser("~/.bashrc")): + self._get_spack_operation().install_spack(spack_version=f'v{version}', bashrc_path=bashrc_path) + + def add_spack_repo(self, repo: SpackDescriptor): + """ + After additional repo was added, setup_spack_env must be invoked + """ + self._spack_config.add_repo(repo) + + def setup_spack_env(self): + self._get_spack_operation().setup_spack_env() + + def concretize_spack_env(self): + self._get_spack_operation().concretize_spack_env() + + def install_packages(self, jobs: int): + self._get_spack_operation().install_packages(jobs=jobs) diff --git a/dedal/bll/__init__.py b/dedal/bll/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/bll/cli_utils.py b/dedal/bll/cli_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..bfc74ed0262aa944c88d722a558a88fccaad0687 --- /dev/null +++ b/dedal/bll/cli_utils.py @@ -0,0 +1,23 @@ +import jsonpickle +import os + + +def save_config(spack_config_data, config_path: str): + """Save config to JSON file.""" + with open(config_path, "w") as data_file: + data_file.write(jsonpickle.encode(spack_config_data)) + + +def load_config(config_path: str): + """Load config from JSON file.""" + if os.path.exists(config_path): + with open(config_path, "r") as data_file: + data = jsonpickle.decode(data_file.read()) + return data + return {} + + +def clear_config(config_path: str): + """Delete the JSON config file.""" + if os.path.exists(config_path): + os.remove(config_path) diff --git a/dedal/build_cache/BuildCacheManager.py b/dedal/build_cache/BuildCacheManager.py index 2da39e252c8ebc3e0b9aa6d6a5612d9f1f4ee02e..98fd234de0ec5282dbde338608371a09fb49d049 100644 --- a/dedal/build_cache/BuildCacheManager.py +++ b/dedal/build_cache/BuildCacheManager.py @@ -1,10 +1,12 @@ +import glob import os -import oras.client +from os.path import join from pathlib import Path +import oras.client + from dedal.build_cache.BuildCacheManagerInterface import BuildCacheManagerInterface from dedal.logger.logger_builder import get_logger -from dedal.utils.utils import clean_up class BuildCacheManager(BuildCacheManagerInterface): @@ -12,49 +14,64 @@ class BuildCacheManager(BuildCacheManagerInterface): This class aims to manage the push/pull/delete of build cache files """ - def __init__(self, auth_backend='basic', insecure=False): - self.logger = get_logger(__name__, BuildCacheManager.__name__) - self.home_path = Path(os.environ.get("HOME_PATH", os.getcwd())) - self.registry_project = os.environ.get("REGISTRY_PROJECT") + def __init__(self, registry_host, registry_project, registry_username, registry_password, cache_version='cache', + auth_backend='basic', + insecure=False): + self._logger = get_logger(__name__, BuildCacheManager.__name__) + self._registry_project = registry_project - self._registry_username = str(os.environ.get("REGISTRY_USERNAME")) - self._registry_password = str(os.environ.get("REGISTRY_PASSWORD")) + self._registry_username = registry_username + self._registry_password = registry_password - self.registry_host = str(os.environ.get("REGISTRY_HOST")) + self._registry_host = registry_host # Initialize an OrasClient instance. # This method utilizes the OCI Registry for container image and artifact management. # Refer to the official OCI Registry documentation for detailed information on the available authentication methods. # Supported authentication types may include basic authentication (username/password), token-based authentication, - self.client = oras.client.OrasClient(hostname=self.registry_host, auth_backend=auth_backend, insecure=insecure) - self.client.login(username=self._registry_username, password=self._registry_password) - self.oci_registry_path = f'{self.registry_host}/{self.registry_project}/cache' + self._client = oras.client.OrasClient(hostname=self._registry_host, auth_backend=auth_backend, + insecure=insecure) + self._client.login(username=self._registry_username, password=self._registry_password) + self.cache_version = cache_version + self._oci_registry_path = f'{self._registry_host}/{self._registry_project}/{self.cache_version}' - def upload(self, out_dir: Path): + def upload(self, upload_dir: Path, update_cache=True): """ This method pushed all the files from the build cache folder into the OCI Registry + Args: + upload_dir (Path): directory with the local binary caches + update_cache (bool): Updates the cache from the OCI Registry with the same tag """ - build_cache_path = self.home_path / out_dir + build_cache_path = upload_dir.resolve() # build cache folder must exist before pushing all the artifacts if not build_cache_path.exists(): - self.logger.error(f"Path {build_cache_path} not found.") + self._logger.error(f"Path {build_cache_path} not found.") + + tags = self.list_tags() for sub_path in build_cache_path.rglob("*"): if sub_path.is_file(): - rel_path = str(sub_path.relative_to(build_cache_path)).replace(str(sub_path.name), "") - target = f"{self.registry_host}/{self.registry_project}/cache:{str(sub_path.name)}" - try: - self.logger.info(f"Pushing folder '{sub_path}' to ORAS target '{target}' ...") - self.client.push( - files=[str(sub_path)], - target=target, - # save in manifest the relative path for reconstruction - manifest_annotations={"path": rel_path}, - disable_path_validation=True, - ) - self.logger.info(f"Successfully pushed {sub_path.name}") - except Exception as e: - self.logger.error( - f"An error occurred while pushing: {e}") + tag = str(sub_path.name) + rel_path = str(sub_path.relative_to(build_cache_path)).replace(tag, "") + target = f"{self._registry_host}/{self._registry_project}/{self.cache_version}:{tag}" + upload_file = True + if update_cache is False and tag in tags: + upload_file = False + if upload_file: + try: + self._logger.info(f"Pushing file '{sub_path}' to ORAS target '{target}' ...") + self._client.push( + files=[str(sub_path)], + target=target, + # save in manifest the relative path for reconstruction + manifest_annotations={"path": rel_path}, + disable_path_validation=True, + ) + self._logger.info(f"Successfully pushed {tag}") + except Exception as e: + self._logger.error( + f"An error occurred while pushing: {e}") + else: + self._logger.info(f"File '{sub_path}' already uploaded ...") # todo to be discussed hot to delete the build cache after being pushed to the OCI Registry # clean_up([str(build_cache_path)], self.logger) @@ -63,37 +80,38 @@ class BuildCacheManager(BuildCacheManagerInterface): This method retrieves all tags from an OCI Registry """ try: - return self.client.get_tags(self.oci_registry_path) + return self._client.get_tags(self._oci_registry_path) except Exception as e: - self.logger.error(f"Failed to list tags: {e}") + self._logger.error(f"Failed to list tags: {e}") return None - def download(self, in_dir: Path): + def download(self, download_dir: Path): """ This method pulls all the files from the OCI Registry into the build cache folder """ - build_cache_path = self.home_path / in_dir + build_cache_path = download_dir.resolve() # create the buildcache dir if it does not exist os.makedirs(build_cache_path, exist_ok=True) tags = self.list_tags() if tags is not None: for tag in tags: - ref = f"{self.registry_host}/{self.registry_project}/cache:{tag}" + ref = f"{self._registry_host}/{self._registry_project}/{self.cache_version}:{tag}" # reconstruct the relative path of each artifact by getting it from the manifest cache_path = \ - self.client.get_manifest(f'{self.registry_host}/{self.registry_project}/cache:{tag}')[ + self._client.get_manifest( + f'{self._registry_host}/{self._registry_project}/{self.cache_version}:{tag}')[ 'annotations'][ 'path'] try: - self.client.pull( + self._client.pull( ref, # missing dirs to output dir are created automatically by OrasClient pull method outdir=str(build_cache_path / cache_path), overwrite=True ) - self.logger.info(f"Successfully pulled artifact {tag}.") + self._logger.info(f"Successfully pulled artifact {tag}.") except Exception as e: - self.logger.error( + self._logger.error( f"Failed to pull artifact {tag} : {e}") def delete(self): @@ -106,8 +124,46 @@ class BuildCacheManager(BuildCacheManagerInterface): tags = self.list_tags() if tags is not None: try: - self.client.delete_tags(self.oci_registry_path, tags) - self.logger.info(f"Successfully deleted all artifacts form OCI registry.") + self._client.delete_tags(self._oci_registry_path, tags) + self._logger.info("Successfully deleted all artifacts form OCI registry.") except RuntimeError as e: - self.logger.error( + self._logger.error( f"Failed to delete artifacts: {e}") + + def __log_warning_if_needed(self, warn_message: str, items: list[str]) -> None: + """Logs a warning message if the number of items is greater than 1. (Private function) + This method logs a warning message using the provided message and items if the list of items has more than one element. + + Args: + warn_message (str): The warning message to log. + items (list[str]): The list of items to include in the log message. + """ + if len(items) > 1: + self._logger.warning(warn_message, items, items[0]) + + def get_public_key_from_cache(self, build_cache_dir: str | None) -> str | None: + """Retrieves the public key from the build cache. + This method searches for the public key within the specified build cache directory. + Args: + build_cache_dir (str | None): The path to the build cache directory. + Returns: + str | None: The path to the public key file if found, otherwise None. + """ + + if not build_cache_dir or not os.path.exists(build_cache_dir): + self._logger.warning("Build cache directory does not exist!") + return None + pgp_folders = glob.glob(f"{build_cache_dir}/**/_pgp", recursive=True) + if not pgp_folders: + self._logger.warning("No _pgp folder found in the build cache!") + return None + self.__log_warning_if_needed( + "More than one PGP folders found in the build cache: %s, using the first one in the list: %s", pgp_folders) + pgp_folder = pgp_folders[0] + key_files = glob.glob(join(pgp_folder, "**")) + if not key_files: + self._logger.warning("No PGP key files found in the build cache!") + return None + self.__log_warning_if_needed( + "More than one PGP key files found in the build cache: %s, using the first one in the list: %s", key_files) + return key_files[0] diff --git a/dedal/cli/spack_manager_api.py b/dedal/cli/spack_manager_api.py new file mode 100644 index 0000000000000000000000000000000000000000..d5f5a9f05e2098fb47878a360fe833edcc7fb862 --- /dev/null +++ b/dedal/cli/spack_manager_api.py @@ -0,0 +1,162 @@ +import os +from pathlib import Path +import click +import jsonpickle + +from dedal.bll.SpackManager import SpackManager +from dedal.bll.cli_utils import save_config, load_config +from dedal.configuration.GpgConfig import GpgConfig +from dedal.configuration.SpackConfig import SpackConfig +from dedal.enum.SpackViewEnum import SpackViewEnum +from dedal.model.SpackDescriptor import SpackDescriptor +from dedal.utils.utils import resolve_path + +SESSION_CONFIG_PATH = os.path.expanduser('/tmp/dedal/dedal_session.json') +os.makedirs(os.path.dirname(SESSION_CONFIG_PATH), exist_ok=True) + + +@click.group() +@click.pass_context +def cli(ctx: click.Context): + config = load_config(SESSION_CONFIG_PATH) + if ctx.invoked_subcommand not in ['set-config', 'install-spack'] and not config: + click.echo('No configuration set. Use `set-config` first.') + ctx.exit(1) + if config: + config['env_path'] = resolve_path(config['env_path']) + env = SpackDescriptor(config['env_name'], config['env_path'], config['env_git_path']) + gpg = GpgConfig(config['gpg_name'], config['gpg_mail']) if config['gpg_name'] and config['gpg_mail'] else None + spack_config = SpackConfig(env=env, repos=None, install_dir=config['install_dir'], + upstream_instance=config['upstream_instance'], + concretization_dir=config['concretization_dir'], + buildcache_dir=config['buildcache_dir'], + system_name=config['system_name'], gpg=gpg, + use_spack_global=config['use_spack_global'], + view=config['view'], + update_cache=config['update_cache']) + ctx.obj = SpackManager(spack_config, use_cache=config['use_cache']) + + +@cli.command() +@click.option('--use_cache', is_flag=True, default=False, help='Enables cashing') +@click.option('--use_spack_global', is_flag=True, default=False, help='Uses spack installed globally on the os') +@click.option('--env_name', type=str, default=None, help='Environment name') +@click.option('--env_path', type=str, default=None, help='Environment path to download locally') +@click.option('--env_git_path', type=str, default=None, help='Git path to download the environment') +@click.option('--install_dir', type=str, + help='Install directory for installing spack; spack environments and repositories are stored here') +@click.option('--upstream_instance', type=str, default=None, help='Upstream instance for spack environment') +@click.option('--system_name', type=str, default=None, help='System name; it is used inside the spack environment') +@click.option('--concretization_dir', type=str, default=None, + help='Directory where the concretization caching (spack.lock) will be downloaded') +@click.option('--buildcache_dir', type=str, default=None, + help='Directory where the binary caching is downloaded for the spack packages') +@click.option('--gpg_name', type=str, default=None, help='Gpg name') +@click.option('--gpg_mail', type=str, default=None, help='Gpg mail contact address') +@click.option('--cache_version_concretize', type=str, default='v1', help='Cache version for concretizaion data') +@click.option('--cache_version_build', type=str, default='v1', help='Cache version for binary caches data') +@click.option('--view', type=SpackViewEnum, default=SpackViewEnum.VIEW, help='Spack environment view') +@click.option('--update_cache', is_flag=True, default=True, help='Flag for overriding existing cache') +def set_config(use_cache, env_name, env_path, env_git_path, install_dir, upstream_instance, system_name, + concretization_dir, + buildcache_dir, gpg_name, gpg_mail, use_spack_global, cache_version_concretize, cache_version_build, + view, update_cache): + """Sets configuration parameters for the session.""" + spack_config_data = { + 'use_cache': use_cache, + 'env_name': env_name, + 'env_path': env_path, + 'env_git_path': env_git_path, + 'install_dir': install_dir, + 'upstream_instance': upstream_instance, + 'system_name': system_name, + 'concretization_dir': Path(concretization_dir) if concretization_dir else None, + 'buildcache_dir': Path(buildcache_dir) if buildcache_dir else None, + 'gpg_name': gpg_name, + 'gpg_mail': gpg_mail, + 'use_spack_global': use_spack_global, + 'repos': [], + 'cache_version_concretize': cache_version_concretize, + 'cache_version_build': cache_version_build, + 'view': view, + 'update_cache': update_cache, + } + save_config(spack_config_data, SESSION_CONFIG_PATH) + click.echo('Configuration saved.') + + +@click.command() +def show_config(): + """Show the current configuration.""" + config = load_config(SESSION_CONFIG_PATH) + if config: + click.echo(jsonpickle.encode(config, indent=2)) + else: + click.echo('No configuration set. Use `set-config` first.') + + +@cli.command() +@click.option('--spack_version', type=str, default='0.23.0', + help='Specifies the Spack version to be installed (default: v0.23.0).') +@click.option('--bashrc_path', type=str, default="~/.bashrc", help='Defines the path to .bashrc.') +@click.pass_context +def install_spack(ctx: click.Context, spack_version: str, bashrc_path: str): + """Install spack in the install_dir folder""" + bashrc_path = os.path.expanduser(bashrc_path) + if ctx.obj is None: + SpackManager().install_spack(spack_version, bashrc_path) + else: + ctx.obj.install_spack(spack_version, bashrc_path) + + +@cli.command() +@click.option('--repo_name', type=str, required=True, default=None, help='Repository name') +@click.option('--path', type=str, required=True, default=None, help='Repository path to download locally') +@click.option('--git_path', type=str, required=True, default=None, help='Git path to download the repository') +def add_spack_repo(repo_name: str, path: str, git_path: str = None): + """Adds a spack repository to the spack environments. The setup command must be rerun.""" + path = resolve_path(path) + repo = SpackDescriptor(repo_name, path, git_path) + config = load_config(SESSION_CONFIG_PATH) + config['repos'].append(repo) + save_config(config, SESSION_CONFIG_PATH) + click.echo('dedal setup_spack_env must be reran after each repo is added for the environment.') + + +@cli.command() +@click.pass_context +def setup_spack_env(ctx: click.Context): + """Setups a spack environment according to the given configuration.""" + ctx.obj.setup_spack_env() + + +@cli.command() +@click.pass_context +def concretize(ctx: click.Context): + """Spack concretization step.""" + ctx.obj.concretize_spack_env() + + +@cli.command() +@click.option('--jobs', type=int, default=2, help='Number of parallel jobs for spack installation') +@click.pass_context +def install_packages(ctx: click.Context, jobs): + """Installs spack packages present in the spack environment defined in configuration.""" + ctx.obj.install_packages(jobs=jobs) + + +@click.command() +def clear_config(): + """Clears stored configuration.""" + if os.path.exists(SESSION_CONFIG_PATH): + os.remove(SESSION_CONFIG_PATH) + click.echo('Configuration cleared!') + else: + click.echo('No configuration to clear.') + + +cli.add_command(show_config) +cli.add_command(clear_config) + +if __name__ == '__main__': + cli() diff --git a/dedal/configuration/GpgConfig.py b/dedal/configuration/GpgConfig.py new file mode 100644 index 0000000000000000000000000000000000000000..a8f0c2d3bc0f39db8c5b251d9ffc6f6fa3a577ec --- /dev/null +++ b/dedal/configuration/GpgConfig.py @@ -0,0 +1,7 @@ +class GpgConfig: + """ + Configuration for gpg key used by spack + """ + def __init__(self, gpg_name='example', gpg_mail='example@example.com'): + self.name = gpg_name + self.mail = gpg_mail diff --git a/dedal/configuration/SpackConfig.py b/dedal/configuration/SpackConfig.py new file mode 100644 index 0000000000000000000000000000000000000000..7945e848be6b6655f65118afb0e6b2bff2c5112a --- /dev/null +++ b/dedal/configuration/SpackConfig.py @@ -0,0 +1,36 @@ +import os +from pathlib import Path +from dedal.configuration.GpgConfig import GpgConfig +from dedal.enum.SpackViewEnum import SpackViewEnum +from dedal.model import SpackDescriptor +from dedal.utils.utils import resolve_path + + +class SpackConfig: + def __init__(self, env: SpackDescriptor = None, repos: list[SpackDescriptor] = None, + install_dir=Path(os.getcwd()).resolve(), upstream_instance=None, system_name=None, + concretization_dir: Path = None, buildcache_dir: Path = None, gpg: GpgConfig = None, + use_spack_global=False, cache_version_concretize='v1', + cache_version_build='v1', view=SpackViewEnum.VIEW, update_cache=True): + self.env = env + if repos is None: + self.repos = [] + else: + self.repos = repos + self.upstream_instance = upstream_instance + self.system_name = system_name + self.concretization_dir = concretization_dir if concretization_dir is None else resolve_path(concretization_dir) + self.buildcache_dir = buildcache_dir if buildcache_dir is None else resolve_path(buildcache_dir) + self.install_dir = resolve_path(install_dir) + self.gpg = gpg + self.use_spack_global = use_spack_global + self.cache_version_concretize = cache_version_concretize + self.cache_version_build = cache_version_build + self.view = view + self.update_cache = update_cache + + def add_repo(self, repo: SpackDescriptor): + if self.repos is None: + self.repos = [] + else: + self.repos.append(repo) diff --git a/dedal/configuration/__init__.py b/dedal/configuration/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/docs/resources/dedal_UML.png b/dedal/docs/resources/dedal_UML.png new file mode 100644 index 0000000000000000000000000000000000000000..6e970d08690dc6f53f95c5088ad3933b9bae6f15 Binary files /dev/null and b/dedal/docs/resources/dedal_UML.png differ diff --git a/dedal/enum/SpackConfigCommand.py b/dedal/enum/SpackConfigCommand.py new file mode 100644 index 0000000000000000000000000000000000000000..3e7b69292492120cec413f8cf410309f2800ad3c --- /dev/null +++ b/dedal/enum/SpackConfigCommand.py @@ -0,0 +1,13 @@ +from enum import Enum + +class SpackConfigCommand(Enum): + GET = 'get' + BLAME = 'blame' + EDIT = 'edit' + LIST = 'list' + ADD = 'add' + CHANGE = 'change' + PREFER_UPSTREAM = 'prefer-upstream' + REMOVE = 'remove' + UPDATE = 'update' + REVERT = 'revert' \ No newline at end of file diff --git a/dedal/enum/SpackViewEnum.py b/dedal/enum/SpackViewEnum.py new file mode 100644 index 0000000000000000000000000000000000000000..fc6c01c19572dfadc77de1b119eb1bce365d2e58 --- /dev/null +++ b/dedal/enum/SpackViewEnum.py @@ -0,0 +1,5 @@ +from enum import Enum + +class SpackViewEnum(Enum): + VIEW = '' + WITHOUT_VIEW = '--without-view' \ No newline at end of file diff --git a/dedal/enum/__init__.py b/dedal/enum/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/error_handling/__init__.py b/dedal/error_handling/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/error_handling/exceptions.py b/dedal/error_handling/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..85398df060dc8248372d273c2f6f35d65cf16696 --- /dev/null +++ b/dedal/error_handling/exceptions.py @@ -0,0 +1,68 @@ +class SpackException(Exception): + + def __init__(self, message): + super().__init__(message) + self.message = str(message) + + def __str__(self): + return self.message + + +class BashCommandException(SpackException): + """ + To be thrown when a bash command has failed + """ + + +class NoSpackEnvironmentException(BashCommandException): + """ + To be thrown when an operation on a spack environment is executed without the environment being activated or existent + """ + + +class SpackConcertizeException(BashCommandException): + """ + To be thrown when the spack concretization step fails + """ + + +class SpackInstallPackagesException(BashCommandException): + """ + To be thrown when the spack fails to install spack packages + """ + + +class SpackMirrorException(BashCommandException): + """ + To be thrown when the spack add mirror command fails + """ + + +class SpackGpgException(BashCommandException): + """ + To be thrown when the spack fails to create gpg keys + """ + + +class SpackRepoException(BashCommandException): + """ + To be thrown when the spack fails to add a repo + """ + + +class SpackReindexException(BashCommandException): + """ + To be thrown when the spack reindex step fails + """ + + +class SpackSpecException(BashCommandException): + """ + To be thrown when the spack spec for a package fails + """ + + +class SpackConfigException(BashCommandException): + """ + To be thrown when the spack config command fails + """ diff --git a/dedal/model/SpackDescriptor.py b/dedal/model/SpackDescriptor.py new file mode 100644 index 0000000000000000000000000000000000000000..939164a0653cf8724e28b7adc6702a4b28d6bb52 --- /dev/null +++ b/dedal/model/SpackDescriptor.py @@ -0,0 +1,13 @@ +import os +from pathlib import Path + + +class SpackDescriptor: + """" + Provides details about the spack environment + """ + + def __init__(self, name: str, path: Path = Path(os.getcwd()).resolve(), git_path: str = None): + self.name = name + self.path = path.resolve() if isinstance(path, Path) else Path(path).resolve() + self.git_path = git_path diff --git a/dedal/model/__init__.py b/dedal/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/spack_factory/SpackOperation.py b/dedal/spack_factory/SpackOperation.py new file mode 100644 index 0000000000000000000000000000000000000000..b0c0a78b8fd75cca7f989eb838c71f5ed574b444 --- /dev/null +++ b/dedal/spack_factory/SpackOperation.py @@ -0,0 +1,416 @@ +import os +import re +import subprocess +from pathlib import Path +from dedal.configuration.SpackConfig import SpackConfig +from dedal.enum.SpackConfigCommand import SpackConfigCommand +from dedal.error_handling.exceptions import BashCommandException, NoSpackEnvironmentException, \ + SpackInstallPackagesException, SpackConcertizeException, SpackMirrorException, SpackGpgException, \ + SpackRepoException, SpackReindexException, SpackSpecException, SpackConfigException +from dedal.logger.logger_builder import get_logger +from dedal.tests.testing_variables import SPACK_VERSION +from dedal.utils.utils import run_command, git_clone_repo, log_command, set_bashrc_variable, get_first_word +from dedal.wrapper.spack_wrapper import check_spack_env +import glob + + +class SpackOperation: + """ + This class should implement the methods necessary for installing spack, set up an environment, concretize and install packages. + Factory design pattern is used because there are 2 cases: creating an environment from scratch or creating an environment from the buildcache. + + Attributes: + ----------- + env : SpackDescriptor + spack environment details + repos : list[SpackDescriptor] + upstream_instance : str + path to Spack instance to use as upstream (optional) + """ + + def __init__(self, spack_config: SpackConfig = SpackConfig(), logger=get_logger(__name__)): + self.spack_config = spack_config + self.spack_config.install_dir = spack_config.install_dir + os.makedirs(self.spack_config.install_dir, exist_ok=True) + self.spack_dir = self.spack_config.install_dir / 'spack' + + self.spack_setup_script = "" if self.spack_config.use_spack_global else f"source {self.spack_dir / 'share' / 'spack' / 'setup-env.sh'}" + self.logger = logger + self.spack_config.concretization_dir = spack_config.concretization_dir + if self.spack_config.concretization_dir: + os.makedirs(self.spack_config.concretization_dir, exist_ok=True) + self.spack_config.buildcache_dir = spack_config.buildcache_dir + if self.spack_config.buildcache_dir: + os.makedirs(self.spack_config.buildcache_dir, exist_ok=True) + if self.spack_config.env and spack_config.env.name: + self.env_path: Path = spack_config.env.path / spack_config.env.name + self.spack_command_on_env = f'{self.spack_setup_script} && spack env activate -p {spack_config.view.value} {self.env_path}' + else: + self.spack_command_on_env = self.spack_setup_script + if self.spack_config.env and spack_config.env.path: + self.spack_config.env.path = spack_config.env.path + self.spack_config.env.path.mkdir(parents=True, exist_ok=True) + + def create_fetch_spack_environment(self): + """Fetches a spack environment if the git path is defined, otherwise creates it.""" + if self.spack_config.env and self.spack_config.env.git_path: + git_clone_repo(self.spack_config.env.name, self.spack_config.env.path / self.spack_config.env.name, + self.spack_config.env.git_path, + logger=self.logger) + else: + os.makedirs(self.spack_config.env.path / self.spack_config.env.name, exist_ok=True) + run_command("bash", "-c", + f'{self.spack_setup_script} && spack env create -d {self.env_path}', + check=True, logger=self.logger, + info_msg=f"Created {self.spack_config.env.name} spack environment", + exception_msg=f"Failed to create {self.spack_config.env.name} spack environment", + exception=BashCommandException) + + def setup_spack_env(self): + """ + This method prepares a spack environment by fetching/creating the spack environment and adding the necessary repos + """ + bashrc_path = os.path.expanduser("~/.bashrc") + if self.spack_config.system_name: + set_bashrc_variable('SYSTEMNAME', self.spack_config.system_name, bashrc_path, logger=self.logger) + os.environ['SYSTEMNAME'] = self.spack_config.system_name + if self.spack_dir.exists() and self.spack_dir.is_dir(): + set_bashrc_variable('SPACK_USER_CACHE_PATH', str(self.spack_dir / ".spack"), bashrc_path, + logger=self.logger) + set_bashrc_variable('SPACK_USER_CONFIG_PATH', str(self.spack_dir / ".spack"), bashrc_path, + logger=self.logger) + self.logger.debug('Added env variables SPACK_USER_CACHE_PATH and SPACK_USER_CONFIG_PATH') + else: + self.logger.error(f'Invalid installation path: {self.spack_dir}') + # Restart the bash after adding environment variables + if self.spack_config.env: + self.create_fetch_spack_environment() + if self.spack_config.install_dir.exists(): + for repo in self.spack_config.repos: + repo_dir = self.spack_config.install_dir / repo.path / repo.name + git_clone_repo(repo.name, repo_dir, repo.git_path, logger=self.logger) + if not self.spack_repo_exists(repo.name): + self.add_spack_repo(repo.path, repo.name) + self.logger.debug(f'Added spack repository {repo.name}') + else: + self.logger.debug(f'Spack repository {repo.name} already added') + + def spack_repo_exists(self, repo_name: str) -> bool | None: + """Check if the given Spack repository exists. + Returns: + True if spack repository exists, False otherwise. + """ + if self.spack_config.env is None: + result = run_command("bash", "-c", + f'{self.spack_setup_script} && spack repo list', + check=True, + capture_output=True, text=True, logger=self.logger, + info_msg=f'Checking if {repo_name} exists') + if result is None: + return False + else: + if self.spack_env_exists(): + result = run_command("bash", "-c", + f'{self.spack_command_on_env} && spack repo list', + check=True, + capture_output=True, text=True, logger=self.logger, + info_msg=f'Checking if repository {repo_name} was added').stdout + else: + self.logger.debug('No spack environment defined') + raise NoSpackEnvironmentException('No spack environment defined') + if result is None: + return False + return any(line.strip().endswith(repo_name) for line in result.splitlines()) + + def spack_env_exists(self): + """Checks if a spack environments exists. + Returns: + True if spack environments exists, False otherwise. + """ + result = run_command("bash", "-c", + self.spack_command_on_env, + check=True, + capture_output=True, text=True, logger=self.logger, + info_msg=f'Checking if environment {self.spack_config.env.name} exists') + return result is not None + + def add_spack_repo(self, repo_path: Path, repo_name: str): + """Add the Spack repository if it does not exist.""" + run_command("bash", "-c", + f'{self.spack_command_on_env} && spack repo add {repo_path}/{repo_name}', + check=True, logger=self.logger, + info_msg=f"Added {repo_name} to spack environment {self.spack_config.env.name}", + exception_msg=f"Failed to add {repo_name} to spack environment {self.spack_config.env.name}", + exception=SpackRepoException) + + @check_spack_env + def get_compiler_version(self): + """Returns the compiler version + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + result = run_command("bash", "-c", + f'{self.spack_command_on_env} && spack compiler list', + check=True, logger=self.logger, + capture_output=True, text=True, + info_msg=f"Checking spack environment compiler version for {self.spack_config.env.name}", + exception_msg=f"Failed to checking spack environment compiler version for {self.spack_config.env.name}", + exception=BashCommandException) + + if result.stdout is None: + self.logger.debug(f'No gcc found for {self.spack_config.env.name}') + return None + + # Find the first occurrence of a GCC compiler using regex + match = re.search(r"gcc@([\d\.]+)", result.stdout) + gcc_version = match.group(1) + self.logger.debug(f'Found gcc for {self.spack_config.env.name}: {gcc_version}') + return gcc_version + + def get_spack_installed_version(self): + """Returns the spack installed version""" + spack_version = run_command("bash", "-c", f'{self.spack_setup_script} && spack --version', + capture_output=True, text=True, check=True, + logger=self.logger, + info_msg=f"Getting spack version", + exception_msg=f"Error retrieving Spack version") + if spack_version: + return spack_version.stdout.strip().split()[0] + return None + + @check_spack_env + def concretize_spack_env(self, force=True, test=None): + """Concretization step for a spack environment + Args: + force (bool): TOverrides an existing concretization when set to True + test: which test dependencies should be included + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + force = '--force' if force else '' + test = f'--test {test}' if test else '' + run_command("bash", "-c", + f'{self.spack_command_on_env} && spack concretize {force} {test}', + check=True, + logger=self.logger, + info_msg=f'Concertization step for {self.spack_config.env.name}', + exception_msg=f'Failed the concertization step for {self.spack_config.env.name}', + exception=SpackConcertizeException) + + def reindex(self): + """Reindex step for a spack environment + Raises: + SpackReindexException: If the spack reindex command fails. + """ + run_command("bash", "-c", + f'{self.spack_command_on_env} && spack reindex', + check=True, + logger=self.logger, + info_msg=f'Reindex step.', + exception_msg=f'Failed the reindex.', + exception=SpackReindexException) + + def spec_pacakge(self, package_name: str): + """Reindex step for a spack environment + Raises: + SpackSpecException: If the spack spec command fails. + """ + try: + spec_output = run_command("bash", "-c", + f'{self.spack_command_on_env} && spack spec {package_name}', + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + logger=self.logger, + info_msg=f'Spack spec {package_name}.', + exception_msg=f'Failed to spack spec {package_name}.', + exception=SpackSpecException).stdout + pattern = r'^\s*-\s*([\w.-]+@[\d.]+)' + match = re.search(pattern, spec_output) + if match: + return match.group(1) + return None + except SpackSpecException: + return None + + def create_gpg_keys(self): + """Creates GPG keys (which can be used when creating binary cashes) and adds it to the trusted keyring.""" + if self.spack_config.gpg: + run_command("bash", "-c", + f'{self.spack_setup_script} && spack gpg init && spack gpg create {self.spack_config.gpg.name} {self.spack_config.gpg.mail}', + check=True, + logger=self.logger, + info_msg=f'Created pgp keys for {self.spack_config.env.name}', + exception_msg=f'Failed to create pgp keys mirror {self.spack_config.env.name}', + exception=SpackGpgException) + else: + raise SpackGpgException('No GPG configuration was defined is spack configuration') + + def add_mirror(self, mirror_name: str, mirror_path: Path, signed=False, autopush=False, global_mirror=False): + """Adds a Spack mirror. + Adds a new mirror to the Spack configuration, either globally or to a specific environment. + Args: + mirror_name (str): The name of the mirror. + mirror_path (str): The path or URL of the mirror. + signed (bool): Whether to require signed packages from the mirror. + autopush (bool): Whether to enable autopush for the mirror. + global_mirror (bool): Whether to add the mirror globally (True) or to the current environment (False). + Raises: + ValueError: If mirror_name or mirror_path are empty. + NoSpackEnvironmentException: If global_mirror is False and no environment is defined. + """ + autopush = '--autopush' if autopush else '' + signed = '--signed' if signed else '' + spack_add_mirror = f'spack mirror add {autopush} {signed} {mirror_name} {mirror_path}' + if global_mirror: + run_command("bash", "-c", + f'{self.spack_setup_script} && {spack_add_mirror}', + check=True, + logger=self.logger, + info_msg=f'Added mirror {mirror_name}', + exception_msg=f'Failed to add mirror {mirror_name}', + exception=SpackMirrorException) + else: + check_spack_env( + run_command("bash", "-c", + f'{self.spack_command_on_env} && {spack_add_mirror}', + check=True, + logger=self.logger, + info_msg=f'Added mirror {mirror_name}', + exception_msg=f'Failed to add mirror {mirror_name}', + exception=SpackMirrorException)) + + @check_spack_env + def trust_gpg_key(self, public_key_path: str): + """Adds a GPG public key to the trusted keyring. + This method attempts to add the provided GPG public key to the + Spack trusted keyring. + Args: + public_key_path (str): Path to the GPG public key file. + Returns: + bool: True if the key was added successfully, False otherwise. + Raises: + ValueError: If public_key_path is empty. + NoSpackEnvironmentException: If the spack environment is not set up. + """ + if not public_key_path: + raise ValueError("public_key_path is required") + + run_command("bash", "-c", + f'{self.spack_command_on_env} && spack gpg trust {public_key_path}', + check=True, + logger=self.logger, + info_msg=f'Trusted GPG key for {self.spack_config.env.name}', + exception_msg=f'Failed to trust GPG key for {self.spack_config.env.name}', + exception=SpackGpgException) + + def config(self, config_type: SpackConfigCommand, config_parameter): + run_command("bash", "-c", + f'{self.spack_command_on_env} && spack config {config_type.value} {config_parameter}', + check=True, + logger=self.logger, + info_msg='Spack config command', + exception_msg='Spack config command failed', + exception=SpackConfigException) + + def mirror_list(self): + """Returns of available mirrors. When an environment is activated it will return the mirrors associated with it, + otherwise the mirrors set globally""" + mirrors = run_command("bash", "-c", + f'{self.spack_command_on_env} && spack mirror list', + check=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + logger=self.logger, + info_msg=f'Listing mirrors', + exception_msg=f'Failed list mirrors', + exception=SpackMirrorException).stdout + return list(map(get_first_word, list(mirrors.strip().splitlines()))) + + def remove_mirror(self, mirror_name: str): + """Removes a mirror from an environment (if it is activated), otherwise removes the mirror globally.""" + if not mirror_name: + raise ValueError("mirror_name is required") + run_command("bash", "-c", + f'{self.spack_command_on_env} && spack mirror rm {mirror_name}', + check=True, + logger=self.logger, + info_msg=f'Removing mirror {mirror_name}', + exception_msg=f'Failed to remove mirror {mirror_name}', + exception=SpackMirrorException) + + @check_spack_env + def install_packages(self, jobs: int, signed=True, fresh=False, debug=False, test=None): + """Installs all spack packages. + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + signed = '' if signed else '--no-check-signature' + fresh = '--fresh' if fresh else '' + debug = '--debug' if debug else '' + test = f'--test {test}' if test else '' + install_result = run_command("bash", "-c", + f'{self.spack_command_on_env} && spack {debug} install -v {signed} -j {jobs} {fresh} {test}', + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + logger=self.logger, + info_msg=f"Installing spack packages for {self.spack_config.env.name}", + exception_msg=f"Error installing spack packages for {self.spack_config.env.name}", + exception=SpackInstallPackagesException) + log_command(install_result, str(Path(os.getcwd()).resolve() / ".generate_cache.log")) + return install_result + + def install_spack(self, spack_version=f'v{SPACK_VERSION}', spack_repo='https://github.com/spack/spack', + bashrc_path=os.path.expanduser("~/.bashrc")): + """Install spack. + Args: + spack_version (str): spack version + spack_repo (str): Git path to the Spack repository. + bashrc_path (str): Path to the .bashrc file. + """ + try: + user = os.getlogin() + except OSError: + user = None + + self.logger.info(f"Starting to install Spack into {self.spack_dir} from branch {spack_version}") + if not self.spack_dir.exists(): + run_command( + "git", "clone", "--depth", "1", + "-c", "advice.detachedHead=false", + "-c", "feature.manyFiles=true", + "--branch", spack_version, spack_repo, self.spack_dir + , check=True, logger=self.logger) + self.logger.debug("Cloned spack") + else: + self.logger.debug("Spack already cloned.") + + # ensure the file exists before opening it + if not os.path.exists(bashrc_path): + open(bashrc_path, "w").close() + # add spack setup commands to .bashrc + with open(bashrc_path, "a") as bashrc: + bashrc.write(f'export PATH="{self.spack_dir}/bin:$PATH"\n') + spack_setup_script = f"source {self.spack_dir / 'share' / 'spack' / 'setup-env.sh'}" + bashrc.write(f"{spack_setup_script}\n") + self.logger.info("Added Spack PATH to .bashrc") + if user: + run_command("chown", "-R", f"{user}:{user}", self.spack_dir, check=True, logger=self.logger, + info_msg='Adding permissions to the logged in user') + self.logger.info("Spack install completed") + if self.spack_config.use_spack_global is True: + # Restart the bash only of the spack is used globally + self.logger.info('Restarting bash') + run_command("bash", "-c", f"source {bashrc_path}", check=True, logger=self.logger, info_msg='Restart bash') + os.system("exec bash") + # Configure upstream Spack instance if specified + if self.spack_config.upstream_instance: + search_path = os.path.join(self.spack_config.upstream_instance, 'spack', 'opt', 'spack', '**', '.spack-db') + spack_db_dirs = glob.glob(search_path, recursive=True) + upstream_prefix = [os.path.dirname(dir) for dir in spack_db_dirs] + for prefix in upstream_prefix: + self.config(SpackConfigCommand.ADD, f':upstream-spack-instance:install_tree:{prefix}') + self.logger.info("Added upstream spack instance") diff --git a/dedal/spack_factory/SpackOperationCreateCache.py b/dedal/spack_factory/SpackOperationCreateCache.py new file mode 100644 index 0000000000000000000000000000000000000000..45e0e7441a874d2ed09057ea889a064aab77f087 --- /dev/null +++ b/dedal/spack_factory/SpackOperationCreateCache.py @@ -0,0 +1,60 @@ +import os + +from dedal.utils.utils import copy_file +from dedal.wrapper.spack_wrapper import check_spack_env +from dedal.build_cache.BuildCacheManager import BuildCacheManager +from dedal.configuration.SpackConfig import SpackConfig +from dedal.logger.logger_builder import get_logger +from dedal.spack_factory.SpackOperation import SpackOperation + + +class SpackOperationCreateCache(SpackOperation): + """ + This class creates caching for the concretization step and for the installation step. + """ + + def __init__(self, spack_config: SpackConfig = SpackConfig()): + super().__init__(spack_config, logger=get_logger(__name__)) + self.cache_dependency = BuildCacheManager(os.environ.get('CONCRETIZE_OCI_HOST'), + os.environ.get('CONCRETIZE_OCI_PROJECT'), + os.environ.get('CONCRETIZE_OCI_USERNAME'), + os.environ.get('CONCRETIZE_OCI_PASSWORD'), + cache_version=spack_config.cache_version_concretize) + self.build_cache = BuildCacheManager(os.environ.get('BUILDCACHE_OCI_HOST'), + os.environ.get('BUILDCACHE_OCI_PROJECT'), + os.environ.get('BUILDCACHE_OCI_USERNAME'), + os.environ.get('BUILDCACHE_OCI_PASSWORD'), + cache_version=spack_config.cache_version_build) + + @check_spack_env + def concretize_spack_env(self, test=None): + """Concretization step for a spack environment. After the concretization step is complete, the concretization file is uploaded to the OCI casing. + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + super().concretize_spack_env(force=True, test=test) + dependency_path = self.spack_config.env.path / self.spack_config.env.name / 'spack.lock' + copy_file(dependency_path, self.spack_config.concretization_dir, logger=self.logger) + self.cache_dependency.upload(self.spack_config.concretization_dir, update_cache=self.spack_config.update_cache) + self.logger.info(f'Created new spack concretization for create cache: {self.spack_config.env.name}') + + @check_spack_env + def install_packages(self, jobs: int = 2, debug=False, test=None): + """Installs all spack packages. After the installation is complete, all the binary cashes are pushed to the defined OCI registry + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + signed = False + if self.spack_config.gpg: + signed = True + self.create_gpg_keys() + self.add_mirror('local_cache', + str(self.spack_config.buildcache_dir), + signed=signed, + autopush=signed, + global_mirror=False) + self.logger.info(f'Added mirror for {self.spack_config.env.name}') + super().install_packages(jobs=jobs, signed=signed, debug=debug, fresh=True, test=test) + self.logger.info(f'Installed spack packages for {self.spack_config.env.name}') + self.build_cache.upload(self.spack_config.buildcache_dir, update_cache=self.spack_config.update_cache) + self.logger.info(f'Pushed spack packages for {self.spack_config.env.name}') diff --git a/dedal/spack_factory/SpackOperationCreator.py b/dedal/spack_factory/SpackOperationCreator.py new file mode 100644 index 0000000000000000000000000000000000000000..fdc929d34866dcb254a775e9e8f8a24ee893029d --- /dev/null +++ b/dedal/spack_factory/SpackOperationCreator.py @@ -0,0 +1,19 @@ +from dedal.configuration.SpackConfig import SpackConfig +from dedal.spack_factory.SpackOperation import SpackOperation +from dedal.spack_factory.SpackOperationCreateCache import SpackOperationCreateCache +from dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache + + +class SpackOperationCreator: + @staticmethod + def get_spack_operator(spack_config: SpackConfig = None, use_cache: bool = False) -> SpackOperation: + if spack_config is None: + return SpackOperation() + elif spack_config.concretization_dir is None and spack_config.buildcache_dir is None: + return SpackOperation(spack_config) + elif (spack_config.concretization_dir and spack_config.buildcache_dir) and not use_cache: + return SpackOperationCreateCache(spack_config) + elif (spack_config.concretization_dir and spack_config.buildcache_dir) and use_cache: + return SpackOperationUseCache(spack_config) + else: + return SpackOperation(SpackConfig()) diff --git a/dedal/spack_factory/SpackOperationUseCache.py b/dedal/spack_factory/SpackOperationUseCache.py new file mode 100644 index 0000000000000000000000000000000000000000..751945980f0fdd98923f43182ea89b2a639774f2 --- /dev/null +++ b/dedal/spack_factory/SpackOperationUseCache.py @@ -0,0 +1,97 @@ +import os +import subprocess +from pathlib import Path + +from dedal.build_cache.BuildCacheManager import BuildCacheManager +from dedal.configuration.SpackConfig import SpackConfig +from dedal.error_handling.exceptions import SpackInstallPackagesException +from dedal.logger.logger_builder import get_logger +from dedal.spack_factory.SpackOperation import SpackOperation +from dedal.utils.utils import file_exists_and_not_empty, run_command, log_command, copy_file +from dedal.wrapper.spack_wrapper import check_spack_env + + +class SpackOperationUseCache(SpackOperation): + """ + This class uses caching for the concretization step and for the installation step. + """ + + def __init__(self, spack_config: SpackConfig = SpackConfig()): + super().__init__(spack_config, logger=get_logger(__name__)) + self.cache_dependency = BuildCacheManager(os.environ.get('CONCRETIZE_OCI_HOST'), + os.environ.get('CONCRETIZE_OCI_PROJECT'), + os.environ.get('CONCRETIZE_OCI_USERNAME'), + os.environ.get('CONCRETIZE_OCI_PASSWORD'), + cache_version=spack_config.cache_version_concretize) + self.build_cache = BuildCacheManager(os.environ.get('BUILDCACHE_OCI_HOST'), + os.environ.get('BUILDCACHE_OCI_PROJECT'), + os.environ.get('BUILDCACHE_OCI_USERNAME'), + os.environ.get('BUILDCACHE_OCI_PASSWORD'), + cache_version=spack_config.cache_version_build) + + def setup_spack_env(self) -> None: + """Set up the spack environment for using the cache. + Downloads the build cache, adds the public key to trusted keys, + and adds the build cache mirror. + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + super().setup_spack_env() + # Download concretization cache from OCI Registry + self.cache_dependency.download(self.spack_config.concretization_dir) + # Download build cache from OCI Registry and add public key to trusted keys + self.build_cache.download(self.spack_config.buildcache_dir) + cached_public_key = self.build_cache.get_public_key_from_cache(str(self.spack_config.buildcache_dir)) + signed = cached_public_key is not None + if signed: + self.trust_gpg_key(cached_public_key) + # Add build cache mirror + self.add_mirror('local_cache', + str(self.spack_config.buildcache_dir), + signed=signed, + autopush=True, + global_mirror=False) + + @check_spack_env + def concretize_spack_env(self, test=None): + """Concretization step for spack environment for using the concretization cache (spack.lock file). + Downloads the concretization cache and moves it to the spack environment's folder + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + concretization_redo = False + self.cache_dependency.download(self.spack_config.concretization_dir) + if file_exists_and_not_empty(self.spack_config.concretization_dir / 'spack.lock'): + concretization_file_path = self.env_path / 'spack.lock' + copy_file(self.spack_config.concretization_dir / 'spack.lock', self.env_path) + # redo the concretization step if spack.lock file was not downloaded from the cache + if not file_exists_and_not_empty(concretization_file_path): + super().concretize_spack_env(force=True, test=test) + concretization_redo = True + else: + # redo the concretization step if spack.lock file was not downloaded from the cache + super().concretize_spack_env(force=True, test=test) + concretization_redo = True + return concretization_redo + + @check_spack_env + def install_packages(self, jobs: int, signed=True, debug=False, test=None): + """Installation step for spack environment for using the binary caches. + + Raises: + NoSpackEnvironmentException: If the spack environment is not set up. + """ + signed = '' if signed else '--no-check-signature' + debug = '--debug' if debug else '' + test = f'--test {test}' if test else '' + install_result = run_command("bash", "-c", + f'{self.spack_command_on_env} && spack {debug} install -v --reuse {signed} -j {jobs} {test}', + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + logger=self.logger, + info_msg=f"Installing spack packages for {self.spack_config.env.name}", + exception_msg=f"Error installing spack packages for {self.spack_config.env.name}", + exception=SpackInstallPackagesException) + log_command(install_result, str(Path(os.getcwd()).resolve() / ".generate_cache.log")) + return install_result diff --git a/dedal/spack_factory/__init__.py b/dedal/spack_factory/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/tests/integration_tests/__init__.py b/dedal/tests/integration_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/tests/integration_tests/spack_create_cache_test.py b/dedal/tests/integration_tests/spack_create_cache_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2ae70502e0db3548ba08ded83645bec7957ea773 --- /dev/null +++ b/dedal/tests/integration_tests/spack_create_cache_test.py @@ -0,0 +1,45 @@ +from pathlib import Path + +import pytest + +from dedal.configuration.GpgConfig import GpgConfig +from dedal.configuration.SpackConfig import SpackConfig + +from dedal.model.SpackDescriptor import SpackDescriptor +from dedal.spack_factory.SpackOperationCreateCache import SpackOperationCreateCache +from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator +from dedal.tests.testing_variables import test_spack_env_git, ebrains_spack_builds_git + +""" +Before running those tests, the repositories where the caching is stored must be cleared after each run. +Ebrains Harbour does not support deletion via API, so the clean up must be done manually +""" + + +@pytest.mark.skip( + reason="Skipping until an OCI registry which supports via API deletion; Clean up for OCI registry repo must be added before this test.") +def test_spack_create_cache_concretization(tmp_path): + install_dir = tmp_path + concretization_dir = install_dir / 'concretization' + buildcache_dir = install_dir / 'buildcache' + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + gpg = GpgConfig(gpg_name='test-gpg', gpg_mail='test@test.com') + config = SpackConfig(env=env, install_dir=install_dir, concretization_dir=concretization_dir, + buildcache_dir=buildcache_dir, gpg=gpg) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperationCreateCache) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env() + assert len(spack_operation.cache_dependency.list_tags()) > 0 + return spack_operation + + +@pytest.mark.skip( + reason="Skipping until an OCI registry which supports via API deletion; Clean up for OCI registry repo must be added before this test.") +def test_spack_create_cache_installation(tmp_path): + spack_operation = test_spack_create_cache_concretization(tmp_path) + spack_operation.install_packages() + assert len(spack_operation.build_cache.list_tags()) > 0 diff --git a/dedal/tests/integration_tests/spack_from_cache_test.py b/dedal/tests/integration_tests/spack_from_cache_test.py new file mode 100644 index 0000000000000000000000000000000000000000..42ecf3b7bf52c478299e41f270fa0d58000f5ec6 --- /dev/null +++ b/dedal/tests/integration_tests/spack_from_cache_test.py @@ -0,0 +1,49 @@ +from pathlib import Path + +from dedal.configuration.SpackConfig import SpackConfig +from dedal.model.SpackDescriptor import SpackDescriptor +from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator +from dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache +from dedal.utils.utils import file_exists_and_not_empty, count_files_in_folder +from dedal.utils.variables import test_spack_env_git, ebrains_spack_builds_git + + +def test_spack_from_cache_setup(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + concretization_dir = install_dir / 'concretize' + buildcache_dir = install_dir / 'buildcache' + spack_config = SpackConfig(env, install_dir=install_dir, concretization_dir=concretization_dir, + buildcache_dir=buildcache_dir) + spack_config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=True) + assert isinstance(spack_operation, SpackOperationUseCache) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + num_tags = len(spack_operation.build_cache.list_tags()) + concretization_download_file_path = concretization_dir / 'spack.lock' + assert file_exists_and_not_empty(concretization_download_file_path) == True + assert count_files_in_folder(buildcache_dir) == num_tags + assert 'local_cache' in spack_operation.mirror_list() + return spack_operation + + +def test_spack_from_cache_concretize(tmp_path): + spack_operation = test_spack_from_cache_setup(tmp_path) + assert spack_operation.concretize_spack_env() == False + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + return spack_operation + + +def test_spack_from_cache_install_1(tmp_path): + spack_operation = test_spack_from_cache_concretize(tmp_path) + install_result = spack_operation.install_packages(jobs=2, signed=True, debug=False) + assert install_result.returncode == 0 + + +def test_spack_from_cache_install_2(tmp_path): + spack_operation = test_spack_from_cache_concretize(tmp_path) + install_result = spack_operation.install_packages(jobs=2, signed=True, debug=False, test='root') + assert install_result.returncode == 0 diff --git a/dedal/tests/integration_tests/spack_from_scratch_test.py b/dedal/tests/integration_tests/spack_from_scratch_test.py new file mode 100644 index 0000000000000000000000000000000000000000..50b19faab603232f0ebc9d19503b5303e16604ff --- /dev/null +++ b/dedal/tests/integration_tests/spack_from_scratch_test.py @@ -0,0 +1,298 @@ +from pathlib import Path +import pytest +from dedal.configuration.SpackConfig import SpackConfig +from dedal.error_handling.exceptions import BashCommandException, NoSpackEnvironmentException +from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator +from dedal.model.SpackDescriptor import SpackDescriptor +from dedal.tests.testing_variables import test_spack_env_git, ebrains_spack_builds_git +from dedal.utils.utils import file_exists_and_not_empty +from dedal.spack_factory.SpackOperation import SpackOperation + + +def test_spack_repo_exists_1(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('ebrains-spack-builds', install_dir) + config = SpackConfig(env=env, install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + with pytest.raises(NoSpackEnvironmentException): + spack_operation.spack_repo_exists(env.name) + + +def test_spack_repo_exists_2(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('ebrains-spack-builds', install_dir) + config = SpackConfig(env=env, install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + assert spack_operation.spack_repo_exists(env.name) == False + + +def test_spack_from_scratch_setup_1(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + assert spack_operation.spack_repo_exists(env.name) == False + + +def test_spack_reindex(tmp_path): + install_dir = tmp_path + config = SpackConfig(install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.reindex() + +@pytest.mark.skip(reason="It does nopt work on bare metal operating systems") +def test_spack_spec(tmp_path): + install_dir = tmp_path + config = SpackConfig(install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + assert spack_operation.spec_pacakge('aida') == 'aida@3.2.1' + + +def test_spack_from_scratch_setup_2(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + repo = env + config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir) + config.add_repo(repo) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + assert spack_operation.spack_repo_exists(env.name) == True + + +def test_spack_from_scratch_setup_3(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('new_env1', install_dir) + repo = env + config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir) + config.add_repo(repo) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + with pytest.raises(BashCommandException): + spack_operation.setup_spack_env() + + +def test_spack_from_scratch_setup_4(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('new_env2', install_dir) + config = SpackConfig(env=env, install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + assert spack_operation.spack_env_exists() == True + + +def test_spack_not_a_valid_repo(): + env = SpackDescriptor('ebrains-spack-builds', Path(), None) + repo = env + config = SpackConfig(env=env, system_name='ebrainslab') + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + with pytest.raises(BashCommandException): + spack_operation.add_spack_repo(repo.path, repo.name) + + +@pytest.mark.skip( + reason="Skipping the concretization step because it may freeze when numerous Spack packages are added to the environment.") +def test_spack_from_scratch_concretize_1(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + repo = env + config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir) + config.add_repo(repo) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=True) + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + + +@pytest.mark.skip( + reason="Skipping the concretization step because it may freeze when numerous Spack packages are added to the environment.") +def test_spack_from_scratch_concretize_2(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + repo = env + config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir) + config.add_repo(repo) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=False) + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + + +def test_spack_from_scratch_concretize_3(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + repo = env + config = SpackConfig(env=env, system_name='ebrainslab', install_dir=install_dir) + config.add_repo(repo) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == False + + +def test_spack_from_scratch_concretize_4(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + config = SpackConfig(env=env, install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=False) + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + + +def test_spack_from_scratch_concretize_5(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + config = SpackConfig(env=env, install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=True) + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + + +def test_spack_from_scratch_concretize_6(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + config = SpackConfig(env=env, install_dir=install_dir) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=False) + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + + +def test_spack_from_scratch_concretize_7(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + config = SpackConfig(env=env, install_dir=install_dir) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=True) + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + + def test_spack_from_scratch_concretize_8(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + config = SpackConfig(env=env, install_dir=install_dir) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=True, test='root') + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + + +def test_spack_from_scratch_install(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + config = SpackConfig(env=env, install_dir=install_dir) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=True) + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + install_result = spack_operation.install_packages(jobs=2, signed=False, fresh=True, debug=False) + assert install_result.returncode == 0 + + +def test_spack_from_scratch_install_2(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + config = SpackConfig(env=env, install_dir=install_dir) + config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + spack_operation.concretize_spack_env(force=True, test='root') + concretization_file_path = spack_operation.env_path / 'spack.lock' + assert file_exists_and_not_empty(concretization_file_path) == True + install_result = spack_operation.install_packages(jobs=2, signed=False, fresh=True, debug=False, test='root') + assert install_result.returncode == 0 + + +def test_spack_mirror_env(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + spack_config = SpackConfig(env, install_dir=install_dir) + spack_config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(spack_config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + mirror_dir = tmp_path / Path('./mirror_dir') + mirror_name = 'mirror_tests' + spack_operation.add_mirror(mirror_name=mirror_name, mirror_path=mirror_dir) + assert mirror_name in spack_operation.mirror_list() + spack_operation.remove_mirror(mirror_name=mirror_name) + assert mirror_name not in spack_operation.mirror_list() + + +def test_spack_mirror_global(tmp_path): + install_dir = tmp_path + spack_config = SpackConfig(install_dir=install_dir) + spack_operation = SpackOperationCreator.get_spack_operator(spack_config) + assert isinstance(spack_operation, SpackOperation) + spack_operation.install_spack(bashrc_path=str(tmp_path / Path('.bashrc'))) + spack_operation.setup_spack_env() + mirror_dir = tmp_path / Path('./mirror_dir') + mirror_name = 'mirror_test' + spack_operation.add_mirror(mirror_name=mirror_name, mirror_path=mirror_dir) + assert mirror_name in spack_operation.mirror_list() + spack_operation.remove_mirror(mirror_name=mirror_name) + assert mirror_name not in spack_operation.mirror_list() diff --git a/dedal/tests/integration_tests/spack_install_test.py b/dedal/tests/integration_tests/spack_install_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0c6cf1273cf57b5ccb82e4c4cd79fc913aebeaaf --- /dev/null +++ b/dedal/tests/integration_tests/spack_install_test.py @@ -0,0 +1,12 @@ +from dedal.configuration.SpackConfig import SpackConfig +from dedal.spack_factory.SpackOperation import SpackOperation +from dedal.tests.testing_variables import SPACK_VERSION + + +def test_spack_install_scratch(tmp_path): + install_dir = tmp_path + spack_config = SpackConfig(install_dir=install_dir) + spack_operation = SpackOperation(spack_config) + spack_operation.install_spack(spack_version=f'v{SPACK_VERSION}') + installed_spack_version = spack_operation.get_spack_installed_version() + assert SPACK_VERSION == installed_spack_version diff --git a/dedal/tests/integration_tests/spack_operation_creator_test.py b/dedal/tests/integration_tests/spack_operation_creator_test.py new file mode 100644 index 0000000000000000000000000000000000000000..226184b00a5c7136c97f5ef12761ac44c71286a1 --- /dev/null +++ b/dedal/tests/integration_tests/spack_operation_creator_test.py @@ -0,0 +1,50 @@ +from dedal.spack_factory.SpackOperationCreateCache import SpackOperationCreateCache + +from dedal.configuration.SpackConfig import SpackConfig +from dedal.model.SpackDescriptor import SpackDescriptor +from dedal.spack_factory.SpackOperation import SpackOperation +from dedal.spack_factory.SpackOperationCreator import SpackOperationCreator +from dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache +from dedal.tests.testing_variables import ebrains_spack_builds_git, test_spack_env_git + + +def test_spack_creator_scratch_1(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + spack_config = SpackConfig(env, install_dir=install_dir) + spack_config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(spack_config) + assert isinstance(spack_operation, SpackOperation) + + +def test_spack_creator_scratch_2(tmp_path): + spack_config = None + spack_operation = SpackOperationCreator.get_spack_operator(spack_config) + assert isinstance(spack_operation, SpackOperation) + + +def test_spack_creator_scratch_3(): + spack_config = SpackConfig() + spack_operation = SpackOperationCreator.get_spack_operator(spack_config) + assert isinstance(spack_operation, SpackOperation) + + +def test_spack_creator_create_cache(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + spack_config = SpackConfig(env, install_dir=install_dir, concretization_dir=install_dir, buildcache_dir=install_dir) + spack_config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(spack_config) + assert isinstance(spack_operation, SpackOperationCreateCache) + + +def test_spack_creator_use_cache(tmp_path): + install_dir = tmp_path + env = SpackDescriptor('test-spack-env', install_dir, test_spack_env_git) + repo = SpackDescriptor('ebrains-spack-builds', install_dir, ebrains_spack_builds_git) + spack_config = SpackConfig(env, install_dir=install_dir, concretization_dir=install_dir, buildcache_dir=install_dir) + spack_config.add_repo(repo) + spack_operation = SpackOperationCreator.get_spack_operator(spack_config, use_cache=True) + assert isinstance(spack_operation, SpackOperationUseCache) diff --git a/dedal/tests/integration_tests/utils_test.py b/dedal/tests/integration_tests/utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7923e175d0a37b54ac0d0edfca0495c63ebdcb55 --- /dev/null +++ b/dedal/tests/integration_tests/utils_test.py @@ -0,0 +1,50 @@ +from dedal.utils.utils import set_bashrc_variable + + +def test_add_new_variable(tmp_path): + var_name = 'TEST_VAR' + value = 'test_value' + bashrc_path = tmp_path / ".bashrc" + bashrc_path.touch() + set_bashrc_variable(var_name, value, bashrc_path=str(bashrc_path)) + content = bashrc_path.read_text() + assert f'export {var_name}={value}' in content + + +def test_update_existing_variable(tmp_path): + var_name = 'TEST_VAR' + value = 'test_value' + updated_value = 'new_value' + bashrc_path = tmp_path / ".bashrc" + bashrc_path.write_text(f'export {var_name}={value}\n') + set_bashrc_variable(var_name, updated_value, bashrc_path=str(bashrc_path), update_variable=True) + content = bashrc_path.read_text() + assert f'export {var_name}={updated_value}' in content + assert f'export {var_name}={value}' not in content + + +def test_do_not_update_existing_variable(tmp_path): + var_name = 'TEST_VAR' + value = 'test_value' + new_value = 'new_value' + bashrc_path = tmp_path / ".bashrc" + bashrc_path.write_text(f'export {var_name}={value}\n') + + set_bashrc_variable(var_name, new_value, bashrc_path=str(bashrc_path), update_variable=False) + + content = bashrc_path.read_text() + assert f'export {var_name}={value}' in content + assert f'export {var_name}={new_value}' not in content + + +def test_add_variable_with_special_characters(tmp_path): + var_name = 'TEST_VAR' + value = 'value_with_$pecial_chars' + escaped_value = 'value_with_\\$pecial_chars' + bashrc_path = tmp_path / ".bashrc" + bashrc_path.touch() + + set_bashrc_variable(var_name, value, bashrc_path=str(bashrc_path)) + + content = bashrc_path.read_text() + assert f'export {var_name}={escaped_value}' in content diff --git a/dedal/tests/testing_variables.py b/dedal/tests/testing_variables.py new file mode 100644 index 0000000000000000000000000000000000000000..e441a2864497edbca5e1f50619befe0c1a90863e --- /dev/null +++ b/dedal/tests/testing_variables.py @@ -0,0 +1,6 @@ +import os + +ebrains_spack_builds_git = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git' +SPACK_VERSION = "0.23.0" +SPACK_ENV_ACCESS_TOKEN = os.getenv("SPACK_ENV_ACCESS_TOKEN") +test_spack_env_git = f'https://oauth2:{SPACK_ENV_ACCESS_TOKEN}@gitlab.ebrains.eu/ri/projects-and-initiatives/virtualbraintwin/tools/test-spack-env.git' diff --git a/dedal/tests/unit_tests/__init__.py b/dedal/tests/unit_tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/tests/unit_tests/build_cache_manager_test.py b/dedal/tests/unit_tests/build_cache_manager_test.py new file mode 100644 index 0000000000000000000000000000000000000000..6bce09480c4c70f41f6435291bdcbede8bd5bd6b --- /dev/null +++ b/dedal/tests/unit_tests/build_cache_manager_test.py @@ -0,0 +1,154 @@ +import pytest +from _pytest.fixtures import fixture + +from dedal.build_cache.BuildCacheManager import BuildCacheManager + + +class TestBuildCacheManager: + + @fixture(scope="function") + def mock_build_cache_manager(self, mocker): + mocker.patch("dedal.build_cache.BuildCacheManager.get_logger") + return BuildCacheManager("TEST_HOST", "TEST_PROJECT", "TEST_USERNAME", "TEST_PASSWORD", "TEST_VERSION") + + def test_get_public_key_from_cache_success_path(self, mock_build_cache_manager, tmp_path): + + # Arrange + build_cache_dir = tmp_path / "build_cache" + pgp_folder = build_cache_dir / "project" / "_pgp" + pgp_folder.mkdir(parents=True) + key_file = pgp_folder / "key.pub" + key_file.write_text("public key content") + + # Act + result = mock_build_cache_manager.get_public_key_from_cache(str(build_cache_dir)) + + # Assert + assert result == str(key_file) + + @pytest.mark.parametrize("test_id, num_pgp_folders, num_key_files, expected_log_message", [ + ("more_than_one_gpg_folder", 2, 1, + "More than one PGP folders found in the build cache: %s, using the first one in the list: %s"), + ("more_than_one_key_file", 1, 2, + "More than one PGP key files found in the build cache: %s, using the first one in the list: %s"), + ]) + def test_get_public_key_from_cache_multiple_files_or_folders(self, mock_build_cache_manager, test_id, + tmp_path, num_pgp_folders, + num_key_files, expected_log_message): + + # Arrange + pgp_folders = [] + key_files = [] + build_cache_dir = tmp_path / "build_cache" + for i in range(num_pgp_folders): + pgp_folder = build_cache_dir / f"project{i}" / "_pgp" + pgp_folders.append(str(pgp_folder)) + pgp_folder.mkdir(parents=True) + for j in range(num_key_files): + key_file = pgp_folder / f"key{j}.pub" + key_files.append(str(key_file)) + key_file.write_text(f"public key {j} content") + + # Act + result = mock_build_cache_manager.get_public_key_from_cache(str(build_cache_dir)) + + # Assert + # Cannot assure the order in which the OS returns the files, + # hence check if the result is in the expected list + assert result in [str(build_cache_dir / "project0" / "_pgp" / "key0.pub"), + str(build_cache_dir / "project0" / "_pgp" / "key1.pub"), + str(build_cache_dir / "project1" / "_pgp" / "key0.pub")] + assert mock_build_cache_manager._logger.warning.call_args[0][0] == expected_log_message + assert set(mock_build_cache_manager._logger.warning.call_args[0][1]) == set( + pgp_folders) if test_id == "more_than_one_gpg_folder" else set(key_files) + assert mock_build_cache_manager._logger.warning.call_args[0][ + 2] in pgp_folders if test_id == "more_than_one_gpg_folder" else key_files + + @pytest.mark.parametrize("build_cache_dir, expected_log_message", [ + (None, 'Build cache directory does not exist!'), + ("non_existent_dir", 'Build cache directory does not exist!'), + ]) + def test_get_public_key_from_cache_no_build_cache(self, mock_build_cache_manager, build_cache_dir, + expected_log_message, tmp_path): + + # Arrange + build_cache_dir = str(tmp_path / build_cache_dir) if build_cache_dir else None + + # Act + result = mock_build_cache_manager.get_public_key_from_cache(build_cache_dir) + + # Assert + assert result is None + mock_build_cache_manager._logger.warning.assert_called_once_with(expected_log_message) + + # Assert + assert result is None + mock_build_cache_manager._logger.warning.assert_called_once_with(expected_log_message) + + @pytest.mark.parametrize("build_cache_dir, expected_log_message", [ + ("non_existent_dir", "No _pgp folder found in the build cache!"), + ]) + def test_get_public_key_from_cache_no_pgp_folder(self, mock_build_cache_manager, build_cache_dir, + expected_log_message, tmp_path): + + # Arrange + if build_cache_dir == "non_existent_dir": + build_cache_dir = tmp_path / build_cache_dir + build_cache_dir.mkdir(parents=True) + + # Act + result = mock_build_cache_manager.get_public_key_from_cache(build_cache_dir) + + # Assert + assert result is None + mock_build_cache_manager._logger.warning.assert_called_once_with(expected_log_message) + + # Assert + assert result is None + mock_build_cache_manager._logger.warning.assert_called_once_with(expected_log_message) + + def test_get_public_key_from_cache_empty_pgp_folder(self, mock_build_cache_manager, tmp_path): + + # Arrange + build_cache_dir = tmp_path / "build_cache" + pgp_folder = build_cache_dir / "project" / "_pgp" + pgp_folder.mkdir(parents=True) + + # Act + result = mock_build_cache_manager.get_public_key_from_cache(str(build_cache_dir)) + + # Assert + assert result is None + mock_build_cache_manager._logger.warning.assert_called_once_with("No PGP key files found in the build cache!") + + @pytest.mark.parametrize("items, expected_log_message", [ + (["item1", "item2"], "test message item1 item2 item1"), + (["item1", "item2", "item3"], "test message item1 item2 item3 item1"), + ]) + def test_log_warning_if_needed_multiple_items(self, mock_build_cache_manager, items, expected_log_message): + # Test ID: multiple_items + + # Arrange + warn_message = "test message" + + # Act + mock_build_cache_manager._BuildCacheManager__log_warning_if_needed(warn_message, items) + + # Assert + mock_build_cache_manager._logger.warning.assert_called_once_with(warn_message, items, items[0]) + + @pytest.mark.parametrize("items", [ + [], + ["item1"], + ]) + def test_log_warning_if_needed_no_warning(self, mock_build_cache_manager, items): + # Test ID: no_warning + + # Arrange + warn_message = "test message" + + # Act + mock_build_cache_manager._BuildCacheManager__log_warning_if_needed(warn_message, items) + + # Assert + mock_build_cache_manager._logger.warning.assert_not_called() diff --git a/dedal/tests/unit_tests/spack_manager_api_test.py b/dedal/tests/unit_tests/spack_manager_api_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2037a502565d47ea19498aee7802e0ea6c30e73f --- /dev/null +++ b/dedal/tests/unit_tests/spack_manager_api_test.py @@ -0,0 +1,186 @@ +import os + +import pytest +from unittest.mock import patch, MagicMock +from click.testing import CliRunner +from dedal.cli.spack_manager_api import show_config, clear_config, install_spack, add_spack_repo, install_packages, \ + setup_spack_env, concretize, set_config +from dedal.enum.SpackViewEnum import SpackViewEnum +from dedal.model.SpackDescriptor import SpackDescriptor + + +@pytest.fixture +def runner(): + return CliRunner() + + +@pytest.fixture +def mocked_session_path(): + return '/mocked/tmp/session.json' + + +@pytest.fixture +def mock_spack_manager(): + mock_spack_manager = MagicMock() + mock_spack_manager.install_spack = MagicMock() + mock_spack_manager.add_spack_repo = MagicMock() + mock_spack_manager.setup_spack_env = MagicMock() + mock_spack_manager.concretize_spack_env = MagicMock() + mock_spack_manager.install_packages = MagicMock() + return mock_spack_manager + + +@pytest.fixture +def mock_load_config(): + with patch('dedal.cli.spack_manager_api.load_config') as mock_load: + yield mock_load + + +@pytest.fixture +def mock_save_config(): + with patch('dedal.cli.spack_manager_api.save_config') as mock_save: + yield mock_save + + +@pytest.fixture +def mock_clear_config(): + with patch('dedal.cli.spack_manager_api.clear_config') as mock_clear: + yield mock_clear + + +def test_show_config_no_config(runner, mock_load_config): + mock_load_config.return_value = None + result = runner.invoke(show_config) + assert 'No configuration set. Use `set-config` first.' in result.output + + +def test_show_config_with_config(runner, mock_load_config): + """Test the show_config command when config is present.""" + mock_load_config.return_value = {"key": "value"} + result = runner.invoke(show_config) + assert result.exit_code == 0 + assert '"key": "value"' in result.output + + +def test_clear_config(runner, mock_clear_config): + """Test the clear_config command.""" + with patch('os.path.exists', return_value=True), patch('os.remove') as mock_remove: + result = runner.invoke(clear_config) + assert 'Configuration cleared!' in result.output + mock_remove.assert_called_once() + + +def test_install_spack_no_context_1(runner, mock_spack_manager): + """Test install_spack with no context, using SpackManager.""" + with patch('dedal.cli.spack_manager_api.SpackManager', return_value=mock_spack_manager): + result = runner.invoke(install_spack, ['--spack_version', '0.24.0']) + mock_spack_manager.install_spack.assert_called_once_with('0.24.0', os.path.expanduser("~/.bashrc")) + assert result.exit_code == 0 + + +def test_install_spack_no_context_2(runner, mock_spack_manager): + """Test install_spack with no context, using SpackManager and the default value for spack_version.""" + with patch('dedal.cli.spack_manager_api.SpackManager', return_value=mock_spack_manager): + result = runner.invoke(install_spack) + mock_spack_manager.install_spack.assert_called_once_with('0.23.0', os.path.expanduser("~/.bashrc")) + assert result.exit_code == 0 + + +def test_install_spack_with_mocked_context_1(runner, mock_spack_manager): + """Test install_spack with a mocked context, using ctx.obj as SpackManager.""" + result = runner.invoke(install_spack, ['--spack_version', '0.24.0', '--bashrc_path', '/home/.bahsrc'], obj=mock_spack_manager) + mock_spack_manager.install_spack.assert_called_once_with('0.24.0', '/home/.bahsrc') + assert result.exit_code == 0 + + +def test_install_spack_with_mocked_context_2(runner, mock_spack_manager): + """Test install_spack with a mocked context, using ctx.obj as SpackManager and the default value for spack_version.""" + result = runner.invoke(install_spack, obj=mock_spack_manager) + mock_spack_manager.install_spack.assert_called_once_with('0.23.0', os.path.expanduser("~/.bashrc")) + assert result.exit_code == 0 + + +def test_setup_spack_env(runner, mock_spack_manager): + """Test setup_spack_env with a mocked context, using ctx.obj as SpackManager.""" + result = runner.invoke(setup_spack_env, obj=mock_spack_manager) + mock_spack_manager.setup_spack_env.assert_called_once_with() + assert result.exit_code == 0 + + +def test_concretize(runner, mock_spack_manager): + """Test install_spack with a mocked context, using ctx.obj as SpackManager.""" + result = runner.invoke(concretize, obj=mock_spack_manager) + mock_spack_manager.concretize_spack_env.assert_called_once_with() + assert result.exit_code == 0 + + +def test_install_packages_1(runner, mock_spack_manager): + """Test install_packages with a mocked context, using ctx.obj as SpackManager.""" + result = runner.invoke(install_packages, obj=mock_spack_manager) + mock_spack_manager.install_packages.assert_called_once_with(jobs=2) + assert result.exit_code == 0 + + +def test_install_packages(runner, mock_spack_manager): + """Test install_packages with a mocked context, using ctx.obj as SpackManager.""" + result = runner.invoke(install_packages, ['--jobs', 3], obj=mock_spack_manager) + mock_spack_manager.install_packages.assert_called_once_with(jobs=3) + assert result.exit_code == 0 + + +@patch('dedal.cli.spack_manager_api.resolve_path') +@patch('dedal.cli.spack_manager_api.SpackDescriptor') +def test_add_spack_repo(mock_spack_descriptor, mock_resolve_path, mock_load_config, mock_save_config, + mocked_session_path, runner): + """Test adding a spack repository with mocks.""" + expected_config = {'repos': [SpackDescriptor(name='test-repo')]} + repo_name = 'test-repo' + path = '/path' + git_path = 'https://example.com/repo.git' + mock_resolve_path.return_value = '/resolved/path' + mock_load_config.return_value = expected_config + mock_repo_instance = MagicMock() + mock_spack_descriptor.return_value = mock_repo_instance + + with patch('dedal.cli.spack_manager_api.SESSION_CONFIG_PATH', mocked_session_path): + result = runner.invoke(add_spack_repo, ['--repo_name', repo_name, '--path', path, '--git_path', git_path]) + + assert result.exit_code == 0 + assert 'dedal setup_spack_env must be reran after each repo is added' in result.output + mock_resolve_path.assert_called_once_with(path) + mock_spack_descriptor.assert_called_once_with(repo_name, '/resolved/path', git_path) + assert mock_repo_instance in expected_config['repos'] + mock_save_config.assert_called_once_with(expected_config, mocked_session_path) + + +def test_set_config(runner, mock_save_config, mocked_session_path): + """Test set_config.""" + with patch('dedal.cli.spack_manager_api.SESSION_CONFIG_PATH', mocked_session_path): + result = runner.invoke(set_config, ['--env_name', 'test', '--system_name', 'sys']) + + expected_config = { + 'use_cache': False, + 'env_name': 'test', + 'env_path': None, + 'env_git_path': None, + 'install_dir': None, + 'upstream_instance': None, + 'system_name': 'sys', + 'concretization_dir': None, + 'buildcache_dir': None, + 'gpg_name': None, + 'gpg_mail': None, + 'use_spack_global': False, + 'repos': [], + 'cache_version_concretize': 'v1', + 'cache_version_build': 'v1', + 'view': SpackViewEnum.VIEW, + 'update_cache': True, + } + + mock_save_config.assert_called_once() + saved_config, saved_path = mock_save_config.call_args[0] + assert saved_path == mocked_session_path + assert saved_config == expected_config + assert result.exit_code == 0 + assert 'Configuration saved.' in result.output diff --git a/dedal/tests/unit_tests/spack_operation_use_cache_test.py b/dedal/tests/unit_tests/spack_operation_use_cache_test.py new file mode 100644 index 0000000000000000000000000000000000000000..fad549bf8c8b07543da45fba7c5b86141bcb3666 --- /dev/null +++ b/dedal/tests/unit_tests/spack_operation_use_cache_test.py @@ -0,0 +1,75 @@ +from pathlib import Path + +import pytest + +from dedal.error_handling.exceptions import NoSpackEnvironmentException +from dedal.spack_factory.SpackOperationUseCache import SpackOperationUseCache + + +@pytest.fixture +def spack_operation_use_cache_mock(mocker): + super_mock = mocker.patch("dedal.spack_factory.SpackOperationUseCache.super") + super_mock.return_value.setup_spack_env = mocker.MagicMock() + mocker.patch("dedal.spack_factory.SpackOperationUseCache.BuildCacheManager") + mock_spack_operation_use_cache = SpackOperationUseCache() + mock_spack_operation_use_cache.build_cache = mocker.MagicMock() + mock_spack_operation_use_cache.spack_config = mocker.MagicMock() + mock_spack_operation_use_cache.spack_config.buildcache_dir = Path("path/to/buildcache") + mock_spack_operation_use_cache.logger = mocker.MagicMock() + return mock_spack_operation_use_cache + + +class TestSpackOperationUseCache: + + @pytest.mark.parametrize("test_id, signed, key_path", [ + ("key_path_exists", True, "path/to/key.gpg"), + ("key_path_does_not_exist", False, None)]) + def test_setup_spack_env(self, mocker, spack_operation_use_cache_mock, test_id, signed, key_path): + # Test ID: setup_spack_env_success + super_mock = mocker.patch("dedal.spack_factory.SpackOperationUseCache.super") + spack_operation_use_cache_mock.trust_gpg_key = mocker.MagicMock() + spack_operation_use_cache_mock.add_mirror = mocker.MagicMock() + + # Arrange + spack_operation_use_cache_mock.build_cache.get_public_key_from_cache.return_value = key_path + spack_operation_use_cache_mock.trust_gpg_key.return_value = signed + spack_operation_use_cache_mock.add_mirror.return_value = None + + # Act + spack_operation_use_cache_mock.setup_spack_env() + + # Assert + spack_operation_use_cache_mock.build_cache.download.assert_called_once_with( + spack_operation_use_cache_mock.spack_config.buildcache_dir) + spack_operation_use_cache_mock.build_cache.get_public_key_from_cache.assert_called_once_with( + str(spack_operation_use_cache_mock.spack_config.buildcache_dir)) + + if key_path: + spack_operation_use_cache_mock.trust_gpg_key.assert_called_once_with(key_path) + else: + spack_operation_use_cache_mock.trust_gpg_key.assert_not_called() + + spack_operation_use_cache_mock.add_mirror.assert_called_once_with( + 'local_cache', + str(spack_operation_use_cache_mock.spack_config.buildcache_dir), + signed=signed, + autopush=True, + global_mirror=False + ) + super_mock.return_value.setup_spack_env.assert_called_once() # call original method + + @pytest.mark.parametrize("exception_type", [NoSpackEnvironmentException]) + def test_setup_spack_env_exceptions(self, mocker, spack_operation_use_cache_mock, exception_type): + # Test ID: setup_spack_env_exceptions + spack_operation_use_cache_mock.trust_gpg_key = mocker.MagicMock() + spack_operation_use_cache_mock.add_mirror = mocker.MagicMock() + + # Arrange + spack_operation_use_cache_mock.build_cache.get_public_key_from_cache.return_value = "path/to/key.gpg" + spack_operation_use_cache_mock.trust_gpg_key.return_value = True + exception = exception_type("test exception") + spack_operation_use_cache_mock.add_mirror.side_effect = exception + + # Act & Assert + with pytest.raises(exception_type): + spack_operation_use_cache_mock.setup_spack_env() \ No newline at end of file diff --git a/dedal/tests/unit_tests/utils_test.py b/dedal/tests/unit_tests/utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..0f2b2a82a8942a6f874de6cceea12456aef99555 --- /dev/null +++ b/dedal/tests/unit_tests/utils_test.py @@ -0,0 +1,285 @@ +import logging +import os +import subprocess + +import pytest +from pathlib import Path +from unittest.mock import mock_open, patch, MagicMock +from dedal.utils.utils import clean_up, file_exists_and_not_empty, log_command, run_command, get_first_word, \ + count_files_in_folder, resolve_path, delete_file + + +@pytest.fixture +def temp_directories(tmp_path): + """ + Create temporary directories with files and subdirectories for testing. + """ + test_dirs = [] + + for i in range(3): + dir_path = tmp_path / f"test_dir_{i}" + dir_path.mkdir() + test_dirs.append(str(dir_path)) + + # Add a file to the directory + file_path = dir_path / f"file_{i}.txt" + file_path.write_text(f"This is a test file in {dir_path}") + + # Add a subdirectory with a file + sub_dir = dir_path / f"subdir_{i}" + sub_dir.mkdir() + sub_file = sub_dir / f"sub_file_{i}.txt" + sub_file.write_text(f"This is a sub file in {sub_dir}") + + return test_dirs + + +def test_clean_up(temp_directories, mocker): + """ + Test the clean_up function to ensure directories and contents are removed. + """ + # Mock the logger using pytest-mock's mocker fixture + mock_logger = mocker.MagicMock() + + # Ensure directories exist before calling clean_up + for dir_path in temp_directories: + assert Path(dir_path).exists() + + clean_up(temp_directories, mock_logger) + + for dir_path in temp_directories: + assert not Path(dir_path).exists() + + for dir_path in temp_directories: + mock_logger.info.assert_any_call(f"Removing {Path(dir_path).resolve()}") + + +def test_clean_up_nonexistent_dirs(mocker): + """ + Test the clean_up function with nonexistent directories. + """ + # Mock the logger using pytest-mock's mocker fixture + mock_logger = mocker.MagicMock() + nonexistent_dirs = ["nonexistent_dir_1", "nonexistent_dir_2"] + + clean_up(nonexistent_dirs, mock_logger) + + for dir_path in nonexistent_dirs: + mock_logger.info.assert_any_call(f"{Path(dir_path).resolve()} does not exist") + + +def test_file_does_not_exist(tmp_path: Path): + non_existent_file = tmp_path / "non_existent.txt" + assert not file_exists_and_not_empty(non_existent_file) + + +def test_file_exists_but_empty(tmp_path: Path): + empty_file = tmp_path / "empty.txt" + # Create an empty file + empty_file.touch() + assert not file_exists_and_not_empty(empty_file) + + +def test_file_exists_and_not_empty(tmp_path: Path): + non_empty_file = tmp_path / "non_empty.txt" + non_empty_file.write_text("Some content") + assert file_exists_and_not_empty(non_empty_file) + + +def test_log_command(): + results = MagicMock() + results.stdout = "Test output" + results.stderr = "Test error" + mock_file = mock_open() + + with patch("builtins.open", mock_file): + log_command(results, "logfile.log") + + mock_file.assert_called_once_with("logfile.log", "w") + handle = mock_file() + handle.write.assert_any_call("Test output") + handle.write.assert_any_call("\n--- STDERR ---\n") + handle.write.assert_any_call("Test error") + + +def test_run_command_success(mocker): + mock_subprocess = mocker.patch("subprocess.run", return_value=MagicMock(returncode=0)) + mock_logger = MagicMock() + result = run_command('bash', '-c', 'echo hello', logger=mock_logger, info_msg="Running echo") + mock_logger.info.assert_called_with("Running echo: args: ('bash', '-c', 'echo hello')") + mock_subprocess.assert_called_once_with(('bash', '-c', 'echo hello')) + assert result.returncode == 0 + + +def test_run_command_not_found(mocker): + mocker.patch("subprocess.run", side_effect=FileNotFoundError) + mock_logger = MagicMock() + run_command("invalid_command", logger=mock_logger) + mock_logger.error.assert_called_with("Command not found. Please check the command syntax.") + + +def test_run_command_permission_error(mocker): + mocker.patch("subprocess.run", side_effect=PermissionError) + mock_logger = MagicMock() + run_command("restricted_command", logger=mock_logger) + mock_logger.error.assert_called_with("Permission denied. Try running with appropriate permissions.") + + +def test_run_command_timeout(mocker): + mocker.patch("subprocess.run", side_effect=subprocess.TimeoutExpired(cmd="test", timeout=5)) + mock_logger = MagicMock() + run_command("test", logger=mock_logger) + mock_logger.error.assert_called_with("Command timed out. Try increasing the timeout duration.") + + +def test_run_command_os_error(mocker): + mocker.patch("subprocess.run", side_effect=OSError("OS Error")) + mock_logger = MagicMock() + run_command("test", logger=mock_logger) + mock_logger.error.assert_called_with("OS error occurred: OS Error") + + +def test_run_command_unexpected_exception(mocker): + mocker.patch("subprocess.run", side_effect=Exception("Unexpected Error")) + mock_logger = MagicMock() + run_command("test", logger=mock_logger) + mock_logger.error.assert_called_with("An unexpected error occurred: Unexpected Error") + + +def test_run_command_called_process_error(mocker): + mocker.patch("subprocess.run", side_effect=subprocess.CalledProcessError(1, "test")) + mock_logger = MagicMock() + run_command("test", logger=mock_logger, exception_msg="Process failed") + mock_logger.error.assert_called_with("Process failed: Command 'test' returned non-zero exit status 1.") + + +def test_get_first_word_basic(): + assert get_first_word("Hello world") == "Hello" + + +def test_get_first_word_single_word(): + assert get_first_word("word") == "word" + + +def test_get_first_word_leading_whitespace(): + assert get_first_word(" leading spaces") == "leading" + + +def test_get_first_word_empty_string(): + assert get_first_word("") == "" + + +def test_get_first_word_whitespace_only(): + assert get_first_word(" \t ") == "" + + +def test_get_first_word_with_punctuation(): + assert get_first_word("Hello, world!") == "Hello," + + +def test_get_first_word_newline_delimiter(): + assert get_first_word("First line\nSecond line") == "First" + + +def test_count_files_in_folder_counts_files_only(tmp_path): + # create files and subdirectories + file1 = tmp_path / "a.txt" + file2 = tmp_path / "b.txt" + file3 = tmp_path / "c.txt" + subdir = tmp_path / "subfolder" + subdir_file = subdir / "d.txt" + file1.write_text("data1") + file2.write_text("data2") + file3.write_text("data3") + subdir.mkdir() + subdir_file.write_text("data4") + count = count_files_in_folder(tmp_path) + assert count == 4 + + +def test_count_files_in_folder_empty(tmp_path): + count = count_files_in_folder(tmp_path) + assert count == 0 + + +def test_count_files_in_folder_only_dirs(tmp_path): + (tmp_path / "dir1").mkdir() + (tmp_path / "dir2").mkdir() + count = count_files_in_folder(tmp_path) + assert count == 0 + + +def test_count_files_in_folder_path_is_file(tmp_path): + file_path = tmp_path / "single.txt" + file_path.write_text("content") + with pytest.raises(ValueError): + count_files_in_folder(file_path) + + +def test_delete_file_success(tmp_path, caplog): + # monkeypatch.chdir(tmp_path) + target = tmp_path / "temp.txt" + target.write_text("to be deleted") + logger = logging.getLogger("delete_success_test") + caplog.set_level(logging.DEBUG, logger=logger.name) + result = delete_file(str(target), logger) + assert result is True + assert not target.exists() + assert any(rec.levelno == logging.DEBUG for rec in caplog.records) + assert "deleted" in " ".join(rec.getMessage() for rec in caplog.records).lower() + + +def test_delete_file_not_found(tmp_path, caplog): + missing = tmp_path / "no_such_file.txt" + logger = logging.getLogger("delete_notfound_test") + caplog.set_level(logging.ERROR, logger=logger.name) + result = delete_file(str(missing), logger) + assert result is False + assert any(rec.levelno >= logging.WARNING for rec in caplog.records) + combined_logs = " ".join(rec.getMessage() for rec in caplog.records).lower() + assert "not found" in combined_logs or "no such file" in combined_logs + + +def test_delete_file_directory_input(tmp_path, caplog): + dir_path = tmp_path / "dir_to_delete" + dir_path.mkdir() + logger = logging.getLogger("delete_dir_test") + caplog.set_level(logging.ERROR, logger=logger.name) + result = delete_file(str(dir_path), logger) + assert result is False + assert any(rec.levelno == logging.ERROR for rec in caplog.records) + combined_logs = " ".join(rec.getMessage() for rec in caplog.records).lower() + assert "directory" in combined_logs or "is a directory" in combined_logs + + +def test_delete_file_empty_path(caplog): + logger = logging.getLogger("delete_empty_test") + caplog.set_level(logging.ERROR, logger=logger.name) + result = delete_file("", logger) + assert result is False + assert any(rec.levelno == logging.ERROR for rec in caplog.records) + combined_logs = " ".join(rec.getMessage() for rec in caplog.records).lower() + assert "invalid" in combined_logs or "no such file" in combined_logs or "not found" in combined_logs + + +def test_resolve_path_relative(tmp_path, monkeypatch): + monkeypatch.chdir(tmp_path) + relative_path = "subfolder/test.txt" + (tmp_path / "subfolder").mkdir() + result = resolve_path(relative_path) + expected_path = tmp_path / "subfolder" / "test.txt" + assert result == expected_path + + +def test_resolve_path_absolute_identity(tmp_path): + absolute = tmp_path / "file.txt" + result = resolve_path(str(absolute)) + assert isinstance(result, Path) + assert str(result) == str(absolute) + + +def test_resolve_path_nonexistent(): + fake_path = "/some/path/that/does/not/exist.txt" + result = resolve_path(fake_path) + assert isinstance(result, Path) + assert str(result) == fake_path or str(result) == os.path.abspath(fake_path) diff --git a/dedal/tests/utils_test.py b/dedal/tests/utils_test.py deleted file mode 100644 index 14795726ce81896eb03d9862a9c096d78123815c..0000000000000000000000000000000000000000 --- a/dedal/tests/utils_test.py +++ /dev/null @@ -1,63 +0,0 @@ -import pytest -from pathlib import Path - -from dedal.utils.utils import clean_up - - -@pytest.fixture -def temp_directories(tmp_path): - """ - Create temporary directories with files and subdirectories for testing. - """ - test_dirs = [] - - for i in range(3): - dir_path = tmp_path / f"test_dir_{i}" - dir_path.mkdir() - test_dirs.append(str(dir_path)) - - # Add a file to the directory - file_path = dir_path / f"file_{i}.txt" - file_path.write_text(f"This is a test file in {dir_path}") - - # Add a subdirectory with a file - sub_dir = dir_path / f"subdir_{i}" - sub_dir.mkdir() - sub_file = sub_dir / f"sub_file_{i}.txt" - sub_file.write_text(f"This is a sub file in {sub_dir}") - - return test_dirs - - -def test_clean_up(temp_directories, mocker): - """ - Test the clean_up function to ensure directories and contents are removed. - """ - # Mock the logger using pytest-mock's mocker fixture - mock_logger = mocker.MagicMock() - - # Ensure directories exist before calling clean_up - for dir_path in temp_directories: - assert Path(dir_path).exists() - - clean_up(temp_directories, mock_logger) - - for dir_path in temp_directories: - assert not Path(dir_path).exists() - - for dir_path in temp_directories: - mock_logger.info.assert_any_call(f"Removing {Path(dir_path).resolve()}") - - -def test_clean_up_nonexistent_dirs(mocker): - """ - Test the clean_up function with nonexistent directories. - """ - # Mock the logger using pytest-mock's mocker fixture - mock_logger = mocker.MagicMock() - nonexistent_dirs = ["nonexistent_dir_1", "nonexistent_dir_2"] - - clean_up(nonexistent_dirs, mock_logger) - - for dir_path in nonexistent_dirs: - mock_logger.info.assert_any_call(f"{Path(dir_path).resolve()} does not exist") diff --git a/dedal/utils/bootstrap.sh b/dedal/utils/bootstrap.sh new file mode 100644 index 0000000000000000000000000000000000000000..9cd2e1e11b4ab82fd301cc86179de7ba373f5d03 --- /dev/null +++ b/dedal/utils/bootstrap.sh @@ -0,0 +1,11 @@ +# Minimal prerequisites for installing the esd_library +# pip must be installed on the OS +echo "Bootstrapping..." +set -euo pipefail +shopt -s inherit_errexit 2>/dev/null +export DEBIAN_FRONTEND=noninteractive +apt update +apt install -o DPkg::Options::=--force-confold -y -q --reinstall \ + bzip2 ca-certificates g++ gcc make gfortran git gzip lsb-release \ + patch python3 python3-pip tar unzip xz-utils zstd gnupg2 vim curl rsync +python3 -m pip install --upgrade pip setuptools wheel diff --git a/dedal/utils/utils.py b/dedal/utils/utils.py index 811d258e7e5856f4b666bc3196996f3b24571112..29c6a2a622b9d2e6503da697b8d91be948102aee 100644 --- a/dedal/utils/utils.py +++ b/dedal/utils/utils.py @@ -1,20 +1,168 @@ +import logging +import os import shutil +import subprocess +import tempfile from pathlib import Path +from dedal.error_handling.exceptions import BashCommandException +import re -def clean_up(dirs: list[str], logging, ignore_errors=True): + +def clean_up(dirs: list[str], logger: logging = logging.getLogger(__name__), ignore_errors=True): """ All the folders from the list dirs are removed with all the content in them """ for cleanup_dir in dirs: cleanup_dir = Path(cleanup_dir).resolve() if cleanup_dir.exists(): - logging.info(f"Removing {cleanup_dir}") + logger.info(f"Removing {cleanup_dir}") try: shutil.rmtree(Path(cleanup_dir)) except OSError as e: - logging.error(f"Failed to remove {cleanup_dir}: {e}") + logger.error(f"Failed to remove {cleanup_dir}: {e}") if not ignore_errors: raise e else: - logging.info(f"{cleanup_dir} does not exist") + logger.info(f"{cleanup_dir} does not exist") + + +def run_command(*args, logger=logging.getLogger(__name__), info_msg: str = '', exception_msg: str = None, + exception=None, **kwargs): + try: + logger.info(f'{info_msg}: args: {args}') + return subprocess.run(args, **kwargs) + except subprocess.CalledProcessError as e: + if exception_msg is not None: + logger.error(f"{exception_msg}: {e}") + if exception is not None: + raise exception(f'{exception_msg} : {e}') + else: + return None + except FileNotFoundError: + logger.error(f"Command not found. Please check the command syntax.") + except PermissionError: + logger.error(f"Permission denied. Try running with appropriate permissions.") + except subprocess.TimeoutExpired: + logger.error(f"Command timed out. Try increasing the timeout duration.") + except ValueError: + logger.error(f"Invalid argument passed to subprocess. Check function parameters.") + except OSError as e: + logger.error(f"OS error occurred: {e}") + except Exception as e: + logger.error(f"An unexpected error occurred: {e}") + + +def git_clone_repo(repo_name: str, dir: Path, git_path: str, logger: logging = logging.getLogger(__name__)): + if not dir.exists(): + run_command( + "git", "clone", "--depth", "1", + "-c", "advice.detachedHead=false", + "-c", "feature.manyFiles=true", + git_path, dir + , check=True, logger=logger, + info_msg=f'Cloned repository {repo_name}', + exception_msg=f'Failed to clone repository: {repo_name}', + exception=BashCommandException) + else: + logger.info(f'Repository {repo_name} already cloned.') + + +def file_exists_and_not_empty(file: Path) -> bool: + return file.is_file() and file.stat().st_size > 0 + + +def log_command(results, log_file: str): + with open(log_file, "w") as log_file: + log_file.write(results.stdout) + log_file.write("\n--- STDERR ---\n") + log_file.write(results.stderr) + + +def copy_to_tmp(file_path: Path) -> Path: + """ + Creates a temporary directory and copies the given file into it. + + :param file_path: Path to the file that needs to be copied. + :return: Path to the copied file inside the temporary directory. + """ + if not file_path.is_file(): + raise FileNotFoundError(f"File not found: {file_path}") + tmp_dir = Path(tempfile.mkdtemp()) + tmp_file_path = tmp_dir / file_path.name + shutil.copy(file_path, tmp_file_path) + return tmp_file_path + + +def set_bashrc_variable(var_name: str, value: str, bashrc_path: str = os.path.expanduser("~/.bashrc"), + logger: logging = logging.getLogger(__name__), update_variable=False): + """Update or add an environment variable in ~/.bashrc.""" + value = value.replace("$", r"\$") + with open(bashrc_path, "r") as file: + lines = file.readlines() + pattern = re.compile(rf'^\s*export\s+{var_name}=.*$') + found_variable = False + # Modify the existing variable if found + for i, line in enumerate(lines): + if pattern.match(line): + if update_variable: + lines[i] = f'export {var_name}={value}\n' + found_variable = True + break + if not found_variable: + lines.append(f'\nexport {var_name}={value}\n') + logger.info(f"Added in {bashrc_path} with: export {var_name}={value}") + else: + logger.info(f"Updated {bashrc_path} with: export {var_name}={value}") + with open(bashrc_path, "w") as file: + file.writelines(lines) + + +def copy_file(src: Path, dst: Path, logger: logging = logging.getLogger(__name__)) -> None: + """ + Copy a file from src to dest. + """ + if not os.path.exists(src): + raise FileNotFoundError(f"Source file '{src}' does not exist.") + src.resolve().as_posix() + dst.resolve().as_posix() + os.makedirs(os.path.dirname(dst), exist_ok=True) + shutil.copy2(src, dst) + logger.debug(f"File copied from '{src}' to '{dst}'") + + +def delete_file(file_path: str, logger: logging = logging.getLogger(__name__)) -> bool: + """ + Deletes a file at the given path. Returns True if successful, False if the file doesn't exist. + """ + try: + os.remove(file_path) + logger.debug(f"File '{file_path}' deleted.") + return True + except FileNotFoundError: + logger.error(f"File not found: {file_path}") + return False + except PermissionError: + logger.error(f"Permission denied: {file_path}") + return False + except Exception as e: + logger.error(f"Error deleting file {file_path}: {e}") + return False + + +def resolve_path(path: str): + if path is None: + path = Path(os.getcwd()).resolve() + else: + path = Path(path).resolve() + return path + + +def count_files_in_folder(folder_path: Path) -> int: + if not folder_path.is_dir(): + raise ValueError(f"{folder_path} is not a valid directory") + return sum(1 for sub_path in folder_path.rglob("*") if sub_path.is_file()) + + +def get_first_word(s: str) -> str: + return s.split()[0] if s.strip() else '' diff --git a/dedal/utils/variables.py b/dedal/utils/variables.py new file mode 100644 index 0000000000000000000000000000000000000000..553ccf97992ca2dcd06970f82fdbbb26f5f1db23 --- /dev/null +++ b/dedal/utils/variables.py @@ -0,0 +1,5 @@ +import os + +SPACK_ENV_ACCESS_TOKEN = os.getenv("SPACK_ENV_ACCESS_TOKEN") +test_spack_env_git = f'https://oauth2:{SPACK_ENV_ACCESS_TOKEN}@gitlab.ebrains.eu/ri/projects-and-initiatives/virtualbraintwin/tools/test-spack-env.git' +ebrains_spack_builds_git = 'https://gitlab.ebrains.eu/ri/tech-hub/platform/esd/ebrains-spack-builds.git' diff --git a/dedal/wrapper/__init__.py b/dedal/wrapper/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/dedal/wrapper/spack_wrapper.py b/dedal/wrapper/spack_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..ccfd50df0acc76f41515cc21a0e7ef13230b98ed --- /dev/null +++ b/dedal/wrapper/spack_wrapper.py @@ -0,0 +1,15 @@ +import functools + +from dedal.error_handling.exceptions import NoSpackEnvironmentException + + +def check_spack_env(method): + @functools.wraps(method) + def wrapper(self, *args, **kwargs): + if self.spack_env_exists(): + return method(self, *args, **kwargs) + else: + self.logger.debug('No spack environment defined') + raise NoSpackEnvironmentException('No spack environment defined') + + return wrapper diff --git a/pyproject.toml b/pyproject.toml index 757f370c12a7602c9b7011c98c8aae416087ca24..7b82909769b161d8777cadd560899ba827cea941 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,24 +1,31 @@ [build-system] -requires = ["setuptools", "setuptools-scm"] +requires = ["setuptools>=64", "wheel"] build-backend = "setuptools.build_meta" [project] name = "dedal" +version = "0.1.0" authors = [ - {name = "Eric Müller", email = "mueller@kip.uni-heidelberg.de"}, - {name = "Adrian Ciu", email = "adrian.ciu@codemart.ro"}, + { name = "Eric Müller", email = "mueller@kip.uni-heidelberg.de" }, + { name = "Adrian Ciu", email = "adrian.ciu@codemart.ro" }, + { name = "Jithu Murugan", email = "j.murugan@fz-juelich.de" } ] -description = "This package provides all the necessary tools to create an Ebrains Software Distribution environment" -version = "0.1.0" +description = "This package includes all the essential tools required to set up an EBRAINS Software Distribution environment." readme = "README.md" requires-python = ">=3.10" dependencies = [ "oras", "spack", "ruamel.yaml", - "pytest", - "pytest-mock", + "click", + "jsonpickle", ] +[project.scripts] +dedal = "dedal.cli.spack_manager_api:cli" + [tool.setuptools.data-files] -"dedal" = ["dedal/logger/logging.conf"] \ No newline at end of file +"dedal" = ["dedal/logger/logging.conf"] + +[project.optional-dependencies] +test = ["pytest", "pytest-mock", "pytest-ordering", "coverage"] \ No newline at end of file