Skip to content
Snippets Groups Projects
Commit 39be4262 authored by Eleni Mathioulaki's avatar Eleni Mathioulaki
Browse files

feat: update packages from Spack v0.22.1 upstream

parent d7d26402
No related branches found
No related tags found
No related merge requests found
Showing
with 420 additions and 867 deletions
--- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java
@@ -150,6 +150,13 @@
builder.put("PATH", null);
builder.put("LD_LIBRARY_PATH", null);
}
+
+ Map<String, String> spackEnv = System.getenv();
+ for (String envName : spackEnv.keySet()) {
+ if (envName.startsWith("SPACK_")) {
+ builder.put(envName, spackEnv.get(envName));
+ }
+ }
}
private static PathFragment determineShellExecutable(OS os, PathFragment fromOption) {
--- a/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java
+++ b/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelRuleClassProvider.java
@@ -168,6 +168,13 @@ public class BazelRuleClassProvider {
env.put("PATH", null);
}
+ Map<String, String> spackEnv = System.getenv();
+ for (String envName : spackEnv.keySet()) {
+ if (envName.startsWith("SPACK_")) {
+ env.put(envName, spackEnv.get(envName));
+ }
+ }
+
// Shell environment variables specified via options take precedence over the
// ones inherited from the fragments. In the long run, these fragments will
// be replaced by appropriate default rc files anyway.
From 9c9d27561780bc56d9f0867e325c7421a94ee1cb Mon Sep 17 00:00:00 2001
From: Harsh Bhatia <bhatia4@llnl.gov>
Date: Tue, 15 Dec 2020 15:56:10 -0800
Subject: [PATCH] https://github.com/bazelbuild/bazel/commit/ab62a6e097590dac5ec946ad7a796ea0e8593ae0
---
src/conditions/BUILD | 6 ++++++
third_party/BUILD | 8 ++++++--
2 files changed, 12 insertions(+), 2 deletions(-)
diff --git a/src/conditions/BUILD b/src/conditions/BUILD
index 2b28e28057..faa41a439d 100644
--- a/src/conditions/BUILD
+++ b/src/conditions/BUILD
@@ -10,6 +10,12 @@ filegroup(
visibility = ["//src:__pkg__"],
)
+config_setting(
+ name = "linux_ppc",
+ values = {"cpu": "ppc"},
+ visibility = ["//visibility:public"],
+)
+
config_setting(
name = "linux_x86_64",
values = {"cpu": "k8"},
diff --git a/third_party/BUILD b/third_party/BUILD
index 159006d741..4fcae54c00 100644
--- a/third_party/BUILD
+++ b/third_party/BUILD
@@ -523,12 +523,13 @@ UNNECESSARY_DYNAMIC_LIBRARIES = select({
"//src/conditions:darwin": "*.so *.dll",
"//src/conditions:darwin_x86_64": "*.so *.dll",
"//src/conditions:linux_x86_64": "*.jnilib *.dll",
+ "//src/conditions:linux_ppc": "*.so *.jnilib *.dll",
# The .so file is an x86 one, so we can just remove it if the CPU is not x86
"//src/conditions:arm": "*.so *.jnilib *.dll",
"//src/conditions:linux_aarch64": "*.so *.jnilib *.dll",
# Play it safe -- better have a big binary than a slow binary
# zip -d does require an argument. Supply something bogus.
- "//conditions:default": "*.bogusextension",
+ "//conditions:default": "",
})
# Remove native libraries that are for a platform different from the one we are
@@ -537,7 +538,10 @@ genrule(
name = "filter_netty_dynamic_libs",
srcs = ["netty_tcnative/netty-tcnative-boringssl-static-2.0.24.Final.jar"],
outs = ["netty_tcnative/netty-tcnative-filtered.jar"],
- cmd = "cp $< $@ && zip -qd $@ " + UNNECESSARY_DYNAMIC_LIBRARIES,
+ cmd = "cp $< $@ && " +
+ # End successfully if there is nothing to be deleted from the archive
+ "if [ -n '" + UNNECESSARY_DYNAMIC_LIBRARIES + "' ]; then " +
+ "zip -qd $@ " + UNNECESSARY_DYNAMIC_LIBRARIES + "; fi",
)
java_import(
--
2.21.0 (Apple Git-122.2)
--- a/tools/cpp/cc_configure.bzl
+++ b/tools/cpp/cc_configure.bzl
@@ -173,8 +173,19 @@
else:
inc_dirs = result.stderr[index1 + 1:index2].strip()
- return [repository_ctx.path(_cxx_inc_convert(p))
- for p in inc_dirs.split("\n")]
+ default_inc_directories = [
+ repository_ctx.path(_cxx_inc_convert(p))
+ for p in inc_dirs.split("\n")
+ ]
+
+ env = repository_ctx.os.environ
+ if "SPACK_INCLUDE_DIRS" in env:
+ for path in env["SPACK_INCLUDE_DIRS"].split(":"):
+ default_inc_directories.append(
+ repository_ctx.path(_cxx_inc_convert(path))
+ )
+
+ return default_inc_directories
def _add_option_if_supported(repository_ctx, cc, option):
"""Checks that `option` is supported by the C compiler."""
--- a/tools/cpp/cc_configure.bzl
+++ b/tools/cpp/cc_configure.bzl
@@ -200,8 +200,19 @@
else:
inc_dirs = result.stderr[index1 + 1:index2].strip()
- return [_escape_string(repository_ctx.path(_cxx_inc_convert(p)))
- for p in inc_dirs.split("\n")]
+ default_inc_directories = [
+ _escape_string(repository_ctx.path(_cxx_inc_convert(p)))
+ for p in inc_dirs.split("\n")
+ ]
+
+ env = repository_ctx.os.environ
+ if "SPACK_INCLUDE_DIRS" in env:
+ for path in env["SPACK_INCLUDE_DIRS"].split(":"):
+ default_inc_directories.append(
+ repository_ctx.path(_cxx_inc_convert(path))
+ )
+
+ return default_inc_directories
def _add_option_if_supported(repository_ctx, cc, option):
--- a/compile.sh
+++ b/compile.sh
@@ -92,7 +92,7 @@
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" \
--host_platform=@bazel_tools//platforms:host_platform \
--platforms=@bazel_tools//platforms:target_platform \
|| fail "Could not build Bazel"
--- a/compile.sh
+++ b/compile.sh
@@ -92,7 +92,7 @@ display "."
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel_nojdk${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel_nojdk${EXE_EXT}" \
--host_platform=@bazel_tools//platforms:host_platform \
--platforms=@bazel_tools//platforms:target_platform \
|| fail "Could not build Bazel"
--- a/compile.sh
+++ b/compile.sh
@@ -92,7 +92,7 @@ display "."
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel_nojdk${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel_nojdk${EXE_EXT}" \
--action_env=PATH \
--host_platform=@bazel_tools//platforms:host_platform \
--platforms=@bazel_tools//platforms:target_platform \
--- a/compile.sh
+++ b/compile.sh
@@ -99,7 +99,7 @@
new_step 'Building Bazel with Bazel'
display "."
log "Building output/bazel"
- bazel_build "src:bazel${EXE_EXT}"
+ CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}"
cp -f "bazel-bin/src/bazel${EXE_EXT}" "output/bazel${EXE_EXT}"
chmod 0755 "output/bazel${EXE_EXT}"
BAZEL="$(pwd)/output/bazel${EXE_EXT}"
--- a/compile.sh
+++ b/compile.sh
@@ -124,7 +124,7 @@
new_step 'Building Bazel with Bazel'
display "."
log "Building output/bazel"
- bazel_build "src:bazel${EXE_EXT}" \
+ CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" \
|| fail "Could not build Bazel"
bazel_bin_path="$(get_bazel_bin_path)/src/bazel${EXE_EXT}"
[ -e "$bazel_bin_path" ] \
--- a/compile.sh
+++ b/compile.sh
@@ -85,7 +85,7 @@
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" \
--experimental_host_platform=//tools/platforms:host_platform \
--experimental_platforms=//tools/platforms:target_platform \
|| fail "Could not build Bazel"
--- a/compile.sh
+++ b/compile.sh
@@ -92,7 +92,7 @@
log "Building output/bazel"
# We set host and target platform directly since the defaults in @bazel_tools
# have not yet been generated.
-bazel_build "src:bazel${EXE_EXT}" \
+CC=$SPACK_CC CXX=$SPACK_CXX bazel_build "src:bazel${EXE_EXT}" \
--host_platform=//tools/platforms:host_platform \
--platforms=//tools/platforms:target_platform \
|| fail "Could not build Bazel"
--- a/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java.orig 2020-06-08 13:42:14.035342560 -0400
+++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/CppCompileAction.java 2020-06-08 13:42:25.149375458 -0400
@@ -963,7 +963,7 @@
// are, it's probably due to a non-hermetic #include, & we should stop
// the build with an error.
if (execPath.startsWith(execRoot)) {
- execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path
+ // execPathFragment = execPath.relativeTo(execRoot); // funky but tolerable path
} else {
problems.add(execPathFragment.getPathString());
continue;
diff --color=auto --color=auto -Naur a/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java b/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java
--- a/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java 1980-01-01 00:00:00
+++ b/src/main/java/com/google/devtools/build/lib/rules/cpp/HeaderDiscovery.java 2024-02-15 13:36:37
@@ -143,7 +143,7 @@
LabelConstants.EXPERIMENTAL_EXTERNAL_PATH_PREFIX.getRelative(
execPath.relativeTo(execRoot.getParentDirectory()));
} else {
- absolutePathProblems.add(execPathFragment.getPathString());
+ // absolutePathProblems.add(execPathFragment.getPathString());
continue;
}
}
This diff is collapsed.
--- a/tools/cpp/unix_cc_configure.bzl
+++ b/tools/cpp/unix_cc_configure.bzl
@@ -147,9 +147,18 @@ def get_escaped_cxx_inc_directories(repository_ctx, cc, additional_flags = []):
else:
inc_dirs = result.stderr[index1 + 1:index2].strip()
- return [_prepare_include_path(repository_ctx, _cxx_inc_convert(p))
- for p in inc_dirs.split("\n")]
+ default_inc_directories = [
+ _prepare_include_path(repository_ctx, _cxx_inc_convert(p))
+ for p in inc_dirs.split("\n")
+ ]
+
+ env = repository_ctx.os.environ
+ if "SPACK_INCLUDE_DIRS" in env:
+ for path in env["SPACK_INCLUDE_DIRS"].split(":"):
+ default_inc_directories.append(path)
+
+ return default_inc_directories
def _is_option_supported(repository_ctx, cc, option):
--- a/tools/cpp/unix_cc_configure.bzl
+++ b/tools/cpp/unix_cc_configure.bzl
@@ -117,9 +117,19 @@ def get_escaped_cxx_inc_directories(repository_ctx, cc):
else:
inc_dirs = result.stderr[index1 + 1:index2].strip()
- return [escape_string(repository_ctx.path(_cxx_inc_convert(p)))
- for p in inc_dirs.split("\n")]
+ default_inc_directories = [
+ escape_string(repository_ctx.path(_cxx_inc_convert(p)))
+ for p in inc_dirs.split("\n")
+ ]
+ env = repository_ctx.os.environ
+ if "SPACK_INCLUDE_DIRS" in env:
+ for path in env["SPACK_INCLUDE_DIRS"].split(":"):
+ default_inc_directories.append(
+ repository_ctx.path(_cxx_inc_convert(path))
+ )
+
+ return default_inc_directories
def _add_option_if_supported(repository_ctx, cc, option):
"""Checks that `option` is supported by the C compiler. Doesn't %-escape the option."""
paths: paths:
# Ubuntu 18.04, system compilers without Fortran # Ubuntu 20.04, system compilers without Fortran. This
- layout: # test also covers which flags are expected to be used
- executables: # during the detection of gcc.
- "bin/gcc" - layout:
- "bin/g++" - executables:
script: "echo 7.5.0" - "bin/gcc"
results: - "bin/g++"
- spec: "gcc@7.5.0 languages=c,c++" script: |
# Mock a version < 7 of GCC that requires -dumpversion and if [ "$1" = "-dumpversion" ] ; then
# errors with -dumpfullversion echo "9"
- layout: elif [ "$1" = "-dumpfullversion" ] ; then
- executables: echo "9.4.0"
- "bin/gcc-5" elif [ "$1" = "--version" ] ; then
- "bin/g++-5" echo "gcc (Ubuntu 9.4.0-1ubuntu1~20.04.2) 9.4.0"
- "bin/gfortran-5" echo "Copyright (C) 2019 Free Software Foundation, Inc."
script: | echo "This is free software; see the source for copying conditions. There is NO"
if [[ "$1" == "-dumpversion" ]] ; then echo "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
echo "5.5.0" else
else echo "mock executable got an unexpected flag: $1"
echo "gcc-5: fatal error: no input files" exit 1
echo "compilation terminated." fi
exit 1 platforms: ["darwin", "linux"]
fi results:
results: - spec: "gcc@9.4.0 languages=c,c++"
- spec: "gcc@5.5.0 languages=c,c++,fortran" extra_attributes:
# Multiple compilers present at the same time compilers:
- layout: c: ".*/bin/gcc"
- executables: cxx: ".*/bin/g++"
- "bin/x86_64-linux-gnu-gcc-6"
script: 'echo 6.5.0' # Mock a version < 7 of GCC that requires -dumpversion and
- executables: # errors with -dumpfullversion
- "bin/x86_64-linux-gnu-gcc-10" - layout:
- "bin/x86_64-linux-gnu-g++-10" - executables:
script: "echo 10.1.0" - "bin/gcc-5"
results: - "bin/g++-5"
- spec: "gcc@6.5.0 languages=c" - "bin/gfortran-5"
- spec: "gcc@10.1.0 languages=c,c++" script: |
\ No newline at end of file if [ "$1" = "-dumpversion" ] ; then
echo "5.5.0"
else
echo "gcc-5: fatal error: no input files"
echo "compilation terminated."
exit 1
fi
platforms: ["darwin", "linux"]
results:
- spec: "gcc@5.5.0 languages=c,c++,fortran"
extra_attributes:
compilers:
c: ".*/bin/gcc-5$"
cxx: ".*/bin/g[+][+]-5$"
fortran: ".*/bin/gfortran-5$"
# Multiple compilers present at the same time
- layout:
- executables:
- "bin/x86_64-linux-gnu-gcc-6"
script: 'echo 6.5.0'
- executables:
- "bin/x86_64-linux-gnu-gcc-10"
- "bin/x86_64-linux-gnu-g++-10"
script: "echo 10.1.0"
platforms: [darwin, linux]
results:
- spec: "gcc@6.5.0 languages=c"
extra_attributes:
compilers:
c: ".*/bin/x86_64-linux-gnu-gcc-6$"
- spec: "gcc@10.1.0 languages=c,c++"
extra_attributes:
compilers:
c: ".*/bin/x86_64-linux-gnu-gcc-10$"
cxx: ".*/bin/x86_64-linux-gnu-g[+][+]-10$"
# Apple clang under disguise as gcc should not be detected
- layout:
- executables:
- "bin/gcc"
script: |
if [ "$1" = "-dumpversion" ] ; then
echo "15.0.0"
elif [ "$1" = "-dumpfullversion" ] ; then
echo "clang: error: no input files" >&2
exit 1
elif [ "$1" = "--version" ] ; then
echo "Apple clang version 15.0.0 (clang-1500.3.9.4)"
echo "Target: x86_64-apple-darwin23.4.0"
echo "Thread model: posix"
echo "InstalledDir: /Library/Developer/CommandLineTools/usr/bin"
else
echo "mock executable got an unexpected flag: $1"
exit 1
fi
platforms: ["darwin"]
results: []
...@@ -2,26 +2,24 @@ ...@@ -2,26 +2,24 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details. # Spack Project Developers. See the top-level COPYRIGHT file for details.
# #
# SPDX-License-Identifier: (Apache-2.0 OR MIT) # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import glob import glob
import itertools import itertools
import os import os
import re
import sys import sys
from archspec.cpu import UnsupportedMicroarchitecture from archspec.cpu import UnsupportedMicroarchitecture
import llnl.util.tty as tty import llnl.util.tty as tty
from llnl.util.lang import classproperty from llnl.util.symlink import readlink
import spack.platforms import spack.platforms
import spack.util.executable import spack.util.executable
from spack.build_environment import dso_suffix import spack.util.libc
from spack.operating_systems.mac_os import macos_sdk_path, macos_version from spack.operating_systems.mac_os import macos_sdk_path, macos_version
from spack.package import * from spack.package import *
class Gcc(AutotoolsPackage, GNUMirrorPackage): class Gcc(AutotoolsPackage, GNUMirrorPackage, CompilerPackage):
"""The GNU Compiler Collection includes front ends for C, C++, Objective-C, """The GNU Compiler Collection includes front ends for C, C++, Objective-C,
Fortran, Ada, and Go, as well as libraries for these languages.""" Fortran, Ada, and Go, as well as libraries for these languages."""
...@@ -34,6 +32,8 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -34,6 +32,8 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
maintainers("michaelkuhn", "alalazo") maintainers("michaelkuhn", "alalazo")
license("GPL-2.0-or-later AND LGPL-2.1-or-later")
version("master", branch="master") version("master", branch="master")
version("13.2.0", sha256="e275e76442a6067341a27f04c5c6b83d8613144004c0413528863dc6b5c743da") version("13.2.0", sha256="e275e76442a6067341a27f04c5c6b83d8613144004c0413528863dc6b5c743da")
...@@ -308,14 +308,10 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -308,14 +308,10 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
destination="newlibsource", destination="newlibsource",
fetch_options=timeout, fetch_options=timeout,
) )
# nvptx-tools does not seem to work as a dependency,
# but does fine when the source is inside the gcc build directory nvptx_tools_ver = "2023-09-13"
# nvptx-tools doesn't have any releases, so grabbing the last commit depends_on("nvptx-tools@" + nvptx_tools_ver, type="build")
resource(
name="nvptx-tools",
git="https://github.com/MentorEmbedded/nvptx-tools",
commit="d0524fbdc86dfca068db5a21cc78ac255b335be5",
)
# NVPTX offloading supported in 7 and later by limited languages # NVPTX offloading supported in 7 and later by limited languages
conflicts("@:6", msg="NVPTX only supported in gcc 7 and above") conflicts("@:6", msg="NVPTX only supported in gcc 7 and above")
conflicts("languages=ada") conflicts("languages=ada")
...@@ -455,8 +451,8 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -455,8 +451,8 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
# Backport libsanitizer patch for glibc >= 2.36 # Backport libsanitizer patch for glibc >= 2.36
# https://reviews.llvm.org/D129471 # https://reviews.llvm.org/D129471
patch("glibc-2.36-libsanitizer-gcc-5-9.patch", when="@5.1:5.5,6.1:6.5,7.1:7.5,8.1:8.5,9.1:9.5") patch("glibc-2.36-libsanitizer-gcc-5-9.patch", when="@5:9")
patch("glibc-2.36-libsanitizer-gcc-10-12.patch", when="@10.1:10.4,11.1:11.3,12.1.0") patch("glibc-2.36-libsanitizer-gcc-10-12.patch", when="@10:10.4,11:11.3,12.1.0")
# Older versions do not compile with newer versions of glibc # Older versions do not compile with newer versions of glibc
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81712 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=81712
...@@ -491,13 +487,52 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -491,13 +487,52 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
) )
# end EBRAINS # end EBRAINS
# patch ICE on aarch64 in tree-vect-slp, cf: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=111478
# patch taken from releases/gcc-12 branch
patch(
"https://github.com/gcc-mirror/gcc/commit/9d033155254ac6df5f47ab32896dbf336f991589.patch?full_index=1",
sha256="8b76fe575ef095b48ac45e8b56544c331663f840ce4b63abdb61510bf3647597",
when="@12.3.0 target=aarch64:",
)
# patch taken from releases/gcc-13 branch
patch(
"https://github.com/gcc-mirror/gcc/commit/7c67939ec384425a3d7383dfb4fb39aa7e9ad20a.patch?full_index=1",
sha256="f0826d7a9c9808af40f3434918f24ad942f1c6a6daec73f11cf52c544cf5fc01",
when="@13.2.0 target=aarch64:",
)
build_directory = "spack-build" build_directory = "spack-build"
@classproperty compiler_languages = ["c", "cxx", "fortran", "d", "go"]
def executables(cls):
names = [r"gcc", r"[^\w]?g\+\+", r"gfortran", r"gdc", r"gccgo"] @property
suffixes = [r"", r"-mp-\d+\.\d", r"-\d+\.\d", r"-\d+", r"\d\d"] def supported_languages(self):
return [r"".join(x) for x in itertools.product(names, suffixes)] # This weirdness is because it could be called on an abstract spec
if "languages" not in self.spec.variants:
return self.compiler_languages
return [x for x in self.compiler_languages if x in self.spec.variants["languages"].value]
c_names = ["gcc"]
cxx_names = ["g++"]
fortran_names = ["gfortran"]
d_names = ["gdc"]
go_names = ["gccgo"]
compiler_prefixes = [r"\w+-\w+-\w+-"]
compiler_suffixes = [r"-mp-\d+(?:\.\d+)?", r"-\d+(?:\.\d+)?", r"\d\d"]
compiler_version_regex = r"(?<!clang version)\s?([0-9.]+)"
compiler_version_argument = ("-dumpfullversion", "-dumpversion")
@classmethod
def determine_version(cls, exe):
try:
output = spack.compiler.get_compiler_version_output(exe, "--version")
except Exception:
output = ""
# Apple's gcc is actually apple clang, so skip it.
if "Apple" in output:
return None
return super().determine_version(exe)
@classmethod @classmethod
def filter_detected_exes(cls, prefix, exes_in_prefix): def filter_detected_exes(cls, prefix, exes_in_prefix):
...@@ -527,58 +562,14 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -527,58 +562,14 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
return result return result
@classmethod
def determine_version(cls, exe):
try:
output = spack.compiler.get_compiler_version_output(exe, "--version")
except Exception:
output = ""
# Apple's gcc is actually apple clang, so skip it.
# Users can add it manually to compilers.yaml at their own risk.
if "Apple" in output:
return None
version_regex = re.compile(r"([\d\.]+)")
for vargs in ("-dumpfullversion", "-dumpversion"):
try:
output = spack.compiler.get_compiler_version_output(exe, vargs)
match = version_regex.search(output)
if match:
return match.group(1)
except spack.util.executable.ProcessError:
pass
except Exception as e:
tty.debug(e)
return None
@classmethod @classmethod
def determine_variants(cls, exes, version_str): def determine_variants(cls, exes, version_str):
languages, compilers = set(), {} compilers = cls.determine_compiler_paths(exes=exes)
# There are often at least two copies (not symlinks) of each compiler executable in the
# same directory: one with a canonical name, e.g. "gfortran", and another one with the languages = set()
# target prefix, e.g. "x86_64-pc-linux-gnu-gfortran". There also might be a copy of "gcc" translation = {"cxx": "c++"}
# with the version suffix, e.g. "x86_64-pc-linux-gnu-gcc-6.3.0". To ensure the consistency for lang, compiler in compilers.items():
# of values in the "compilers" dictionary (i.e. we prefer all of them to reference copies languages.add(translation.get(lang, lang))
# with canonical names if possible), we iterate over the executables in the reversed sorted
# order:
for exe in sorted(exes, reverse=True):
basename = os.path.basename(exe)
if "g++" in basename:
languages.add("c++")
compilers["cxx"] = exe
elif "gfortran" in basename:
languages.add("fortran")
compilers["fortran"] = exe
elif "gcc" in basename:
languages.add("c")
compilers["c"] = exe
elif "gccgo" in basename:
languages.add("go")
compilers["go"] = exe
elif "gdc" in basename:
languages.add("d")
compilers["d"] = exe
variant_str = "languages={0}".format(",".join(languages)) variant_str = "languages={0}".format(",".join(languages))
return variant_str, {"compilers": compilers} return variant_str, {"compilers": compilers}
...@@ -694,10 +685,10 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -694,10 +685,10 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
"""Get the right (but pessimistic) architecture specific flags supported by """Get the right (but pessimistic) architecture specific flags supported by
both host gcc and to-be-built gcc. For example: gcc@7 %gcc@12 target=znver3 both host gcc and to-be-built gcc. For example: gcc@7 %gcc@12 target=znver3
should pick -march=znver1, since that's what gcc@7 supports.""" should pick -march=znver1, since that's what gcc@7 supports."""
archs = [spec.target] + spec.target.ancestors microarchitectures = [spec.target] + spec.target.ancestors
for arch in archs: for uarch in microarchitectures:
try: try:
return arch.optimization_flags("gcc", spec.version) return uarch.optimization_flags("gcc", str(spec.version))
except UnsupportedMicroarchitecture: except UnsupportedMicroarchitecture:
pass pass
# no arch specific flags in common, unlikely to happen. # no arch specific flags in common, unlikely to happen.
...@@ -861,6 +852,28 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -861,6 +852,28 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
return options return options
# Copy nvptx-tools into the GCC install prefix
def copy_nvptx_tools(self):
nvptx_tools_bin_path = self.spec["nvptx-tools"].prefix.bin
gcc_bin_path = self.prefix.bin
mkdirp(gcc_bin_path)
copy_list = ["as", "ld", "nm", "run", "run-single"]
for file in copy_list:
fullname = f"nvptx-none-{file}"
copy(join_path(nvptx_tools_bin_path, fullname), join_path(gcc_bin_path, fullname))
link_list = ["ar", "ranlib"]
for file in link_list:
fullname = f"nvptx-none-{file}"
orig_target = readlink(join_path(nvptx_tools_bin_path, fullname))
symlink(orig_target, join_path(gcc_bin_path, fullname))
util_dir_path = join_path(self.prefix, "nvptx-none", "bin")
mkdirp(util_dir_path)
util_list = ["ar", "as", "ld", "nm", "ranlib"]
for file in util_list:
rel_target = join_path("..", "..", "bin", f"nvptx-none-{file}")
dest_link = join_path(util_dir_path, file)
symlink(rel_target, dest_link)
# run configure/make/make(install) for the nvptx-none target # run configure/make/make(install) for the nvptx-none target
# before running the host compiler phases # before running the host compiler phases
@run_before("configure") @run_before("configure")
...@@ -883,11 +896,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -883,11 +896,7 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
"--with-cuda-driver-lib={0}".format(spec["cuda"].libs.directories[0]), "--with-cuda-driver-lib={0}".format(spec["cuda"].libs.directories[0]),
] ]
with working_dir("nvptx-tools"): self.copy_nvptx_tools()
configure = Executable("./configure")
configure(*options)
make()
make("install")
pattern = join_path(self.stage.source_path, "newlibsource", "*") pattern = join_path(self.stage.source_path, "newlibsource", "*")
files = glob.glob(pattern) files = glob.glob(pattern)
...@@ -929,76 +938,43 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -929,76 +938,43 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
@property @property
def spec_dir(self): def spec_dir(self):
# e.g. lib/gcc/x86_64-unknown-linux-gnu/4.9.2 # e.g. lib/gcc/x86_64-unknown-linux-gnu/4.9.2
spec_dir = glob.glob("{0}/gcc/*/*".format(self.prefix.lib)) spec_dir = glob.glob(f"{self.prefix.lib}/gcc/*/*")
return spec_dir[0] if spec_dir else None return spec_dir[0] if spec_dir else None
@run_after("install") @run_after("install")
def write_rpath_specs(self): def write_specs_file(self):
"""Generate a spec file so the linker adds a rpath to the libs """(1) inject an rpath to its runtime library dir, (2) add a default programs search path
the compiler used to build the executable. to <binutils>/bin."""
.. caution::
The custom spec file by default with *always* pass ``-Wl,-rpath
...`` to the linker, which will cause the linker to *ignore* the
value of ``LD_RUN_PATH``, which otherwise would be saved to the
binary as the default rpath. See the mitigation below for how to
temporarily disable this behavior.
Structure the specs file so that users can define a custom spec file
to suppress the spack-linked rpaths to facilitate rpath adjustment
for relocatable binaries. The custom spec file
:file:`{norpath}.spec` will have a single
line followed by two blanks lines::
*link_libgcc_rpath:
It can be passed to the GCC linker using the argument
``--specs=norpath.spec`` to disable the automatic rpath and restore
the behavior of ``LD_RUN_PATH``."""
if not self.spec_dir: if not self.spec_dir:
tty.warn( tty.warn(f"Could not install specs for {self.spec.format('{name}{@version}')}.")
"Could not install specs for {0}.".format(self.spec.format("{name}{@version}"))
)
return return
gcc = self.spec["gcc"].command
lines = gcc("-dumpspecs", output=str).splitlines(True)
specs_file = join_path(self.spec_dir, "specs")
# Save a backup
with open(specs_file + ".orig", "w") as out:
out.writelines(lines)
# Find which directories have shared libraries # Find which directories have shared libraries
rpath_libdirs = [] for dir in ["lib64", "lib"]:
for dir in ["lib", "lib64"]:
libdir = join_path(self.prefix, dir) libdir = join_path(self.prefix, dir)
if glob.glob(join_path(libdir, "*." + dso_suffix)): if glob.glob(join_path(libdir, "libgcc_s.*")):
rpath_libdirs.append(libdir) rpath_dir = libdir
break
if not rpath_libdirs: else:
# No shared libraries
tty.warn("No dynamic libraries found in lib/lib64") tty.warn("No dynamic libraries found in lib/lib64")
return rpath_dir = None
# Overwrite the specs file specs_file = join_path(self.spec_dir, "specs")
with open(specs_file, "w") as out: with open(specs_file, "w") as f:
for line in lines: # can't extend the builtins without dumping them first
out.write(line) f.write(self.spec["gcc"].command("-dumpspecs", output=str, error=os.devnull).strip())
if line.startswith("*link_libgcc:"):
# Insert at start of line following link_libgcc, which gets f.write("\n\n# Generated by Spack\n\n")
# inserted into every call to the linker
out.write("%(link_libgcc_rpath) ") # rpath
if rpath_dir:
# Add easily-overridable rpath string at the end f.write(f"*link_libgcc:\n+ -rpath {rpath_dir}\n\n")
out.write("*link_libgcc_rpath:\n")
out.write(" ".join("-rpath " + lib for lib in rpath_libdirs)) # programs search path
out.write("\n") if self.spec.satisfies("+binutils"):
f.write(f"*self_spec:\n+ -B{self.spec['binutils'].prefix.bin}\n\n")
set_install_permissions(specs_file) set_install_permissions(specs_file)
tty.info("Wrote new spec file to {0}".format(specs_file)) tty.info(f"Wrote new spec file to {specs_file}")
def setup_run_environment(self, env): def setup_run_environment(self, env):
# Search prefix directory for possibly modified compiler names # Search prefix directory for possibly modified compiler names
...@@ -1019,7 +995,9 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -1019,7 +995,9 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
continue continue
abspath = os.path.join(bin_path, filename) abspath = os.path.join(bin_path, filename)
if os.path.islink(abspath):
# Skip broken symlinks (https://github.com/spack/spack/issues/41327)
if not os.path.exists(abspath):
continue continue
# Set the proper environment variable # Set the proper environment variable
...@@ -1112,3 +1090,110 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage): ...@@ -1112,3 +1090,110 @@ class Gcc(AutotoolsPackage, GNUMirrorPackage):
), ),
), ),
) )
@classmethod
def runtime_constraints(cls, *, spec, pkg):
"""Callback function to inject runtime-related rules into the solver.
Rule-injection is obtained through method calls of the ``pkg`` argument.
Documentation for this function is temporary. When the API will be in its final state,
we'll document the behavior at https://spack.readthedocs.io/en/latest/
Args:
spec: spec that will inject runtime dependencies
pkg: object used to forward information to the solver
"""
pkg("*").depends_on(
"gcc-runtime",
when="%gcc",
type="link",
description="If any package uses %gcc, it depends on gcc-runtime",
)
pkg("*").depends_on(
f"gcc-runtime@{str(spec.version)}:",
when=f"%{str(spec)}",
type="link",
description=f"If any package uses %{str(spec)}, "
f"it depends on gcc-runtime@{str(spec.version)}:",
)
gfortran_str = "libgfortran@5"
if spec.satisfies("gcc@:6"):
gfortran_str = "libgfortran@3"
elif spec.satisfies("gcc@7"):
gfortran_str = "libgfortran@4"
for fortran_virtual in ("fortran-rt", gfortran_str):
pkg("*").depends_on(
fortran_virtual,
when=f"%{str(spec)}",
languages=["fortran"],
type="link",
description=f"Add a dependency on '{gfortran_str}' for nodes compiled with "
f"{str(spec)} and using the 'fortran' language",
)
# The version of gcc-runtime is the same as the %gcc used to "compile" it
pkg("gcc-runtime").requires(f"@={str(spec.version)}", when=f"%{str(spec)}")
# If a node used %gcc@X.Y its dependencies must use gcc-runtime@:X.Y
# (technically @:X is broader than ... <= @=X but this should work in practice)
pkg("*").propagate(f"%gcc@:{str(spec.version)}", when=f"%{str(spec)}")
def _post_buildcache_install_hook(self):
if not self.spec.satisfies("platform=linux"):
return
# Setting up the runtime environment shouldn't be necessary here.
relocation_args = []
gcc = self.spec["gcc"].command
specs_file = os.path.join(self.spec_dir, "specs")
dryrun = gcc("test.c", "-###", output=os.devnull, error=str).strip()
if not dryrun:
tty.warn(f"Cannot relocate {specs_file}, compiler might not be working properly")
return
dynamic_linker = spack.util.libc.parse_dynamic_linker(dryrun)
if not dynamic_linker:
tty.warn(f"Cannot relocate {specs_file}, compiler might not be working properly")
return
libc = spack.util.libc.libc_from_dynamic_linker(dynamic_linker)
# We search for crt1.o ourselves because `gcc -print-prile-name=crt1.o` can give a rather
# convoluted relative path from a different prefix.
startfile_prefix = spack.util.libc.startfile_prefix(libc.external_path, dynamic_linker)
gcc_can_locate = lambda p: os.path.isabs(
gcc(f"-print-file-name={p}", output=str, error=os.devnull).strip()
)
if not gcc_can_locate("crt1.o"):
relocation_args.append(f"-B{startfile_prefix}")
# libc headers may also be in a multiarch subdir.
header_dir = spack.util.libc.libc_include_dir_from_startfile_prefix(
libc.external_path, startfile_prefix
)
if header_dir and all(
os.path.exists(os.path.join(header_dir, h))
for h in libc.package_class.representative_headers
):
relocation_args.append(f"-idirafter {header_dir}")
else:
tty.warn(
f"Cannot relocate {specs_file} include directories, "
f"compiler might not be working properly"
)
# Delete current spec files.
try:
os.unlink(specs_file)
except OSError:
pass
# Write a new one and append flags for libc
self.write_specs_file()
if relocation_args:
with open(specs_file, "a") as f:
f.write(f"*self_spec:\n+ {' '.join(relocation_args)}\n\n")
...@@ -15,6 +15,8 @@ class Libvips(AutotoolsPackage): ...@@ -15,6 +15,8 @@ class Libvips(AutotoolsPackage):
url = "https://github.com/libvips/libvips/releases/download/v8.9.0/vips-8.9.0.tar.gz" url = "https://github.com/libvips/libvips/releases/download/v8.9.0/vips-8.9.0.tar.gz"
git = "https://github.com/libvips/libvips.git" git = "https://github.com/libvips/libvips.git"
license("LGPL-2.1-or-later")
version("8.13.3", sha256="4eff5cdc8dbe1a05a926290a99014e20ba386f5dcca38d9774bef61413435d4c") version("8.13.3", sha256="4eff5cdc8dbe1a05a926290a99014e20ba386f5dcca38d9774bef61413435d4c")
version("8.10.5", sha256="a4eef2f5334ab6dbf133cd3c6d6394d5bdb3e76d5ea4d578b02e1bc3d9e1cfd8") version("8.10.5", sha256="a4eef2f5334ab6dbf133cd3c6d6394d5bdb3e76d5ea4d578b02e1bc3d9e1cfd8")
version("8.9.1", sha256="45633798877839005016c9d3494e98dee065f5cb9e20f4552d3b315b8e8bce91") version("8.9.1", sha256="45633798877839005016c9d3494e98dee065f5cb9e20f4552d3b315b8e8bce91")
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment