From 836be2364ca62e01e89ac07c8e18b19949fc43b4 Mon Sep 17 00:00:00 2001
From: Massimiliano Culpo <massimiliano.culpo@gmail.com>
Date: Thu, 22 Aug 2024 12:13:08 +0200
Subject: Make spack compiler find use external find (#45784)

so that there is no duplicate detection logic for compilers
---
 lib/spack/llnl/util/tty/log.py                     |   1 +
 lib/spack/spack/bootstrap/config.py                |   6 +-
 lib/spack/spack/cmd/compiler.py                    |  23 +-
 lib/spack/spack/compilers/__init__.py              | 642 ++++++++-------------
 lib/spack/spack/detection/common.py                |   2 +-
 lib/spack/spack/detection/path.py                  |   4 +-
 lib/spack/spack/environment/environment.py         |   2 +-
 lib/spack/spack/installer.py                       |   4 +-
 lib/spack/spack/solver/asp.py                      |  14 +-
 lib/spack/spack/spec.py                            |   4 +-
 lib/spack/spack/test/bindist.py                    | 114 ++--
 lib/spack/spack/test/cmd/compiler.py               |  39 +-
 lib/spack/spack/test/compilers/basics.py           |  89 +--
 lib/spack/spack/test/compilers/detection.py        | 471 ---------------
 lib/spack/spack/test/cray_manifest.py              |   1 +
 lib/spack/spack/test/installer.py                  |  27 +-
 share/spack/spack-completion.bash                  |   4 +-
 share/spack/spack-completion.fish                  |   8 +-
 .../repos/builtin.mock/packages/gcc/package.py     |  12 +
 .../packages/intel-oneapi-compilers/package.py     |   9 +-
 20 files changed, 363 insertions(+), 1113 deletions(-)
 delete mode 100644 lib/spack/spack/test/compilers/detection.py

diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index aeb1114c5a..5613984340 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -10,6 +10,7 @@ import ctypes
 import errno
 import io
 import multiprocessing
+import multiprocessing.connection
 import os
 import re
 import select
diff --git a/lib/spack/spack/bootstrap/config.py b/lib/spack/spack/bootstrap/config.py
index 067e884b50..3feaef1d37 100644
--- a/lib/spack/spack/bootstrap/config.py
+++ b/lib/spack/spack/bootstrap/config.py
@@ -143,11 +143,7 @@ def _bootstrap_config_scopes() -> Sequence["spack.config.ConfigScope"]:
 def _add_compilers_if_missing() -> None:
     arch = spack.spec.ArchSpec.frontend_arch()
     if not spack.compilers.compilers_for_arch(arch):
-        new_compilers = spack.compilers.find_new_compilers(
-            mixed_toolchain=sys.platform == "darwin"
-        )
-        if new_compilers:
-            spack.compilers.add_compilers_to_config(new_compilers)
+        spack.compilers.find_compilers()
 
 
 @contextlib.contextmanager
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index 860f0a9ee0..2eb23bf714 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -50,6 +50,7 @@ def setup_parser(subparser):
         default=lambda: spack.config.default_modify_scope("compilers"),
         help="configuration scope to modify",
     )
+    arguments.add_common_arguments(find_parser, ["jobs"])
 
     # Remove
     remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
@@ -78,25 +79,21 @@ def setup_parser(subparser):
 def compiler_find(args):
     """Search either $PATH or a list of paths OR MODULES for compilers and
     add them to Spack's configuration.
-
     """
-    # None signals spack.compiler.find_compilers to use its default logic
     paths = args.add_paths or None
-
-    # Below scope=None because we want new compilers that don't appear
-    # in any other configuration.
-    new_compilers = spack.compilers.find_new_compilers(
-        paths, scope=None, mixed_toolchain=args.mixed_toolchain
+    new_compilers = spack.compilers.find_compilers(
+        path_hints=paths,
+        scope=args.scope,
+        mixed_toolchain=args.mixed_toolchain,
+        max_workers=args.jobs,
     )
     if new_compilers:
-        spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope)
         n = len(new_compilers)
         s = "s" if n > 1 else ""
-
-        config = spack.config.CONFIG
-        filename = config.get_config_filename(args.scope, "compilers")
-        tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
-        colify(reversed(sorted(c.spec.display_str for c in new_compilers)), indent=4)
+        filename = spack.config.CONFIG.get_config_filename(args.scope, "compilers")
+        tty.msg(f"Added {n:d} new compiler{s} to {filename}")
+        compiler_strs = sorted(f"{c.spec.name}@{c.spec.version}" for c in new_compilers)
+        colify(reversed(compiler_strs), indent=4)
     else:
         tty.msg("Found no new compilers")
     tty.msg("Compilers are defined in the following files:")
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 9712b63077..a42cd82d9d 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -7,11 +7,10 @@
 system and configuring Spack to use multiple compilers.
 """
 import collections
-import itertools
-import multiprocessing.pool
 import os
+import sys
 import warnings
-from typing import Dict, List, Optional, Tuple
+from typing import Dict, List, Optional
 
 import archspec.cpu
 
@@ -22,11 +21,12 @@ import llnl.util.tty as tty
 import spack.compiler
 import spack.config
 import spack.error
-import spack.operating_systems
 import spack.paths
 import spack.platforms
+import spack.repo
 import spack.spec
 import spack.version
+from spack.operating_systems import windows_os
 from spack.util.environment import get_path
 from spack.util.naming import mod_to_class
 
@@ -63,6 +63,10 @@ package_name_to_compiler_name = {
 }
 
 
+#: Tag used to identify packages providing a compiler
+COMPILER_TAG = "compiler"
+
+
 def pkg_spec_for_compiler(cspec):
     """Return the spec of the package that provides the compiler."""
     for spec, package in _compiler_to_pkg.items():
@@ -127,7 +131,7 @@ def get_compiler_config(
         # Do not init config because there is a non-empty scope
         return config
 
-    _init_compiler_config(configuration, scope=scope)
+    find_compilers(scope=scope)
     config = configuration.get("compilers", scope=scope)
     return config
 
@@ -136,125 +140,8 @@ def get_compiler_config_from_packages(
     configuration: "spack.config.Configuration", *, scope: Optional[str] = None
 ) -> List[Dict]:
     """Return the compiler configuration from packages.yaml"""
-    config = configuration.get("packages", scope=scope)
-    if not config:
-        return []
-
-    packages = []
-    compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
-    for name, entry in config.items():
-        if name not in compiler_package_names:
-            continue
-        externals_config = entry.get("externals", None)
-        if not externals_config:
-            continue
-        packages.extend(_compiler_config_from_package_config(externals_config))
-
-    return packages
-
-
-def _compiler_config_from_package_config(config):
-    compilers = []
-    for entry in config:
-        compiler = _compiler_config_from_external(entry)
-        if compiler:
-            compilers.append(compiler)
-
-    return compilers
-
-
-def _compiler_config_from_external(config):
-    extra_attributes_key = "extra_attributes"
-    compilers_key = "compilers"
-    c_key, cxx_key, fortran_key = "c", "cxx", "fortran"
-
-    # Allow `@x.y.z` instead of `@=x.y.z`
-    spec = spack.spec.parse_with_version_concrete(config["spec"])
-
-    compiler_spec = spack.spec.CompilerSpec(
-        package_name_to_compiler_name.get(spec.name, spec.name), spec.version
-    )
-
-    err_header = f"The external spec '{spec}' cannot be used as a compiler"
-
-    # If extra_attributes is not there I might not want to use this entry as a compiler,
-    # therefore just leave a debug message, but don't be loud with a warning.
-    if extra_attributes_key not in config:
-        tty.debug(f"[{__file__}] {err_header}: missing the '{extra_attributes_key}' key")
-        return None
-    extra_attributes = config[extra_attributes_key]
-
-    # If I have 'extra_attributes' warn if 'compilers' is missing, or we don't have a C compiler
-    if compilers_key not in extra_attributes:
-        warnings.warn(
-            f"{err_header}: missing the '{compilers_key}' key under '{extra_attributes_key}'"
-        )
-        return None
-    attribute_compilers = extra_attributes[compilers_key]
-
-    if c_key not in attribute_compilers:
-        warnings.warn(
-            f"{err_header}: missing the C compiler path under "
-            f"'{extra_attributes_key}:{compilers_key}'"
-        )
-        return None
-    c_compiler = attribute_compilers[c_key]
-
-    # C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
-    if cxx_key not in attribute_compilers:
-        tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
-
-    if fortran_key not in attribute_compilers:
-        tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
-
-    # compilers format has cc/fc/f77, externals format has "c/fortran"
-    paths = {
-        "cc": c_compiler,
-        "cxx": attribute_compilers.get(cxx_key, None),
-        "fc": attribute_compilers.get(fortran_key, None),
-        "f77": attribute_compilers.get(fortran_key, None),
-    }
-
-    if not spec.architecture:
-        host_platform = spack.platforms.host()
-        operating_system = host_platform.operating_system("default_os")
-        target = host_platform.target("default_target").microarchitecture
-    else:
-        target = spec.architecture.target
-        if not target:
-            target = spack.platforms.host().target("default_target")
-        target = target.microarchitecture
-
-        operating_system = spec.os
-        if not operating_system:
-            host_platform = spack.platforms.host()
-            operating_system = host_platform.operating_system("default_os")
-
-    compiler_entry = {
-        "compiler": {
-            "spec": str(compiler_spec),
-            "paths": paths,
-            "flags": extra_attributes.get("flags", {}),
-            "operating_system": str(operating_system),
-            "target": str(target.family),
-            "modules": config.get("modules", []),
-            "environment": extra_attributes.get("environment", {}),
-            "extra_rpaths": extra_attributes.get("extra_rpaths", []),
-            "implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
-        }
-    }
-    return compiler_entry
-
-
-def _init_compiler_config(
-    configuration: "spack.config.Configuration", *, scope: Optional[str]
-) -> None:
-    """Compiler search used when Spack has no compilers."""
-    compilers = find_compilers()
-    compilers_dict = []
-    for compiler in compilers:
-        compilers_dict.append(_to_dict(compiler))
-    configuration.set("compilers", compilers_dict, scope=scope)
+    packages_yaml = configuration.get("packages", scope=scope)
+    return CompilerConfigFactory.from_packages_yaml(packages_yaml)
 
 
 def compiler_config_files():
@@ -278,9 +165,7 @@ def add_compilers_to_config(compilers, scope=None):
         compilers: a list of Compiler objects.
         scope: configuration scope to modify.
     """
-    compiler_config = get_compiler_config(
-        configuration=spack.config.CONFIG, scope=scope, init_config=False
-    )
+    compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
     for compiler in compilers:
         if not compiler.cc:
             tty.debug(f"{compiler.spec} does not have a C compiler")
@@ -329,9 +214,7 @@ def _remove_compiler_from_scope(compiler_spec, scope):
          True if one or more compiler entries were actually removed, False otherwise
     """
     assert scope is not None, "a specific scope is needed when calling this function"
-    compiler_config = get_compiler_config(
-        configuration=spack.config.CONFIG, scope=scope, init_config=False
-    )
+    compiler_config = get_compiler_config(configuration=spack.config.CONFIG, scope=scope)
     filtered_compiler_config = [
         compiler_entry
         for compiler_entry in compiler_config
@@ -380,79 +263,77 @@ def all_compiler_specs(scope=None, init_config=True):
 
 
 def find_compilers(
-    path_hints: Optional[List[str]] = None, *, mixed_toolchain=False
+    path_hints: Optional[List[str]] = None,
+    *,
+    scope: Optional[str] = None,
+    mixed_toolchain: bool = False,
+    max_workers: Optional[int] = None,
 ) -> List["spack.compiler.Compiler"]:
-    """Return the list of compilers found in the paths given as arguments.
+    """Searches for compiler in the paths given as argument. If any new compiler is found, the
+    configuration is updated, and the list of new compiler objects is returned.
 
     Args:
         path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
             environment variable will be used if the value is None
+        scope: configuration scope to modify
         mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
             a certain language
+        max_workers: number of processes used to search for compilers
     """
+    import spack.detection
+
+    known_compilers = set(all_compilers(init_config=False))
+
     if path_hints is None:
         path_hints = get_path("PATH")
     default_paths = fs.search_paths_for_executables(*path_hints)
+    if sys.platform == "win32":
+        default_paths.extend(windows_os.WindowsOs().compiler_search_paths)
+    compiler_pkgs = spack.repo.PATH.packages_with_tags(COMPILER_TAG, full=True)
 
-    # To detect the version of the compilers, we dispatch a certain number
-    # of function calls to different workers. Here we construct the list
-    # of arguments for each call.
-    arguments = []
-    for o in all_os_classes():
-        search_paths = getattr(o, "compiler_search_paths", default_paths)
-        arguments.extend(arguments_to_detect_version_fn(o, search_paths))
-
-    # Here we map the function arguments to the corresponding calls
-    tp = multiprocessing.pool.ThreadPool()
-    try:
-        detected_versions = tp.map(detect_version, arguments)
-    finally:
-        tp.close()
-
-    def valid_version(item: Tuple[Optional[DetectVersionArgs], Optional[str]]) -> bool:
-        value, error = item
-        if error is None:
-            return True
-        try:
-            # This will fail on Python 2.6 if a non ascii
-            # character is in the error
-            tty.debug(error)
-        except UnicodeEncodeError:
-            pass
-        return False
-
-    def remove_errors(
-        item: Tuple[Optional[DetectVersionArgs], Optional[str]]
-    ) -> DetectVersionArgs:
-        value, _ = item
-        assert value is not None
-        return value
-
-    return make_compiler_list(
-        [remove_errors(detected) for detected in detected_versions if valid_version(detected)],
-        mixed_toolchain=mixed_toolchain,
+    detected_packages = spack.detection.by_path(
+        compiler_pkgs, path_hints=default_paths, max_workers=max_workers
     )
 
+    valid_compilers = {}
+    for name, detected in detected_packages.items():
+        compilers = [x for x in detected if CompilerConfigFactory.from_external_spec(x.spec)]
+        if not compilers:
+            continue
+        valid_compilers[name] = compilers
 
-def find_new_compilers(
-    path_hints: Optional[List[str]] = None,
-    scope: Optional[str] = None,
-    *,
-    mixed_toolchain: bool = False,
-):
-    """Same as ``find_compilers`` but return only the compilers that are not
-    already in compilers.yaml.
+    def _has_fortran_compilers(x):
+        if "compilers" not in x.spec.extra_attributes:
+            return False
 
-    Args:
-        path_hints: list of path hints where to look for. A sensible default based on the ``PATH``
-            environment variable will be used if the value is None
-        scope: scope to look for a compiler. If None consider the merged configuration.
-        mixed_toolchain: allow mixing compilers from different toolchains if otherwise missing for
-            a certain language
-    """
-    compilers = find_compilers(path_hints, mixed_toolchain=mixed_toolchain)
+        return "fortran" in x.spec.extra_attributes["compilers"]
 
-    return select_new_compilers(compilers, scope)
+    if mixed_toolchain:
+        gccs = [x for x in valid_compilers.get("gcc", []) if _has_fortran_compilers(x)]
+        if gccs:
+            best_gcc = sorted(
+                gccs, key=lambda x: spack.spec.parse_with_version_concrete(x.spec).version
+            )[-1]
+            gfortran = best_gcc.spec.extra_attributes["compilers"]["fortran"]
+            for name in ("llvm", "apple-clang"):
+                if name not in valid_compilers:
+                    continue
+                candidates = valid_compilers[name]
+                for candidate in candidates:
+                    if _has_fortran_compilers(candidate):
+                        continue
+                    candidate.spec.extra_attributes["compilers"]["fortran"] = gfortran
+
+    new_compilers = []
+    for name, detected in valid_compilers.items():
+        for config in CompilerConfigFactory.from_specs([x.spec for x in detected]):
+            c = _compiler_from_config_entry(config["compiler"])
+            if c in known_compilers:
+                continue
+            new_compilers.append(c)
+
+    add_compilers_to_config(new_compilers, scope=scope)
+    return new_compilers
 
 
 def select_new_compilers(compilers, scope=None):
@@ -462,7 +343,9 @@ def select_new_compilers(compilers, scope=None):
     compilers_not_in_config = []
     for c in compilers:
         arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
-        same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
+        same_specs = compilers_for_spec(
+            c.spec, arch_spec=arch_spec, scope=scope, init_config=False
+        )
         if not same_specs:
             compilers_not_in_config.append(c)
 
@@ -531,7 +414,12 @@ def find(compiler_spec, scope=None, init_config=True):
 def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
     """Return specs of available compilers that match the supplied
     compiler spec.  Return an empty list if nothing found."""
-    return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
+    return [
+        c.spec
+        for c in compilers_for_spec(
+            compiler_spec, arch_spec=arch_spec, scope=scope, init_config=init_config
+        )
+    ]
 
 
 def all_compilers(scope=None, init_config=True):
@@ -553,14 +441,11 @@ def all_compilers_from(configuration, scope=None, init_config=True):
 
 
 @_auto_compiler_spec
-def compilers_for_spec(
-    compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
-):
+def compilers_for_spec(compiler_spec, *, arch_spec=None, scope=None, init_config=True):
     """This gets all compilers that satisfy the supplied CompilerSpec.
     Returns an empty list if none are found.
     """
     config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=init_config)
-
     matches = set(find(compiler_spec, scope, init_config))
     compilers = []
     for cspec in matches:
@@ -569,7 +454,7 @@ def compilers_for_spec(
 
 
 def compilers_for_arch(arch_spec, scope=None):
-    config = all_compilers_config(spack.config.CONFIG, scope=scope)
+    config = all_compilers_config(spack.config.CONFIG, scope=scope, init_config=False)
     return list(get_compilers(config, arch_spec=arch_spec))
 
 
@@ -819,228 +704,6 @@ DetectVersionArgs = collections.namedtuple(
 )
 
 
-def arguments_to_detect_version_fn(
-    operating_system: spack.operating_systems.OperatingSystem, paths: List[str]
-) -> List[DetectVersionArgs]:
-    """Returns a list of DetectVersionArgs tuples to be used in a
-    corresponding function to detect compiler versions.
-
-    The ``operating_system`` instance can customize the behavior of this
-    function by providing a method called with the same name.
-
-    Args:
-        operating_system: the operating system on which we are looking for compilers
-        paths: paths to search for compilers
-
-    Returns:
-        List of DetectVersionArgs tuples. Each item in the list will be later
-        mapped to the corresponding function call to detect the version of the
-        compilers in this OS.
-    """
-
-    def _default(search_paths: List[str]) -> List[DetectVersionArgs]:
-        command_arguments: List[DetectVersionArgs] = []
-        files_to_be_tested = fs.files_in(*search_paths)
-        for compiler_name in supported_compilers_for_host_platform():
-            compiler_cls = class_for_compiler_name(compiler_name)
-
-            for language in ("cc", "cxx", "f77", "fc"):
-                # Select only the files matching a regexp
-                for (file, full_path), regexp in itertools.product(
-                    files_to_be_tested, compiler_cls.search_regexps(language)
-                ):
-                    match = regexp.match(file)
-                    if match:
-                        compiler_id = CompilerID(operating_system, compiler_name, None)
-                        detect_version_args = DetectVersionArgs(
-                            id=compiler_id,
-                            variation=NameVariation(*match.groups()),
-                            language=language,
-                            path=full_path,
-                        )
-                        command_arguments.append(detect_version_args)
-
-        return command_arguments
-
-    fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
-    return fn(paths)
-
-
-def detect_version(
-    detect_version_args: DetectVersionArgs,
-) -> Tuple[Optional[DetectVersionArgs], Optional[str]]:
-    """Computes the version of a compiler and adds it to the information
-    passed as input.
-
-    As this function is meant to be executed by worker processes it won't
-    raise any exception but instead will return a (value, error) tuple that
-    needs to be checked by the code dispatching the calls.
-
-    Args:
-        detect_version_args: information on the compiler for which we should detect the version.
-
-    Returns:
-        A ``(DetectVersionArgs, error)`` tuple. If ``error`` is ``None`` the
-        version of the compiler was computed correctly and the first argument
-        of the tuple will contain it. Otherwise ``error`` is a string
-        containing an explanation on why the version couldn't be computed.
-    """
-
-    def _default(fn_args):
-        compiler_id = fn_args.id
-        language = fn_args.language
-        compiler_cls = class_for_compiler_name(compiler_id.compiler_name)
-        path = fn_args.path
-
-        # Get compiler names and the callback to detect their versions
-        callback = getattr(compiler_cls, f"{language}_version")
-
-        try:
-            version = callback(path)
-            if version and str(version).strip() and version != "unknown":
-                value = fn_args._replace(id=compiler_id._replace(version=version))
-                return value, None
-
-            error = f"Couldn't get version for compiler {path}".format(path)
-        except spack.util.executable.ProcessError as e:
-            error = f"Couldn't get version for compiler {path}\n" + str(e)
-        except spack.util.executable.ProcessTimeoutError as e:
-            error = f"Couldn't get version for compiler {path}\n" + str(e)
-        except Exception as e:
-            # Catching "Exception" here is fine because it just
-            # means something went wrong running a candidate executable.
-            error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
-                path, e.__class__.__name__, str(e)
-            )
-        return None, error
-
-    operating_system = detect_version_args.id.os
-    fn = getattr(operating_system, "detect_version", _default)
-    return fn(detect_version_args)
-
-
-def make_compiler_list(
-    detected_versions: List[DetectVersionArgs], mixed_toolchain: bool = False
-) -> List["spack.compiler.Compiler"]:
-    """Process a list of detected versions and turn them into a list of
-    compiler specs.
-
-    Args:
-        detected_versions: list of DetectVersionArgs containing a valid version
-        mixed_toolchain: allow mixing compilers from different toolchains if langauge is missing
-
-    Returns:
-        list: list of Compiler objects
-    """
-    group_fn = lambda x: (x.id, x.variation, x.language)
-    sorted_compilers = sorted(detected_versions, key=group_fn)
-
-    # Gather items in a dictionary by the id, name variation and language
-    compilers_d: Dict[CompilerID, Dict[NameVariation, dict]] = {}
-    for sort_key, group in itertools.groupby(sorted_compilers, key=group_fn):
-        compiler_id, name_variation, language = sort_key
-        by_compiler_id = compilers_d.setdefault(compiler_id, {})
-        by_name_variation = by_compiler_id.setdefault(name_variation, {})
-        by_name_variation[language] = next(x.path for x in group)
-
-    def _default_make_compilers(cmp_id, paths):
-        operating_system, compiler_name, version = cmp_id
-        compiler_cls = class_for_compiler_name(compiler_name)
-        spec = spack.spec.CompilerSpec(compiler_cls.name, f"={version}")
-        paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
-        # TODO: johnwparent - revist the following line as per discussion at:
-        # https://github.com/spack/spack/pull/33385/files#r1040036318
-        target = archspec.cpu.host()
-        compiler = compiler_cls(spec, operating_system, str(target.family), paths)
-        return [compiler]
-
-    # For compilers with the same compiler id:
-    #
-    # - Prefer with C compiler to without
-    # - Prefer with C++ compiler to without
-    # - Prefer no variations to variations (e.g., clang to clang-gpu)
-    #
-    sort_fn = lambda variation: (
-        "cc" not in by_compiler_id[variation],  # None last
-        "cxx" not in by_compiler_id[variation],  # None last
-        getattr(variation, "prefix", None),
-        getattr(variation, "suffix", None),
-    )
-
-    # Flatten to a list of compiler id, primary variation and compiler dictionary
-    flat_compilers: List[Tuple[CompilerID, NameVariation, dict]] = []
-    for compiler_id, by_compiler_id in compilers_d.items():
-        ordered = sorted(by_compiler_id, key=sort_fn)
-        selected_variation = ordered[0]
-        selected = by_compiler_id[selected_variation]
-
-        # Fill any missing parts from subsequent entries (without mixing toolchains)
-        for lang in ["cxx", "f77", "fc"]:
-            if lang not in selected:
-                next_lang = next(
-                    (by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
-                )
-                if next_lang:
-                    selected[lang] = next_lang
-
-        flat_compilers.append((compiler_id, selected_variation, selected))
-
-    # Next, fill out the blanks of missing compilers by creating a mixed toolchain (if requested)
-    if mixed_toolchain:
-        make_mixed_toolchain(flat_compilers)
-
-    # Finally, create the compiler list
-    compilers: List["spack.compiler.Compiler"] = []
-    for compiler_id, _, compiler in flat_compilers:
-        make_compilers = getattr(compiler_id.os, "make_compilers", _default_make_compilers)
-        candidates = make_compilers(compiler_id, compiler)
-        compilers.extend(x for x in candidates if x.cc is not None)
-
-    return compilers
-
-
-def make_mixed_toolchain(compilers: List[Tuple[CompilerID, NameVariation, dict]]) -> None:
-    """Add missing compilers across toolchains when they are missing for a particular language.
-    This currently only adds the most sensible gfortran to (apple)-clang if it doesn't have a
-    fortran compiler (no flang)."""
-
-    # First collect the clangs that are missing a fortran compiler
-    clangs_without_flang = [
-        (id, variation, compiler)
-        for id, variation, compiler in compilers
-        if id.compiler_name in ("clang", "apple-clang")
-        and "f77" not in compiler
-        and "fc" not in compiler
-    ]
-    if not clangs_without_flang:
-        return
-
-    # Filter on GCCs with fortran compiler
-    gccs_with_fortran = [
-        (id, variation, compiler)
-        for id, variation, compiler in compilers
-        if id.compiler_name == "gcc" and "f77" in compiler and "fc" in compiler
-    ]
-
-    # Sort these GCCs by "best variation" (no prefix / suffix first)
-    gccs_with_fortran.sort(
-        key=lambda x: (getattr(x[1], "prefix", None), getattr(x[1], "suffix", None))
-    )
-
-    # Attach the optimal GCC fortran compiler to the clangs that don't have one
-    for clang_id, _, clang_compiler in clangs_without_flang:
-        gcc_compiler = next(
-            (gcc[2] for gcc in gccs_with_fortran if gcc[0].os == clang_id.os), None
-        )
-
-        if not gcc_compiler:
-            continue
-
-        # Update the fc / f77 entries
-        clang_compiler["f77"] = gcc_compiler["f77"]
-        clang_compiler["fc"] = gcc_compiler["fc"]
-
-
 def is_mixed_toolchain(compiler):
     """Returns True if the current compiler is a mixed toolchain,
     False otherwise.
@@ -1087,6 +750,155 @@ def is_mixed_toolchain(compiler):
     return False
 
 
+_EXTRA_ATTRIBUTES_KEY = "extra_attributes"
+_COMPILERS_KEY = "compilers"
+_C_KEY = "c"
+_CXX_KEY, _FORTRAN_KEY = "cxx", "fortran"
+
+
+class CompilerConfigFactory:
+    """Class aggregating all ways of constructing a list of compiler config entries."""
+
+    @staticmethod
+    def from_specs(specs: List["spack.spec.Spec"]) -> List[dict]:
+        result = []
+        compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
+        for s in specs:
+            if s.name not in compiler_package_names:
+                continue
+
+            candidate = CompilerConfigFactory.from_external_spec(s)
+            if candidate is None:
+                continue
+
+            result.append(candidate)
+        return result
+
+    @staticmethod
+    def from_packages_yaml(packages_yaml) -> List[dict]:
+        compiler_specs = []
+        compiler_package_names = supported_compilers() + list(package_name_to_compiler_name.keys())
+        for name, entry in packages_yaml.items():
+            if name not in compiler_package_names:
+                continue
+
+            externals_config = entry.get("externals", None)
+            if not externals_config:
+                continue
+
+            current_specs = []
+            for current_external in externals_config:
+                compiler = CompilerConfigFactory._spec_from_external_config(current_external)
+                if compiler:
+                    current_specs.append(compiler)
+            compiler_specs.extend(current_specs)
+
+        return CompilerConfigFactory.from_specs(compiler_specs)
+
+    @staticmethod
+    def _spec_from_external_config(config):
+        # Allow `@x.y.z` instead of `@=x.y.z`
+        err_header = f"The external spec '{config['spec']}' cannot be used as a compiler"
+        # If extra_attributes is not there I might not want to use this entry as a compiler,
+        # therefore just leave a debug message, but don't be loud with a warning.
+        if _EXTRA_ATTRIBUTES_KEY not in config:
+            tty.debug(f"[{__file__}] {err_header}: missing the '{_EXTRA_ATTRIBUTES_KEY}' key")
+            return None
+        extra_attributes = config[_EXTRA_ATTRIBUTES_KEY]
+        result = spack.spec.Spec(
+            str(spack.spec.parse_with_version_concrete(config["spec"])),
+            external_modules=config.get("modules"),
+        )
+        result.extra_attributes = extra_attributes
+        return result
+
+    @staticmethod
+    def from_external_spec(spec: "spack.spec.Spec") -> Optional[dict]:
+        spec = spack.spec.parse_with_version_concrete(spec)
+        extra_attributes = getattr(spec, _EXTRA_ATTRIBUTES_KEY, None)
+        if extra_attributes is None:
+            return None
+
+        paths = CompilerConfigFactory._extract_compiler_paths(spec)
+        if paths is None:
+            return None
+
+        compiler_spec = spack.spec.CompilerSpec(
+            package_name_to_compiler_name.get(spec.name, spec.name), spec.version
+        )
+
+        operating_system, target = CompilerConfigFactory._extract_os_and_target(spec)
+
+        compiler_entry = {
+            "compiler": {
+                "spec": str(compiler_spec),
+                "paths": paths,
+                "flags": extra_attributes.get("flags", {}),
+                "operating_system": str(operating_system),
+                "target": str(target.family),
+                "modules": getattr(spec, "external_modules", []),
+                "environment": extra_attributes.get("environment", {}),
+                "extra_rpaths": extra_attributes.get("extra_rpaths", []),
+                "implicit_rpaths": extra_attributes.get("implicit_rpaths", None),
+            }
+        }
+        return compiler_entry
+
+    @staticmethod
+    def _extract_compiler_paths(spec: "spack.spec.Spec") -> Optional[Dict[str, str]]:
+        err_header = f"The external spec '{spec}' cannot be used as a compiler"
+        extra_attributes = spec.extra_attributes
+        # If I have 'extra_attributes' warn if 'compilers' is missing,
+        # or we don't have a C compiler
+        if _COMPILERS_KEY not in extra_attributes:
+            warnings.warn(
+                f"{err_header}: missing the '{_COMPILERS_KEY}' key under '{_EXTRA_ATTRIBUTES_KEY}'"
+            )
+            return None
+        attribute_compilers = extra_attributes[_COMPILERS_KEY]
+
+        if _C_KEY not in attribute_compilers:
+            warnings.warn(
+                f"{err_header}: missing the C compiler path under "
+                f"'{_EXTRA_ATTRIBUTES_KEY}:{_COMPILERS_KEY}'"
+            )
+            return None
+        c_compiler = attribute_compilers[_C_KEY]
+
+        # C++ and Fortran compilers are not mandatory, so let's just leave a debug trace
+        if _CXX_KEY not in attribute_compilers:
+            tty.debug(f"[{__file__}] The external spec {spec} does not have a C++ compiler")
+
+        if _FORTRAN_KEY not in attribute_compilers:
+            tty.debug(f"[{__file__}] The external spec {spec} does not have a Fortran compiler")
+
+        # compilers format has cc/fc/f77, externals format has "c/fortran"
+        return {
+            "cc": c_compiler,
+            "cxx": attribute_compilers.get(_CXX_KEY, None),
+            "fc": attribute_compilers.get(_FORTRAN_KEY, None),
+            "f77": attribute_compilers.get(_FORTRAN_KEY, None),
+        }
+
+    @staticmethod
+    def _extract_os_and_target(spec: "spack.spec.Spec"):
+        if not spec.architecture:
+            host_platform = spack.platforms.host()
+            operating_system = host_platform.operating_system("default_os")
+            target = host_platform.target("default_target").microarchitecture
+        else:
+            target = spec.architecture.target
+            if not target:
+                target = spack.platforms.host().target("default_target")
+            target = target.microarchitecture
+
+            operating_system = spec.os
+            if not operating_system:
+                host_platform = spack.platforms.host()
+                operating_system = host_platform.operating_system("default_os")
+        return operating_system, target
+
+
 class InvalidCompilerConfigurationError(spack.error.SpackError):
     def __init__(self, compiler_spec):
         super().__init__(
diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py
index e043c6fb8a..596d2ccd7e 100644
--- a/lib/spack/spack/detection/common.py
+++ b/lib/spack/spack/detection/common.py
@@ -239,7 +239,7 @@ def update_configuration(
         external_entries = pkg_config.get("externals", [])
         assert not isinstance(external_entries, bool), "unexpected value for external entry"
 
-        all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in external_entries])
+        all_new_specs.extend([x.spec for x in new_entries])
         if buildable is False:
             pkg_config["buildable"] = False
         pkg_to_cfg[package_name] = pkg_config
diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py
index 96535e7e48..75d29fe1db 100644
--- a/lib/spack/spack/detection/path.py
+++ b/lib/spack/spack/detection/path.py
@@ -62,7 +62,7 @@ def common_windows_package_paths(pkg_cls=None) -> List[str]:
 
 def file_identifier(path):
     s = os.stat(path)
-    return (s.st_dev, s.st_ino)
+    return s.st_dev, s.st_ino
 
 
 def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
@@ -80,6 +80,8 @@ def executables_in_path(path_hints: List[str]) -> Dict[str, str]:
             constructed based on the PATH environment variable.
     """
     search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
+    # Make use we don't doubly list /usr/lib and /lib etc
+    search_paths = list(llnl.util.lang.dedupe(search_paths, key=file_identifier))
     return path_to_dict(search_paths)
 
 
diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py
index 4114a1cdb9..877e294b74 100644
--- a/lib/spack/spack/environment/environment.py
+++ b/lib/spack/spack/environment/environment.py
@@ -1644,7 +1644,7 @@ class Environment:
 
         # Ensure we have compilers in compilers.yaml to avoid that
         # processes try to write the config file in parallel
-        _ = spack.compilers.get_compiler_config(spack.config.CONFIG, init_config=True)
+        _ = spack.compilers.all_compilers_config(spack.config.CONFIG)
 
         # Early return if there is nothing to do
         if len(args) == 0:
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index dc94c95926..9039a3738f 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -1611,9 +1611,7 @@ class PackageInstaller:
 
     def _add_compiler_package_to_config(self, pkg: "spack.package_base.PackageBase") -> None:
         compiler_search_prefix = getattr(pkg, "compiler_search_prefix", pkg.spec.prefix)
-        spack.compilers.add_compilers_to_config(
-            spack.compilers.find_compilers([compiler_search_prefix])
-        )
+        spack.compilers.find_compilers([compiler_search_prefix])
 
     def _install_task(self, task: BuildTask, install_status: InstallStatus) -> None:
         """
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index e12789055c..b3f1248ce7 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -285,16 +285,14 @@ def _create_counter(specs: List[spack.spec.Spec], tests: bool):
     return NoDuplicatesCounter(specs, tests=tests)
 
 
-def all_compilers_in_config(configuration):
-    return spack.compilers.all_compilers_from(configuration)
-
-
 def all_libcs() -> Set[spack.spec.Spec]:
     """Return a set of all libc specs targeted by any configured compiler. If none, fall back to
     libc determined from the current Python process if dynamically linked."""
 
     libcs = {
-        c.default_libc for c in all_compilers_in_config(spack.config.CONFIG) if c.default_libc
+        c.default_libc
+        for c in spack.compilers.all_compilers_from(spack.config.CONFIG)
+        if c.default_libc
     }
 
     if libcs:
@@ -613,7 +611,7 @@ def _external_config_with_implicit_externals(configuration):
     if not using_libc_compatibility():
         return packages_yaml
 
-    for compiler in all_compilers_in_config(configuration):
+    for compiler in spack.compilers.all_compilers_from(configuration):
         libc = compiler.default_libc
         if libc:
             entry = {"spec": f"{libc} %{compiler.spec}", "prefix": libc.external_path}
@@ -3002,7 +3000,7 @@ class CompilerParser:
 
     def __init__(self, configuration) -> None:
         self.compilers: Set[KnownCompiler] = set()
-        for c in all_compilers_in_config(configuration):
+        for c in spack.compilers.all_compilers_from(configuration):
             if using_libc_compatibility() and not c_compiler_runs(c):
                 tty.debug(
                     f"the C compiler {c.cc} does not exist, or does not run correctly."
@@ -3466,7 +3464,7 @@ class SpecBuilder:
         """
         # reverse compilers so we get highest priority compilers that share a spec
         compilers = dict(
-            (c.spec, c) for c in reversed(all_compilers_in_config(spack.config.CONFIG))
+            (c.spec, c) for c in reversed(spack.compilers.all_compilers_from(spack.config.CONFIG))
         )
         cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
 
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index ccdc498214..eddbbf934c 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -4341,9 +4341,9 @@ class Spec:
                 v.attach_lookup(spack.version.git_ref_lookup.GitRefLookup(self.fullname))
 
 
-def parse_with_version_concrete(string: str, compiler: bool = False):
+def parse_with_version_concrete(spec_like: Union[str, Spec], compiler: bool = False):
     """Same as Spec(string), but interprets @x as @=x"""
-    s: Union[CompilerSpec, Spec] = CompilerSpec(string) if compiler else Spec(string)
+    s: Union[CompilerSpec, Spec] = CompilerSpec(spec_like) if compiler else Spec(spec_like)
     interpreted_version = s.versions.concrete_range_as_version
     if interpreted_version:
         s.versions = vn.VersionList([interpreted_version])
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index 996428ce59..fcc781b142 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -8,7 +8,9 @@ import gzip
 import io
 import json
 import os
+import pathlib
 import platform
+import shutil
 import sys
 import tarfile
 import urllib.error
@@ -16,12 +18,11 @@ import urllib.request
 import urllib.response
 from pathlib import Path, PurePath
 
-import py
 import pytest
 
 import archspec.cpu
 
-from llnl.util.filesystem import join_path, visit_directory_tree
+from llnl.util.filesystem import copy_tree, join_path, visit_directory_tree
 from llnl.util.symlink import readlink
 
 import spack.binary_distribution as bindist
@@ -81,72 +82,67 @@ def test_mirror(mirror_dir):
 
 
 @pytest.fixture(scope="module")
-def config_directory(tmpdir_factory):
-    tmpdir = tmpdir_factory.mktemp("test_configs")
-    # restore some sane defaults for packages and config
-    config_path = py.path.local(spack.paths.etc_path)
-    modules_yaml = config_path.join("defaults", "modules.yaml")
-    os_modules_yaml = config_path.join(
-        "defaults", "%s" % platform.system().lower(), "modules.yaml"
-    )
-    packages_yaml = config_path.join("defaults", "packages.yaml")
-    config_yaml = config_path.join("defaults", "config.yaml")
-    repos_yaml = config_path.join("defaults", "repos.yaml")
-    tmpdir.ensure("site", dir=True)
-    tmpdir.ensure("user", dir=True)
-    tmpdir.ensure("site/%s" % platform.system().lower(), dir=True)
-    modules_yaml.copy(tmpdir.join("site", "modules.yaml"))
-    os_modules_yaml.copy(tmpdir.join("site/%s" % platform.system().lower(), "modules.yaml"))
-    packages_yaml.copy(tmpdir.join("site", "packages.yaml"))
-    config_yaml.copy(tmpdir.join("site", "config.yaml"))
-    repos_yaml.copy(tmpdir.join("site", "repos.yaml"))
-    yield tmpdir
-    tmpdir.remove()
+def config_directory(tmp_path_factory):
+    # Copy defaults to a temporary "site" scope
+    defaults_dir = tmp_path_factory.mktemp("test_configs")
+    config_path = pathlib.Path(spack.paths.etc_path)
+    copy_tree(str(config_path / "defaults"), str(defaults_dir / "site"))
+
+    # Create a "user" scope
+    (defaults_dir / "user").mkdir()
+
+    # Detect compilers
+    cfg_scopes = [
+        spack.config.DirectoryConfigScope(name, str(defaults_dir / name))
+        for name in [f"site/{platform.system().lower()}", "site", "user"]
+    ]
+    with spack.config.use_configuration(*cfg_scopes):
+        _ = spack.compilers.find_compilers(scope="site")
+
+    yield defaults_dir
+
+    shutil.rmtree(str(defaults_dir))
 
 
 @pytest.fixture(scope="function")
-def default_config(tmpdir, config_directory, monkeypatch, install_mockery):
+def default_config(tmp_path, config_directory, monkeypatch, install_mockery):
     # This fixture depends on install_mockery to ensure
     # there is a clear order of initialization. The substitution of the
     # config scopes here is done on top of the substitution that comes with
     # install_mockery
-    mutable_dir = tmpdir.mkdir("mutable_config").join("tmp")
-    config_directory.copy(mutable_dir)
-
-    cfg = spack.config.Configuration(
-        *[
-            spack.config.DirectoryConfigScope(name, str(mutable_dir))
-            for name in [f"site/{platform.system().lower()}", "site", "user"]
-        ]
-    )
-
-    spack.config.CONFIG, old_config = cfg, spack.config.CONFIG
-    spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
-    njobs = spack.config.get("config:build_jobs")
-    if not njobs:
-        spack.config.set("config:build_jobs", 4, scope="user")
-    extensions = spack.config.get("config:template_dirs")
-    if not extensions:
-        spack.config.set(
-            "config:template_dirs",
-            [os.path.join(spack.paths.share_path, "templates")],
-            scope="user",
-        )
-
-    mutable_dir.ensure("build_stage", dir=True)
-    build_stage = spack.config.get("config:build_stage")
-    if not build_stage:
-        spack.config.set(
-            "config:build_stage", [str(mutable_dir.join("build_stage"))], scope="user"
-        )
-    timeout = spack.config.get("config:connect_timeout")
-    if not timeout:
-        spack.config.set("config:connect_timeout", 10, scope="user")
+    mutable_dir = tmp_path / "mutable_config" / "tmp"
+    mutable_dir.mkdir(parents=True)
+    copy_tree(str(config_directory), str(mutable_dir))
+
+    scopes = [
+        spack.config.DirectoryConfigScope(name, str(mutable_dir / name))
+        for name in [f"site/{platform.system().lower()}", "site", "user"]
+    ]
+
+    with spack.config.use_configuration(*scopes):
+        spack.config.CONFIG.set("repos", [spack.paths.mock_packages_path])
+        njobs = spack.config.get("config:build_jobs")
+        if not njobs:
+            spack.config.set("config:build_jobs", 4, scope="user")
+        extensions = spack.config.get("config:template_dirs")
+        if not extensions:
+            spack.config.set(
+                "config:template_dirs",
+                [os.path.join(spack.paths.share_path, "templates")],
+                scope="user",
+            )
 
-    yield spack.config.CONFIG
+        (mutable_dir / "build_stage").mkdir()
+        build_stage = spack.config.get("config:build_stage")
+        if not build_stage:
+            spack.config.set(
+                "config:build_stage", [str(mutable_dir / "build_stage")], scope="user"
+            )
+        timeout = spack.config.get("config:connect_timeout")
+        if not timeout:
+            spack.config.set("config:connect_timeout", 10, scope="user")
 
-    spack.config.CONFIG = old_config
-    mutable_dir.remove()
+        yield spack.config.CONFIG
 
 
 @pytest.fixture(scope="function")
diff --git a/lib/spack/spack/test/cmd/compiler.py b/lib/spack/spack/test/cmd/compiler.py
index 2fde7fbc92..2638aa7926 100644
--- a/lib/spack/spack/test/cmd/compiler.py
+++ b/lib/spack/spack/test/cmd/compiler.py
@@ -81,34 +81,6 @@ def test_compiler_find_without_paths(no_compilers_yaml, working_env, mock_execut
     assert "gcc" in output
 
 
-@pytest.mark.regression("17589")
-def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, mock_executable):
-    """Tests that Spack won't mistake Apple's GCC as a "real" GCC, since it's really
-    Clang with a few tweaks.
-    """
-    gcc_path = mock_executable(
-        "gcc",
-        output="""
-if [ "$1" = "-dumpversion" ]; then
-    echo "4.2.1"
-elif [ "$1" = "--version" ]; then
-    echo "Configured with: --prefix=/dummy"
-    echo "Apple clang version 11.0.0 (clang-1100.0.33.16)"
-    echo "Target: x86_64-apple-darwin18.7.0"
-    echo "Thread model: posix"
-    echo "InstalledDir: /dummy"
-else
-    echo "clang: error: no input files"
-fi
-""",
-    )
-
-    os.environ["PATH"] = str(gcc_path.parent)
-    output = compiler("find", "--scope=site")
-
-    assert "gcc" not in output
-
-
 @pytest.mark.regression("37996")
 def test_compiler_remove(mutable_config, mock_packages):
     """Tests that we can remove a compiler from configuration."""
@@ -131,7 +103,7 @@ def test_removing_compilers_from_multiple_scopes(mutable_config, mock_packages):
 
 
 @pytest.mark.not_on_windows("Cannot execute bash script on Windows")
-def test_compiler_add(mutable_config, mock_packages, mock_executable):
+def test_compiler_add(mutable_config, mock_executable):
     """Tests that we can add a compiler to configuration."""
     expected_version = "4.5.3"
     gcc_path = mock_executable(
@@ -149,7 +121,12 @@ done
 
     compilers_before_find = set(spack.compilers.all_compiler_specs())
     args = spack.util.pattern.Bunch(
-        all=None, compiler_spec=None, add_paths=[str(root_dir)], scope=None, mixed_toolchain=False
+        all=None,
+        compiler_spec=None,
+        add_paths=[str(root_dir)],
+        scope=None,
+        mixed_toolchain=False,
+        jobs=1,
     )
     spack.cmd.compiler.compiler_find(args)
     compilers_after_find = set(spack.compilers.all_compiler_specs())
@@ -229,7 +206,7 @@ def test_compiler_find_path_order(no_compilers_yaml, working_env, compilers_dir)
     for name in ("gcc-8", "g++-8", "gfortran-8"):
         shutil.copy(compilers_dir / name, new_dir / name)
     # Set PATH to have the new folder searched first
-    os.environ["PATH"] = "{}:{}".format(str(new_dir), str(compilers_dir))
+    os.environ["PATH"] = f"{str(new_dir)}:{str(compilers_dir)}"
 
     compiler("find", "--scope=site")
 
diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py
index 0884f9b1a4..db9be42745 100644
--- a/lib/spack/spack/test/compilers/basics.py
+++ b/lib/spack/spack/test/compilers/basics.py
@@ -19,27 +19,6 @@ from spack.compiler import Compiler
 from spack.util.executable import Executable, ProcessError
 
 
-@pytest.fixture()
-def make_args_for_version(monkeypatch):
-    def _factory(version, path="/usr/bin/gcc"):
-        class MockOs:
-            pass
-
-        compiler_name = "gcc"
-        compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
-        monkeypatch.setattr(compiler_cls, "cc_version", lambda x: version)
-
-        compiler_id = spack.compilers.CompilerID(
-            os=MockOs, compiler_name=compiler_name, version=None
-        )
-        variation = spack.compilers.NameVariation(prefix="", suffix="")
-        return spack.compilers.DetectVersionArgs(
-            id=compiler_id, variation=variation, language="cc", path=path
-        )
-
-    return _factory
-
-
 def test_multiple_conflicting_compiler_definitions(mutable_config):
     compiler_def = {
         "compiler": {
@@ -82,21 +61,6 @@ def test_get_compiler_duplicates(mutable_config, compiler_factory):
     assert len(duplicates) == 1
 
 
-@pytest.mark.parametrize(
-    "input_version,expected_version,expected_error",
-    [(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
-)
-def test_version_detection_is_empty(
-    make_args_for_version, input_version, expected_version, expected_error
-):
-    args = make_args_for_version(version=input_version)
-    result, error = spack.compilers.detect_version(args)
-    if not error:
-        assert result.id.version == expected_version
-
-    assert error == expected_error
-
-
 def test_compiler_flags_from_config_are_grouped():
     compiler_entry = {
         "spec": "intel@17.0.2",
@@ -906,51 +870,30 @@ def test_compiler_executable_verification_success(tmpdir):
 
 
 @pytest.mark.parametrize(
-    "detected_versions,expected_length",
+    "compilers_extra_attributes,expected_length",
     [
         # If we detect a C compiler we expect the result to be valid
-        (
-            [
-                spack.compilers.DetectVersionArgs(
-                    id=spack.compilers.CompilerID(
-                        os="ubuntu20.04", compiler_name="clang", version="12.0.0"
-                    ),
-                    variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
-                    language="cc",
-                    path="/usr/bin/clang-12",
-                ),
-                spack.compilers.DetectVersionArgs(
-                    id=spack.compilers.CompilerID(
-                        os="ubuntu20.04", compiler_name="clang", version="12.0.0"
-                    ),
-                    variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
-                    language="cxx",
-                    path="/usr/bin/clang++-12",
-                ),
-            ],
-            1,
-        ),
+        ({"c": "/usr/bin/clang-12", "cxx": "/usr/bin/clang-12"}, 1),
         # If we detect only a C++ compiler we expect the result to be discarded
-        (
-            [
-                spack.compilers.DetectVersionArgs(
-                    id=spack.compilers.CompilerID(
-                        os="ubuntu20.04", compiler_name="clang", version="12.0.0"
-                    ),
-                    variation=spack.compilers.NameVariation(prefix="", suffix="-12"),
-                    language="cxx",
-                    path="/usr/bin/clang++-12",
-                )
-            ],
-            0,
-        ),
+        ({"cxx": "/usr/bin/clang-12"}, 0),
     ],
 )
-def test_detection_requires_c_compiler(detected_versions, expected_length):
+def test_detection_requires_c_compiler(compilers_extra_attributes, expected_length):
     """Tests that compilers automatically added to the configuration have
     at least a C compiler.
     """
-    result = spack.compilers.make_compiler_list(detected_versions)
+    packages_yaml = {
+        "llvm": {
+            "externals": [
+                {
+                    "spec": "clang@12.0.0",
+                    "prefix": "/usr",
+                    "extra_attributes": {"compilers": compilers_extra_attributes},
+                }
+            ]
+        }
+    }
+    result = spack.compilers.CompilerConfigFactory.from_packages_yaml(packages_yaml)
     assert len(result) == expected_length
 
 
diff --git a/lib/spack/spack/test/compilers/detection.py b/lib/spack/spack/test/compilers/detection.py
deleted file mode 100644
index 4103b20985..0000000000
--- a/lib/spack/spack/test/compilers/detection.py
+++ /dev/null
@@ -1,471 +0,0 @@
-# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
-# Spack Project Developers. See the top-level COPYRIGHT file for details.
-#
-# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-"""Test detection of compiler version"""
-import pytest
-
-import spack.compilers.aocc
-import spack.compilers.arm
-import spack.compilers.cce
-import spack.compilers.clang
-import spack.compilers.fj
-import spack.compilers.gcc
-import spack.compilers.intel
-import spack.compilers.nag
-import spack.compilers.nvhpc
-import spack.compilers.oneapi
-import spack.compilers.pgi
-import spack.compilers.xl
-import spack.compilers.xl_r
-import spack.util.module_cmd
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        (
-            "Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n"
-            "Target: aarch64--linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir:\n"
-            "/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
-            "19.0",
-        ),
-        (
-            "Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n"
-            "Target: aarch64--linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir:\n"
-            "/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
-            "19.3.1",
-        ),
-    ],
-)
-def test_arm_version_detection(version_str, expected_version):
-    version = spack.compilers.arm.Arm.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        ("Cray C : Version 8.4.6  Mon Apr 15, 2019  12:13:39\n", "8.4.6"),
-        ("Cray C++ : Version 8.4.6  Mon Apr 15, 2019  12:13:45\n", "8.4.6"),
-        ("Cray clang Version 8.4.6  Mon Apr 15, 2019  12:13:45\n", "8.4.6"),
-        ("Cray Fortran : Version 8.4.6  Mon Apr 15, 2019  12:13:55\n", "8.4.6"),
-    ],
-)
-def test_cce_version_detection(version_str, expected_version):
-    version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.regression("10191")
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        # macOS clang
-        (
-            "Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
-            "Target: x86_64-apple-darwin18.7.0\n"
-            "Thread model: posix\n"
-            "InstalledDir: "
-            "/Applications/Xcode.app/Contents/Developer/Toolchains/"
-            "XcodeDefault.xctoolchain/usr/bin\n",
-            "11.0.0",
-        ),
-        (
-            "Apple LLVM version 7.0.2 (clang-700.1.81)\n"
-            "Target: x86_64-apple-darwin15.2.0\n"
-            "Thread model: posix\n",
-            "7.0.2",
-        ),
-    ],
-)
-def test_apple_clang_version_detection(version_str, expected_version):
-    cls = spack.compilers.class_for_compiler_name("apple-clang")
-    version = cls.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.regression("10191")
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        # LLVM Clang
-        (
-            "clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n"
-            "Target: x86_64-pc-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /usr/bin\n",
-            "6.0.1",
-        ),
-        (
-            "clang version 3.1 (trunk 149096)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n",
-            "3.1",
-        ),
-        (
-            "clang version 8.0.0-3~ubuntu18.04.1 (tags/RELEASE_800/final)\n"
-            "Target: x86_64-pc-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /usr/bin\n",
-            "8.0.0",
-        ),
-        (
-            "clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n"
-            "Target: x86_64-pc-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /usr/bin\n",
-            "9.0.1",
-        ),
-        (
-            "clang version 8.0.0-3 (tags/RELEASE_800/final)\n"
-            "Target: aarch64-unknown-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /usr/bin\n",
-            "8.0.0",
-        ),
-        (
-            "clang version 11.0.0\n"
-            "Target: aarch64-unknown-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /usr/bin\n",
-            "11.0.0",
-        ),
-    ],
-)
-def test_clang_version_detection(version_str, expected_version):
-    version = spack.compilers.clang.Clang.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        # C compiler
-        (
-            "fcc (FCC) 4.0.0a 20190314\n"
-            "simulating gcc version 6.1\n"
-            "Copyright FUJITSU LIMITED 2019",
-            "4.0.0a",
-        ),
-        # C++ compiler
-        (
-            "FCC (FCC) 4.0.0a 20190314\n"
-            "simulating gcc version 6.1\n"
-            "Copyright FUJITSU LIMITED 2019",
-            "4.0.0a",
-        ),
-        # Fortran compiler
-        ("frt (FRT) 4.0.0a 20190314\n" "Copyright FUJITSU LIMITED 2019", "4.0.0a"),
-    ],
-)
-def test_fj_version_detection(version_str, expected_version):
-    version = spack.compilers.fj.Fj.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        # Output of -dumpversion changed to return only major from GCC 7
-        ("4.4.7\n", "4.4.7"),
-        ("7\n", "7"),
-    ],
-)
-def test_gcc_version_detection(version_str, expected_version):
-    version = spack.compilers.gcc.Gcc.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        (
-            "icpc (ICC) 12.1.5 20120612\n"
-            "Copyright (C) 1985-2012 Intel Corporation.  All rights reserved.\n",
-            "12.1.5",
-        ),
-        (
-            "ifort (IFORT) 12.1.5 20120612\n"
-            "Copyright (C) 1985-2012 Intel Corporation.  All rights reserved.\n",
-            "12.1.5",
-        ),
-    ],
-)
-def test_intel_version_detection(version_str, expected_version):
-    version = spack.compilers.intel.Intel.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        (  # ICX/ICPX
-            "Intel(R) oneAPI DPC++ Compiler 2021.1.2 (2020.10.0.1214)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /made/up/path",
-            "2021.1.2",
-        ),
-        (  # ICX/ICPX
-            "Intel(R) oneAPI DPC++ Compiler 2021.2.0 (2021.2.0.20210317)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /made/up/path",
-            "2021.2.0",
-        ),
-        (  # ICX/ICPX
-            "Intel(R) oneAPI DPC++/C++ Compiler 2021.3.0 (2021.3.0.20210619)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /made/up/path",
-            "2021.3.0",
-        ),
-        (  # ICX/ICPX
-            "Intel(R) oneAPI DPC++/C++ Compiler 2021.4.0 (2021.4.0.20210924)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n"
-            "InstalledDir: /made/up/path",
-            "2021.4.0",
-        ),
-        (  # IFX
-            "ifx (IFORT) 2021.1.2 Beta 20201214\n"
-            "Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
-            "2021.1.2",
-        ),
-        (  # IFX
-            "ifx (IFORT) 2021.2.0 Beta 20210317\n"
-            "Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
-            "2021.2.0",
-        ),
-        (  # IFX
-            "ifx (IFORT) 2021.3.0 Beta 20210619\n"
-            "Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
-            "2021.3.0",
-        ),
-        (  # IFX
-            "ifx (IFORT) 2021.4.0 Beta 20210924\n"
-            "Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
-            "2021.4.0",
-        ),
-        (  # IFX
-            "ifx (IFORT) 2022.0.0 20211123\n"
-            "Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
-            "2022.0.0",
-        ),
-        (  # IFX
-            "ifx (IFX) 2023.1.0 20230320\n"
-            "Copyright (C) 1985-2023 Intel Corporation. All rights reserved.",
-            "2023.1.0",
-        ),
-    ],
-)
-def test_oneapi_version_detection(version_str, expected_version):
-    version = spack.compilers.oneapi.Oneapi.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        (
-            "NAG Fortran Compiler Release 6.0(Hibiya) Build 1037\n"
-            "Product NPL6A60NA for x86-64 Linux\n",
-            "6.0.1037",
-        )
-    ],
-)
-def test_nag_version_detection(version_str, expected_version):
-    version = spack.compilers.nag.Nag.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        # C compiler on x86-64
-        (
-            "nvc 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # C++ compiler on x86-64
-        (
-            "nvc++ 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # Fortran compiler on x86-64
-        (
-            "nvfortran 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # C compiler on Power
-        (
-            "nvc 20.9-0 linuxpower target on Linuxpower\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # C++ compiler on Power
-        (
-            "nvc++ 20.9-0 linuxpower target on Linuxpower\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # Fortran compiler on Power
-        (
-            "nvfortran 20.9-0 linuxpower target on Linuxpower\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # C compiler on Arm
-        (
-            "nvc 20.9-0 linuxarm64 target on aarch64 Linux\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # C++ compiler on Arm
-        (
-            "nvc++ 20.9-0 linuxarm64 target on aarch64 Linux\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-        # Fortran compiler on Arm
-        (
-            "nvfortran 20.9-0 linuxarm64 target on aarch64 Linux\n"
-            "NVIDIA Compilers and Tools\n"
-            "Copyright (c) 2020, NVIDIA CORPORATION.  All rights reserved.",
-            "20.9",
-        ),
-    ],
-)
-def test_nvhpc_version_detection(version_str, expected_version):
-    version = spack.compilers.nvhpc.Nvhpc.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        # Output on x86-64
-        (
-            "pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge\n"
-            "The Portland Group - PGI Compilers and Tools\n"
-            "Copyright (c) 2015, NVIDIA CORPORATION.  All rights reserved.\n",
-            "15.10",
-        ),
-        # Output on PowerPC
-        (
-            "pgcc 17.4-0 linuxpower target on Linuxpower\n"
-            "PGI Compilers and Tools\n"
-            "Copyright (c) 2017, NVIDIA CORPORATION.  All rights reserved.\n",
-            "17.4",
-        ),
-        # Output when LLVM-enabled
-        (
-            "pgcc-llvm 18.4-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
-            "PGI Compilers and Tools\n"
-            "Copyright (c) 2018, NVIDIA CORPORATION.  All rights reserved.\n",
-            "18.4",
-        ),
-    ],
-)
-def test_pgi_version_detection(version_str, expected_version):
-    version = spack.compilers.pgi.Pgi.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        ("IBM XL C/C++ for Linux, V11.1 (5724-X14)\n" "Version: 11.01.0000.0000\n", "11.1"),
-        ("IBM XL Fortran for Linux, V13.1 (5724-X16)\n" "Version: 13.01.0000.0000\n", "13.1"),
-        ("IBM XL C/C++ for AIX, V11.1 (5724-X13)\n" "Version: 11.01.0000.0009\n", "11.1"),
-        (
-            "IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0\n" "Version: 09.00.0000.0017\n",
-            "9.0",
-        ),
-    ],
-)
-def test_xl_version_detection(version_str, expected_version):
-    version = spack.compilers.xl.Xl.extract_version_from_output(version_str)
-    assert version == expected_version
-
-    version = spack.compilers.xl_r.XlR.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.parametrize(
-    "version_str,expected_version",
-    [
-        # This applies to C,C++ and FORTRAN compiler
-        (
-            "AMD clang version 12.0.0 (CLANG: AOCC_3_1_0-Build#126 2021_06_07)"
-            "(based on LLVM Mirror.Version.12.0.0)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n",
-            "3.1.0",
-        ),
-        (
-            "AMD clang version 12.0.0 (CLANG: AOCC_3.0.0-Build#78 2020_12_10)"
-            "(based on LLVM Mirror.Version.12.0.0)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n",
-            "3.0.0",
-        ),
-        (
-            "AMD clang version 11.0.0 (CLANG: AOCC_2.3.0-Build#85 2020_11_10)"
-            "(based on LLVM Mirror.Version.11.0.0)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n",
-            "2.3.0",
-        ),
-        (
-            "AMD clang version 10.0.0 (CLANG: AOCC_2.2.0-Build#93 2020_06_25)"
-            "(based on LLVM Mirror.Version.10.0.0)\n"
-            "Target: x86_64-unknown-linux-gnu\n"
-            "Thread model: posix\n",
-            "2.2.0",
-        ),
-    ],
-)
-def test_aocc_version_detection(version_str, expected_version):
-    version = spack.compilers.aocc.Aocc.extract_version_from_output(version_str)
-    assert version == expected_version
-
-
-@pytest.mark.regression("33901")
-@pytest.mark.parametrize(
-    "version_str",
-    [
-        (
-            "Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
-            "Target: x86_64-apple-darwin18.7.0\n"
-            "Thread model: posix\n"
-            "InstalledDir: "
-            "/Applications/Xcode.app/Contents/Developer/Toolchains/"
-            "XcodeDefault.xctoolchain/usr/bin\n"
-        ),
-        (
-            "Apple LLVM version 7.0.2 (clang-700.1.81)\n"
-            "Target: x86_64-apple-darwin15.2.0\n"
-            "Thread model: posix\n"
-        ),
-    ],
-)
-def test_apple_clang_not_detected_as_cce(version_str):
-    version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
-    assert version == "unknown"
diff --git a/lib/spack/spack/test/cray_manifest.py b/lib/spack/spack/test/cray_manifest.py
index 3a00180da8..532261b3ee 100644
--- a/lib/spack/spack/test/cray_manifest.py
+++ b/lib/spack/spack/test/cray_manifest.py
@@ -16,6 +16,7 @@ import pytest
 
 import spack
 import spack.cmd
+import spack.cmd.external
 import spack.compilers
 import spack.config
 import spack.cray_manifest as cray_manifest
diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py
index 1af1fb1d90..16d50221ea 100644
--- a/lib/spack/spack/test/installer.py
+++ b/lib/spack/spack/test/installer.py
@@ -493,11 +493,13 @@ def test_update_tasks_for_compiler_packages_as_compiler(mock_packages, config, m
 def test_bootstrapping_compilers_with_different_names_from_spec(
     install_mockery, mutable_config, mock_fetch, archspec_host_is_spack_test_host
 ):
+    """Tests that, when we bootstrap '%oneapi' we can translate it to the
+    'intel-oneapi-compilers' package.
+    """
     with spack.config.override("config:install_missing_compilers", True):
         with spack.concretize.disable_compiler_existence_check():
             spec = spack.spec.Spec("trivial-install-test-package%oneapi@=22.2.0").concretized()
             spec.package.do_install()
-
             assert (
                 spack.spec.CompilerSpec("oneapi@=22.2.0") in spack.compilers.all_compiler_specs()
             )
@@ -749,29 +751,6 @@ def test_install_task_use_cache(install_mockery, monkeypatch):
     assert request.pkg_id in installer.installed
 
 
-def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
-    config_msg = "mock add_compilers_to_config"
-
-    def _add(_compilers):
-        tty.msg(config_msg)
-
-    installer = create_installer(["pkg-a"], {})
-    task = create_build_task(installer.build_requests[0].pkg)
-    task.compiler = True
-
-    # Preclude any meaningful side-effects
-    monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _true)
-    monkeypatch.setattr(inst.PackageInstaller, "_setup_install_dir", _noop)
-    monkeypatch.setattr(spack.build_environment, "start_build_process", _noop)
-    monkeypatch.setattr(spack.database.Database, "add", _noop)
-    monkeypatch.setattr(spack.compilers, "add_compilers_to_config", _add)
-
-    installer._install_task(task, None)
-
-    out = capfd.readouterr()[0]
-    assert config_msg in out
-
-
 def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
     """Test _release_lock for supposed write lock with exception."""
     installer = create_installer(["trivial-install-test-package"], {})
diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash
index eba6b5a96d..079f021706 100644
--- a/share/spack/spack-completion.bash
+++ b/share/spack/spack-completion.bash
@@ -752,7 +752,7 @@ _spack_compiler() {
 _spack_compiler_find() {
     if $list_options
     then
-        SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope"
+        SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope -j --jobs"
     else
         SPACK_COMPREPLY=""
     fi
@@ -761,7 +761,7 @@ _spack_compiler_find() {
 _spack_compiler_add() {
     if $list_options
     then
-        SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope"
+        SPACK_COMPREPLY="-h --help --mixed-toolchain --no-mixed-toolchain --scope -j --jobs"
     else
         SPACK_COMPREPLY=""
     fi
diff --git a/share/spack/spack-completion.fish b/share/spack/spack-completion.fish
index 4a905d2ddd..6d891757c8 100644
--- a/share/spack/spack-completion.fish
+++ b/share/spack/spack-completion.fish
@@ -1059,7 +1059,7 @@ complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -f -a he
 complete -c spack -n '__fish_spack_using_command compiler' -s h -l help -d 'show this help message and exit'
 
 # spack compiler find
-set -g __fish_spack_optspecs_spack_compiler_find h/help mixed-toolchain no-mixed-toolchain scope=
+set -g __fish_spack_optspecs_spack_compiler_find h/help mixed-toolchain no-mixed-toolchain scope= j/jobs=
 
 complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -f -a help
 complete -c spack -n '__fish_spack_using_command compiler find' -s h -l help -d 'show this help message and exit'
@@ -1069,9 +1069,11 @@ complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-tool
 complete -c spack -n '__fish_spack_using_command compiler find' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)'
 complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -f -a '_builtin defaults system site user command_line'
 complete -c spack -n '__fish_spack_using_command compiler find' -l scope -r -d 'configuration scope to modify'
+complete -c spack -n '__fish_spack_using_command compiler find' -s j -l jobs -r -f -a jobs
+complete -c spack -n '__fish_spack_using_command compiler find' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
 
 # spack compiler add
-set -g __fish_spack_optspecs_spack_compiler_add h/help mixed-toolchain no-mixed-toolchain scope=
+set -g __fish_spack_optspecs_spack_compiler_add h/help mixed-toolchain no-mixed-toolchain scope= j/jobs=
 
 complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -f -a help
 complete -c spack -n '__fish_spack_using_command compiler add' -s h -l help -d 'show this help message and exit'
@@ -1081,6 +1083,8 @@ complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolc
 complete -c spack -n '__fish_spack_using_command compiler add' -l no-mixed-toolchain -d 'Do not allow mixed toolchains (for example: clang, clang++, gfortran)'
 complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -f -a '_builtin defaults system site user command_line'
 complete -c spack -n '__fish_spack_using_command compiler add' -l scope -r -d 'configuration scope to modify'
+complete -c spack -n '__fish_spack_using_command compiler add' -s j -l jobs -r -f -a jobs
+complete -c spack -n '__fish_spack_using_command compiler add' -s j -l jobs -r -d 'explicitly set number of parallel jobs'
 
 # spack compiler remove
 set -g __fish_spack_optspecs_spack_compiler_remove h/help a/all scope=
diff --git a/var/spack/repos/builtin.mock/packages/gcc/package.py b/var/spack/repos/builtin.mock/packages/gcc/package.py
index 05518419dd..fa2b0309ff 100644
--- a/var/spack/repos/builtin.mock/packages/gcc/package.py
+++ b/var/spack/repos/builtin.mock/packages/gcc/package.py
@@ -16,11 +16,23 @@ class Gcc(CompilerPackage, Package):
     version("2.0", md5="abcdef0123456789abcdef0123456789")
     version("3.0", md5="def0123456789abcdef0123456789abc")
 
+    variant(
+        "languages",
+        default="c,c++,fortran",
+        values=("c", "c++", "fortran"),
+        multi=True,
+        description="Compilers and runtime libraries to build",
+    )
+
     depends_on("conflict", when="@3.0")
 
     c_names = ["gcc"]
     cxx_names = ["g++"]
     fortran_names = ["gfortran"]
+    compiler_prefixes = [r"\w+-\w+-\w+-"]
+    compiler_suffixes = [r"-mp-\d+(?:\.\d+)?", r"-\d+(?:\.\d+)?", r"\d\d"]
+    compiler_version_regex = r"(?<!clang version)\s?([0-9.]+)"
+    compiler_version_argument = ("-dumpfullversion", "-dumpversion")
 
     def install(self, spec, prefix):
         # Create the minimal compiler that will fool `spack compiler find`
diff --git a/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py b/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py
index 78fdbe056c..892a09f7dc 100644
--- a/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py
+++ b/var/spack/repos/builtin.mock/packages/intel-oneapi-compilers/package.py
@@ -18,9 +18,14 @@ class IntelOneapiCompilers(Package, CompilerPackage):
     version("2.0", md5="abcdef0123456789abcdef0123456789")
     version("3.0", md5="def0123456789abcdef0123456789abc")
 
+    compiler_languages = ["c", "cxx", "fortran"]
     c_names = ["icx"]
     cxx_names = ["icpx"]
     fortran_names = ["ifx"]
+    compiler_version_argument = "--version"
+    compiler_version_regex = (
+        r"(?:(?:oneAPI DPC\+\+(?:\/C\+\+)? Compiler)|(?:\(IFORT\))|(?:\(IFX\))) (\S+)"
+    )
 
     @property
     def compiler_search_prefix(self):
@@ -32,9 +37,9 @@ class IntelOneapiCompilers(Package, CompilerPackage):
         comp = self.compiler_search_prefix.icx
         if sys.platform == "win32":
             comp = comp + ".bat"
-            comp_string = "@echo off\necho oneAPI DPC++ Compiler %s" % str(spec.version)
+            comp_string = f"@echo off\necho oneAPI DPC++ Compiler {str(spec.version)}"
         else:
-            comp_string = '#!/bin/bash\necho "oneAPI DPC++ Compiler %s"' % str(spec.version)
+            comp_string = f'#!/bin/bash\necho "oneAPI DPC++ Compiler {str(spec.version)}"'
         with open(comp, "w") as f:
             f.write(comp_string)
         set_executable(comp)
-- 
cgit v1.2.3-70-g09d2