summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorHarmen Stoppels <me@harmenstoppels.nl>2024-09-17 17:06:00 +0200
committerGitHub <noreply@github.com>2024-09-17 17:06:00 +0200
commit3ded2fc9c53cc5dc684e49da25f4517bb687f3f5 (patch)
tree25cd9d66d99a7c71c78701aeeae5e3057bdd6bd3 /lib
parent623c5a4d2491a0d6781154abc6d8886e771f7f4e (diff)
downloadspack-3ded2fc9c53cc5dc684e49da25f4517bb687f3f5.tar.gz
spack-3ded2fc9c53cc5dc684e49da25f4517bb687f3f5.tar.bz2
spack-3ded2fc9c53cc5dc684e49da25f4517bb687f3f5.tar.xz
spack-3ded2fc9c53cc5dc684e49da25f4517bb687f3f5.zip
untangle spack.config / spack.util.cpus & spack.spec (#46427)
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/build_environment.py3
-rw-r--r--lib/spack/spack/build_systems/racket.py2
-rw-r--r--lib/spack/spack/config.py46
-rw-r--r--lib/spack/spack/environment/environment.py2
-rw-r--r--lib/spack/spack/package.py2
-rw-r--r--lib/spack/spack/solver/asp.py31
-rw-r--r--lib/spack/spack/test/build_environment.py13
-rw-r--r--lib/spack/spack/util/cpus.py36
-rw-r--r--lib/spack/spack/util/parallel.py2
9 files changed, 65 insertions, 72 deletions
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 296fdd4aff..bf8ac6c1ea 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -77,7 +77,6 @@ from spack import traverse
from spack.context import Context
from spack.error import InstallError, NoHeadersError, NoLibrariesError
from spack.install_test import spack_install_test_log
-from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import (
SYSTEM_DIR_CASE_ENTRY,
EnvironmentModifications,
@@ -559,7 +558,7 @@ def set_package_py_globals(pkg, context: Context = Context.BUILD):
module.std_meson_args = spack.build_systems.meson.MesonBuilder.std_args(pkg)
module.std_pip_args = spack.build_systems.python.PythonPipBuilder.std_args(pkg)
- jobs = determine_number_of_jobs(parallel=pkg.parallel)
+ jobs = spack.config.determine_number_of_jobs(parallel=pkg.parallel)
module.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
diff --git a/lib/spack/spack/build_systems/racket.py b/lib/spack/spack/build_systems/racket.py
index facac62032..7e48f8b370 100644
--- a/lib/spack/spack/build_systems/racket.py
+++ b/lib/spack/spack/build_systems/racket.py
@@ -11,9 +11,9 @@ import llnl.util.tty as tty
import spack.builder
from spack.build_environment import SPACK_NO_PARALLEL_MAKE
+from spack.config import determine_number_of_jobs
from spack.directives import build_system, extends, maintainers
from spack.package_base import PackageBase
-from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import env_flag
from spack.util.executable import Executable, ProcessError
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 34dd133d97..94f8e4ff04 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -58,12 +58,10 @@ import spack.schema.packages
import spack.schema.repos
import spack.schema.upstreams
import spack.schema.view
-import spack.spec
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
import spack.util.web as web_util
-from spack.error import SpecSyntaxError
from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section
@@ -1710,25 +1708,37 @@ def get_mark_from_yaml_data(obj):
return mark
-def parse_spec_from_yaml_string(string: str) -> "spack.spec.Spec":
- """Parse a spec from YAML and add file/line info to errors, if it's available.
-
- Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
- add file/line information for debugging using file/line annotations from the string.
+def determine_number_of_jobs(
+ *,
+ parallel: bool = False,
+ max_cpus: int = cpus_available(),
+ config: Optional[Configuration] = None,
+) -> int:
+ """
+ Packages that require sequential builds need 1 job. Otherwise we use the
+ number of jobs set on the command line. If not set, then we use the config
+ defaults (which is usually set through the builtin config scope), but we
+ cap to the number of CPUs available to avoid oversubscription.
+
+ Parameters:
+ parallel: true when package supports parallel builds
+ max_cpus: maximum number of CPUs to use (defaults to cpus_available())
+ config: configuration object (defaults to global config)
+ """
+ if not parallel:
+ return 1
- Arguments:
- string: a string representing a ``Spec`` from config YAML.
+ cfg = config or CONFIG
- """
+ # Command line overrides all
try:
- spec = spack.spec.Spec(string)
- return spec
- except SpecSyntaxError as e:
- mark = get_mark_from_yaml_data(string)
- if mark:
- msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
- raise SpecSyntaxError(msg) from e
- raise e
+ command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
+ if command_line is not None:
+ return command_line
+ except ValueError:
+ pass
+
+ return min(max_cpus, cfg.get("config:build_jobs", 16))
class ConfigSectionError(spack.error.ConfigError):
diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py
index 1d2ea86c9d..2a22a76abe 100644
--- a/lib/spack/spack/environment/environment.py
+++ b/lib/spack/spack/environment/environment.py
@@ -1648,7 +1648,7 @@ class Environment:
# Solve the environment in parallel on Linux
start = time.time()
- num_procs = min(len(args), spack.util.cpus.determine_number_of_jobs(parallel=True))
+ num_procs = min(len(args), spack.config.determine_number_of_jobs(parallel=True))
# TODO: support parallel concretization on macOS and Windows
msg = "Starting concretization"
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index d2a00f9941..c8de3c2158 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -75,6 +75,7 @@ from spack.build_systems.sourceware import SourcewarePackage
from spack.build_systems.waf import WafPackage
from spack.build_systems.xorg import XorgPackage
from spack.builder import run_after, run_before
+from spack.config import determine_number_of_jobs
from spack.deptypes import ALL_TYPES as all_deptypes
from spack.directives import *
from spack.install_test import (
@@ -99,7 +100,6 @@ from spack.package_base import (
on_package_attributes,
)
from spack.spec import InvalidSpecDetected, Spec
-from spack.util.cpus import determine_number_of_jobs
from spack.util.executable import *
from spack.util.filesystem import file_command, fix_darwin_install_name, mime_type
from spack.variant import (
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index 3b98117c1c..62557d311a 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -31,7 +31,6 @@ import spack.bootstrap.core
import spack.compilers
import spack.concretize
import spack.config
-import spack.config as sc
import spack.deptypes as dt
import spack.environment as ev
import spack.error
@@ -49,6 +48,8 @@ import spack.variant
import spack.version as vn
import spack.version.git_ref_lookup
from spack import traverse
+from spack.config import get_mark_from_yaml_data
+from spack.error import SpecSyntaxError
from .core import (
AspFunction,
@@ -2923,6 +2924,26 @@ class ProblemInstanceBuilder:
return "".join(self.asp_problem)
+def parse_spec_from_yaml_string(string: str) -> "spack.spec.Spec":
+ """Parse a spec from YAML and add file/line info to errors, if it's available.
+
+ Parse a ``Spec`` from the supplied string, but also intercept any syntax errors and
+ add file/line information for debugging using file/line annotations from the string.
+
+ Arguments:
+ string: a string representing a ``Spec`` from config YAML.
+
+ """
+ try:
+ return spack.spec.Spec(string)
+ except SpecSyntaxError as e:
+ mark = get_mark_from_yaml_data(string)
+ if mark:
+ msg = f"{mark.name}:{mark.line + 1}: {str(e)}"
+ raise SpecSyntaxError(msg) from e
+ raise e
+
+
class RequirementParser:
"""Parses requirements from package.py files and configuration, and returns rules."""
@@ -3008,11 +3029,11 @@ class RequirementParser:
def _parse_prefer_conflict_item(self, item):
# The item is either a string or an object with at least a "spec" attribute
if isinstance(item, str):
- spec = sc.parse_spec_from_yaml_string(item)
+ spec = parse_spec_from_yaml_string(item)
condition = spack.spec.Spec()
message = None
else:
- spec = sc.parse_spec_from_yaml_string(item["spec"])
+ spec = parse_spec_from_yaml_string(item["spec"])
condition = spack.spec.Spec(item.get("when"))
message = item.get("message")
return spec, condition, message
@@ -3053,10 +3074,10 @@ class RequirementParser:
# validate specs from YAML first, and fail with line numbers if parsing fails.
constraints = [
- sc.parse_spec_from_yaml_string(constraint) for constraint in constraints
+ parse_spec_from_yaml_string(constraint) for constraint in constraints
]
when_str = requirement.get("when")
- when = sc.parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
+ when = parse_spec_from_yaml_string(when_str) if when_str else spack.spec.Spec()
constraints = [
x
diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py
index 38d34fc1e8..8d7a09ab7e 100644
--- a/lib/spack/spack/test/build_environment.py
+++ b/lib/spack/spack/test/build_environment.py
@@ -22,7 +22,6 @@ import spack.util.spack_yaml as syaml
from spack.build_environment import UseMode, _static_to_shared_library, dso_suffix
from spack.context import Context
from spack.paths import build_env_path
-from spack.util.cpus import determine_number_of_jobs
from spack.util.environment import EnvironmentModifications
from spack.util.executable import Executable
@@ -483,7 +482,7 @@ def test_parallel_false_is_not_propagating(default_mock_concretization):
assert s["pkg-a"].package.module.make_jobs == 1
spack.build_environment.set_package_py_globals(s["pkg-b"].package, context=Context.BUILD)
- assert s["pkg-b"].package.module.make_jobs == spack.build_environment.determine_number_of_jobs(
+ assert s["pkg-b"].package.module.make_jobs == spack.config.determine_number_of_jobs(
parallel=s["pkg-b"].package.parallel
)
@@ -516,7 +515,7 @@ def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mo
def test_build_jobs_sequential_is_sequential():
assert (
- determine_number_of_jobs(
+ spack.config.determine_number_of_jobs(
parallel=False,
max_cpus=8,
config=spack.config.Configuration(
@@ -530,7 +529,7 @@ def test_build_jobs_sequential_is_sequential():
def test_build_jobs_command_line_overrides():
assert (
- determine_number_of_jobs(
+ spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=1,
config=spack.config.Configuration(
@@ -541,7 +540,7 @@ def test_build_jobs_command_line_overrides():
== 10
)
assert (
- determine_number_of_jobs(
+ spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=100,
config=spack.config.Configuration(
@@ -555,7 +554,7 @@ def test_build_jobs_command_line_overrides():
def test_build_jobs_defaults():
assert (
- determine_number_of_jobs(
+ spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(
@@ -565,7 +564,7 @@ def test_build_jobs_defaults():
== 1
)
assert (
- determine_number_of_jobs(
+ spack.config.determine_number_of_jobs(
parallel=True,
max_cpus=10,
config=spack.config.Configuration(
diff --git a/lib/spack/spack/util/cpus.py b/lib/spack/spack/util/cpus.py
index 9c98656830..5cf09c3e82 100644
--- a/lib/spack/spack/util/cpus.py
+++ b/lib/spack/spack/util/cpus.py
@@ -5,9 +5,6 @@
import multiprocessing
import os
-from typing import Optional
-
-import spack.config
def cpus_available():
@@ -21,36 +18,3 @@ def cpus_available():
return len(os.sched_getaffinity(0)) # novermin
except Exception:
return multiprocessing.cpu_count()
-
-
-def determine_number_of_jobs(
- *,
- parallel: bool = False,
- max_cpus: int = cpus_available(),
- config: Optional["spack.config.Configuration"] = None,
-) -> int:
- """
- Packages that require sequential builds need 1 job. Otherwise we use the
- number of jobs set on the command line. If not set, then we use the config
- defaults (which is usually set through the builtin config scope), but we
- cap to the number of CPUs available to avoid oversubscription.
-
- Parameters:
- parallel: true when package supports parallel builds
- max_cpus: maximum number of CPUs to use (defaults to cpus_available())
- config: configuration object (defaults to global config)
- """
- if not parallel:
- return 1
-
- cfg = config or spack.config.CONFIG
-
- # Command line overrides all
- try:
- command_line = cfg.get("config:build_jobs", default=None, scope="command_line")
- if command_line is not None:
- return command_line
- except ValueError:
- pass
-
- return min(max_cpus, cfg.get("config:build_jobs", 16))
diff --git a/lib/spack/spack/util/parallel.py b/lib/spack/spack/util/parallel.py
index 28c55b7d1e..9bbdf5dd7a 100644
--- a/lib/spack/spack/util/parallel.py
+++ b/lib/spack/spack/util/parallel.py
@@ -9,7 +9,7 @@ import sys
import traceback
from typing import Optional
-from spack.util.cpus import determine_number_of_jobs
+from spack.config import determine_number_of_jobs
class ErrorFromWorker: