summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/conf.py237
-rw-r--r--lib/spack/llnl/util/argparsewriter.py86
-rw-r--r--lib/spack/llnl/util/compat.py28
-rw-r--r--lib/spack/llnl/util/filesystem.py444
-rw-r--r--lib/spack/llnl/util/lang.py198
-rw-r--r--lib/spack/llnl/util/link_tree.py136
-rw-r--r--lib/spack/llnl/util/lock.py164
-rw-r--r--lib/spack/llnl/util/multiproc.py4
-rw-r--r--lib/spack/llnl/util/symlink.py16
-rw-r--r--lib/spack/llnl/util/tty/__init__.py126
-rw-r--r--lib/spack/llnl/util/tty/colify.py74
-rw-r--r--lib/spack/llnl/util/tty/color.py90
-rw-r--r--lib/spack/llnl/util/tty/log.py145
-rw-r--r--lib/spack/llnl/util/tty/pty.py54
-rw-r--r--lib/spack/spack/__init__.py6
-rw-r--r--lib/spack/spack/abi.py45
-rw-r--r--lib/spack/spack/audit.py204
-rw-r--r--lib/spack/spack/binary_distribution.py941
-rw-r--r--lib/spack/spack/bootstrap.py404
-rw-r--r--lib/spack/spack/build_environment.py575
-rw-r--r--lib/spack/spack/build_systems/aspell_dict.py29
-rw-r--r--lib/spack/spack/build_systems/autotools.py262
-rw-r--r--lib/spack/spack/build_systems/cached_cmake.py119
-rw-r--r--lib/spack/spack/build_systems/cmake.py158
-rw-r--r--lib/spack/spack/build_systems/cuda.py226
-rw-r--r--lib/spack/spack/build_systems/gnu.py10
-rw-r--r--lib/spack/spack/build_systems/intel.py837
-rw-r--r--lib/spack/spack/build_systems/lua.py50
-rw-r--r--lib/spack/spack/build_systems/makefile.py29
-rw-r--r--lib/spack/spack/build_systems/maven.py19
-rw-r--r--lib/spack/spack/build_systems/meson.py83
-rw-r--r--lib/spack/spack/build_systems/octave.py28
-rw-r--r--lib/spack/spack/build_systems/oneapi.py65
-rw-r--r--lib/spack/spack/build_systems/perl.py48
-rw-r--r--lib/spack/spack/build_systems/python.py135
-rw-r--r--lib/spack/spack/build_systems/qmake.py20
-rw-r--r--lib/spack/spack/build_systems/r.py44
-rw-r--r--lib/spack/spack/build_systems/racket.py40
-rw-r--r--lib/spack/spack/build_systems/rocm.py46
-rw-r--r--lib/spack/spack/build_systems/ruby.py24
-rw-r--r--lib/spack/spack/build_systems/scons.py15
-rw-r--r--lib/spack/spack/build_systems/sip.py93
-rw-r--r--lib/spack/spack/build_systems/sourceforge.py21
-rw-r--r--lib/spack/spack/build_systems/sourceware.py15
-rw-r--r--lib/spack/spack/build_systems/waf.py27
-rw-r--r--lib/spack/spack/build_systems/xorg.py19
-rw-r--r--lib/spack/spack/caches.py8
-rw-r--r--lib/spack/spack/ci.py1299
-rw-r--r--lib/spack/spack/ci_needs_workaround.py25
-rw-r--r--lib/spack/spack/ci_optimization.py148
-rw-r--r--lib/spack/spack/cmd/__init__.py177
-rw-r--r--lib/spack/spack/cmd/activate.py15
-rw-r--r--lib/spack/spack/cmd/add.py21
-rw-r--r--lib/spack/spack/cmd/arch.py52
-rw-r--r--lib/spack/spack/cmd/audit.py48
-rw-r--r--lib/spack/spack/cmd/blame.py116
-rw-r--r--lib/spack/spack/cmd/bootstrap.py333
-rw-r--r--lib/spack/spack/cmd/build_env.py7
-rw-r--r--lib/spack/spack/cmd/buildcache.py593
-rw-r--r--lib/spack/spack/cmd/cd.py7
-rw-r--r--lib/spack/spack/cmd/checksum.py57
-rw-r--r--lib/spack/spack/cmd/ci.py476
-rw-r--r--lib/spack/spack/cmd/clean.py88
-rw-r--r--lib/spack/spack/cmd/clone.py74
-rw-r--r--lib/spack/spack/cmd/commands.py145
-rw-r--r--lib/spack/spack/cmd/common/__init__.py9
-rw-r--r--lib/spack/spack/cmd/common/arguments.py275
-rw-r--r--lib/spack/spack/cmd/common/env_utility.py34
-rw-r--r--lib/spack/spack/cmd/compiler.py100
-rw-r--r--lib/spack/spack/cmd/compilers.py7
-rw-r--r--lib/spack/spack/cmd/concretize.py24
-rw-r--r--lib/spack/spack/cmd/config.py285
-rw-r--r--lib/spack/spack/cmd/containerize.py25
-rw-r--r--lib/spack/spack/cmd/create.py356
-rw-r--r--lib/spack/spack/cmd/deactivate.py46
-rw-r--r--lib/spack/spack/cmd/debug.py61
-rw-r--r--lib/spack/spack/cmd/dependencies.py39
-rw-r--r--lib/spack/spack/cmd/dependents.py31
-rw-r--r--lib/spack/spack/cmd/deprecate.py103
-rw-r--r--lib/spack/spack/cmd/dev_build.py98
-rw-r--r--lib/spack/spack/cmd/develop.py36
-rw-r--r--lib/spack/spack/cmd/diff.py60
-rw-r--r--lib/spack/spack/cmd/docs.py8
-rw-r--r--lib/spack/spack/cmd/edit.py75
-rw-r--r--lib/spack/spack/cmd/env.py425
-rw-r--r--lib/spack/spack/cmd/extensions.py43
-rw-r--r--lib/spack/spack/cmd/external.py141
-rw-r--r--lib/spack/spack/cmd/fetch.py7
-rw-r--r--lib/spack/spack/cmd/find.py199
-rw-r--r--lib/spack/spack/cmd/gc.py5
-rw-r--r--lib/spack/spack/cmd/gpg.py182
-rw-r--r--lib/spack/spack/cmd/graph.py26
-rw-r--r--lib/spack/spack/cmd/help.py27
-rw-r--r--lib/spack/spack/cmd/info.py218
-rw-r--r--lib/spack/spack/cmd/install.py338
-rw-r--r--lib/spack/spack/cmd/license.py133
-rw-r--r--lib/spack/spack/cmd/list.py200
-rw-r--r--lib/spack/spack/cmd/load.py78
-rw-r--r--lib/spack/spack/cmd/location.py78
-rw-r--r--lib/spack/spack/cmd/log_parse.py63
-rw-r--r--lib/spack/spack/cmd/maintainers.py41
-rw-r--r--lib/spack/spack/cmd/make_installer.py74
-rw-r--r--lib/spack/spack/cmd/mark.py52
-rw-r--r--lib/spack/spack/cmd/mirror.py250
-rw-r--r--lib/spack/spack/cmd/module.py2
-rw-r--r--lib/spack/spack/cmd/modules/__init__.py234
-rw-r--r--lib/spack/spack/cmd/modules/lmod.py30
-rw-r--r--lib/spack/spack/cmd/modules/tcl.py30
-rw-r--r--lib/spack/spack/cmd/patch.py7
-rw-r--r--lib/spack/spack/cmd/pkg.py97
-rw-r--r--lib/spack/spack/cmd/providers.py21
-rw-r--r--lib/spack/spack/cmd/pydoc.py2
-rw-r--r--lib/spack/spack/cmd/python.py77
-rw-r--r--lib/spack/spack/cmd/remove.py26
-rw-r--r--lib/spack/spack/cmd/repo.py85
-rw-r--r--lib/spack/spack/cmd/resource.py34
-rw-r--r--lib/spack/spack/cmd/restage.py2
-rw-r--r--lib/spack/spack/cmd/solve.py133
-rw-r--r--lib/spack/spack/cmd/spec.py77
-rw-r--r--lib/spack/spack/cmd/stage.py14
-rw-r--r--lib/spack/spack/cmd/tags.py24
-rw-r--r--lib/spack/spack/cmd/test.py243
-rw-r--r--lib/spack/spack/cmd/test_env.py7
-rw-r--r--lib/spack/spack/cmd/tutorial.py39
-rw-r--r--lib/spack/spack/cmd/undevelop.py13
-rw-r--r--lib/spack/spack/cmd/uninstall.py116
-rw-r--r--lib/spack/spack/cmd/unit_test.py111
-rw-r--r--lib/spack/spack/cmd/unload.py59
-rw-r--r--lib/spack/spack/cmd/url.py261
-rw-r--r--lib/spack/spack/cmd/verify.py48
-rw-r--r--lib/spack/spack/cmd/versions.py62
-rw-r--r--lib/spack/spack/cmd/view.py146
-rw-r--r--lib/spack/spack/compiler.py273
-rw-r--r--lib/spack/spack/compilers/__init__.py385
-rw-r--r--lib/spack/spack/compilers/aocc.py62
-rw-r--r--lib/spack/spack/compilers/apple_clang.py92
-rw-r--r--lib/spack/spack/compilers/arm.py31
-rw-r--r--lib/spack/spack/compilers/cce.py67
-rw-r--r--lib/spack/spack/compilers/clang.py108
-rw-r--r--lib/spack/spack/compilers/dpcpp.py13
-rw-r--r--lib/spack/spack/compilers/fj.py26
-rw-r--r--lib/spack/spack/compilers/gcc.py87
-rw-r--r--lib/spack/spack/compilers/intel.py80
-rw-r--r--lib/spack/spack/compilers/msvc.py75
-rw-r--r--lib/spack/spack/compilers/nag.py31
-rw-r--r--lib/spack/spack/compilers/nvhpc.py44
-rw-r--r--lib/spack/spack/compilers/oneapi.py49
-rw-r--r--lib/spack/spack/compilers/pgi.py52
-rw-r--r--lib/spack/spack/compilers/rocmcc.py33
-rw-r--r--lib/spack/spack/compilers/xl.py59
-rw-r--r--lib/spack/spack/compilers/xl_r.py18
-rw-r--r--lib/spack/spack/concretize.py375
-rw-r--r--lib/spack/spack/config.py281
-rw-r--r--lib/spack/spack/container/__init__.py52
-rw-r--r--lib/spack/spack/container/images.py29
-rw-r--r--lib/spack/spack/container/writers/__init__.py147
-rw-r--r--lib/spack/spack/container/writers/docker.py11
-rw-r--r--lib/spack/spack/container/writers/singularity.py15
-rw-r--r--lib/spack/spack/cray_manifest.py119
-rw-r--r--lib/spack/spack/database.py457
-rw-r--r--lib/spack/spack/dependency.py23
-rw-r--r--lib/spack/spack/detection/__init__.py12
-rw-r--r--lib/spack/spack/detection/common.py83
-rw-r--r--lib/spack/spack/detection/path.py102
-rw-r--r--lib/spack/spack/directives.py221
-rw-r--r--lib/spack/spack/directory_layout.py279
-rw-r--r--lib/spack/spack/environment/__init__.py52
-rw-r--r--lib/spack/spack/environment/environment.py685
-rw-r--r--lib/spack/spack/environment/shell.py106
-rw-r--r--lib/spack/spack/error.py21
-rw-r--r--lib/spack/spack/extensions.py40
-rw-r--r--lib/spack/spack/fetch_strategy.py674
-rw-r--r--lib/spack/spack/filesystem_view.py339
-rw-r--r--lib/spack/spack/gcs_handler.py6
-rw-r--r--lib/spack/spack/graph.py138
-rw-r--r--lib/spack/spack/hash_types.py19
-rw-r--r--lib/spack/spack/hooks/__init__.py34
-rw-r--r--lib/spack/spack/hooks/licensing.py56
-rw-r--r--lib/spack/spack/hooks/module_file_generation.py25
-rw-r--r--lib/spack/spack/hooks/sbang.py65
-rw-r--r--lib/spack/spack/install_test.py101
-rw-r--r--lib/spack/spack/installer.py723
-rw-r--r--lib/spack/spack/main.py442
-rw-r--r--lib/spack/spack/mirror.py166
-rw-r--r--lib/spack/spack/mixins.py54
-rw-r--r--lib/spack/spack/modules/__init__.py11
-rw-r--r--lib/spack/spack/modules/common.py295
-rw-r--r--lib/spack/spack/modules/lmod.py126
-rw-r--r--lib/spack/spack/modules/tcl.py36
-rw-r--r--lib/spack/spack/multimethod.py67
-rw-r--r--lib/spack/spack/operating_systems/__init__.py9
-rw-r--r--lib/spack/spack/operating_systems/_operating_system.py10
-rw-r--r--lib/spack/spack/operating_systems/cray_backend.py41
-rw-r--r--lib/spack/spack/operating_systems/cray_frontend.py37
-rw-r--r--lib/spack/spack/operating_systems/linux_distro.py47
-rw-r--r--lib/spack/spack/operating_systems/mac_os.py71
-rwxr-xr-xlib/spack/spack/operating_systems/windows_os.py51
-rw-r--r--lib/spack/spack/package_base.py824
-rw-r--r--lib/spack/spack/package_prefs.py130
-rw-r--r--lib/spack/spack/package_test.py22
-rw-r--r--lib/spack/spack/parse.py27
-rw-r--r--lib/spack/spack/patch.py147
-rw-r--r--lib/spack/spack/paths.py61
-rw-r--r--lib/spack/spack/platforms/__init__.py23
-rw-r--r--lib/spack/spack/platforms/_functions.py2
-rw-r--r--lib/spack/spack/platforms/_platform.py34
-rw-r--r--lib/spack/spack/platforms/cray.py96
-rw-r--r--lib/spack/spack/platforms/darwin.py16
-rw-r--r--lib/spack/spack/platforms/linux.py6
-rw-r--r--lib/spack/spack/platforms/test.py26
-rwxr-xr-xlib/spack/spack/platforms/windows.py10
-rw-r--r--lib/spack/spack/projections.py3
-rw-r--r--lib/spack/spack/provider_index.py40
-rw-r--r--lib/spack/spack/relocate.py341
-rw-r--r--lib/spack/spack/repo.py318
-rw-r--r--lib/spack/spack/report.py160
-rw-r--r--lib/spack/spack/reporter.py2
-rw-r--r--lib/spack/spack/reporters/cdash.py293
-rw-r--r--lib/spack/spack/reporters/junit.py6
-rw-r--r--lib/spack/spack/rewiring.py88
-rw-r--r--lib/spack/spack/s3_handler.py17
-rw-r--r--lib/spack/spack/schema/__init__.py19
-rw-r--r--lib/spack/spack/schema/bootstrap.py45
-rw-r--r--lib/spack/spack/schema/buildcache_spec.py44
-rw-r--r--lib/spack/spack/schema/cdash.py28
-rw-r--r--lib/spack/spack/schema/compilers.py129
-rw-r--r--lib/spack/spack/schema/concretizer.py46
-rw-r--r--lib/spack/spack/schema/config.py159
-rw-r--r--lib/spack/spack/schema/container.py137
-rw-r--r--lib/spack/spack/schema/cray_manifest.py58
-rw-r--r--lib/spack/spack/schema/database_index.py64
-rw-r--r--lib/spack/spack/schema/env.py254
-rw-r--r--lib/spack/spack/schema/environment.py29
-rw-r--r--lib/spack/spack/schema/gitlab_ci.py160
-rw-r--r--lib/spack/spack/schema/merged.py12
-rw-r--r--lib/spack/spack/schema/mirrors.py40
-rw-r--r--lib/spack/spack/schema/modules.py294
-rw-r--r--lib/spack/spack/schema/packages.py143
-rw-r--r--lib/spack/spack/schema/projections.py20
-rw-r--r--lib/spack/spack/schema/repos.py18
-rw-r--r--lib/spack/spack/schema/spec.py262
-rw-r--r--lib/spack/spack/schema/upstreams.py45
-rw-r--r--lib/spack/spack/solver/asp.py628
-rw-r--r--lib/spack/spack/spec.py1815
-rw-r--r--lib/spack/spack/spec_list.py58
-rw-r--r--lib/spack/spack/stage.py332
-rw-r--r--lib/spack/spack/store.py82
-rw-r--r--lib/spack/spack/subprocess_context.py17
-rw-r--r--lib/spack/spack/tag.py15
-rw-r--r--lib/spack/spack/target.py44
-rw-r--r--lib/spack/spack/tengine.py22
-rw-r--r--lib/spack/spack/test/abi.py86
-rw-r--r--lib/spack/spack/test/architecture.py195
-rw-r--r--lib/spack/spack/test/audit.py140
-rw-r--r--lib/spack/spack/test/bindist.py430
-rw-r--r--lib/spack/spack/test/bootstrap.py103
-rw-r--r--lib/spack/spack/test/build_distribution.py31
-rw-r--r--lib/spack/spack/test/build_environment.py479
-rw-r--r--lib/spack/spack/test/build_system_guess.py57
-rw-r--r--lib/spack/spack/test/build_systems.py318
-rw-r--r--lib/spack/spack/test/buildrequest.py19
-rw-r--r--lib/spack/spack/test/buildtask.py25
-rw-r--r--lib/spack/spack/test/cache_fetch.py24
-rw-r--r--lib/spack/spack/test/cc.py902
-rw-r--r--lib/spack/spack/test/ci.py328
-rw-r--r--lib/spack/spack/test/cmd/activate.py69
-rw-r--r--lib/spack/spack/test/cmd/arch.py36
-rw-r--r--lib/spack/spack/test/cmd/audit.py43
-rw-r--r--lib/spack/spack/test/cmd/blame.py47
-rw-r--r--lib/spack/spack/test/cmd/bootstrap.py169
-rw-r--r--lib/spack/spack/test/cmd/build_env.py54
-rw-r--r--lib/spack/spack/test/cmd/buildcache.py229
-rw-r--r--lib/spack/spack/test/cmd/cd.py2
-rw-r--r--lib/spack/spack/test/cmd/checksum.py56
-rw-r--r--lib/spack/spack/test/cmd/ci.py1678
-rw-r--r--lib/spack/spack/test/cmd/clean.py64
-rw-r--r--lib/spack/spack/test/cmd/commands.py200
-rw-r--r--lib/spack/spack/test/cmd/common/arguments.py76
-rw-r--r--lib/spack/spack/test/cmd/compiler.py256
-rw-r--r--lib/spack/spack/test/cmd/concretize.py49
-rw-r--r--lib/spack/spack/test/cmd/config.py466
-rw-r--r--lib/spack/spack/test/cmd/create.py207
-rw-r--r--lib/spack/spack/test/cmd/debug.py29
-rw-r--r--lib/spack/spack/test/cmd/dependencies.py72
-rw-r--r--lib/spack/spack/test/cmd/dependents.py81
-rw-r--r--lib/spack/spack/test/cmd/deprecate.py137
-rw-r--r--lib/spack/spack/test/cmd/dev_build.py255
-rw-r--r--lib/spack/spack/test/cmd/develop.py93
-rw-r--r--lib/spack/spack/test/cmd/diff.py78
-rw-r--r--lib/spack/spack/test/cmd/env.py2223
-rw-r--r--lib/spack/spack/test/cmd/extensions.py21
-rw-r--r--lib/spack/spack/test/cmd/external.py368
-rw-r--r--lib/spack/spack/test/cmd/fetch.py16
-rw-r--r--lib/spack/spack/test/cmd/find.py220
-rw-r--r--lib/spack/spack/test/cmd/gc.py31
-rw-r--r--lib/spack/spack/test/cmd/gpg.py167
-rw-r--r--lib/spack/spack/test/cmd/graph.py27
-rw-r--r--lib/spack/spack/test/cmd/help.py24
-rw-r--r--lib/spack/spack/test/cmd/info.py78
-rw-r--r--lib/spack/spack/test/cmd/install.py825
-rw-r--r--lib/spack/spack/test/cmd/is_git_repo.py7
-rw-r--r--lib/spack/spack/test/cmd/license.py70
-rw-r--r--lib/spack/spack/test/cmd/list.py69
-rw-r--r--lib/spack/spack/test/cmd/load.py146
-rw-r--r--lib/spack/spack/test/cmd/location.py82
-rw-r--r--lib/spack/spack/test/cmd/maintainers.py85
-rw-r--r--lib/spack/spack/test/cmd/mark.py39
-rw-r--r--lib/spack/spack/test/cmd/mirror.py276
-rw-r--r--lib/spack/spack/test/cmd/module.py126
-rw-r--r--lib/spack/spack/test/cmd/pkg.py208
-rw-r--r--lib/spack/spack/test/cmd/print_shell_vars.py8
-rw-r--r--lib/spack/spack/test/cmd/providers.py62
-rw-r--r--lib/spack/spack/test/cmd/python.py12
-rw-r--r--lib/spack/spack/test/cmd/reindex.py34
-rw-r--r--lib/spack/spack/test/cmd/repo.py20
-rw-r--r--lib/spack/spack/test/cmd/resource.py96
-rw-r--r--lib/spack/spack/test/cmd/spec.py116
-rw-r--r--lib/spack/spack/test/cmd/stage.py62
-rw-r--r--lib/spack/spack/test/cmd/tags.py36
-rw-r--r--lib/spack/spack/test/cmd/test.py256
-rw-r--r--lib/spack/spack/test/cmd/undevelop.py51
-rw-r--r--lib/spack/spack/test/cmd/uninstall.py76
-rw-r--r--lib/spack/spack/test/cmd/unit_test.py32
-rw-r--r--lib/spack/spack/test/cmd/url.py156
-rw-r--r--lib/spack/spack/test/cmd/verify.py57
-rw-r--r--lib/spack/spack/test/cmd/versions.py46
-rw-r--r--lib/spack/spack/test/cmd/view.py375
-rw-r--r--lib/spack/spack/test/cmd_extensions.py161
-rw-r--r--lib/spack/spack/test/compilers/basics.py720
-rw-r--r--lib/spack/spack/test/compilers/detection.py672
-rw-r--r--lib/spack/spack/test/concretize.py1681
-rw-r--r--lib/spack/spack/test/concretize_preferences.py324
-rw-r--r--lib/spack/spack/test/config.py1146
-rw-r--r--lib/spack/spack/test/config_values.py31
-rw-r--r--lib/spack/spack/test/conftest.py779
-rw-r--r--lib/spack/spack/test/container/cli.py21
-rw-r--r--lib/spack/spack/test/container/conftest.py23
-rw-r--r--lib/spack/spack/test/container/docker.py72
-rw-r--r--lib/spack/spack/test/container/images.py42
-rw-r--r--lib/spack/spack/test/container/singularity.py30
-rw-r--r--lib/spack/spack/test/cray_manifest.py242
-rw-r--r--lib/spack/spack/test/cvs_fetch.py26
-rw-r--r--lib/spack/spack/test/database.py523
-rw-r--r--lib/spack/spack/test/directives.py30
-rw-r--r--lib/spack/spack/test/directory_layout.py56
-rw-r--r--lib/spack/spack/test/env.py13
-rw-r--r--lib/spack/spack/test/environment_modifications.py498
-rw-r--r--lib/spack/spack/test/fetch_strategy.py3
-rw-r--r--lib/spack/spack/test/flag_handlers.py94
-rw-r--r--lib/spack/spack/test/gcs_fetch.py19
-rw-r--r--lib/spack/spack/test/git_fetch.py235
-rw-r--r--lib/spack/spack/test/graph.py73
-rw-r--r--lib/spack/spack/test/hg_fetch.py29
-rw-r--r--lib/spack/spack/test/install.py273
-rw-r--r--lib/spack/spack/test/installer.py542
-rw-r--r--lib/spack/spack/test/link_paths.py55
-rw-r--r--lib/spack/spack/test/llnl/util/argparsewriter.py10
-rw-r--r--lib/spack/spack/test/llnl/util/file_list.py265
-rw-r--r--lib/spack/spack/test/llnl/util/filesystem.py667
-rw-r--r--lib/spack/spack/test/llnl/util/lang.py128
-rw-r--r--lib/spack/spack/test/llnl/util/link_tree.py284
-rw-r--r--lib/spack/spack/test/llnl/util/lock.py457
-rw-r--r--lib/spack/spack/test/llnl/util/tty/log.py219
-rw-r--r--lib/spack/spack/test/llnl/util/tty/tty.py74
-rw-r--r--lib/spack/spack/test/main.py32
-rw-r--r--lib/spack/spack/test/make_executable.py112
-rw-r--r--lib/spack/spack/test/mirror.py196
-rw-r--r--lib/spack/spack/test/module_parsing.py109
-rw-r--r--lib/spack/spack/test/modules/common.py175
-rw-r--r--lib/spack/spack/test/modules/conftest.py24
-rw-r--r--lib/spack/spack/test/modules/lmod.py218
-rw-r--r--lib/spack/spack/test/modules/tcl.py314
-rw-r--r--lib/spack/spack/test/multimethod.py121
-rw-r--r--lib/spack/spack/test/namespace_trie.py94
-rw-r--r--lib/spack/spack/test/operating_system.py58
-rw-r--r--lib/spack/spack/test/optional_deps.py106
-rw-r--r--lib/spack/spack/test/package_class.py146
-rw-r--r--lib/spack/spack/test/package_sanity.py106
-rw-r--r--lib/spack/spack/test/packages.py274
-rw-r--r--lib/spack/spack/test/packaging.py631
-rw-r--r--lib/spack/spack/test/patch.py345
-rw-r--r--lib/spack/spack/test/pattern.py19
-rw-r--r--lib/spack/spack/test/permissions.py11
-rw-r--r--lib/spack/spack/test/provider_index.py28
-rw-r--r--lib/spack/spack/test/relocate.py291
-rw-r--r--lib/spack/spack/test/repo.py69
-rw-r--r--lib/spack/spack/test/rewiring.py78
-rw-r--r--lib/spack/spack/test/s3_fetch.py19
-rw-r--r--lib/spack/spack/test/sbang.py239
-rw-r--r--lib/spack/spack/test/schema.py113
-rw-r--r--lib/spack/spack/test/spack_yaml.py71
-rw-r--r--lib/spack/spack/test/spec_dag.py824
-rw-r--r--lib/spack/spack/test/spec_list.py222
-rw-r--r--lib/spack/spack/test/spec_semantics.py1009
-rw-r--r--lib/spack/spack/test/spec_syntax.py620
-rw-r--r--lib/spack/spack/test/spec_yaml.py249
-rw-r--r--lib/spack/spack/test/stage.py270
-rw-r--r--lib/spack/spack/test/svn_fetch.py32
-rw-r--r--lib/spack/spack/test/tag.py42
-rw-r--r--lib/spack/spack/test/tengine.py34
-rw-r--r--lib/spack/spack/test/test_activations.py289
-rw-r--r--lib/spack/spack/test/test_suite.py81
-rw-r--r--lib/spack/spack/test/url_fetch.py225
-rw-r--r--lib/spack/spack/test/url_parse.py1260
-rw-r--r--lib/spack/spack/test/url_substitution.py89
-rw-r--r--lib/spack/spack/test/util/compression.py48
-rw-r--r--lib/spack/spack/test/util/editor.py109
-rw-r--r--lib/spack/spack/test/util/environment.py162
-rw-r--r--lib/spack/spack/test/util/executable.py48
-rw-r--r--lib/spack/spack/test/util/file_cache.py35
-rw-r--r--lib/spack/spack/test/util/log_parser.py14
-rw-r--r--lib/spack/spack/test/util/mock_package.py37
-rw-r--r--lib/spack/spack/test/util/package_hash.py69
-rw-r--r--lib/spack/spack/test/util/path.py23
-rw-r--r--lib/spack/spack/test/util/prefix.py60
-rw-r--r--lib/spack/spack/test/util/spack_lock_wrapper.py8
-rw-r--r--lib/spack/spack/test/util/spack_yaml.py42
-rw-r--r--lib/spack/spack/test/util/unparse/unparse.py43
-rw-r--r--lib/spack/spack/test/util/util_gpg.py23
-rw-r--r--lib/spack/spack/test/util/util_string.py10
-rw-r--r--lib/spack/spack/test/util/util_url.py457
-rw-r--r--lib/spack/spack/test/variant.py410
-rw-r--r--lib/spack/spack/test/verification.py132
-rw-r--r--lib/spack/spack/test/versions.py705
-rw-r--r--lib/spack/spack/test/views.py28
-rw-r--r--lib/spack/spack/test/web.py280
-rw-r--r--lib/spack/spack/url.py463
-rw-r--r--lib/spack/spack/user_environment.py38
-rw-r--r--lib/spack/spack/util/classes.py10
-rw-r--r--lib/spack/spack/util/compression.py99
-rw-r--r--lib/spack/spack/util/crypto.py79
-rw-r--r--lib/spack/spack/util/debug.py8
-rw-r--r--lib/spack/spack/util/editor.py21
-rw-r--r--lib/spack/spack/util/environment.py370
-rw-r--r--lib/spack/spack/util/executable.py95
-rw-r--r--lib/spack/spack/util/file_cache.py23
-rw-r--r--lib/spack/spack/util/file_permissions.py9
-rw-r--r--lib/spack/spack/util/gcs.py70
-rw-r--r--lib/spack/spack/util/gpg.py119
-rw-r--r--lib/spack/spack/util/hash.py7
-rw-r--r--lib/spack/spack/util/lock.py17
-rw-r--r--lib/spack/spack/util/log_parse.py23
-rw-r--r--lib/spack/spack/util/mock_package.py38
-rw-r--r--lib/spack/spack/util/module_cmd.py83
-rw-r--r--lib/spack/spack/util/naming.py105
-rw-r--r--lib/spack/spack/util/package_hash.py44
-rw-r--r--lib/spack/spack/util/parallel.py16
-rw-r--r--lib/spack/spack/util/path.py78
-rw-r--r--lib/spack/spack/util/pattern.py27
-rw-r--r--lib/spack/spack/util/prefix.py1
-rw-r--r--lib/spack/spack/util/s3.py18
-rw-r--r--lib/spack/spack/util/spack_json.py17
-rw-r--r--lib/spack/spack/util/spack_yaml.py96
-rw-r--r--lib/spack/spack/util/string.py18
-rw-r--r--lib/spack/spack/util/timer.py4
-rw-r--r--lib/spack/spack/util/unparse/__init__.py2
-rw-r--r--lib/spack/spack/util/unparse/unparser.py119
-rw-r--r--lib/spack/spack/util/url.py133
-rw-r--r--lib/spack/spack/util/web.py231
-rw-r--r--lib/spack/spack/variant.py226
-rw-r--r--lib/spack/spack/verify.py110
-rw-r--r--lib/spack/spack/version.py273
462 files changed, 36530 insertions, 35680 deletions
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index 48746d149e..c29951154f 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -32,37 +32,39 @@ from sphinx.parsers import RSTParser
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
-sys.path.insert(0, os.path.abspath('_spack_root/lib/spack/external'))
-sys.path.insert(0, os.path.abspath('_spack_root/lib/spack/external/pytest-fallback'))
+sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external"))
+sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/pytest-fallback"))
if sys.version_info[0] < 3:
- sys.path.insert(
- 0, os.path.abspath('_spack_root/lib/spack/external/yaml/lib'))
+ sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib"))
else:
- sys.path.insert(
- 0, os.path.abspath('_spack_root/lib/spack/external/yaml/lib3'))
+ sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/external/yaml/lib3"))
-sys.path.append(os.path.abspath('_spack_root/lib/spack/'))
+sys.path.append(os.path.abspath("_spack_root/lib/spack/"))
# Add the Spack bin directory to the path so that we can use its output in docs.
-os.environ['SPACK_ROOT'] = os.path.abspath('_spack_root')
-os.environ['PATH'] += "%s%s" % (os.pathsep, os.path.abspath('_spack_root/bin'))
+os.environ["SPACK_ROOT"] = os.path.abspath("_spack_root")
+os.environ["PATH"] += "%s%s" % (os.pathsep, os.path.abspath("_spack_root/bin"))
# Set an environment variable so that colify will print output like it would to
# a terminal.
-os.environ['COLIFY_SIZE'] = '25x120'
-os.environ['COLUMNS'] = '120'
+os.environ["COLIFY_SIZE"] = "25x120"
+os.environ["COLUMNS"] = "120"
# Generate full package list if needed
-subprocess.call([
- 'spack', 'list', '--format=html', '--update=package_list.html'])
+subprocess.call(["spack", "list", "--format=html", "--update=package_list.html"])
# Generate a command index if an update is needed
-subprocess.call([
- 'spack', 'commands',
- '--format=rst',
- '--header=command_index.in',
- '--update=command_index.rst'] + glob('*rst'))
+subprocess.call(
+ [
+ "spack",
+ "commands",
+ "--format=rst",
+ "--header=command_index.in",
+ "--update=command_index.rst",
+ ]
+ + glob("*rst")
+)
#
# Run sphinx-apidoc
@@ -72,12 +74,12 @@ subprocess.call([
# Without this, the API Docs will never actually update
#
apidoc_args = [
- '--force', # Overwrite existing files
- '--no-toc', # Don't create a table of contents file
- '--output-dir=.', # Directory to place all output
+ "--force", # Overwrite existing files
+ "--no-toc", # Don't create a table of contents file
+ "--output-dir=.", # Directory to place all output
]
-sphinx_apidoc(apidoc_args + ['_spack_root/lib/spack/spack'])
-sphinx_apidoc(apidoc_args + ['_spack_root/lib/spack/llnl'])
+sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/spack"])
+sphinx_apidoc(apidoc_args + ["_spack_root/lib/spack/llnl"])
# Enable todo items
todo_include_todos = True
@@ -87,10 +89,12 @@ todo_include_todos = True
#
class PatchedPythonDomain(PythonDomain):
def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
- if 'refspecific' in node:
- del node['refspecific']
+ if "refspecific" in node:
+ del node["refspecific"]
return super(PatchedPythonDomain, self).resolve_xref(
- env, fromdocname, builder, typ, target, node, contnode)
+ env, fromdocname, builder, typ, target, node, contnode
+ )
+
#
# Disable tabs to space expansion in code blocks
@@ -103,51 +107,57 @@ class NoTabExpansionRSTParser(RSTParser):
inputstring = StringList(lines, document.current_source)
super().parse(inputstring, document)
+
def setup(sphinx):
sphinx.add_domain(PatchedPythonDomain, override=True)
sphinx.add_source_parser(NoTabExpansionRSTParser, override=True)
+
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = '3.4'
+needs_sphinx = "3.4"
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
- 'sphinx.ext.autodoc',
- 'sphinx.ext.graphviz',
- 'sphinx.ext.intersphinx',
- 'sphinx.ext.napoleon',
- 'sphinx.ext.todo',
- 'sphinx.ext.viewcode',
- 'sphinxcontrib.programoutput',
+ "sphinx.ext.autodoc",
+ "sphinx.ext.graphviz",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.todo",
+ "sphinx.ext.viewcode",
+ "sphinxcontrib.programoutput",
]
# Set default graphviz options
graphviz_dot_args = [
- '-Grankdir=LR', '-Gbgcolor=transparent',
- '-Nshape=box', '-Nfontname=monaco', '-Nfontsize=10']
+ "-Grankdir=LR",
+ "-Gbgcolor=transparent",
+ "-Nshape=box",
+ "-Nfontname=monaco",
+ "-Nfontsize=10",
+]
# Get nice vector graphics
graphviz_output_format = "svg"
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
# The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
# The encoding of source files.
-source_encoding = 'utf-8-sig'
+source_encoding = "utf-8-sig"
# The master toctree document.
-master_doc = 'index'
+master_doc = "index"
# General information about the project.
-project = u'Spack'
-copyright = u'2013-2021, Lawrence Livermore National Laboratory.'
+project = u"Spack"
+copyright = u"2013-2021, Lawrence Livermore National Laboratory."
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -156,16 +166,16 @@ copyright = u'2013-2021, Lawrence Livermore National Laboratory.'
# The short X.Y version.
import spack
-version = '.'.join(str(s) for s in spack.spack_version_info[:2])
+version = ".".join(str(s) for s in spack.spack_version_info[:2])
# The full version, including alpha/beta/rc tags.
release = spack.spack_version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
-#language = None
+# language = None
# Places to look for .po/.mo files for doc translations
-#locale_dirs = []
+# locale_dirs = []
# Sphinx gettext settings
gettext_compact = True
@@ -173,41 +183,41 @@ gettext_uuid = False
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
-#today = ''
+# today = ''
# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
+# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ['_build', '_spack_root', '.spack-env']
+exclude_patterns = ["_build", "_spack_root", ".spack-env"]
nitpicky = True
nitpick_ignore = [
# Python classes that intersphinx is unable to resolve
- ('py:class', 'argparse.HelpFormatter'),
- ('py:class', 'contextlib.contextmanager'),
- ('py:class', 'module'),
- ('py:class', '_io.BufferedReader'),
- ('py:class', 'unittest.case.TestCase'),
- ('py:class', '_frozen_importlib_external.SourceFileLoader'),
+ ("py:class", "argparse.HelpFormatter"),
+ ("py:class", "contextlib.contextmanager"),
+ ("py:class", "module"),
+ ("py:class", "_io.BufferedReader"),
+ ("py:class", "unittest.case.TestCase"),
+ ("py:class", "_frozen_importlib_external.SourceFileLoader"),
# Spack classes that are private and we don't want to expose
- ('py:class', 'spack.provider_index._IndexBase'),
- ('py:class', 'spack.repo._PrependFileLoader'),
+ ("py:class", "spack.provider_index._IndexBase"),
+ ("py:class", "spack.repo._PrependFileLoader"),
]
# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
+# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
+# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
-#add_module_names = True
+# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
-#show_authors = False
+# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
# We use our own extension of the default style with a few modifications
@@ -218,156 +228,151 @@ from pygments.token import Comment, Generic, Text
class SpackStyle(DefaultStyle):
styles = DefaultStyle.styles.copy()
- background_color = "#f4f4f8"
+ background_color = "#f4f4f8"
styles[Generic.Output] = "#355"
styles[Generic.Prompt] = "bold #346ec9"
+
import pkg_resources
dist = pkg_resources.Distribution(__file__)
-sys.path.append('.') # make 'conf' module findable
-ep = pkg_resources.EntryPoint.parse('spack = conf:SpackStyle', dist=dist)
-dist._ep_map = {'pygments.styles': {'plugin1': ep}}
+sys.path.append(".") # make 'conf' module findable
+ep = pkg_resources.EntryPoint.parse("spack = conf:SpackStyle", dist=dist)
+dist._ep_map = {"pygments.styles": {"plugin1": ep}}
pkg_resources.working_set.add(dist)
-pygments_style = 'spack'
+pygments_style = "spack"
# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
+# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'sphinx_rtd_theme'
+html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
-html_theme_options = { 'logo_only' : True }
+html_theme_options = {"logo_only": True}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = ["_themes"]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
-#html_title = None
+# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
+# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-html_logo = '_spack_root/share/spack/logo/spack-logo-white-text.svg'
+html_logo = "_spack_root/share/spack/logo/spack-logo-white-text.svg"
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-html_favicon = '_spack_root/share/spack/logo/favicon.ico'
+html_favicon = "_spack_root/share/spack/logo/favicon.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
-html_last_updated_fmt = '%b %d, %Y'
+html_last_updated_fmt = "%b %d, %Y"
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
-#html_use_smartypants = True
+# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
+# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
-#html_additional_pages = {}
+# html_additional_pages = {}
# If false, no module index is generated.
-#html_domain_indices = True
+# html_domain_indices = True
# If false, no index is generated.
-#html_use_index = True
+# html_use_index = True
# If true, the index is split into individual pages for each letter.
-#html_split_index = False
+# html_split_index = False
# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
+# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = False
+# html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
+# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
+# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
+# html_file_suffix = None
# Output file base name for HTML help builder.
-htmlhelp_basename = 'Spackdoc'
+htmlhelp_basename = "Spackdoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
+ # The paper size ('letterpaper' or 'a4paper').
+ #'papersize': 'letterpaper',
+ # The font size ('10pt', '11pt' or '12pt').
+ #'pointsize': '10pt',
+ # Additional stuff for the LaTeX preamble.
+ #'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'Spack.tex', u'Spack Documentation',
- u'Todd Gamblin', 'manual'),
+ ("index", "Spack.tex", u"Spack Documentation", u"Todd Gamblin", "manual"),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
-#latex_logo = None
+# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
-#latex_use_parts = False
+# latex_use_parts = False
# If true, show page references after internal links.
-#latex_show_pagerefs = False
+# latex_show_pagerefs = False
# If true, show URL addresses after external links.
-#latex_show_urls = False
+# latex_show_urls = False
# Documents to append as an appendix to all manuals.
-#latex_appendices = []
+# latex_appendices = []
# If false, no module index is generated.
-#latex_domain_indices = True
+# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [
- ('index', 'spack', u'Spack Documentation',
- [u'Todd Gamblin'], 1)
-]
+man_pages = [("index", "spack", u"Spack Documentation", [u"Todd Gamblin"], 1)]
# If true, show URL addresses after external links.
-#man_show_urls = False
+# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
@@ -376,19 +381,25 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- ('index', 'Spack', u'Spack Documentation',
- u'Todd Gamblin', 'Spack', 'One line description of project.',
- 'Miscellaneous'),
+ (
+ "index",
+ "Spack",
+ u"Spack Documentation",
+ u"Todd Gamblin",
+ "Spack",
+ "One line description of project.",
+ "Miscellaneous",
+ ),
]
# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
+# texinfo_appendices = []
# If false, no module index is generated.
-#texinfo_domain_indices = True
+# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
+# texinfo_show_urls = 'footnote'
# -- Extension configuration -------------------------------------------------
diff --git a/lib/spack/llnl/util/argparsewriter.py b/lib/spack/llnl/util/argparsewriter.py
index eb35d26aa8..a8db508c2f 100644
--- a/lib/spack/llnl/util/argparsewriter.py
+++ b/lib/spack/llnl/util/argparsewriter.py
@@ -29,8 +29,8 @@ class Command(object):
- optionals: list of optional arguments (list)
- subcommands: list of subcommand parsers (list)
"""
- def __init__(self, prog, description, usage,
- positionals, optionals, subcommands):
+
+ def __init__(self, prog, description, usage, positionals, optionals, subcommands):
self.prog = prog
self.description = description
self.usage = usage
@@ -71,15 +71,15 @@ class ArgparseWriter(argparse.HelpFormatter):
"""
self.parser = parser
- split_prog = parser.prog.split(' ')
+ split_prog = parser.prog.split(" ")
split_prog[-1] = prog
- prog = ' '.join(split_prog)
+ prog = " ".join(split_prog)
description = parser.description
fmt = parser._get_formatter()
actions = parser._actions
groups = parser._mutually_exclusive_groups
- usage = fmt._format_usage(None, actions, groups, '').strip()
+ usage = fmt._format_usage(None, actions, groups, "").strip()
# Go through actions and split them into optionals, positionals,
# and subcommands
@@ -90,8 +90,8 @@ class ArgparseWriter(argparse.HelpFormatter):
if action.option_strings:
flags = action.option_strings
dest_flags = fmt._format_action_invocation(action)
- help = self._expand_help(action) if action.help else ''
- help = help.replace('\n', ' ')
+ help = self._expand_help(action) if action.help else ""
+ help = help.replace("\n", " ")
optionals.append((flags, dest_flags, help))
elif isinstance(action, argparse._SubParsersAction):
for subaction in action._choices_actions:
@@ -100,20 +100,19 @@ class ArgparseWriter(argparse.HelpFormatter):
# Look for aliases of the form 'name (alias, ...)'
if self.aliases:
- match = re.match(r'(.*) \((.*)\)', subaction.metavar)
+ match = re.match(r"(.*) \((.*)\)", subaction.metavar)
if match:
- aliases = match.group(2).split(', ')
+ aliases = match.group(2).split(", ")
for alias in aliases:
subparser = action._name_parser_map[alias]
subcommands.append((subparser, alias))
else:
args = fmt._format_action_invocation(action)
- help = self._expand_help(action) if action.help else ''
- help = help.replace('\n', ' ')
+ help = self._expand_help(action) if action.help else ""
+ help = help.replace("\n", " ")
positionals.append((args, help))
- return Command(
- prog, description, usage, positionals, optionals, subcommands)
+ return Command(prog, description, usage, positionals, optionals, subcommands)
def format(self, cmd):
"""Returns the string representation of a single node in the
@@ -161,14 +160,13 @@ class ArgparseWriter(argparse.HelpFormatter):
raise
-_rst_levels = ['=', '-', '^', '~', ':', '`']
+_rst_levels = ["=", "-", "^", "~", ":", "`"]
class ArgparseRstWriter(ArgparseWriter):
"""Write argparse output as rst sections."""
- def __init__(self, prog, out=None, aliases=False,
- rst_levels=_rst_levels):
+ def __init__(self, prog, out=None, aliases=False, rst_levels=_rst_levels):
"""Create a new ArgparseRstWriter.
Parameters:
@@ -217,11 +215,12 @@ class ArgparseRstWriter(ArgparseWriter):
{1}
{2}
-""".format(prog.replace(' ', '-'), prog,
- self.rst_levels[self.level] * len(prog))
+""".format(
+ prog.replace(" ", "-"), prog, self.rst_levels[self.level] * len(prog)
+ )
def description(self, description):
- return description + '\n\n'
+ return description + "\n\n"
def usage(self, usage):
return """\
@@ -229,33 +228,39 @@ class ArgparseRstWriter(ArgparseWriter):
{0}
-""".format(usage)
+""".format(
+ usage
+ )
def begin_positionals(self):
- return '\n**Positional arguments**\n\n'
+ return "\n**Positional arguments**\n\n"
def positional(self, name, help):
return """\
{0}
{1}
-""".format(name, help)
+""".format(
+ name, help
+ )
def end_positionals(self):
- return ''
+ return ""
def begin_optionals(self):
- return '\n**Optional arguments**\n\n'
+ return "\n**Optional arguments**\n\n"
def optional(self, opts, help):
return """\
``{0}``
{1}
-""".format(opts, help)
+""".format(
+ opts, help
+ )
def end_optionals(self):
- return ''
+ return ""
def begin_subcommands(self, subcommands):
string = """
@@ -267,11 +272,10 @@ class ArgparseRstWriter(ArgparseWriter):
"""
for cmd, _ in subcommands:
- prog = re.sub(r'^[^ ]* ', '', cmd.prog)
- string += ' * :ref:`{0} <{1}>`\n'.format(
- prog, cmd.prog.replace(' ', '-'))
+ prog = re.sub(r"^[^ ]* ", "", cmd.prog)
+ string += " * :ref:`{0} <{1}>`\n".format(prog, cmd.prog.replace(" ", "-"))
- return string + '\n'
+ return string + "\n"
class ArgparseCompletionWriter(ArgparseWriter):
@@ -306,9 +310,11 @@ class ArgparseCompletionWriter(ArgparseWriter):
# Flatten lists of lists
optionals = [x for xx in optionals for x in xx]
- return (self.start_function(cmd.prog) +
- self.body(positionals, optionals, subcommands) +
- self.end_function(cmd.prog))
+ return (
+ self.start_function(cmd.prog)
+ + self.body(positionals, optionals, subcommands)
+ + self.end_function(cmd.prog)
+ )
def start_function(self, prog):
"""Returns the syntax needed to begin a function definition.
@@ -319,8 +325,8 @@ class ArgparseCompletionWriter(ArgparseWriter):
Returns:
str: the function definition beginning
"""
- name = prog.replace('-', '_').replace(' ', '_')
- return '\n_{0}() {{'.format(name)
+ name = prog.replace("-", "_").replace(" ", "_")
+ return "\n_{0}() {{".format(name)
def end_function(self, prog=None):
"""Returns the syntax needed to end a function definition.
@@ -331,7 +337,7 @@ class ArgparseCompletionWriter(ArgparseWriter):
Returns:
str: the function definition ending
"""
- return '}\n'
+ return "}\n"
def body(self, positionals, optionals, subcommands):
"""Returns the body of the function.
@@ -344,7 +350,7 @@ class ArgparseCompletionWriter(ArgparseWriter):
Returns:
str: the function body
"""
- return ''
+ return ""
def positionals(self, positionals):
"""Returns the syntax for reporting positional arguments.
@@ -355,7 +361,7 @@ class ArgparseCompletionWriter(ArgparseWriter):
Returns:
str: the syntax for positional arguments
"""
- return ''
+ return ""
def optionals(self, optionals):
"""Returns the syntax for reporting optional flags.
@@ -366,7 +372,7 @@ class ArgparseCompletionWriter(ArgparseWriter):
Returns:
str: the syntax for optional flags
"""
- return ''
+ return ""
def subcommands(self, subcommands):
"""Returns the syntax for reporting subcommands.
@@ -377,4 +383,4 @@ class ArgparseCompletionWriter(ArgparseWriter):
Returns:
str: the syntax for subcommand parsers
"""
- return ''
+ return ""
diff --git a/lib/spack/llnl/util/compat.py b/lib/spack/llnl/util/compat.py
index ca914d0fb6..ebe509f3a7 100644
--- a/lib/spack/llnl/util/compat.py
+++ b/lib/spack/llnl/util/compat.py
@@ -18,22 +18,22 @@ else:
map = map
zip = zip
from itertools import zip_longest as zip_longest # novm # noqa: F401
- from urllib.parse import urlencode as urlencode # novm # noqa: F401
- from urllib.request import urlopen as urlopen # novm # noqa: F401
+ from urllib.parse import urlencode as urlencode # novm # noqa: F401
+ from urllib.request import urlopen as urlopen # novm # noqa: F401
if sys.version_info >= (3, 3):
- from collections.abc import Hashable as Hashable # novm
- from collections.abc import Iterable as Iterable # novm
- from collections.abc import Mapping as Mapping # novm
- from collections.abc import MutableMapping as MutableMapping # novm
+ from collections.abc import Hashable as Hashable # novm
+ from collections.abc import Iterable as Iterable # novm
+ from collections.abc import Mapping as Mapping # novm
+ from collections.abc import MutableMapping as MutableMapping # novm
from collections.abc import MutableSequence as MutableSequence # novm
- from collections.abc import MutableSet as MutableSet # novm
- from collections.abc import Sequence as Sequence # novm
+ from collections.abc import MutableSet as MutableSet # novm
+ from collections.abc import Sequence as Sequence # novm
else:
- from collections import Hashable as Hashable # noqa: F401
- from collections import Iterable as Iterable # noqa: F401
- from collections import Mapping as Mapping # noqa: F401
- from collections import MutableMapping as MutableMapping # noqa: F401
+ from collections import Hashable as Hashable # noqa: F401
+ from collections import Iterable as Iterable # noqa: F401
+ from collections import Mapping as Mapping # noqa: F401
+ from collections import MutableMapping as MutableMapping # noqa: F401
from collections import MutableSequence as MutableSequence # noqa: F401
- from collections import MutableSet as MutableSet # noqa: F401
- from collections import Sequence as Sequence # noqa: F401
+ from collections import MutableSet as MutableSet # noqa: F401
+ from collections import Sequence as Sequence # noqa: F401
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index 185f45d131..1740fb71c0 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -27,7 +27,7 @@ from llnl.util.symlink import symlink
from spack.util.executable import Executable
from spack.util.path import path_to_os_path, system_path_filter
-is_windows = _platform == 'win32'
+is_windows = _platform == "win32"
if not is_windows:
import grp
@@ -37,56 +37,57 @@ else:
__all__ = [
- 'FileFilter',
- 'FileList',
- 'HeaderList',
- 'LibraryList',
- 'ancestor',
- 'can_access',
- 'change_sed_delimiter',
- 'copy_mode',
- 'filter_file',
- 'find',
- 'find_headers',
- 'find_all_headers',
- 'find_libraries',
- 'find_system_libraries',
- 'fix_darwin_install_name',
- 'force_remove',
- 'force_symlink',
- 'getuid',
- 'chgrp',
- 'chmod_x',
- 'copy',
- 'install',
- 'copy_tree',
- 'install_tree',
- 'is_exe',
- 'join_path',
- 'last_modification_time_recursive',
- 'library_extensions',
- 'mkdirp',
- 'partition_path',
- 'prefixes',
- 'remove_dead_links',
- 'remove_directory_contents',
- 'remove_if_dead_link',
- 'remove_linked_tree',
- 'rename',
- 'set_executable',
- 'set_install_permissions',
- 'touch',
- 'touchp',
- 'traverse_tree',
- 'unset_executable_mode',
- 'working_dir',
- 'keep_modification_time'
+ "FileFilter",
+ "FileList",
+ "HeaderList",
+ "LibraryList",
+ "ancestor",
+ "can_access",
+ "change_sed_delimiter",
+ "copy_mode",
+ "filter_file",
+ "find",
+ "find_headers",
+ "find_all_headers",
+ "find_libraries",
+ "find_system_libraries",
+ "fix_darwin_install_name",
+ "force_remove",
+ "force_symlink",
+ "getuid",
+ "chgrp",
+ "chmod_x",
+ "copy",
+ "install",
+ "copy_tree",
+ "install_tree",
+ "is_exe",
+ "join_path",
+ "last_modification_time_recursive",
+ "library_extensions",
+ "mkdirp",
+ "partition_path",
+ "prefixes",
+ "remove_dead_links",
+ "remove_directory_contents",
+ "remove_if_dead_link",
+ "remove_linked_tree",
+ "rename",
+ "set_executable",
+ "set_install_permissions",
+ "touch",
+ "touchp",
+ "traverse_tree",
+ "unset_executable_mode",
+ "working_dir",
+ "keep_modification_time",
]
def getuid():
if is_windows:
import ctypes
+
if ctypes.windll.shell32.IsUserAnAdmin() == 0:
return 1
return 0
@@ -111,7 +112,7 @@ def path_contains_subdirectory(path, root):
#: This generates the library filenames that may appear on any OS.
-library_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
+library_extensions = ["a", "la", "so", "tbd", "dylib"]
def possible_library_filenames(library_names):
@@ -120,8 +121,9 @@ def possible_library_filenames(library_names):
"""
lib_extensions = library_extensions
return set(
- '.'.join((lib, extension)) for lib, extension in
- itertools.product(library_names, lib_extensions))
+ ".".join((lib, extension))
+ for lib, extension in itertools.product(library_names, lib_extensions)
+ )
def paths_containing_libs(paths, library_names):
@@ -174,19 +176,21 @@ def filter_file(regex, repl, *filenames, **kwargs):
file is copied verbatim. Default is to filter until the end of the
file.
"""
- string = kwargs.get('string', False)
- backup = kwargs.get('backup', False)
- ignore_absent = kwargs.get('ignore_absent', False)
- stop_at = kwargs.get('stop_at', None)
+ string = kwargs.get("string", False)
+ backup = kwargs.get("backup", False)
+ ignore_absent = kwargs.get("ignore_absent", False)
+ stop_at = kwargs.get("stop_at", None)
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
- unescaped = repl.replace(r'\\', '\\')
+ unescaped = repl.replace(r"\\", "\\")
def replace_groups_with_groupid(m):
def groupid_to_group(x):
return m.group(int(x.group(1)))
- return re.sub(r'\\([1-9])', groupid_to_group, unescaped)
+
+ return re.sub(r"\\([1-9])", groupid_to_group, unescaped)
+
repl = replace_groups_with_groupid
if string:
@@ -217,16 +221,16 @@ def filter_file(regex, repl, *filenames, **kwargs):
try:
extra_kwargs = {}
if sys.version_info > (3, 0):
- extra_kwargs = {'errors': 'surrogateescape'}
+ extra_kwargs = {"errors": "surrogateescape"}
# Open as a text file and filter until the end of the file is
# reached or we found a marker in the line if it was specified
- with open(tmp_filename, mode='r', **extra_kwargs) as input_file:
- with open(filename, mode='w', **extra_kwargs) as output_file:
+ with open(tmp_filename, mode="r", **extra_kwargs) as input_file:
+ with open(filename, mode="w", **extra_kwargs) as output_file:
# Using iter and readline is a workaround needed not to
# disable input_file.tell(), which will happen if we call
# input_file.next() implicitly via the for loop
- for line in iter(input_file.readline, ''):
+ for line in iter(input_file.readline, ""):
if stop_at is not None:
current_position = input_file.tell()
if stop_at == line.strip():
@@ -240,9 +244,9 @@ def filter_file(regex, repl, *filenames, **kwargs):
# If we stopped filtering at some point, reopen the file in
# binary mode and copy verbatim the remaining part
if current_position and stop_at:
- with open(tmp_filename, mode='rb') as input_file:
+ with open(tmp_filename, mode="rb") as input_file:
input_file.seek(current_position)
- with open(filename, mode='ab') as output_file:
+ with open(filename, mode="ab") as output_file:
output_file.writelines(input_file.readlines())
except BaseException:
@@ -281,26 +285,26 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
new_delim (str): The delimiter to replace with
*filenames: One or more files to search and replace
"""
- assert(len(old_delim) == 1)
- assert(len(new_delim) == 1)
+ assert len(old_delim) == 1
+ assert len(new_delim) == 1
# TODO: handle these cases one day?
- assert(old_delim != '"')
- assert(old_delim != "'")
- assert(new_delim != '"')
- assert(new_delim != "'")
+ assert old_delim != '"'
+ assert old_delim != "'"
+ assert new_delim != '"'
+ assert new_delim != "'"
whole_lines = "^s@([^@]*)@(.*)@[gIp]$"
- whole_lines = whole_lines.replace('@', old_delim)
+ whole_lines = whole_lines.replace("@", old_delim)
single_quoted = r"'s@((?:\\'|[^@'])*)@((?:\\'|[^'])*)@[gIp]?'"
- single_quoted = single_quoted.replace('@', old_delim)
+ single_quoted = single_quoted.replace("@", old_delim)
double_quoted = r'"s@((?:\\"|[^@"])*)@((?:\\"|[^"])*)@[gIp]?"'
- double_quoted = double_quoted.replace('@', old_delim)
+ double_quoted = double_quoted.replace("@", old_delim)
- repl = r's@\1@\2@g'
- repl = repl.replace('@', new_delim)
+ repl = r"s@\1@\2@g"
+ repl = repl.replace("@", new_delim)
filenames = path_to_os_path(*filenames)
for f in filenames:
filter_file(whole_lines, repl, f)
@@ -324,8 +328,7 @@ def exploding_archive_catch(stage):
# Expand all tarballs in their own directory to contain
# exploding tarballs.
- tarball_container = os.path.join(stage.path,
- "spack-expanded-archive")
+ tarball_container = os.path.join(stage.path, "spack-expanded-archive")
mkdirp(tarball_container)
orig_dir = os.getcwd()
os.chdir(tarball_container)
@@ -349,7 +352,7 @@ def exploding_archive_handler(tarball_container, stage):
where archive is being expanded
"""
files = os.listdir(tarball_container)
- non_hidden = [f for f in files if not f.startswith('.')]
+ non_hidden = [f for f in files if not f.startswith(".")]
if len(non_hidden) == 1:
src = os.path.join(tarball_container, non_hidden[0])
if os.path.isdir(src):
@@ -377,11 +380,13 @@ def get_owner_uid(path, err_msg=None):
p_stat = os.stat(path)
if p_stat.st_mode & stat.S_IRWXU != stat.S_IRWXU:
- tty.error("Expected {0} to support mode {1}, but it is {2}"
- .format(path, stat.S_IRWXU, p_stat.st_mode))
+ tty.error(
+ "Expected {0} to support mode {1}, but it is {2}".format(
+ path, stat.S_IRWXU, p_stat.st_mode
+ )
+ )
- raise OSError(errno.EACCES,
- err_msg.format(path, path) if err_msg else "")
+ raise OSError(errno.EACCES, err_msg.format(path, path) if err_msg else "")
else:
p_stat = os.stat(path)
@@ -389,8 +394,8 @@ def get_owner_uid(path, err_msg=None):
owner_uid = p_stat.st_uid
else:
sid = win32security.GetFileSecurity(
- path, win32security.OWNER_SECURITY_INFORMATION) \
- .GetSecurityDescriptorOwner()
+ path, win32security.OWNER_SECURITY_INFORMATION
+ ).GetSecurityDescriptorOwner()
owner_uid = win32security.LookupAccountSid(None, sid)[0]
return owner_uid
@@ -460,8 +465,7 @@ def chmod_x(entry, perms):
@system_path_filter
def copy_mode(src, dest):
- """Set the mode of dest to that of src unless it is a link.
- """
+ """Set the mode of dest to that of src unless it is a link."""
if os.path.islink(dest):
return
src_mode = os.stat(src).st_mode
@@ -504,17 +508,17 @@ def copy(src, dest, _permissions=False):
not a directory
"""
if _permissions:
- tty.debug('Installing {0} to {1}'.format(src, dest))
+ tty.debug("Installing {0} to {1}".format(src, dest))
else:
- tty.debug('Copying {0} to {1}'.format(src, dest))
+ tty.debug("Copying {0} to {1}".format(src, dest))
files = glob.glob(src)
if not files:
raise IOError("No such file or directory: '{0}'".format(src))
if len(files) > 1 and not os.path.isdir(dest):
raise ValueError(
- "'{0}' matches multiple files but '{1}' is not a directory".format(
- src, dest))
+ "'{0}' matches multiple files but '{1}' is not a directory".format(src, dest)
+ )
for src in files:
# Expand dest to its eventual full path if it is a directory.
@@ -592,9 +596,9 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
ValueError: if *src* is a parent directory of *dest*
"""
if _permissions:
- tty.debug('Installing {0} to {1}'.format(src, dest))
+ tty.debug("Installing {0} to {1}".format(src, dest))
else:
- tty.debug('Copying {0} to {1}'.format(src, dest))
+ tty.debug("Copying {0} to {1}".format(src, dest))
abs_dest = os.path.abspath(dest)
if not abs_dest.endswith(os.path.sep):
@@ -612,15 +616,20 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
# Stop early to avoid unnecessary recursion if being asked to copy
# from a parent directory.
if abs_dest.startswith(abs_src):
- raise ValueError('Cannot copy ancestor directory {0} into {1}'.
- format(abs_src, abs_dest))
+ raise ValueError(
+ "Cannot copy ancestor directory {0} into {1}".format(abs_src, abs_dest)
+ )
mkdirp(abs_dest)
- for s, d in traverse_tree(abs_src, abs_dest, order='pre',
- follow_symlinks=not symlinks,
- ignore=ignore,
- follow_nonexisting=True):
+ for s, d in traverse_tree(
+ abs_src,
+ abs_dest,
+ order="pre",
+ follow_symlinks=not symlinks,
+ ignore=ignore,
+ follow_nonexisting=True,
+ ):
if os.path.islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
@@ -628,8 +637,7 @@ def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
if os.path.isabs(target):
new_target = re.sub(abs_src, abs_dest, target)
if new_target != target:
- tty.debug("Redirecting link {0} to {1}"
- .format(target, new_target))
+ tty.debug("Redirecting link {0} to {1}".format(target, new_target))
target = new_target
symlink(target, d)
@@ -679,10 +687,9 @@ def get_filetype(path_name):
"""
Return the output of file path_name as a string to identify file type.
"""
- file = Executable('file')
- file.add_default_env('LC_ALL', 'C')
- output = file('-b', '-h', '%s' % path_name,
- output=str, error=str)
+ file = Executable("file")
+ file.add_default_env("LC_ALL", "C")
+ output = file("-b", "-h", "%s" % path_name, output=str, error=str)
return output.strip()
@@ -703,8 +710,8 @@ def is_nonsymlink_exe_with_shebang(path):
return False
# Should start with a shebang
- with open(path, 'rb') as f:
- return f.read(2) == b'#!'
+ with open(path, "rb") as f:
+ return f.read(2) == b"#!"
except (IOError, OSError):
return False
@@ -736,16 +743,16 @@ def mkdirp(*paths, **kwargs):
intermediate get the same permissions specified in the arguments to
mkdirp -- default value is 'args'
"""
- mode = kwargs.get('mode', None)
- group = kwargs.get('group', None)
- default_perms = kwargs.get('default_perms', 'args')
+ mode = kwargs.get("mode", None)
+ group = kwargs.get("group", None)
+ default_perms = kwargs.get("default_perms", "args")
paths = path_to_os_path(*paths)
for path in paths:
if not os.path.exists(path):
try:
# detect missing intermediate folders
intermediate_folders = []
- last_parent = ''
+ last_parent = ""
intermediate_path = os.path.dirname(path)
@@ -772,10 +779,10 @@ def mkdirp(*paths, **kwargs):
# ones and if mode_intermediate has been specified, otherwise
# intermediate folders list is not populated at all and default
# OS mode will be used
- if default_perms == 'args':
+ if default_perms == "args":
intermediate_mode = mode
intermediate_group = group
- elif default_perms == 'parents':
+ elif default_perms == "parents":
stat_info = os.stat(last_parent)
intermediate_mode = stat_info.st_mode
intermediate_group = stat_info.st_gid
@@ -788,10 +795,8 @@ def mkdirp(*paths, **kwargs):
if intermediate_mode is not None:
os.chmod(intermediate_path, intermediate_mode)
if intermediate_group is not None:
- chgrp_if_not_world_writable(intermediate_path,
- intermediate_group)
- os.chmod(intermediate_path,
- intermediate_mode) # reset sticky bit after
+ chgrp_if_not_world_writable(intermediate_path, intermediate_group)
+ os.chmod(intermediate_path, intermediate_mode) # reset sticky bit after
except OSError as e:
if e.errno != errno.EEXIST or not os.path.isdir(path):
@@ -803,7 +808,7 @@ def mkdirp(*paths, **kwargs):
@system_path_filter
def force_remove(*paths):
"""Remove files without printing errors. Like ``rm -f``, does NOT
- remove directories."""
+ remove directories."""
for path in paths:
try:
os.remove(path)
@@ -814,7 +819,7 @@ def force_remove(*paths):
@contextmanager
@system_path_filter
def working_dir(dirname, **kwargs):
- if kwargs.get('create', False):
+ if kwargs.get("create", False):
mkdirp(dirname)
orig_dir = os.getcwd()
@@ -847,19 +852,17 @@ def replace_directory_transaction(directory_name):
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
directory_name = os.path.abspath(directory_name)
- assert os.path.isdir(directory_name), 'Not a directory: ' + directory_name
+ assert os.path.isdir(directory_name), "Not a directory: " + directory_name
# Note: directory_name is normalized here, meaning the trailing slash is dropped,
# so dirname is the directory's parent not the directory itself.
- tmpdir = tempfile.mkdtemp(
- dir=os.path.dirname(directory_name),
- prefix='.backup')
+ tmpdir = tempfile.mkdtemp(dir=os.path.dirname(directory_name), prefix=".backup")
# We have to jump through hoops to support Windows, since
# os.rename(directory_name, tmpdir) errors there.
- backup_dir = os.path.join(tmpdir, 'backup')
+ backup_dir = os.path.join(tmpdir, "backup")
os.rename(directory_name, backup_dir)
- tty.debug('Directory moved [src={0}, dest={1}]'.format(directory_name, backup_dir))
+ tty.debug("Directory moved [src={0}, dest={1}]".format(directory_name, backup_dir))
try:
yield backup_dir
@@ -874,12 +877,12 @@ def replace_directory_transaction(directory_name):
except Exception as outer_exception:
raise CouldNotRestoreDirectoryBackup(inner_exception, outer_exception)
- tty.debug('Directory recovered [{0}]'.format(directory_name))
+ tty.debug("Directory recovered [{0}]".format(directory_name))
raise
else:
# Otherwise delete the temporary directory
shutil.rmtree(tmpdir, ignore_errors=True)
- tty.debug('Temporary directory deleted [{0}]'.format(tmpdir))
+ tty.debug("Temporary directory deleted [{0}]".format(tmpdir))
@system_path_filter
@@ -904,7 +907,7 @@ def hash_directory(directory, ignore=[]):
# TODO: if caching big files becomes an issue, convert this to
# TODO: read in chunks. Currently it's used only for testing
# TODO: purposes.
- with open(filename, 'rb') as f:
+ with open(filename, "rb") as f:
md5_hash.update(f.read())
return md5_hash.hexdigest()
@@ -916,15 +919,15 @@ def write_tmp_and_move(filename):
"""Write to a temporary file, then move into place."""
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
- tmp = os.path.join(dirname, '.%s.tmp' % basename)
- with open(tmp, 'w') as f:
+ tmp = os.path.join(dirname, ".%s.tmp" % basename)
+ with open(tmp, "w") as f:
yield f
shutil.move(tmp, filename)
@contextmanager
@system_path_filter
-def open_if_filename(str_or_file, mode='r'):
+def open_if_filename(str_or_file, mode="r"):
"""Takes either a path or a file object, and opens it if it is a path.
If it's a file object, just yields the file object.
@@ -940,9 +943,9 @@ def open_if_filename(str_or_file, mode='r'):
def touch(path):
"""Creates an empty file at the specified path."""
if is_windows:
- perms = (os.O_WRONLY | os.O_CREAT)
+ perms = os.O_WRONLY | os.O_CREAT
else:
- perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
+ perms = os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY
fd = None
try:
fd = os.open(path, perms)
@@ -954,8 +957,7 @@ def touch(path):
@system_path_filter
def touchp(path):
- """Like ``touch``, but creates any parent directories needed for the file.
- """
+ """Like ``touch``, but creates any parent directories needed for the file."""
mkdirp(os.path.dirname(path))
touch(path)
@@ -990,8 +992,7 @@ def ancestor(dir, n=1):
def get_single_file(directory):
fnames = os.listdir(directory)
if len(fnames) != 1:
- raise ValueError("Expected exactly 1 file, got {0}"
- .format(str(len(fnames))))
+ raise ValueError("Expected exactly 1 file, got {0}".format(str(len(fnames))))
return fnames[0]
@@ -1025,7 +1026,7 @@ def can_access(file_name):
@system_path_filter
-def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
+def traverse_tree(source_root, dest_root, rel_path="", **kwargs):
"""Traverse two filesystem trees simultaneously.
Walks the LinkTree directory in pre or post order. Yields each
@@ -1057,16 +1058,16 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src``
"""
- follow_nonexisting = kwargs.get('follow_nonexisting', True)
- follow_links = kwargs.get('follow_link', False)
+ follow_nonexisting = kwargs.get("follow_nonexisting", True)
+ follow_links = kwargs.get("follow_link", False)
# Yield in pre or post order?
- order = kwargs.get('order', 'pre')
- if order not in ('pre', 'post'):
+ order = kwargs.get("order", "pre")
+ if order not in ("pre", "post"):
raise ValueError("Order must be 'pre' or 'post'.")
# List of relative paths to ignore under the src root.
- ignore = kwargs.get('ignore', None) or (lambda filename: False)
+ ignore = kwargs.get("ignore", None) or (lambda filename: False)
# Don't descend into ignored directories
if ignore(rel_path):
@@ -1076,7 +1077,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
- if order == 'pre':
+ if order == "pre":
yield (source_path, dest_path)
for f in os.listdir(source_path):
@@ -1088,14 +1089,12 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
# TODO: for symlinks, os.path.isdir looks for the link target. If the
# target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link
- if os.path.isdir(source_child) and (
- follow_links or not os.path.islink(source_child)):
+ if os.path.isdir(source_child) and (follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
- tuples = traverse_tree(
- source_root, dest_root, rel_child, **kwargs)
+ tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
for t in tuples:
yield t
@@ -1103,7 +1102,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
elif not ignore(os.path.join(rel_path, f)):
yield (source_child, dest_child)
- if order == 'post':
+ if order == "post":
yield (source_path, dest_path)
@@ -1134,7 +1133,7 @@ def lexists_islink_isdir(path):
return True, is_link, is_dir
-def visit_directory_tree(root, visitor, rel_path='', depth=0):
+def visit_directory_tree(root, visitor, rel_path="", depth=0):
"""
Recurses the directory root depth-first through a visitor pattern
@@ -1172,8 +1171,7 @@ def visit_directory_tree(root, visitor, rel_path='', depth=0):
try:
isdir = f.is_dir()
except OSError as e:
- if is_windows and hasattr(e, 'winerror')\
- and e.winerror == 5 and islink:
+ if is_windows and hasattr(e, "winerror") and e.winerror == 5 and islink:
# if path is a symlink, determine destination and
# evaluate file vs directory
link_target = resolve_link_target_relative_to_the_link(f)
@@ -1221,9 +1219,11 @@ def set_executable(path):
def last_modification_time_recursive(path):
path = os.path.abspath(path)
times = [os.stat(path).st_mtime]
- times.extend(os.stat(os.path.join(root, name)).st_mtime
- for root, dirs, files in os.walk(path)
- for name in dirs + files)
+ times.extend(
+ os.stat(os.path.join(root, name)).st_mtime
+ for root, dirs, files in os.walk(path)
+ for name in dirs + files
+ )
return max(times)
@@ -1282,18 +1282,23 @@ def readonly_file_handler(ignore_errors=False):
and will raise a separate error if it is ever invoked (by accident)
on a non-Windows system.
"""
+
def error_remove_readonly(func, path, exc):
if not is_windows:
raise RuntimeError("This method should only be invoked on Windows")
excvalue = exc[1]
- if is_windows and func in (os.rmdir, os.remove, os.unlink) and\
- excvalue.errno == errno.EACCES:
+ if (
+ is_windows
+ and func in (os.rmdir, os.remove, os.unlink)
+ and excvalue.errno == errno.EACCES
+ ):
# change the file to be readable,writable,executable: 0777
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
# retry
func(path)
elif not ignore_errors:
raise
+
return error_remove_readonly
@@ -1309,13 +1314,13 @@ def remove_linked_tree(path):
Parameters:
path (str): Directory to be removed
"""
- kwargs = {'ignore_errors': True}
+ kwargs = {"ignore_errors": True}
# Windows readonly files cannot be removed by Python
# directly.
if is_windows:
- kwargs['ignore_errors'] = False
- kwargs['onerror'] = readonly_file_handler(ignore_errors=True)
+ kwargs["ignore_errors"] = False
+ kwargs["onerror"] = readonly_file_handler(ignore_errors=True)
if os.path.exists(path):
if os.path.islink(path):
@@ -1344,9 +1349,7 @@ def safe_remove(*files_or_dirs):
# Sort them so that shorter paths like "/foo/bar" come before
# nested paths like "/foo/bar/baz.yaml". This simplifies the
# handling of temporary copies below
- sorted_matches = sorted([
- os.path.abspath(x) for x in itertools.chain(*glob_matches)
- ], key=len)
+ sorted_matches = sorted([os.path.abspath(x) for x in itertools.chain(*glob_matches)], key=len)
# Copy files and directories in a temporary location
removed, dst_root = {}, tempfile.mkdtemp()
@@ -1361,7 +1364,7 @@ def safe_remove(*files_or_dirs):
continue
# The monotonic ID is a simple way to make the filename
# or directory name unique in the temporary folder
- basename = os.path.basename(file_or_dir) + '-{0}'.format(id)
+ basename = os.path.basename(file_or_dir) + "-{0}".format(id)
temporary_path = os.path.join(dst_root, basename)
shutil.move(file_or_dir, temporary_path)
removed[file_or_dir] = temporary_path
@@ -1390,11 +1393,11 @@ def fix_darwin_install_name(path):
libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
- install_name_tool = Executable('install_name_tool')
- install_name_tool('-id', lib, lib)
- otool = Executable('otool')
- long_deps = otool('-L', lib, output=str).split('\n')
- deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
+ install_name_tool = Executable("install_name_tool")
+ install_name_tool("-id", lib, lib)
+ otool = Executable("otool")
+ long_deps = otool("-L", lib, output=str).split("\n")
+ deps = [dep.partition(" ")[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
@@ -1404,7 +1407,7 @@ def fix_darwin_install_name(path):
# but we don't know builddir (nor how symbolic links look
# in builddir). We thus only compare the basenames.
if os.path.basename(dep) == os.path.basename(loc):
- install_name_tool('-change', dep, loc, lib)
+ install_name_tool("-change", dep, loc, lib)
break
@@ -1534,9 +1537,7 @@ class FileList(Sequence):
Returns:
list: A list of directories
"""
- return list(dedupe(
- os.path.dirname(x) for x in self.files if os.path.dirname(x)
- ))
+ return list(dedupe(os.path.dirname(x) for x in self.files if os.path.dirname(x)))
@property
def basenames(self):
@@ -1572,11 +1573,11 @@ class FileList(Sequence):
def __len__(self):
return len(self.files)
- def joined(self, separator=' '):
+ def joined(self, separator=" "):
return separator.join(self.files)
def __repr__(self):
- return self.__class__.__name__ + '(' + repr(self.files) + ')'
+ return self.__class__.__name__ + "(" + repr(self.files) + ")"
def __str__(self):
return self.joined()
@@ -1593,7 +1594,7 @@ class HeaderList(FileList):
# as "xinclude" will cause false matches.
# Avoid matching paths such as <prefix>/include/something/detail/include,
# e.g. in the CUDA Toolkit which ships internal libc++ headers.
- include_regex = re.compile(r'(.*?)(\binclude\b)(.*)')
+ include_regex = re.compile(r"(.*?)(\binclude\b)(.*)")
def __init__(self, files):
super(HeaderList, self).__init__(files)
@@ -1658,7 +1659,7 @@ class HeaderList(FileList):
name = x
# Valid extensions include: ['.cuh', '.hpp', '.hh', '.h']
- for ext in ['.cuh', '.hpp', '.hh', '.h']:
+ for ext in [".cuh", ".hpp", ".hh", ".h"]:
i = name.rfind(ext)
if i != -1:
names.append(name[:i])
@@ -1680,7 +1681,7 @@ class HeaderList(FileList):
Returns:
str: A joined list of include flags
"""
- return ' '.join(['-I' + x for x in self.directories])
+ return " ".join(["-I" + x for x in self.directories])
@property
def macro_definitions(self):
@@ -1695,7 +1696,7 @@ class HeaderList(FileList):
Returns:
str: A joined list of macro definitions
"""
- return ' '.join(self._macro_definitions)
+ return " ".join(self._macro_definitions)
@property
def cpp_flags(self):
@@ -1713,7 +1714,7 @@ class HeaderList(FileList):
"""
cpp_flags = self.include_flags
if self.macro_definitions:
- cpp_flags += ' ' + self.macro_definitions
+ cpp_flags += " " + self.macro_definitions
return cpp_flags
def add_macro(self, macro):
@@ -1752,24 +1753,30 @@ def find_headers(headers, root, recursive=False):
if isinstance(headers, six.string_types):
headers = [headers]
elif not isinstance(headers, Sequence):
- message = '{0} expects a string or sequence of strings as the '
- message += 'first argument [got {1} instead]'
+ message = "{0} expects a string or sequence of strings as the "
+ message += "first argument [got {1} instead]"
message = message.format(find_headers.__name__, type(headers))
raise TypeError(message)
# Construct the right suffix for the headers
suffixes = [
# C
- 'h',
+ "h",
# C++
- 'hpp', 'hxx', 'hh', 'H', 'txx', 'tcc', 'icc',
+ "hpp",
+ "hxx",
+ "hh",
+ "H",
+ "txx",
+ "tcc",
+ "icc",
# Fortran
- 'mod', 'inc',
+ "mod",
+ "inc",
]
# List of headers we are searching with suffixes
- headers = ['{0}.{1}'.format(header, suffix) for header in headers
- for suffix in suffixes]
+ headers = ["{0}.{1}".format(header, suffix) for header in headers for suffix in suffixes]
return HeaderList(find(root, headers, recursive))
@@ -1785,7 +1792,7 @@ def find_all_headers(root):
Returns:
List of all headers found in ``root`` and subdirectories.
"""
- return find_headers('*', root=root, recursive=True)
+ return find_headers("*", root=root, recursive=True)
class LibraryList(FileList):
@@ -1819,11 +1826,11 @@ class LibraryList(FileList):
for x in self.basenames:
name = x
- if x.startswith('lib'):
+ if x.startswith("lib"):
name = x[3:]
# Valid extensions include: ['.dylib', '.so', '.a']
- for ext in ['.dylib', '.so', '.a']:
+ for ext in [".dylib", ".so", ".a"]:
i = name.rfind(ext)
if i != -1:
names.append(name[:i])
@@ -1845,7 +1852,7 @@ class LibraryList(FileList):
Returns:
str: A joined list of search flags
"""
- return ' '.join(['-L' + x for x in self.directories])
+ return " ".join(["-L" + x for x in self.directories])
@property
def link_flags(self):
@@ -1858,7 +1865,7 @@ class LibraryList(FileList):
Returns:
str: A joined list of link flags
"""
- return ' '.join(['-l' + name for name in self.names])
+ return " ".join(["-l" + name for name in self.names])
@property
def ld_flags(self):
@@ -1871,7 +1878,7 @@ class LibraryList(FileList):
Returns:
str: A joined list of search flags and link flags
"""
- return self.search_flags + ' ' + self.link_flags
+ return self.search_flags + " " + self.link_flags
def find_system_libraries(libraries, shared=True):
@@ -1908,20 +1915,19 @@ def find_system_libraries(libraries, shared=True):
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, Sequence):
- message = '{0} expects a string or sequence of strings as the '
- message += 'first argument [got {1} instead]'
- message = message.format(find_system_libraries.__name__,
- type(libraries))
+ message = "{0} expects a string or sequence of strings as the "
+ message += "first argument [got {1} instead]"
+ message = message.format(find_system_libraries.__name__, type(libraries))
raise TypeError(message)
libraries_found = []
search_locations = [
- '/lib64',
- '/lib',
- '/usr/lib64',
- '/usr/lib',
- '/usr/local/lib64',
- '/usr/local/lib',
+ "/lib64",
+ "/lib",
+ "/usr/lib64",
+ "/usr/lib",
+ "/usr/local/lib64",
+ "/usr/local/lib",
]
for library in libraries:
@@ -1962,24 +1968,23 @@ def find_libraries(libraries, root, shared=True, recursive=False):
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, Sequence):
- message = '{0} expects a string or sequence of strings as the '
- message += 'first argument [got {1} instead]'
+ message = "{0} expects a string or sequence of strings as the "
+ message += "first argument [got {1} instead]"
message = message.format(find_libraries.__name__, type(libraries))
raise TypeError(message)
# Construct the right suffix for the library
if shared:
# Used on both Linux and macOS
- suffixes = ['so']
- if sys.platform == 'darwin':
+ suffixes = ["so"]
+ if sys.platform == "darwin":
# Only used on macOS
- suffixes.append('dylib')
+ suffixes.append("dylib")
else:
- suffixes = ['a']
+ suffixes = ["a"]
# List of libraries we are searching with suffixes
- libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries
- for suffix in suffixes]
+ libraries = ["{0}.{1}".format(lib, suffix) for lib in libraries for suffix in suffixes]
if not recursive:
# If not recursive, look for the libraries directly in root
@@ -1989,7 +1994,7 @@ def find_libraries(libraries, root, shared=True, recursive=False):
# perform first non-recursive search in root/lib then in root/lib64 and
# finally search all of root recursively. The search stops when the first
# match is found.
- for subdir in ('lib', 'lib64'):
+ for subdir in ("lib", "lib64"):
dirname = join_path(root, subdir)
if not os.path.isdir(dirname):
continue
@@ -2045,10 +2050,11 @@ def files_in(*search_paths):
"""
files = []
for d in filter(can_access_dir, search_paths):
- files.extend(filter(
- lambda x: os.path.isfile(x[1]),
- [(f, os.path.join(d, f)) for f in os.listdir(d)]
- ))
+ files.extend(
+ filter(
+ lambda x: os.path.isfile(x[1]), [(f, os.path.join(d, f)) for f in os.listdir(d)]
+ )
+ )
return files
@@ -2078,7 +2084,7 @@ def search_paths_for_executables(*path_hints):
path = os.path.abspath(path)
executable_paths.append(path)
- bin_dir = os.path.join(path, 'bin')
+ bin_dir = os.path.join(path, "bin")
if os.path.isdir(bin_dir):
executable_paths.append(bin_dir)
@@ -2106,11 +2112,11 @@ def search_paths_for_libraries(*path_hints):
path = os.path.abspath(path)
library_paths.append(path)
- lib_dir = os.path.join(path, 'lib')
+ lib_dir = os.path.join(path, "lib")
if os.path.isdir(lib_dir):
library_paths.append(lib_dir)
- lib64_dir = os.path.join(path, 'lib64')
+ lib64_dir = os.path.join(path, "lib64")
if os.path.isdir(lib64_dir):
library_paths.append(lib64_dir)
@@ -2140,13 +2146,13 @@ def partition_path(path, entry=None):
# Handle drive letters e.g. C:/ on Windows
entries[0] = entries[0] + sep
i = entries.index(entry)
- if '' in entries:
+ if "" in entries:
i -= 1
- return paths[:i], paths[i], paths[i + 1:]
+ return paths[:i], paths[i], paths[i + 1 :]
except ValueError:
pass
- return paths, '', []
+ return paths, "", []
@system_path_filter
@@ -2181,7 +2187,7 @@ def prefixes(path):
elif parts[0].endswith(":"):
# Handle drive letters e.g. C:/ on Windows
parts[0] = parts[0] + sep
- paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))]
+ paths = [os.path.join(*parts[: i + 1]) for i in range(len(parts))]
try:
paths.remove(sep)
@@ -2189,7 +2195,7 @@ def prefixes(path):
pass
try:
- paths.remove('.')
+ paths.remove(".")
except ValueError:
pass
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 463310b7a2..314566e97a 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -21,7 +21,7 @@ from six import string_types
from llnl.util.compat import MutableMapping, MutableSequence, zip_longest
# Ignore emacs backups when listing modules
-ignore_modules = [r'^\.#', '~$']
+ignore_modules = [r"^\.#", "~$"]
def index_by(objects, *funcs):
@@ -91,9 +91,9 @@ def index_by(objects, *funcs):
def caller_locals():
"""This will return the locals of the *parent* of the caller.
- This allows a function to insert variables into its caller's
- scope. Yes, this is some black magic, and yes it's useful
- for implementing things like depends_on and provides.
+ This allows a function to insert variables into its caller's
+ scope. Yes, this is some black magic, and yes it's useful
+ for implementing things like depends_on and provides.
"""
# Passing zero here skips line context for speed.
stack = inspect.stack(0)
@@ -105,7 +105,7 @@ def caller_locals():
def get_calling_module_name():
"""Make sure that the caller is a class definition, and return the
- enclosing module's name.
+ enclosing module's name.
"""
# Passing zero here skips line context for speed.
stack = inspect.stack(0)
@@ -115,12 +115,13 @@ def get_calling_module_name():
finally:
del stack
- if '__module__' not in caller_locals:
- raise RuntimeError("Must invoke get_calling_module_name() "
- "from inside a class definition!")
+ if "__module__" not in caller_locals:
+ raise RuntimeError(
+ "Must invoke get_calling_module_name() " "from inside a class definition!"
+ )
- module_name = caller_locals['__module__']
- base_name = module_name.split('.')[-1]
+ module_name = caller_locals["__module__"]
+ base_name = module_name.split(".")[-1]
return base_name
@@ -128,8 +129,8 @@ def attr_required(obj, attr_name):
"""Ensure that a class has a required attribute."""
if not hasattr(obj, attr_name):
raise RequiredAttributeError(
- "No required attribute '%s' in class '%s'"
- % (attr_name, obj.__class__.__name__))
+ "No required attribute '%s' in class '%s'" % (attr_name, obj.__class__.__name__)
+ )
def attr_setdefault(obj, name, value):
@@ -201,33 +202,35 @@ def memoized(func):
# TypeError is raised when indexing into a dict if the key is unhashable.
raise six.raise_from(
UnhashableArguments(
- "args + kwargs '{}' was not hashable for function '{}'"
- .format(key, func.__name__),
+ "args + kwargs '{}' was not hashable for function '{}'".format(
+ key, func.__name__
+ ),
),
- e)
+ e,
+ )
return _memoized_function
def list_modules(directory, **kwargs):
"""Lists all of the modules, excluding ``__init__.py``, in a
- particular directory. Listed packages have no particular
- order."""
- list_directories = kwargs.setdefault('directories', True)
+ particular directory. Listed packages have no particular
+ order."""
+ list_directories = kwargs.setdefault("directories", True)
for name in os.listdir(directory):
- if name == '__init__.py':
+ if name == "__init__.py":
continue
path = os.path.join(directory, name)
if list_directories and os.path.isdir(path):
- init_py = os.path.join(path, '__init__.py')
+ init_py = os.path.join(path, "__init__.py")
if os.path.isfile(init_py):
yield name
- elif name.endswith('.py'):
+ elif name.endswith(".py"):
if not any(re.search(pattern, name) for pattern in ignore_modules):
- yield re.sub('.py$', '', name)
+ yield re.sub(".py$", "", name)
def decorator_with_or_without_args(decorator):
@@ -257,41 +260,34 @@ def decorator_with_or_without_args(decorator):
def key_ordering(cls):
"""Decorates a class with extra methods that implement rich comparison
- operations and ``__hash__``. The decorator assumes that the class
- implements a function called ``_cmp_key()``. The rich comparison
- operations will compare objects using this key, and the ``__hash__``
- function will return the hash of this key.
+ operations and ``__hash__``. The decorator assumes that the class
+ implements a function called ``_cmp_key()``. The rich comparison
+ operations will compare objects using this key, and the ``__hash__``
+ function will return the hash of this key.
- If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
- ``__gt__``, or ``__ge__`` defined, this decorator will overwrite them.
+ If a class already has ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
+ ``__gt__``, or ``__ge__`` defined, this decorator will overwrite them.
- Raises:
- TypeError: If the class does not have a ``_cmp_key`` method
+ Raises:
+ TypeError: If the class does not have a ``_cmp_key`` method
"""
+
def setter(name, value):
value.__name__ = name
setattr(cls, name, value)
- if not has_method(cls, '_cmp_key'):
+ if not has_method(cls, "_cmp_key"):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
- setter('__eq__',
- lambda s, o:
- (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
- setter('__lt__',
- lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
- setter('__le__',
- lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
+ setter("__eq__", lambda s, o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
+ setter("__lt__", lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
+ setter("__le__", lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
- setter('__ne__',
- lambda s, o:
- (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
- setter('__gt__',
- lambda s, o: o is None or s._cmp_key() > o._cmp_key())
- setter('__ge__',
- lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
+ setter("__ne__", lambda s, o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
+ setter("__gt__", lambda s, o: o is None or s._cmp_key() > o._cmp_key())
+ setter("__ge__", lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
- setter('__hash__', lambda self: hash(self._cmp_key()))
+ setter("__hash__", lambda self: hash(self._cmp_key()))
return cls
@@ -458,8 +454,7 @@ def lazy_lexicographic_ordering(cls, set_hash=True):
def le(self, other):
if self is other:
return True
- return (other is not None) and not lazy_lt(other._cmp_iter,
- self._cmp_iter)
+ return (other is not None) and not lazy_lt(other._cmp_iter, self._cmp_iter)
def ge(self, other):
if self is other:
@@ -489,7 +484,7 @@ def lazy_lexicographic_ordering(cls, set_hash=True):
@lazy_lexicographic_ordering
class HashableMap(MutableMapping):
"""This is a hashable, comparable dictionary. Hash is performed on
- a tuple of the values in the dictionary."""
+ a tuple of the values in the dictionary."""
def __init__(self):
self.dict = {}
@@ -527,7 +522,7 @@ class HashableMap(MutableMapping):
def in_function(function_name):
"""True if the caller was called from some function with
- the supplied Name, False otherwise."""
+ the supplied Name, False otherwise."""
stack = inspect.stack()
try:
for elt in stack[2:]:
@@ -540,24 +535,25 @@ def in_function(function_name):
def check_kwargs(kwargs, fun):
"""Helper for making functions with kwargs. Checks whether the kwargs
- are empty after all of them have been popped off. If they're
- not, raises an error describing which kwargs are invalid.
+ are empty after all of them have been popped off. If they're
+ not, raises an error describing which kwargs are invalid.
- Example::
+ Example::
- def foo(self, **kwargs):
- x = kwargs.pop('x', None)
- y = kwargs.pop('y', None)
- z = kwargs.pop('z', None)
- check_kwargs(kwargs, self.foo)
+ def foo(self, **kwargs):
+ x = kwargs.pop('x', None)
+ y = kwargs.pop('y', None)
+ z = kwargs.pop('z', None)
+ check_kwargs(kwargs, self.foo)
- # This raises a TypeError:
- foo(w='bad kwarg')
+ # This raises a TypeError:
+ foo(w='bad kwarg')
"""
if kwargs:
raise TypeError(
"'%s' is an invalid keyword argument for function %s()."
- % (next(iter(kwargs)), fun.__name__))
+ % (next(iter(kwargs)), fun.__name__)
+ )
def match_predicate(*args):
@@ -573,6 +569,7 @@ def match_predicate(*args):
* any regex in a list or tuple of regexes matches.
* any predicate in args matches.
"""
+
def match(string):
for arg in args:
if isinstance(arg, string_types):
@@ -585,9 +582,11 @@ def match_predicate(*args):
if arg(string):
return True
else:
- raise ValueError("args to match_predicate must be regex, "
- "list of regexes, or callable.")
+ raise ValueError(
+ "args to match_predicate must be regex, " "list of regexes, or callable."
+ )
return False
+
return match
@@ -647,7 +646,7 @@ def pretty_date(time, now=None):
day_diff = diff.days
if day_diff < 0:
- return ''
+ return ""
if day_diff == 0:
if second_diff < 10:
@@ -705,43 +704,40 @@ def pretty_string_to_date(date_str, now=None):
now = now or datetime.now()
# datetime formats
- pattern[re.compile(r'^\d{4}$')] = lambda x: datetime.strptime(x, '%Y')
- pattern[re.compile(r'^\d{4}-\d{2}$')] = lambda x: datetime.strptime(
- x, '%Y-%m'
+ pattern[re.compile(r"^\d{4}$")] = lambda x: datetime.strptime(x, "%Y")
+ pattern[re.compile(r"^\d{4}-\d{2}$")] = lambda x: datetime.strptime(x, "%Y-%m")
+ pattern[re.compile(r"^\d{4}-\d{2}-\d{2}$")] = lambda x: datetime.strptime(x, "%Y-%m-%d")
+ pattern[re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}$")] = lambda x: datetime.strptime(
+ x, "%Y-%m-%d %H:%M"
)
- pattern[re.compile(r'^\d{4}-\d{2}-\d{2}$')] = lambda x: datetime.strptime(
- x, '%Y-%m-%d'
+ pattern[re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$")] = lambda x: datetime.strptime(
+ x, "%Y-%m-%d %H:%M:%S"
)
- pattern[re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}$')] = \
- lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M')
- pattern[re.compile(r'^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$')] = \
- lambda x: datetime.strptime(x, '%Y-%m-%d %H:%M:%S')
- pretty_regex = re.compile(
- r'(a|\d+)\s*(year|month|week|day|hour|minute|second)s?\s*ago')
+ pretty_regex = re.compile(r"(a|\d+)\s*(year|month|week|day|hour|minute|second)s?\s*ago")
def _n_xxx_ago(x):
how_many, time_period = pretty_regex.search(x).groups()
- how_many = 1 if how_many == 'a' else int(how_many)
+ how_many = 1 if how_many == "a" else int(how_many)
# timedelta natively supports time periods up to 'weeks'.
# To apply month or year we convert to 30 and 365 days
- if time_period == 'month':
+ if time_period == "month":
how_many *= 30
- time_period = 'day'
- elif time_period == 'year':
+ time_period = "day"
+ elif time_period == "year":
how_many *= 365
- time_period = 'day'
+ time_period = "day"
- kwargs = {(time_period + 's'): how_many}
+ kwargs = {(time_period + "s"): how_many}
return now - timedelta(**kwargs)
pattern[pretty_regex] = _n_xxx_ago
# yesterday
callback = lambda x: now - timedelta(days=1)
- pattern[re.compile('^yesterday$')] = callback
+ pattern[re.compile("^yesterday$")] = callback
for regexp, parser in pattern.items():
if bool(regexp.match(date_str)):
@@ -752,7 +748,6 @@ def pretty_string_to_date(date_str, now=None):
class RequiredAttributeError(ValueError):
-
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)
@@ -764,6 +759,7 @@ class ObjectWrapper(object):
This class is modeled after the stackoverflow answer:
* http://stackoverflow.com/a/1445289/771663
"""
+
def __init__(self, wrapped_object):
wrapped_cls = type(wrapped_object)
wrapped_name = wrapped_cls.__name__
@@ -807,7 +803,7 @@ class Singleton(object):
# requested but not yet set. The final 'getattr' line here requires
# 'instance'/'_instance' to be defined or it will enter an infinite
# loop, so protect against that here.
- if name in ['_instance', 'instance']:
+ if name in ["_instance", "instance"]:
raise AttributeError()
return getattr(self.instance, name)
@@ -837,7 +833,7 @@ class LazyReference(object):
self.ref_function = ref_function
def __getattr__(self, name):
- if name == 'ref_function':
+ if name == "ref_function":
raise AttributeError()
return getattr(self.ref_function(), name)
@@ -875,8 +871,8 @@ def load_module_from_file(module_name, module_path):
# This recipe is adapted from https://stackoverflow.com/a/67692/771663
if sys.version_info[0] == 3 and sys.version_info[1] >= 5:
import importlib.util
- spec = importlib.util.spec_from_file_location( # novm
- module_name, module_path)
+
+ spec = importlib.util.spec_from_file_location(module_name, module_path) # novm
module = importlib.util.module_from_spec(spec) # novm
# The module object needs to exist in sys.modules before the
# loader executes the module code.
@@ -893,6 +889,7 @@ def load_module_from_file(module_name, module_path):
raise
elif sys.version_info[0] == 2:
import imp
+
module = imp.load_source(module_name, module_path)
return module
@@ -924,8 +921,10 @@ def uniq(sequence):
def star(func):
"""Unpacks arguments for use with Multiprocessing mapping functions"""
+
def _wrapper(args):
return func(*args)
+
return _wrapper
@@ -934,22 +933,23 @@ class Devnull(object):
See https://stackoverflow.com/a/2929954.
"""
+
def write(self, *_):
pass
def elide_list(line_list, max_num=10):
"""Takes a long list and limits it to a smaller number of elements,
- replacing intervening elements with '...'. For example::
+ replacing intervening elements with '...'. For example::
- elide_list([1,2,3,4,5,6], 4)
+ elide_list([1,2,3,4,5,6], 4)
- gives::
+ gives::
- [1, 2, 3, '...', 6]
+ [1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
- return line_list[:max_num - 1] + ['...'] + line_list[-1:]
+ return line_list[: max_num - 1] + ["..."] + line_list[-1:]
else:
return line_list
@@ -972,7 +972,7 @@ def enum(**kwargs):
Args:
**kwargs: explicit dictionary of enums
"""
- return type('Enum', (object,), kwargs)
+ return type("Enum", (object,), kwargs)
class TypedMutableSequence(MutableSequence):
@@ -988,6 +988,7 @@ class TypedMutableSequence(MutableSequence):
if isinstance(l, Foo):
# do something
"""
+
def __init__(self, iterable):
self.data = list(iterable)
@@ -1017,7 +1018,7 @@ class GroupedExceptionHandler(object):
"""A generic mechanism to coalesce multiple exceptions and preserve tracebacks."""
def __init__(self):
- self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
+ self.exceptions = [] # type: List[Tuple[str, Exception, List[str]]]
def __bool__(self):
"""Whether any exceptions were handled."""
@@ -1036,17 +1037,15 @@ class GroupedExceptionHandler(object):
# type: (bool) -> str
"""Print out an error message coalescing all the forwarded errors."""
each_exception_message = [
- '{0} raised {1}: {2}{3}'.format(
+ "{0} raised {1}: {2}{3}".format(
context,
exc.__class__.__name__,
exc,
- '\n{0}'.format(''.join(tb)) if with_tracebacks else '',
+ "\n{0}".format("".join(tb)) if with_tracebacks else "",
)
for context, exc, tb in self.exceptions
]
- return 'due to the following failures:\n{0}'.format(
- '\n'.join(each_exception_message)
- )
+ return "due to the following failures:\n{0}".format("\n".join(each_exception_message))
class GroupedExceptionForwarder(object):
@@ -1079,6 +1078,7 @@ class classproperty(object):
the evaluation is injected at creation time and take an instance (could be None) and
an owner (i.e. the class that originated the instance)
"""
+
def __init__(self, callback):
self.callback = callback
diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py
index 34cce1247c..947ca9c541 100644
--- a/lib/spack/llnl/util/link_tree.py
+++ b/lib/spack/llnl/util/link_tree.py
@@ -16,9 +16,9 @@ import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp, touch, traverse_tree
from llnl.util.symlink import islink, symlink
-__all__ = ['LinkTree']
+__all__ = ["LinkTree"]
-empty_file_name = '.spack-empty'
+empty_file_name = ".spack-empty"
def remove_link(src, dest):
@@ -38,6 +38,7 @@ class MergeConflict:
project(src_a) == project(src_b) == dst
"""
+
def __init__(self, dst, src_a=None, src_b=None):
self.dst = dst
self.src_a = src_a
@@ -51,13 +52,14 @@ class SourceMergeVisitor(object):
- A list of files to link in dst
- A list of merge conflicts in dst/
"""
+
def __init__(self, ignore=None):
self.ignore = ignore if ignore is not None else lambda f: False
# When mapping <src root> to <dst root>/<projection>, we need
# to prepend the <projection> bit to the relative path in the
# destination dir.
- self.projection = ''
+ self.projection = ""
# When a file blocks another file, the conflict can sometimes
# be resolved / ignored (e.g. <prefix>/LICENSE or
@@ -88,10 +90,13 @@ class SourceMergeVisitor(object):
elif proj_rel_path in self.files:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[proj_rel_path]
- self.fatal_conflicts.append(MergeConflict(
- dst=proj_rel_path,
- src_a=os.path.join(src_a_root, src_a_relpath),
- src_b=os.path.join(root, rel_path)))
+ self.fatal_conflicts.append(
+ MergeConflict(
+ dst=proj_rel_path,
+ src_a=os.path.join(src_a_root, src_a_relpath),
+ src_b=os.path.join(root, rel_path),
+ )
+ )
return False
elif proj_rel_path in self.directories:
# No new directory, carry on.
@@ -147,17 +152,23 @@ class SourceMergeVisitor(object):
elif proj_rel_path in self.directories:
# Can't create a file where a dir is; fatal error
src_a_root, src_a_relpath = self.directories[proj_rel_path]
- self.fatal_conflicts.append(MergeConflict(
- dst=proj_rel_path,
- src_a=os.path.join(src_a_root, src_a_relpath),
- src_b=os.path.join(root, rel_path)))
+ self.fatal_conflicts.append(
+ MergeConflict(
+ dst=proj_rel_path,
+ src_a=os.path.join(src_a_root, src_a_relpath),
+ src_b=os.path.join(root, rel_path),
+ )
+ )
elif proj_rel_path in self.files:
# In some cases we can resolve file-file conflicts
src_a_root, src_a_relpath = self.files[proj_rel_path]
- self.file_conflicts.append(MergeConflict(
- dst=proj_rel_path,
- src_a=os.path.join(src_a_root, src_a_relpath),
- src_b=os.path.join(root, rel_path)))
+ self.file_conflicts.append(
+ MergeConflict(
+ dst=proj_rel_path,
+ src_a=os.path.join(src_a_root, src_a_relpath),
+ src_b=os.path.join(root, rel_path),
+ )
+ )
else:
# Otherwise register this file to be linked.
self.files[proj_rel_path] = (root, rel_path)
@@ -166,24 +177,27 @@ class SourceMergeVisitor(object):
self.projection = os.path.normpath(projection)
# Todo, is this how to check in general for empty projection?
- if self.projection == '.':
- self.projection = ''
+ if self.projection == ".":
+ self.projection = ""
return
# If there is a projection, we'll also create the directories
# it consists of, and check whether that's causing conflicts.
- path = ''
+ path = ""
for part in self.projection.split(os.sep):
path = os.path.join(path, part)
if path not in self.files:
- self.directories[path] = ('<projection>', path)
+ self.directories[path] = ("<projection>", path)
else:
# Can't create a dir where a file is.
src_a_root, src_a_relpath = self.files[path]
- self.fatal_conflicts.append(MergeConflict(
- dst=path,
- src_a=os.path.join(src_a_root, src_a_relpath),
- src_b=os.path.join('<projection>', path)))
+ self.fatal_conflicts.append(
+ MergeConflict(
+ dst=path,
+ src_a=os.path.join(src_a_root, src_a_relpath),
+ src_b=os.path.join("<projection>", path),
+ )
+ )
class DestinationMergeVisitor(object):
@@ -200,6 +214,7 @@ class DestinationMergeVisitor(object):
in the target prefix will never be merged with
directories in the sources directories.
"""
+
def __init__(self, source_merge_visitor):
self.src = source_merge_visitor
@@ -208,10 +223,11 @@ class DestinationMergeVisitor(object):
# and don't traverse deeper
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
- self.src.fatal_conflicts.append(MergeConflict(
- rel_path,
- os.path.join(src_a_root, src_a_relpath),
- os.path.join(root, rel_path)))
+ self.src.fatal_conflicts.append(
+ MergeConflict(
+ rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
+ )
+ )
return False
# If destination dir was also a src dir, remove the mkdir
@@ -236,17 +252,19 @@ class DestinationMergeVisitor(object):
# Always conflict
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
- self.src.fatal_conflicts.append(MergeConflict(
- rel_path,
- os.path.join(src_a_root, src_a_relpath),
- os.path.join(root, rel_path)))
+ self.src.fatal_conflicts.append(
+ MergeConflict(
+ rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
+ )
+ )
if rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
- self.src.fatal_conflicts.append(MergeConflict(
- rel_path,
- os.path.join(src_a_root, src_a_relpath),
- os.path.join(root, rel_path)))
+ self.src.fatal_conflicts.append(
+ MergeConflict(
+ rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
+ )
+ )
# Never descend into symlinked target dirs.
return False
@@ -258,17 +276,19 @@ class DestinationMergeVisitor(object):
# Can't merge a file if target already exists
if rel_path in self.src.directories:
src_a_root, src_a_relpath = self.src.directories[rel_path]
- self.src.fatal_conflicts.append(MergeConflict(
- rel_path,
- os.path.join(src_a_root, src_a_relpath),
- os.path.join(root, rel_path)))
+ self.src.fatal_conflicts.append(
+ MergeConflict(
+ rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
+ )
+ )
elif rel_path in self.src.files:
src_a_root, src_a_relpath = self.src.files[rel_path]
- self.src.fatal_conflicts.append(MergeConflict(
- rel_path,
- os.path.join(src_a_root, src_a_relpath),
- os.path.join(root, rel_path)))
+ self.src.fatal_conflicts.append(
+ MergeConflict(
+ rel_path, os.path.join(src_a_root, src_a_relpath), os.path.join(root, rel_path)
+ )
+ )
class LinkTree(object):
@@ -281,30 +301,31 @@ class LinkTree(object):
symlinked to, to prevent the source directory from ever being
modified.
"""
+
def __init__(self, source_root):
if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root
- def find_conflict(self, dest_root, ignore=None,
- ignore_file_conflicts=False):
+ def find_conflict(self, dest_root, ignore=None, ignore_file_conflicts=False):
"""Returns the first file in dest that conflicts with src"""
ignore = ignore or (lambda x: False)
conflicts = self.find_dir_conflicts(dest_root, ignore)
if not ignore_file_conflicts:
conflicts.extend(
- dst for src, dst
- in self.get_file_map(dest_root, ignore).items()
- if os.path.exists(dst))
+ dst
+ for src, dst in self.get_file_map(dest_root, ignore).items()
+ if os.path.exists(dst)
+ )
if conflicts:
return conflicts[0]
def find_dir_conflicts(self, dest_root, ignore):
conflicts = []
- kwargs = {'follow_nonexisting': False, 'ignore': ignore}
+ kwargs = {"follow_nonexisting": False, "ignore": ignore}
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
if os.path.exists(dest) and not os.path.isdir(dest):
@@ -315,7 +336,7 @@ class LinkTree(object):
def get_file_map(self, dest_root, ignore):
merge_map = {}
- kwargs = {'follow_nonexisting': True, 'ignore': ignore}
+ kwargs = {"follow_nonexisting": True, "ignore": ignore}
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if not os.path.isdir(src):
merge_map[src] = dest
@@ -337,8 +358,7 @@ class LinkTree(object):
touch(marker)
def unmerge_directories(self, dest_root, ignore):
- for src, dest in traverse_tree(
- self._root, dest_root, ignore=ignore, order='post'):
+ for src, dest in traverse_tree(self._root, dest_root, ignore=ignore, order="post"):
if os.path.isdir(src):
if not os.path.exists(dest):
continue
@@ -354,8 +374,7 @@ class LinkTree(object):
if os.path.exists(marker):
os.remove(marker)
- def merge(self, dest_root, ignore_conflicts=False, ignore=None,
- link=symlink, relative=False):
+ def merge(self, dest_root, ignore_conflicts=False, ignore=None, link=symlink, relative=False):
"""Link all files in src into dest, creating directories
if necessary.
@@ -377,7 +396,8 @@ class LinkTree(object):
ignore = lambda x: False
conflict = self.find_conflict(
- dest_root, ignore=ignore, ignore_file_conflicts=ignore_conflicts)
+ dest_root, ignore=ignore, ignore_file_conflicts=ignore_conflicts
+ )
if conflict:
raise SingleMergeConflictError(conflict)
@@ -416,8 +436,7 @@ class MergeConflictError(Exception):
class SingleMergeConflictError(MergeConflictError):
def __init__(self, path):
- super(MergeConflictError, self).__init__(
- "Package merge blocked by file: %s" % path)
+ super(MergeConflictError, self).__init__("Package merge blocked by file: %s" % path)
class MergeConflictSummary(MergeConflictError):
@@ -430,5 +449,6 @@ class MergeConflictSummary(MergeConflictError):
# show the first 3 merge conflicts.
for conflict in conflicts[:3]:
msg += "\n `{0}` and `{1}` both project to `{2}`".format(
- conflict.src_a, conflict.src_b, conflict.dst)
+ conflict.src_a, conflict.src_b, conflict.dst
+ )
super(MergeConflictSummary, self).__init__(msg)
diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py
index 1ff7ceec64..0682ce059a 100644
--- a/lib/spack/llnl/util/lock.py
+++ b/lib/spack/llnl/util/lock.py
@@ -15,22 +15,22 @@ import llnl.util.tty as tty
import spack.util.string
-if sys.platform != 'win32':
+if sys.platform != "win32":
import fcntl
__all__ = [
- 'Lock',
- 'LockDowngradeError',
- 'LockUpgradeError',
- 'LockTransaction',
- 'WriteTransaction',
- 'ReadTransaction',
- 'LockError',
- 'LockTimeoutError',
- 'LockPermissionError',
- 'LockROFileError',
- 'CantCreateLockError'
+ "Lock",
+ "LockDowngradeError",
+ "LockUpgradeError",
+ "LockTransaction",
+ "WriteTransaction",
+ "ReadTransaction",
+ "LockError",
+ "LockTimeoutError",
+ "LockPermissionError",
+ "LockROFileError",
+ "CantCreateLockError",
]
@@ -47,6 +47,7 @@ class OpenFile(object):
the file descriptor from the file handle if needed -- or we could make this track
file descriptors as well in the future.
"""
+
def __init__(self, fh):
self.fh = fh
self.refs = 0
@@ -92,11 +93,11 @@ class OpenFileTracker(object):
path (str): path to lock file we want a filehandle for
"""
# Open writable files as 'r+' so we can upgrade to write later
- os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), 'r+'
+ os_mode, fh_mode = (os.O_RDWR | os.O_CREAT), "r+"
pid = os.getpid()
open_file = None # OpenFile object, if there is one
- stat = None # stat result for the lockfile, if it exists
+ stat = None # stat result for the lockfile, if it exists
try:
# see whether we've seen this inode/pid before
@@ -109,7 +110,7 @@ class OpenFileTracker(object):
raise
# path does not exist -- fail if we won't be able to create it
- parent = os.path.dirname(path) or '.'
+ parent = os.path.dirname(path) or "."
if not os.access(parent, os.W_OK):
raise CantCreateLockError(path)
@@ -119,7 +120,7 @@ class OpenFileTracker(object):
# we know path exists but not if it's writable. If it's read-only,
# only open the file for reading (and fail if we're trying to get
# an exclusive (write) lock on it)
- os_mode, fh_mode = os.O_RDONLY, 'r'
+ os_mode, fh_mode = os.O_RDONLY, "r"
fd = os.open(path, os_mode)
fh = os.fdopen(fd, fh_mode)
@@ -162,10 +163,10 @@ file_tracker = OpenFileTracker()
def _attempts_str(wait_time, nattempts):
# Don't print anything if we succeeded on the first try
if nattempts <= 1:
- return ''
+ return ""
- attempts = spack.util.string.plural(nattempts, 'attempt')
- return ' after {0:0.2f}s and {1}'.format(wait_time, attempts)
+ attempts = spack.util.string.plural(nattempts, "attempt")
+ return " after {0:0.2f}s and {1}".format(wait_time, attempts)
class LockType(object):
@@ -188,8 +189,7 @@ class LockType(object):
@staticmethod
def is_valid(op):
- return op == LockType.READ \
- or op == LockType.WRITE
+ return op == LockType.READ or op == LockType.WRITE
class Lock(object):
@@ -207,8 +207,7 @@ class Lock(object):
overlapping byte ranges in the same file).
"""
- def __init__(self, path, start=0, length=0, default_timeout=None,
- debug=False, desc=''):
+ def __init__(self, path, start=0, length=0, default_timeout=None, debug=False, desc=""):
"""Construct a new lock on the file at ``path``.
By default, the lock applies to the whole file. Optionally,
@@ -243,7 +242,7 @@ class Lock(object):
self.debug = debug
# optional debug description
- self.desc = ' ({0})'.format(desc) if desc else ''
+ self.desc = " ({0})".format(desc) if desc else ""
# If the user doesn't set a default timeout, or if they choose
# None, 0, etc. then lock attempts will not time out (unless the
@@ -280,17 +279,17 @@ class Lock(object):
def __repr__(self):
"""Formal representation of the lock."""
- rep = '{0}('.format(self.__class__.__name__)
+ rep = "{0}(".format(self.__class__.__name__)
for attr, value in self.__dict__.items():
- rep += '{0}={1}, '.format(attr, value.__repr__())
- return '{0})'.format(rep.strip(', '))
+ rep += "{0}={1}, ".format(attr, value.__repr__())
+ return "{0})".format(rep.strip(", "))
def __str__(self):
"""Readable string (with key fields) of the lock."""
- location = '{0}[{1}:{2}]'.format(self.path, self._start, self._length)
- timeout = 'timeout={0}'.format(self.default_timeout)
- activity = '#reads={0}, #writes={1}'.format(self._reads, self._writes)
- return '({0}, {1}, {2})'.format(location, timeout, activity)
+ location = "{0}[{1}:{2}]".format(self.path, self._start, self._length)
+ timeout = "timeout={0}".format(self.default_timeout)
+ activity = "#reads={0}, #writes={1}".format(self._reads, self._writes)
+ return "({0}, {1}, {2})".format(location, timeout, activity)
def _lock(self, op, timeout=None):
"""This takes a lock using POSIX locks (``fcntl.lockf``).
@@ -305,7 +304,7 @@ class Lock(object):
assert LockType.is_valid(op)
op_str = LockType.to_str(op)
- self._log_acquiring('{0} LOCK'.format(op_str))
+ self._log_acquiring("{0} LOCK".format(op_str))
timeout = timeout or self.default_timeout
# Create file and parent directories if they don't exist.
@@ -313,14 +312,16 @@ class Lock(object):
self._ensure_parent_directory()
self._file = file_tracker.get_fh(self.path)
- if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == 'r':
+ if LockType.to_module(op) == fcntl.LOCK_EX and self._file.mode == "r":
# Attempt to upgrade to write lock w/a read-only file.
# If the file were writable, we'd have opened it 'r+'
raise LockROFileError(self.path)
- self._log_debug("{0} locking [{1}:{2}]: timeout {3} sec"
- .format(op_str.lower(), self._start, self._length,
- timeout))
+ self._log_debug(
+ "{0} locking [{1}:{2}]: timeout {3} sec".format(
+ op_str.lower(), self._start, self._length, timeout
+ )
+ )
poll_intervals = iter(Lock._poll_interval_generator())
start_time = time.time()
@@ -339,8 +340,7 @@ class Lock(object):
total_wait_time = time.time() - start_time
return total_wait_time, num_attempts
- raise LockTimeoutError("Timed out waiting for a {0} lock."
- .format(op_str.lower()))
+ raise LockTimeoutError("Timed out waiting for a {0} lock.".format(op_str.lower()))
def _poll_lock(self, op):
"""Attempt to acquire the lock in a non-blocking manner. Return whether
@@ -349,16 +349,19 @@ class Lock(object):
module_op = LockType.to_module(op)
try:
# Try to get the lock (will raise if not available.)
- fcntl.lockf(self._file, module_op | fcntl.LOCK_NB,
- self._length, self._start, os.SEEK_SET)
+ fcntl.lockf(
+ self._file, module_op | fcntl.LOCK_NB, self._length, self._start, os.SEEK_SET
+ )
# help for debugging distributed locking
if self.debug:
# All locks read the owner PID and host
self._read_log_debug_data()
- self._log_debug('{0} locked {1} [{2}:{3}] (owner={4})'
- .format(LockType.to_str(op), self.path,
- self._start, self._length, self.pid))
+ self._log_debug(
+ "{0} locked {1} [{2}:{3}] (owner={4})".format(
+ LockType.to_str(op), self.path, self._start, self._length, self.pid
+ )
+ )
# Exclusive locks write their PID/host
if module_op == fcntl.LOCK_EX:
@@ -378,14 +381,13 @@ class Lock(object):
# relative paths to lockfiles in the current directory have no parent
if not parent:
- return '.'
+ return "."
try:
os.makedirs(parent)
except OSError as e:
# makedirs can fail when diretory already exists.
- if not (e.errno == errno.EEXIST and os.path.isdir(parent) or
- e.errno == errno.EISDIR):
+ if not (e.errno == errno.EEXIST and os.path.isdir(parent) or e.errno == errno.EISDIR):
raise
return parent
@@ -396,9 +398,9 @@ class Lock(object):
line = self._file.read()
if line:
- pid, host = line.strip().split(',')
- _, _, self.pid = pid.rpartition('=')
- _, _, self.host = host.rpartition('=')
+ pid, host = line.strip().split(",")
+ _, _, self.pid = pid.rpartition("=")
+ _, _, self.host = host.rpartition("=")
self.pid = int(self.pid)
def _write_log_debug_data(self):
@@ -423,8 +425,7 @@ class Lock(object):
be masquerading as write locks, but this removes either.
"""
- fcntl.lockf(self._file, fcntl.LOCK_UN,
- self._length, self._start, os.SEEK_SET)
+ fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
file_tracker.release_fh(self.path)
self._file = None
@@ -449,7 +450,7 @@ class Lock(object):
wait_time, nattempts = self._lock(LockType.READ, timeout=timeout)
self._reads += 1
# Log if acquired, which includes counts when verbose
- self._log_acquired('READ LOCK', wait_time, nattempts)
+ self._log_acquired("READ LOCK", wait_time, nattempts)
return True
else:
# Increment the read count for nested lock tracking
@@ -474,7 +475,7 @@ class Lock(object):
wait_time, nattempts = self._lock(LockType.WRITE, timeout=timeout)
self._writes += 1
# Log if acquired, which includes counts when verbose
- self._log_acquired('WRITE LOCK', wait_time, nattempts)
+ self._log_acquired("WRITE LOCK", wait_time, nattempts)
# return True only if we weren't nested in a read lock.
# TODO: we may need to return two values: whether we got
@@ -561,7 +562,7 @@ class Lock(object):
"""
assert self._reads > 0
- locktype = 'READ LOCK'
+ locktype = "READ LOCK"
if self._reads == 1 and self._writes == 0:
self._log_releasing(locktype)
@@ -569,7 +570,7 @@ class Lock(object):
release_fn = release_fn or true_fn
result = release_fn()
- self._unlock() # can raise LockError.
+ self._unlock() # can raise LockError.
self._reads = 0
self._log_released(locktype)
return result
@@ -597,14 +598,14 @@ class Lock(object):
assert self._writes > 0
release_fn = release_fn or true_fn
- locktype = 'WRITE LOCK'
+ locktype = "WRITE LOCK"
if self._writes == 1 and self._reads == 0:
self._log_releasing(locktype)
# we need to call release_fn before releasing the lock
result = release_fn()
- self._unlock() # can raise LockError.
+ self._unlock() # can raise LockError.
self._writes = 0
self._log_released(locktype)
return result
@@ -625,56 +626,55 @@ class Lock(object):
raise LockError("Attempting to cleanup active lock.")
def _get_counts_desc(self):
- return '(reads {0}, writes {1})'.format(self._reads, self._writes) \
- if tty.is_verbose() else ''
+ return (
+ "(reads {0}, writes {1})".format(self._reads, self._writes) if tty.is_verbose() else ""
+ )
def _log_acquired(self, locktype, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
- desc = 'Acquired at %s' % now.strftime("%H:%M:%S.%f")
- self._log_debug(self._status_msg(locktype, '{0}{1}'
- .format(desc, attempts_part)))
+ desc = "Acquired at %s" % now.strftime("%H:%M:%S.%f")
+ self._log_debug(self._status_msg(locktype, "{0}{1}".format(desc, attempts_part)))
def _log_acquiring(self, locktype):
- self._log_debug(self._status_msg(locktype, 'Acquiring'), level=3)
+ self._log_debug(self._status_msg(locktype, "Acquiring"), level=3)
def _log_debug(self, *args, **kwargs):
"""Output lock debug messages."""
- kwargs['level'] = kwargs.get('level', 2)
+ kwargs["level"] = kwargs.get("level", 2)
tty.debug(*args, **kwargs)
def _log_downgraded(self, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
- desc = 'Downgraded at %s' % now.strftime("%H:%M:%S.%f")
- self._log_debug(self._status_msg('READ LOCK', '{0}{1}'
- .format(desc, attempts_part)))
+ desc = "Downgraded at %s" % now.strftime("%H:%M:%S.%f")
+ self._log_debug(self._status_msg("READ LOCK", "{0}{1}".format(desc, attempts_part)))
def _log_downgrading(self):
- self._log_debug(self._status_msg('WRITE LOCK', 'Downgrading'), level=3)
+ self._log_debug(self._status_msg("WRITE LOCK", "Downgrading"), level=3)
def _log_released(self, locktype):
now = datetime.now()
- desc = 'Released at %s' % now.strftime("%H:%M:%S.%f")
+ desc = "Released at %s" % now.strftime("%H:%M:%S.%f")
self._log_debug(self._status_msg(locktype, desc))
def _log_releasing(self, locktype):
- self._log_debug(self._status_msg(locktype, 'Releasing'), level=3)
+ self._log_debug(self._status_msg(locktype, "Releasing"), level=3)
def _log_upgraded(self, wait_time, nattempts):
attempts_part = _attempts_str(wait_time, nattempts)
now = datetime.now()
- desc = 'Upgraded at %s' % now.strftime("%H:%M:%S.%f")
- self._log_debug(self._status_msg('WRITE LOCK', '{0}{1}'.
- format(desc, attempts_part)))
+ desc = "Upgraded at %s" % now.strftime("%H:%M:%S.%f")
+ self._log_debug(self._status_msg("WRITE LOCK", "{0}{1}".format(desc, attempts_part)))
def _log_upgrading(self):
- self._log_debug(self._status_msg('READ LOCK', 'Upgrading'), level=3)
+ self._log_debug(self._status_msg("READ LOCK", "Upgrading"), level=3)
def _status_msg(self, locktype, status):
- status_desc = '[{0}] {1}'.format(status, self._get_counts_desc())
- return '{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}'.format(
- locktype, self, status_desc)
+ status_desc = "[{0}] {1}".format(status, self._get_counts_desc())
+ return "{0}{1.desc}: {1.path}[{1._start}:{1._length}] {2}".format(
+ locktype, self, status_desc
+ )
class LockTransaction(object):
@@ -715,7 +715,7 @@ class LockTransaction(object):
def __enter__(self):
if self._enter() and self._acquire_fn:
self._as = self._acquire_fn()
- if hasattr(self._as, '__enter__'):
+ if hasattr(self._as, "__enter__"):
return self._as.__enter__()
else:
return self._as
@@ -727,7 +727,7 @@ class LockTransaction(object):
if self._release_fn is not None:
return self._release_fn(type, value, traceback)
- if self._as and hasattr(self._as, '__exit__'):
+ if self._as and hasattr(self._as, "__exit__"):
if self._as.__exit__(type, value, traceback):
suppress = True
@@ -739,6 +739,7 @@ class LockTransaction(object):
class ReadTransaction(LockTransaction):
"""LockTransaction context manager that does a read and releases it."""
+
def _enter(self):
return self._lock.acquire_read(self._timeout)
@@ -748,6 +749,7 @@ class ReadTransaction(LockTransaction):
class WriteTransaction(LockTransaction):
"""LockTransaction context manager that does a write and releases it."""
+
def _enter(self):
return self._lock.acquire_write(self._timeout)
@@ -761,6 +763,7 @@ class LockError(Exception):
class LockDowngradeError(LockError):
"""Raised when unable to downgrade from a write to a read lock."""
+
def __init__(self, path):
msg = "Cannot downgrade lock from write to read on file: %s" % path
super(LockDowngradeError, self).__init__(msg)
@@ -776,6 +779,7 @@ class LockTimeoutError(LockError):
class LockUpgradeError(LockError):
"""Raised when unable to upgrade from a read to a write lock."""
+
def __init__(self, path):
msg = "Cannot upgrade lock from read to write on file: %s" % path
super(LockUpgradeError, self).__init__(msg)
@@ -787,6 +791,7 @@ class LockPermissionError(LockError):
class LockROFileError(LockPermissionError):
"""Tried to take an exclusive lock on a read-only file."""
+
def __init__(self, path):
msg = "Can't take write lock on read-only file: %s" % path
super(LockROFileError, self).__init__(msg)
@@ -794,6 +799,7 @@ class LockROFileError(LockPermissionError):
class CantCreateLockError(LockPermissionError):
"""Attempt to create a lock in an unwritable location."""
+
def __init__(self, path):
msg = "cannot create lock '%s': " % path
msg += "file does not exist and location is not writable"
diff --git a/lib/spack/llnl/util/multiproc.py b/lib/spack/llnl/util/multiproc.py
index 86b9e81bcc..e6a0091191 100644
--- a/lib/spack/llnl/util/multiproc.py
+++ b/lib/spack/llnl/util/multiproc.py
@@ -10,7 +10,7 @@ to pickle functions if they're passed indirectly as parameters.
"""
from multiprocessing import Semaphore, Value
-__all__ = ['Barrier']
+__all__ = ["Barrier"]
class Barrier:
@@ -24,7 +24,7 @@ class Barrier:
def __init__(self, n, timeout=None):
self.n = n
self.to = timeout
- self.count = Value('i', 0)
+ self.count = Value("i", 0)
self.mutex = Semaphore(1)
self.turnstile1 = Semaphore(0)
self.turnstile2 = Semaphore(1)
diff --git a/lib/spack/llnl/util/symlink.py b/lib/spack/llnl/util/symlink.py
index 6c55d74f66..103c5b4c38 100644
--- a/lib/spack/llnl/util/symlink.py
+++ b/lib/spack/llnl/util/symlink.py
@@ -11,7 +11,7 @@ from sys import platform as _platform
from llnl.util import lang
-is_windows = _platform == 'win32'
+is_windows = _platform == "win32"
if is_windows:
from win32file import CreateHardLink
@@ -47,7 +47,7 @@ def _win32_junction(path, link):
# os.symlink will fail if link exists, emulate the behavior here
if exists(link):
- raise OSError(errno.EEXIST, 'File exists: %s -> %s' % (link, path))
+ raise OSError(errno.EEXIST, "File exists: %s -> %s" % (link, path))
if not os.path.isabs(path):
parent = os.path.join(link, os.pardir)
@@ -61,13 +61,14 @@ def _win32_junction(path, link):
def _win32_can_symlink():
tempdir = tempfile.mkdtemp()
- dpath = join(tempdir, 'dpath')
- fpath = join(tempdir, 'fpath.txt')
+ dpath = join(tempdir, "dpath")
+ fpath = join(tempdir, "fpath.txt")
- dlink = join(tempdir, 'dlink')
- flink = join(tempdir, 'flink.txt')
+ dlink = join(tempdir, "dlink")
+ flink = join(tempdir, "flink.txt")
import llnl.util.filesystem as fs
+
fs.touchp(fpath)
try:
@@ -106,7 +107,6 @@ def _win32_is_junction(path):
FILE_ATTRIBUTE_REPARSE_POINT = 0x400
res = GetFileAttributes(path)
- return res != INVALID_FILE_ATTRIBUTES and \
- bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
+ return res != INVALID_FILE_ATTRIBUTES and bool(res & FILE_ATTRIBUTE_REPARSE_POINT)
return False
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index de49f3f77f..ed847298ef 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -54,7 +54,7 @@ def is_stacktrace():
def set_debug(level=0):
global _debug
- assert level >= 0, 'Debug level must be a positive value'
+ assert level >= 0, "Debug level must be a positive value"
_debug = level
@@ -110,10 +110,7 @@ def output_filter(filter_fn):
class SuppressOutput:
"""Class for disabling output in a scope using 'with' keyword"""
- def __init__(self,
- msg_enabled=True,
- warn_enabled=True,
- error_enabled=True):
+ def __init__(self, msg_enabled=True, warn_enabled=True, error_enabled=True):
self._msg_enabled_initial = _msg_enabled
self._warn_enabled_initial = _warn_enabled
@@ -164,11 +161,10 @@ def get_timestamp(force=False):
"""Get a string timestamp"""
if _debug or _timestamp or force:
# Note inclusion of the PID is useful for parallel builds.
- pid = ', {0}'.format(os.getpid()) if show_pid() else ''
- return '[{0}{1}] '.format(
- datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
+ pid = ", {0}".format(os.getpid()) if show_pid() else ""
+ return "[{0}{1}] ".format(datetime.now().strftime("%Y-%m-%d-%H:%M:%S.%f"), pid)
else:
- return ''
+ return ""
def msg(message, *args, **kwargs):
@@ -178,26 +174,14 @@ def msg(message, *args, **kwargs):
if isinstance(message, Exception):
message = "%s: %s" % (message.__class__.__name__, str(message))
- newline = kwargs.get('newline', True)
+ newline = kwargs.get("newline", True)
st_text = ""
if _stacktrace:
st_text = process_stacktrace(2)
if newline:
- cprint(
- "@*b{%s==>} %s%s" % (
- st_text,
- get_timestamp(),
- cescape(_output_filter(message))
- )
- )
+ cprint("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
else:
- cwrite(
- "@*b{%s==>} %s%s" % (
- st_text,
- get_timestamp(),
- cescape(_output_filter(message))
- )
- )
+ cwrite("@*b{%s==>} %s%s" % (st_text, get_timestamp(), cescape(_output_filter(message))))
for arg in args:
print(indent + _output_filter(six.text_type(arg)))
@@ -206,23 +190,19 @@ def info(message, *args, **kwargs):
if isinstance(message, Exception):
message = "%s: %s" % (message.__class__.__name__, str(message))
- format = kwargs.get('format', '*b')
- stream = kwargs.get('stream', sys.stdout)
- wrap = kwargs.get('wrap', False)
- break_long_words = kwargs.get('break_long_words', False)
- st_countback = kwargs.get('countback', 3)
+ format = kwargs.get("format", "*b")
+ stream = kwargs.get("stream", sys.stdout)
+ wrap = kwargs.get("wrap", False)
+ break_long_words = kwargs.get("break_long_words", False)
+ st_countback = kwargs.get("countback", 3)
st_text = ""
if _stacktrace:
st_text = process_stacktrace(st_countback)
cprint(
- "@%s{%s==>} %s%s" % (
- format,
- st_text,
- get_timestamp(),
- cescape(_output_filter(six.text_type(message)))
- ),
- stream=stream
+ "@%s{%s==>} %s%s"
+ % (format, st_text, get_timestamp(), cescape(_output_filter(six.text_type(message)))),
+ stream=stream,
)
for arg in args:
if wrap:
@@ -230,27 +210,25 @@ def info(message, *args, **kwargs):
_output_filter(six.text_type(arg)),
initial_indent=indent,
subsequent_indent=indent,
- break_long_words=break_long_words
+ break_long_words=break_long_words,
)
for line in lines:
- stream.write(line + '\n')
+ stream.write(line + "\n")
else:
- stream.write(
- indent + _output_filter(six.text_type(arg)) + '\n'
- )
+ stream.write(indent + _output_filter(six.text_type(arg)) + "\n")
def verbose(message, *args, **kwargs):
if _verbose:
- kwargs.setdefault('format', 'c')
+ kwargs.setdefault("format", "c")
info(message, *args, **kwargs)
def debug(message, *args, **kwargs):
- level = kwargs.get('level', 1)
+ level = kwargs.get("level", 1)
if is_debug(level):
- kwargs.setdefault('format', 'g')
- kwargs.setdefault('stream', sys.stderr)
+ kwargs.setdefault("format", "g")
+ kwargs.setdefault("stream", sys.stderr)
info(message, *args, **kwargs)
@@ -258,8 +236,8 @@ def error(message, *args, **kwargs):
if not error_enabled():
return
- kwargs.setdefault('format', '*r')
- kwargs.setdefault('stream', sys.stderr)
+ kwargs.setdefault("format", "*r")
+ kwargs.setdefault("stream", sys.stderr)
info("Error: " + six.text_type(message), *args, **kwargs)
@@ -267,27 +245,27 @@ def warn(message, *args, **kwargs):
if not warn_enabled():
return
- kwargs.setdefault('format', '*Y')
- kwargs.setdefault('stream', sys.stderr)
+ kwargs.setdefault("format", "*Y")
+ kwargs.setdefault("stream", sys.stderr)
info("Warning: " + six.text_type(message), *args, **kwargs)
def die(message, *args, **kwargs):
- kwargs.setdefault('countback', 4)
+ kwargs.setdefault("countback", 4)
error(message, *args, **kwargs)
sys.exit(1)
def get_number(prompt, **kwargs):
- default = kwargs.get('default', None)
- abort = kwargs.get('abort', None)
+ default = kwargs.get("default", None)
+ abort = kwargs.get("abort", None)
if default is not None and abort is not None:
- prompt += ' (default is %s, %s to abort) ' % (default, abort)
+ prompt += " (default is %s, %s to abort) " % (default, abort)
elif default is not None:
- prompt += ' (default is %s) ' % default
+ prompt += " (default is %s) " % default
elif abort is not None:
- prompt += ' (%s to abort) ' % abort
+ prompt += " (%s to abort) " % abort
number = None
while number is None:
@@ -310,17 +288,16 @@ def get_number(prompt, **kwargs):
def get_yes_or_no(prompt, **kwargs):
- default_value = kwargs.get('default', None)
+ default_value = kwargs.get("default", None)
if default_value is None:
- prompt += ' [y/n] '
+ prompt += " [y/n] "
elif default_value is True:
- prompt += ' [Y/n] '
+ prompt += " [Y/n] "
elif default_value is False:
- prompt += ' [y/N] '
+ prompt += " [y/N] "
else:
- raise ValueError(
- "default for get_yes_no() must be True, False, or None.")
+ raise ValueError("default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
@@ -331,9 +308,9 @@ def get_yes_or_no(prompt, **kwargs):
if result is None:
print("Please enter yes or no.")
else:
- if ans == 'y' or ans == 'yes':
+ if ans == "y" or ans == "yes":
result = True
- elif ans == 'n' or ans == 'no':
+ elif ans == "n" or ans == "no":
result = False
return result
@@ -345,12 +322,12 @@ def hline(label=None, **kwargs):
char (str): Char to draw the line with. Default '-'
max_width (int): Maximum width of the line. Default is 64 chars.
"""
- char = kwargs.pop('char', '-')
- max_width = kwargs.pop('max_width', 64)
+ char = kwargs.pop("char", "-")
+ max_width = kwargs.pop("max_width", 64)
if kwargs:
raise TypeError(
- "'%s' is an invalid keyword argument for this function."
- % next(kwargs.iterkeys()))
+ "'%s' is an invalid keyword argument for this function." % next(kwargs.iterkeys())
+ )
rows, cols = terminal_size()
if not cols:
@@ -374,13 +351,14 @@ def hline(label=None, **kwargs):
def terminal_size():
"""Gets the dimensions of the console: (rows, cols)."""
if _platform != "win32":
+
def ioctl_gwinsz(fd):
try:
- rc = struct.unpack('hh', fcntl.ioctl(
- fd, termios.TIOCGWINSZ, '1234'))
+ rc = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
except BaseException:
return
return rc
+
rc = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2)
if not rc:
try:
@@ -390,12 +368,14 @@ def terminal_size():
except BaseException:
pass
if not rc:
- rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
return int(rc[0]), int(rc[1])
else:
if sys.version_info[0] < 3:
- raise RuntimeError("Terminal size not obtainable on Windows with a\
-Python version older than 3")
- rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ raise RuntimeError(
+ "Terminal size not obtainable on Windows with a\
+Python version older than 3"
+ )
+ rc = (os.environ.get("LINES", 25), os.environ.get("COLUMNS", 80))
return int(rc[0]), int(rc[1])
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index 65e56b5473..73c1daf0d5 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -18,29 +18,27 @@ from llnl.util.tty.color import cextra, clen
class ColumnConfig:
-
def __init__(self, cols):
self.cols = cols
self.line_length = 0
self.valid = True
- self.widths = [0] * cols # does not include ansi colors
+ self.widths = [0] * cols # does not include ansi colors
def __repr__(self):
- attrs = [(a, getattr(self, a))
- for a in dir(self) if not a.startswith("__")]
+ attrs = [(a, getattr(self, a)) for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
def config_variable_cols(elts, console_width, padding, cols=0):
"""Variable-width column fitting algorithm.
- This function determines the most columns that can fit in the
- screen width. Unlike uniform fitting, where all columns take
- the width of the longest element in the list, each column takes
- the width of its own longest element. This packs elements more
- efficiently on screen.
+ This function determines the most columns that can fit in the
+ screen width. Unlike uniform fitting, where all columns take
+ the width of the longest element in the list, each column takes
+ the width of its own longest element. This packs elements more
+ efficiently on screen.
- If cols is nonzero, force
+ If cols is nonzero, force
"""
if cols < 0:
raise ValueError("cols must be non-negative.")
@@ -64,8 +62,8 @@ def config_variable_cols(elts, console_width, padding, cols=0):
if conf.widths[col] < (length + p):
conf.line_length += length + p - conf.widths[col]
- conf.widths[col] = length + p
- conf.valid = (conf.line_length < console_width)
+ conf.widths[col] = length + p
+ conf.valid = conf.line_length < console_width
try:
config = next(conf for conf in reversed(configs) if conf.valid)
@@ -81,9 +79,9 @@ def config_variable_cols(elts, console_width, padding, cols=0):
def config_uniform_cols(elts, console_width, padding, cols=0):
"""Uniform-width column fitting algorithm.
- Determines the longest element in the list, and determines how
- many columns of that width will fit on screen. Returns a
- corresponding column config.
+ Determines the longest element in the list, and determines how
+ many columns of that width will fit on screen. Returns a
+ corresponding column config.
"""
if cols < 0:
raise ValueError("cols must be non-negative.")
@@ -122,18 +120,18 @@ def colify(elts, **options):
and fit less data on the screen
"""
# Get keyword arguments or set defaults
- cols = options.pop("cols", 0)
- output = options.pop("output", sys.stdout)
- indent = options.pop("indent", 0)
- padding = options.pop("padding", 2)
- tty = options.pop('tty', None)
- method = options.pop("method", "variable")
+ cols = options.pop("cols", 0)
+ output = options.pop("output", sys.stdout)
+ indent = options.pop("indent", 0)
+ padding = options.pop("padding", 2)
+ tty = options.pop("tty", None)
+ method = options.pop("method", "variable")
console_cols = options.pop("width", None)
if options:
raise TypeError(
- "'%s' is an invalid keyword argument for this function."
- % next(options.iterkeys()))
+ "'%s' is an invalid keyword argument for this function." % next(options.iterkeys())
+ )
# elts needs to be an array of strings so we can count the elements
elts = [text_type(elt) for elt in elts]
@@ -141,10 +139,10 @@ def colify(elts, **options):
return (0, ())
# environment size is of the form "<rows>x<cols>"
- env_size = os.environ.get('COLIFY_SIZE')
+ env_size = os.environ.get("COLIFY_SIZE")
if env_size:
try:
- r, c = env_size.split('x')
+ r, c = env_size.split("x")
console_rows, console_cols = int(r), int(c)
tty = True
except BaseException:
@@ -180,7 +178,7 @@ def colify(elts, **options):
elt = col * rows + row
width = config.widths[col] + cextra(elts[elt])
if col < cols - 1:
- fmt = '%%-%ds' % width
+ fmt = "%%-%ds" % width
output.write(fmt % elts[elt])
else:
# Don't pad the rightmost column (sapces can wrap on
@@ -198,15 +196,15 @@ def colify(elts, **options):
def colify_table(table, **options):
"""Version of ``colify()`` for data expressed in rows, (list of lists).
- Same as regular colify but:
+ Same as regular colify but:
- 1. This takes a list of lists, where each sub-list must be the
- same length, and each is interpreted as a row in a table.
- Regular colify displays a sequential list of values in columns.
+ 1. This takes a list of lists, where each sub-list must be the
+ same length, and each is interpreted as a row in a table.
+ Regular colify displays a sequential list of values in columns.
- 2. Regular colify will always print with 1 column when the output
- is not a tty. This will always print with same dimensions of
- the table argument.
+ 2. Regular colify will always print with 1 column when the output
+ is not a tty. This will always print with same dimensions of
+ the table argument.
"""
if table is None:
@@ -221,20 +219,20 @@ def colify_table(table, **options):
for row in table:
yield row[i]
- if 'cols' in options:
+ if "cols" in options:
raise ValueError("Cannot override columsn in colify_table.")
- options['cols'] = columns
+ options["cols"] = columns
# don't reduce to 1 column for non-tty
- options['tty'] = True
+ options["tty"] = True
colify(transpose(), **options)
def colified(elts, **options):
"""Invokes the ``colify()`` function but returns the result as a string
- instead of writing it to an output string."""
+ instead of writing it to an output string."""
sio = StringIO()
- options['output'] = sio
+ options["output"] = sio
colify(elts, **options)
return sio.getvalue()
diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py
index 99c0a5c7ac..1bc80331d4 100644
--- a/lib/spack/llnl/util/tty/color.py
+++ b/lib/spack/llnl/util/tty/color.py
@@ -76,29 +76,33 @@ class ColorParseError(Exception):
# Text styles for ansi codes
-styles = {'*': '1', # bold
- '_': '4', # underline
- None: '0'} # plain
+styles = {"*": "1", "_": "4", None: "0"} # bold # underline # plain
# Dim and bright ansi colors
-colors = {'k': 30, 'K': 90, # black
- 'r': 31, 'R': 91, # red
- 'g': 32, 'G': 92, # green
- 'y': 33, 'Y': 93, # yellow
- 'b': 34, 'B': 94, # blue
- 'm': 35, 'M': 95, # magenta
- 'c': 36, 'C': 96, # cyan
- 'w': 37, 'W': 97} # white
+colors = {
+ "k": 30,
+ "K": 90, # black
+ "r": 31,
+ "R": 91, # red
+ "g": 32,
+ "G": 92, # green
+ "y": 33,
+ "Y": 93, # yellow
+ "b": 34,
+ "B": 94, # blue
+ "m": 35,
+ "M": 95, # magenta
+ "c": 36,
+ "C": 96, # cyan
+ "w": 37,
+ "W": 97,
+} # white
# Regex to be used for color formatting
-color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
+color_re = r"@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)"
# Mapping from color arguments to values for tty.set_color
-color_when_values = {
- 'always': True,
- 'auto': None,
- 'never': False
-}
+color_when_values = {"always": True, "auto": None, "never": False}
# Force color; None: Only color if stdout is a tty
# True: Always colorize output, False: Never colorize output
@@ -114,7 +118,7 @@ def _color_when_value(when):
if when in color_when_values:
return color_when_values[when]
elif when not in color_when_values.values():
- raise ValueError('Invalid color setting: %s' % when)
+ raise ValueError("Invalid color setting: %s" % when)
return when
@@ -146,7 +150,6 @@ def color_when(value):
class match_to_ansi(object):
-
def __init__(self, color=True):
self.color = _color_when_value(color)
@@ -155,7 +158,7 @@ class match_to_ansi(object):
if self.color:
return "\033[%sm" % s
else:
- return ''
+ return ""
def __call__(self, match):
"""Convert a match object generated by ``color_re`` into an ansi
@@ -164,22 +167,22 @@ class match_to_ansi(object):
style, color, text = match.groups()
m = match.group(0)
- if m == '@@':
- return '@'
- elif m == '@.':
+ if m == "@@":
+ return "@"
+ elif m == "@.":
return self.escape(0)
- elif m == '@':
- raise ColorParseError("Incomplete color format: '%s' in %s"
- % (m, match.string))
+ elif m == "@":
+ raise ColorParseError("Incomplete color format: '%s' in %s" % (m, match.string))
string = styles[style]
if color:
if color not in colors:
- raise ColorParseError("Invalid color specifier: '%s' in '%s'"
- % (color, match.string))
- string += ';' + str(colors[color])
+ raise ColorParseError(
+ "Invalid color specifier: '%s' in '%s'" % (color, match.string)
+ )
+ string += ";" + str(colors[color])
- colored_text = ''
+ colored_text = ""
if text:
colored_text = text + self.escape(0)
@@ -199,28 +202,28 @@ def colorize(string, **kwargs):
color (bool): If False, output will be plain text without control
codes, for output to non-console devices.
"""
- color = _color_when_value(kwargs.get('color', get_color_when()))
+ color = _color_when_value(kwargs.get("color", get_color_when()))
string = re.sub(color_re, match_to_ansi(color), string)
- string = string.replace('}}', '}')
+ string = string.replace("}}", "}")
return string
def clen(string):
"""Return the length of a string, excluding ansi color sequences."""
- return len(re.sub(r'\033[^m]*m', '', string))
+ return len(re.sub(r"\033[^m]*m", "", string))
def cextra(string):
"""Length of extra color characters in a string"""
- return len(''.join(re.findall(r'\033[^m]*m', string)))
+ return len("".join(re.findall(r"\033[^m]*m", string)))
def cwrite(string, stream=None, color=None):
"""Replace all color expressions in string with ANSI control
- codes and write the result to the stream. If color is
- False, this will write plain text with no color. If True,
- then it will always write colored output. If not supplied,
- then it will be set based on stream.isatty().
+ codes and write the result to the stream. If color is
+ False, this will write plain text with no color. If True,
+ then it will always write colored output. If not supplied,
+ then it will be set based on stream.isatty().
"""
stream = sys.stdout if stream is None else stream
if color is None:
@@ -251,20 +254,19 @@ def cescape(string):
(str): the string with color codes escaped
"""
string = six.text_type(string)
- string = string.replace('@', '@@')
- string = string.replace('}', '}}')
+ string = string.replace("@", "@@")
+ string = string.replace("}", "}}")
return string
class ColorStream(object):
-
def __init__(self, stream, color=None):
self._stream = stream
self._color = color
def write(self, string, **kwargs):
- raw = kwargs.get('raw', False)
- raw_write = getattr(self._stream, 'write')
+ raw = kwargs.get("raw", False)
+ raw_write = getattr(self._stream, "write")
color = self._color
if self._color is None:
@@ -275,6 +277,6 @@ class ColorStream(object):
raw_write(colorize(string, color=color))
def writelines(self, sequence, **kwargs):
- raw = kwargs.get('raw', False)
+ raw = kwargs.get("raw", False)
for string in sequence:
self.write(string, self.color, raw=raw)
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index 51b9caa332..e155fa1d26 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -31,21 +31,22 @@ import llnl.util.tty as tty
termios = None # type: Optional[ModuleType]
try:
import termios as term_mod
+
termios = term_mod
except ImportError:
pass
# Use this to strip escape sequences
-_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07')
+_escape = re.compile(r"\x1b[^m]*m|\x1b\[?1034h|\x1b\][0-9]+;[^\x07]*\x07")
# control characters for enabling/disabling echo
#
# We use control characters to ensure that echo enable/disable are inline
# with the other output. We always follow these with a newline to ensure
# one per line the following newline is ignored in output.
-xon, xoff = '\x11\n', '\x13\n'
-control = re.compile('(\x11\n|\x13\n)')
+xon, xoff = "\x11\n", "\x13\n"
+control = re.compile("(\x11\n|\x13\n)")
@contextmanager
@@ -59,17 +60,13 @@ def ignore_signal(signum):
def _is_background_tty(stream):
- """True if the stream is a tty and calling process is in the background.
- """
- return (
- stream.isatty() and
- os.getpgrp() != os.tcgetpgrp(stream.fileno())
- )
+ """True if the stream is a tty and calling process is in the background."""
+ return stream.isatty() and os.getpgrp() != os.tcgetpgrp(stream.fileno())
def _strip(line):
"""Strip color and control characters from a line."""
- return _escape.sub('', line)
+ return _escape.sub("", line)
class keyboard_input(object):
@@ -147,6 +144,7 @@ class keyboard_input(object):
a TTY, ``keyboard_input`` has no effect.
"""
+
def __init__(self, stream):
"""Create a context manager that will enable keyboard input on stream.
@@ -204,7 +202,7 @@ class keyboard_input(object):
bg = self._is_background()
# restore sanity if flags are amiss -- see diagram in class docs
- if not bg and any(flags): # fg, but input not enabled
+ if not bg and any(flags): # fg, but input not enabled
self._enable_keyboard_input()
elif bg and not all(flags): # bg, but input enabled
self._restore_default_terminal_settings()
@@ -228,8 +226,7 @@ class keyboard_input(object):
# Install a signal handler to disable/enable keyboard input
# when the process moves between foreground and background.
- self.old_handlers[signal.SIGTSTP] = signal.signal(
- signal.SIGTSTP, self._tstp_handler)
+ self.old_handlers[signal.SIGTSTP] = signal.signal(signal.SIGTSTP, self._tstp_handler)
# add an atexit handler to ensure the terminal is restored
atexit.register(self._restore_default_terminal_settings)
@@ -258,6 +255,7 @@ class Unbuffered(object):
This is implemented by forcing a flush after each write.
"""
+
def __init__(self, stream):
self.stream = stream
@@ -302,6 +300,7 @@ class FileWrapper(object):
yet), or neither. When unwrapped, it returns an open file (or file-like)
object.
"""
+
def __init__(self, file_like):
# This records whether the file-like object returned by "unwrap" is
# purely in-memory. In that case a subprocess will need to explicitly
@@ -325,9 +324,9 @@ class FileWrapper(object):
if self.open:
if self.file_like:
if sys.version_info < (3,):
- self.file = open(self.file_like, 'w')
+ self.file = open(self.file_like, "w")
else:
- self.file = open(self.file_like, 'w', encoding='utf-8') # novm
+ self.file = open(self.file_like, "w", encoding="utf-8") # novm
else:
self.file = StringIO()
return self.file
@@ -343,8 +342,9 @@ class FileWrapper(object):
class MultiProcessFd(object):
"""Return an object which stores a file descriptor and can be passed as an
- argument to a function run with ``multiprocessing.Process``, such that
- the file descriptor is available in the subprocess."""
+ argument to a function run with ``multiprocessing.Process``, such that
+ the file descriptor is available in the subprocess."""
+
def __init__(self, fd):
self._connection = None
self._fd = None
@@ -434,7 +434,7 @@ def log_output(*args, **kwargs):
This method is actually a factory serving a per platform
(unix vs windows) log_output class
"""
- if sys.platform == 'win32':
+ if sys.platform == "win32":
return winlog(*args, **kwargs)
else:
return nixlog(*args, **kwargs)
@@ -454,8 +454,9 @@ class nixlog(object):
work within test frameworks like nose and pytest.
"""
- def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
- env=None, filter_fn=None):
+ def __init__(
+ self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
+ ):
"""Create a new output log context manager.
Args:
@@ -524,8 +525,7 @@ class nixlog(object):
raise RuntimeError("Can't re-enter the same log_output!")
if self.file_like is None:
- raise RuntimeError(
- "file argument must be set by either __init__ or __call__")
+ raise RuntimeError("file argument must be set by either __init__ or __call__")
# set up a stream for the daemon to write to
self.log_file = FileWrapper(self.file_like)
@@ -555,9 +555,7 @@ class nixlog(object):
input_multiprocess_fd = None
try:
if sys.stdin.isatty():
- input_multiprocess_fd = MultiProcessFd(
- os.dup(sys.stdin.fileno())
- )
+ input_multiprocess_fd = MultiProcessFd(os.dup(sys.stdin.fileno()))
except BaseException:
# just don't forward input if this fails
pass
@@ -566,9 +564,14 @@ class nixlog(object):
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
- input_multiprocess_fd, read_multiprocess_fd, write_fd,
- self.echo, self.log_file, child_pipe, self.filter_fn
- )
+ input_multiprocess_fd,
+ read_multiprocess_fd,
+ write_fd,
+ self.echo,
+ self.log_file,
+ child_pipe,
+ self.filter_fn,
+ ),
)
self.process.daemon = True # must set before start()
self.process.start()
@@ -609,7 +612,7 @@ class nixlog(object):
self._saved_stderr = sys.stderr
# create a file object for the pipe; redirect to it.
- pipe_fd_out = os.fdopen(write_fd, 'w')
+ pipe_fd_out = os.fdopen(write_fd, "w")
sys.stdout = pipe_fd_out
sys.stderr = pipe_fd_out
@@ -674,8 +677,7 @@ class nixlog(object):
def force_echo(self):
"""Context manager to force local echo, even if echo is off."""
if not self._active:
- raise RuntimeError(
- "Can't call force_echo() outside log_output region!")
+ raise RuntimeError("Can't call force_echo() outside log_output region!")
# This uses the xon/xoff to highlight regions to be echoed in the
# output. We us these control characters rather than, say, a
@@ -691,25 +693,26 @@ class nixlog(object):
class StreamWrapper:
- """ Wrapper class to handle redirection of io streams """
+ """Wrapper class to handle redirection of io streams"""
+
def __init__(self, sys_attr):
self.sys_attr = sys_attr
self.saved_stream = None
- if sys.platform.startswith('win32'):
+ if sys.platform.startswith("win32"):
if sys.version_info < (3, 5):
- libc = ctypes.CDLL(ctypes.util.find_library('c'))
+ libc = ctypes.CDLL(ctypes.util.find_library("c"))
else:
- if hasattr(sys, 'gettotalrefcount'): # debug build
- libc = ctypes.CDLL('ucrtbased')
+ if hasattr(sys, "gettotalrefcount"): # debug build
+ libc = ctypes.CDLL("ucrtbased")
else:
- libc = ctypes.CDLL('api-ms-win-crt-stdio-l1-1-0')
+ libc = ctypes.CDLL("api-ms-win-crt-stdio-l1-1-0")
- kernel32 = ctypes.WinDLL('kernel32')
+ kernel32 = ctypes.WinDLL("kernel32")
# https://docs.microsoft.com/en-us/windows/console/getstdhandle
- if self.sys_attr == 'stdout':
+ if self.sys_attr == "stdout":
STD_HANDLE = -11
- elif self.sys_attr == 'stderr':
+ elif self.sys_attr == "stderr":
STD_HANDLE = -12
else:
raise KeyError(self.sys_attr)
@@ -728,7 +731,7 @@ class StreamWrapper:
def redirect_stream(self, to_fd):
"""Redirect stdout to the given file descriptor."""
# Flush the C-level buffer stream
- if sys.platform.startswith('win32'):
+ if sys.platform.startswith("win32"):
self.libc.fflush(None)
else:
self.libc.fflush(self.c_stream)
@@ -739,13 +742,13 @@ class StreamWrapper:
# Make orig_stream_fd point to the same file as to_fd
os.dup2(to_fd, self.orig_stream_fd)
# Set sys_stream to a new stream that points to the redirected fd
- new_buffer = open(self.orig_stream_fd, 'wb')
+ new_buffer = open(self.orig_stream_fd, "wb")
new_stream = io.TextIOWrapper(new_buffer)
setattr(sys, self.sys_attr, new_stream)
self.sys_stream = getattr(sys, self.sys_attr)
def flush(self):
- if sys.platform.startswith('win32'):
+ if sys.platform.startswith("win32"):
self.libc.fflush(None)
else:
self.libc.fflush(self.c_stream)
@@ -768,14 +771,16 @@ class winlog(object):
Does not support the use of 'v' toggling as nixlog does.
"""
- def __init__(self, file_like=None, echo=False, debug=0, buffer=False,
- env=None, filter_fn=None):
+
+ def __init__(
+ self, file_like=None, echo=False, debug=0, buffer=False, env=None, filter_fn=None
+ ):
self.env = env
self.debug = debug
self.echo = echo
self.logfile = file_like
- self.stdout = StreamWrapper('stdout')
- self.stderr = StreamWrapper('stderr')
+ self.stdout = StreamWrapper("stdout")
+ self.stderr = StreamWrapper("stderr")
self._active = False
self._ioflag = False
self.old_stdout = sys.stdout
@@ -786,8 +791,7 @@ class winlog(object):
raise RuntimeError("Can't re-enter the same log_output!")
if self.logfile is None:
- raise RuntimeError(
- "file argument must be set by __init__ ")
+ raise RuntimeError("file argument must be set by __init__ ")
# Open both write and reading on logfile
if type(self.logfile) == StringIO:
@@ -796,8 +800,8 @@ class winlog(object):
sys.stdout = self.logfile
sys.stderr = self.logfile
else:
- self.writer = open(self.logfile, mode='wb+')
- self.reader = open(self.logfile, mode='rb+')
+ self.writer = open(self.logfile, mode="wb+")
+ self.reader = open(self.logfile, mode="rb+")
# Dup stdout so we can still write to it after redirection
self.echo_writer = open(os.dup(sys.stdout.fileno()), "w")
@@ -811,7 +815,7 @@ class winlog(object):
# if echo: write line to user
try:
while True:
- is_killed = _kill.wait(.1)
+ is_killed = _kill.wait(0.1)
# Flush buffered build output to file
# stdout/err fds refer to log file
self.stderr.flush()
@@ -819,7 +823,7 @@ class winlog(object):
line = reader.readline()
if self.echo and line:
- echo_writer.write('{0}'.format(line.decode()))
+ echo_writer.write("{0}".format(line.decode()))
echo_writer.flush()
if is_killed:
@@ -829,8 +833,9 @@ class winlog(object):
self._active = True
with replace_environment(self.env):
- self._thread = Thread(target=background_reader,
- args=(self.reader, self.echo_writer, self._kill))
+ self._thread = Thread(
+ target=background_reader, args=(self.reader, self.echo_writer, self._kill)
+ )
self._thread.start()
return self
@@ -854,13 +859,19 @@ class winlog(object):
def force_echo(self):
"""Context manager to force local echo, even if echo is off."""
if not self._active:
- raise RuntimeError(
- "Can't call force_echo() outside log_output region!")
+ raise RuntimeError("Can't call force_echo() outside log_output region!")
yield
-def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
- log_file_wrapper, control_pipe, filter_fn):
+def _writer_daemon(
+ stdin_multiprocess_fd,
+ read_multiprocess_fd,
+ write_fd,
+ echo,
+ log_file_wrapper,
+ control_pipe,
+ filter_fn,
+):
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
The daemon receives output from the parent process and writes it both
@@ -913,16 +924,16 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
# write_fd to terminate the reading loop, so we close the file descriptor
# here. Forking is the process spawning method everywhere except Mac OS
# for Python >= 3.8 and on Windows
- if sys.version_info < (3, 8) or sys.platform != 'darwin':
+ if sys.version_info < (3, 8) or sys.platform != "darwin":
os.close(write_fd)
# Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O.
if sys.version_info < (3,):
- in_pipe = os.fdopen(read_multiprocess_fd.fd, 'r', 1)
+ in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1)
else:
# Python 3.x before 3.7 does not open with UTF-8 encoding by default
- in_pipe = os.fdopen(read_multiprocess_fd.fd, 'r', 1, encoding='utf-8')
+ in_pipe = os.fdopen(read_multiprocess_fd.fd, "r", 1, encoding="utf-8")
if stdin_multiprocess_fd:
stdin = os.fdopen(stdin_multiprocess_fd.fd)
@@ -931,7 +942,7 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
# list of streams to select from
istreams = [in_pipe, stdin] if stdin else [in_pipe]
- force_echo = False # parent can force echo for certain output
+ force_echo = False # parent can force echo for certain output
log_file = log_file_wrapper.unwrap()
@@ -954,7 +965,7 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
# check and the read, so we ignore SIGTTIN here.
with ignore_signal(signal.SIGTTIN):
try:
- if stdin.read(1) == 'v':
+ if stdin.read(1) == "v":
echo = not echo
except IOError as e:
# If SIGTTIN is ignored, the system gives EIO
@@ -972,14 +983,14 @@ def _writer_daemon(stdin_multiprocess_fd, read_multiprocess_fd, write_fd, echo,
line = _retry(in_pipe.readline)()
except UnicodeDecodeError:
# installs like --test=root gpgme produce non-UTF8 logs
- line = '<line lost: output was not encoded as UTF-8>\n'
+ line = "<line lost: output was not encoded as UTF-8>\n"
if not line:
return
line_count += 1
# find control characters and strip them.
- clean_line, num_controls = control.subn('', line)
+ clean_line, num_controls = control.subn("", line)
# Echo to stdout if requested or forced.
if echo or force_echo:
@@ -1043,6 +1054,7 @@ def _retry(function):
relevant for this file.
"""
+
def wrapped(*args, **kwargs):
while True:
try:
@@ -1055,6 +1067,7 @@ def _retry(function):
if e.args[0] == errno.EINTR:
continue
raise
+
return wrapped
diff --git a/lib/spack/llnl/util/tty/pty.py b/lib/spack/llnl/util/tty/pty.py
index 1a5731a5c7..8d7213a533 100644
--- a/lib/spack/llnl/util/tty/pty.py
+++ b/lib/spack/llnl/util/tty/pty.py
@@ -30,6 +30,7 @@ from spack.util.executable import which
termios = None
try:
import termios as term_mod
+
termios = term_mod
except ImportError:
pass
@@ -42,8 +43,8 @@ class ProcessController(object):
minion) similar to the way a shell would, by sending signals and I/O.
"""
- def __init__(self, pid, controller_fd,
- timeout=1, sleep_time=1e-1, debug=False):
+
+ def __init__(self, pid, controller_fd, timeout=1, sleep_time=1e-1, debug=False):
"""Create a controller to manipulate the process with id ``pid``
Args:
@@ -84,18 +85,19 @@ class ProcessController(object):
def horizontal_line(self, name):
"""Labled horizontal line for debugging."""
if self.debug:
- sys.stderr.write(
- "------------------------------------------- %s\n" % name
- )
+ sys.stderr.write("------------------------------------------- %s\n" % name)
def status(self):
"""Print debug message with status info for the minion."""
if self.debug:
canon, echo = self.get_canon_echo_attrs()
- sys.stderr.write("canon: %s, echo: %s\n" % (
- "on" if canon else "off",
- "on" if echo else "off",
- ))
+ sys.stderr.write(
+ "canon: %s, echo: %s\n"
+ % (
+ "on" if canon else "off",
+ "on" if echo else "off",
+ )
+ )
sys.stderr.write("input: %s\n" % self.input_on())
sys.stderr.write("bg: %s\n" % self.background())
sys.stderr.write("\n")
@@ -137,7 +139,7 @@ class ProcessController(object):
def wait(self, condition):
start = time.time()
- while (((time.time() - start) < self.timeout) and not condition()):
+ while ((time.time() - start) < self.timeout) and not condition():
time.sleep(1e-2)
assert condition()
@@ -219,6 +221,7 @@ class PseudoShell(object):
|_________________________________________________________|
"""
+
def __init__(self, controller_function, minion_function):
self.proc = None
self.controller_function = controller_function
@@ -242,8 +245,12 @@ class PseudoShell(object):
"""
self.proc = multiprocessing.Process(
target=PseudoShell._set_up_and_run_controller_function,
- args=(self.controller_function, self.minion_function,
- self.controller_timeout, self.sleep_time),
+ args=(
+ self.controller_function,
+ self.minion_function,
+ self.controller_timeout,
+ self.sleep_time,
+ ),
kwargs=kwargs,
)
self.proc.start()
@@ -255,7 +262,8 @@ class PseudoShell(object):
@staticmethod
def _set_up_and_run_minion_function(
- tty_name, stdout_fd, stderr_fd, ready, minion_function, **kwargs):
+ tty_name, stdout_fd, stderr_fd, ready, minion_function, **kwargs
+ ):
"""Minion process wrapper for PseudoShell.
Handles the mechanics of setting up a PTY, then calls
@@ -273,8 +281,7 @@ class PseudoShell(object):
os.close(stdin_fd)
if kwargs.get("debug"):
- sys.stderr.write(
- "minion: stdin.isatty(): %s\n" % sys.stdin.isatty())
+ sys.stderr.write("minion: stdin.isatty(): %s\n" % sys.stdin.isatty())
# tell the parent that we're really running
if kwargs.get("debug"):
@@ -288,15 +295,15 @@ class PseudoShell(object):
@staticmethod
def _set_up_and_run_controller_function(
- controller_function, minion_function, controller_timeout,
- sleep_time, **kwargs):
+ controller_function, minion_function, controller_timeout, sleep_time, **kwargs
+ ):
"""Set up a pty, spawn a minion process, execute controller_function.
Handles the mechanics of setting up a PTY, then calls
``controller_function``.
"""
- os.setsid() # new session; this process is the controller
+ os.setsid() # new session; this process is the controller
controller_fd, minion_fd = os.openpty()
pty_name = os.ttyname(minion_fd)
@@ -305,11 +312,10 @@ class PseudoShell(object):
pty_fd = os.open(pty_name, os.O_RDWR)
os.close(pty_fd)
- ready = multiprocessing.Value('i', False)
+ ready = multiprocessing.Value("i", False)
minion_process = multiprocessing.Process(
target=PseudoShell._set_up_and_run_minion_function,
- args=(pty_name, sys.stdout.fileno(), sys.stderr.fileno(),
- ready, minion_function),
+ args=(pty_name, sys.stdout.fileno(), sys.stderr.fileno(), ready, minion_function),
kwargs=kwargs,
)
minion_process.start()
@@ -329,8 +335,7 @@ class PseudoShell(object):
minion_pgid = os.getpgid(minion_process.pid)
sys.stderr.write("minion pid: %d\n" % minion_process.pid)
sys.stderr.write("minion pgid: %d\n" % minion_pgid)
- sys.stderr.write(
- "minion sid: %d\n" % os.getsid(minion_process.pid))
+ sys.stderr.write("minion sid: %d\n" % os.getsid(minion_process.pid))
sys.stderr.write("\n")
sys.stderr.flush()
# set up controller to ignore SIGTSTP, like a shell
@@ -339,7 +344,8 @@ class PseudoShell(object):
# call the controller function once the minion is ready
try:
controller = ProcessController(
- minion_process.pid, controller_fd, debug=kwargs.get("debug"))
+ minion_process.pid, controller_fd, debug=kwargs.get("debug")
+ )
controller.timeout = controller_timeout
controller.sleep_time = sleep_time
error = controller_function(minion_process, controller, **kwargs)
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index e64c21f32d..e368639553 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -4,10 +4,10 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: (major, minor, micro, dev release) tuple
-spack_version_info = (0, 19, 0, 'dev0')
+spack_version_info = (0, 19, 0, "dev0")
#: PEP440 canonical <major>.<minor>.<micro>.<devN> string
-spack_version = '.'.join(str(s) for s in spack_version_info)
+spack_version = ".".join(str(s) for s in spack_version_info)
-__all__ = ['spack_version_info', 'spack_version']
+__all__ = ["spack_version_info", "spack_version"]
__version__ = spack_version
diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py
index fdf9a49695..d49be8bc0d 100644
--- a/lib/spack/spack/abi.py
+++ b/lib/spack/spack/abi.py
@@ -15,21 +15,25 @@ from spack.util.executable import Executable, ProcessError
class ABI(object):
"""This class provides methods to test ABI compatibility between specs.
- The current implementation is rather rough and could be improved."""
+ The current implementation is rather rough and could be improved."""
def architecture_compatible(self, target, constraint):
"""Return true if architecture of target spec is ABI compatible
- to the architecture of constraint spec. If either the target
- or constraint specs have no architecture, target is also defined
- as architecture ABI compatible to constraint."""
- return not target.architecture or not constraint.architecture or \
- target.architecture.satisfies(constraint.architecture)
+ to the architecture of constraint spec. If either the target
+ or constraint specs have no architecture, target is also defined
+ as architecture ABI compatible to constraint."""
+ return (
+ not target.architecture
+ or not constraint.architecture
+ or target.architecture.satisfies(constraint.architecture)
+ )
@memoized
def _gcc_get_libstdcxx_version(self, version):
"""Returns gcc ABI compatibility info by getting the library version of
- a compiler's libstdc++ or libgcc_s"""
+ a compiler's libstdc++ or libgcc_s"""
from spack.build_environment import dso_suffix
+
spec = CompilerSpec("gcc", version)
compilers = spack.compilers.compilers_for_spec(spec)
if not compilers:
@@ -50,7 +54,7 @@ class ABI(object):
# Some gcc's are actually clang and don't respond properly to
# --print-file-name (they just print the filename, not the
# full path). Ignore these and expect them to be handled as clang.
- if Clang.default_version(rungcc.exe[0]) != 'unknown':
+ if Clang.default_version(rungcc.exe[0]) != "unknown":
return None
output = rungcc("--print-file-name=%s" % libname, output=str)
@@ -66,7 +70,7 @@ class ABI(object):
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
- are ABI compatible."""
+ are ABI compatible."""
plib = self._gcc_get_libstdcxx_version(pversion)
clib = self._gcc_get_libstdcxx_version(cversion)
if not plib or not clib:
@@ -75,10 +79,10 @@ class ABI(object):
def _intel_compiler_compare(self, pversion, cversion):
"""Returns true iff the intel version pversion and cversion
- are ABI compatible"""
+ are ABI compatible"""
# Test major and minor versions. Ignore build version.
- if (len(pversion.version) < 2 or len(cversion.version) < 2):
+ if len(pversion.version) < 2 or len(cversion.version) < 2:
return False
return pversion.version[:2] == cversion.version[:2]
@@ -91,7 +95,7 @@ class ABI(object):
# Different compiler families are assumed ABI incompatible
return False
- if kwargs.get('loose', False):
+ if kwargs.get("loose", False):
return True
# TODO: Can we move the specialized ABI matching stuff
@@ -102,16 +106,19 @@ class ABI(object):
# Otherwise match on version match.
if pversion.satisfies(cversion):
return True
- elif (parent.compiler.name == "gcc" and
- self._gcc_compiler_compare(pversion, cversion)):
+ elif parent.compiler.name == "gcc" and self._gcc_compiler_compare(
+ pversion, cversion
+ ):
return True
- elif (parent.compiler.name == "intel" and
- self._intel_compiler_compare(pversion, cversion)):
+ elif parent.compiler.name == "intel" and self._intel_compiler_compare(
+ pversion, cversion
+ ):
return True
return False
def compatible(self, target, constraint, **kwargs):
"""Returns true if target spec is ABI compatible to constraint spec"""
- loosematch = kwargs.get('loose', False)
- return self.architecture_compatible(target, constraint) and \
- self.compiler_compatible(target, constraint, loose=loosematch)
+ loosematch = kwargs.get("loose", False)
+ return self.architecture_compatible(target, constraint) and self.compiler_compatible(
+ target, constraint, loose=loosematch
+ )
diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py
index 713ccc6055..acc8519a7f 100644
--- a/lib/spack/spack/audit.py
+++ b/lib/spack/spack/audit.py
@@ -59,14 +59,13 @@ GROUPS = collections.defaultdict(list)
class Error(object):
"""Information on an error reported in a test."""
+
def __init__(self, summary, details):
self.summary = summary
self.details = tuple(details)
def __str__(self):
- return self.summary + '\n' + '\n'.join([
- ' ' + detail for detail in self.details
- ])
+ return self.summary + "\n" + "\n".join([" " + detail for detail in self.details])
def __eq__(self, other):
if self.summary != other.summary or self.details != other.details:
@@ -118,11 +117,11 @@ class AuditClass(Sequence):
def run(self, **kwargs):
msg = 'please pass "{0}" as keyword arguments'
- msg = msg.format(', '.join(self.kwargs))
+ msg = msg.format(", ".join(self.kwargs))
assert set(self.kwargs) == set(kwargs), msg
errors = []
- kwargs['error_cls'] = Error
+ kwargs["error_cls"] = Error
for fn in self.callbacks:
errors.extend(fn(**kwargs))
@@ -164,19 +163,16 @@ def run_check(tag, **kwargs):
# TODO: https://github.com/spack/spack/pull/23053/files#r630265011
#: Generic checks relying on global state
generic = AuditClass(
- group='generic',
- tag='GENERIC',
- description='Generic checks relying on global variables',
- kwargs=()
+ group="generic",
+ tag="GENERIC",
+ description="Generic checks relying on global variables",
+ kwargs=(),
)
#: Sanity checks on compilers.yaml
config_compiler = AuditClass(
- group='configs',
- tag='CFG-COMPILER',
- description='Sanity checks on compilers.yaml',
- kwargs=()
+ group="configs", tag="CFG-COMPILER", description="Sanity checks on compilers.yaml", kwargs=()
)
@@ -185,34 +181,25 @@ def _search_duplicate_compilers(error_cls):
"""Report compilers with the same spec and two different definitions"""
errors = []
- compilers = list(sorted(
- spack.config.get('compilers'), key=lambda x: x['compiler']['spec']
- ))
- for spec, group in itertools.groupby(
- compilers, key=lambda x: x['compiler']['spec']
- ):
+ compilers = list(sorted(spack.config.get("compilers"), key=lambda x: x["compiler"]["spec"]))
+ for spec, group in itertools.groupby(compilers, key=lambda x: x["compiler"]["spec"]):
group = list(group)
if len(group) == 1:
continue
- error_msg = 'Compiler defined multiple times: {0}'
+ error_msg = "Compiler defined multiple times: {0}"
try:
details = [str(x._start_mark).strip() for x in group]
except Exception:
details = []
- errors.append(error_cls(
- summary=error_msg.format(spec), details=details
- ))
+ errors.append(error_cls(summary=error_msg.format(spec), details=details))
return errors
#: Sanity checks on packages.yaml
config_packages = AuditClass(
- group='configs',
- tag='CFG-PACKAGES',
- description='Sanity checks on packages.yaml',
- kwargs=()
+ group="configs", tag="CFG-PACKAGES", description="Sanity checks on packages.yaml", kwargs=()
)
@@ -220,19 +207,19 @@ config_packages = AuditClass(
def _search_duplicate_specs_in_externals(error_cls):
"""Search for duplicate specs declared as externals"""
errors, externals = [], collections.defaultdict(list)
- packages_yaml = spack.config.get('packages')
+ packages_yaml = spack.config.get("packages")
for name, pkg_config in packages_yaml.items():
# No externals can be declared under all
- if name == 'all' or 'externals' not in pkg_config:
+ if name == "all" or "externals" not in pkg_config:
continue
- current_externals = pkg_config['externals']
+ current_externals = pkg_config["externals"]
for entry in current_externals:
# Ask for the string representation of the spec to normalize
# aspects of the spec that may be represented in multiple ways
# e.g. +foo or foo=true
- key = str(spack.spec.Spec(entry['spec']))
+ key = str(spack.spec.Spec(entry["spec"]))
externals[key].append(entry)
for spec, entries in sorted(externals.items()):
@@ -241,14 +228,14 @@ def _search_duplicate_specs_in_externals(error_cls):
continue
# Otherwise wwe need to report an error
- error_msg = 'Multiple externals share the same spec: {0}'.format(spec)
+ error_msg = "Multiple externals share the same spec: {0}".format(spec)
try:
lines = [str(x._start_mark).strip() for x in entries]
- details = [
- 'Please remove all but one of the following entries:'
- ] + lines + [
- 'as they might result in non-deterministic hashes'
- ]
+ details = (
+ ["Please remove all but one of the following entries:"]
+ + lines
+ + ["as they might result in non-deterministic hashes"]
+ )
except TypeError:
details = []
@@ -259,20 +246,20 @@ def _search_duplicate_specs_in_externals(error_cls):
#: Sanity checks on package directives
package_directives = AuditClass(
- group='packages',
- tag='PKG-DIRECTIVES',
- description='Sanity checks on specs used in directives',
- kwargs=('pkgs',)
+ group="packages",
+ tag="PKG-DIRECTIVES",
+ description="Sanity checks on specs used in directives",
+ kwargs=("pkgs",),
)
#: Sanity checks on linting
# This can take some time, so it's run separately from packages
package_https_directives = AuditClass(
- group='packages-https',
- tag='PKG-HTTPS-DIRECTIVES',
- description='Sanity checks on https checks of package urls, etc.',
- kwargs=('pkgs',)
+ group="packages-https",
+ tag="PKG-HTTPS-DIRECTIVES",
+ description="Sanity checks on https checks of package urls, etc.",
+ kwargs=("pkgs",),
)
@@ -284,11 +271,9 @@ def _check_build_test_callbacks(pkgs, error_cls):
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
test_callbacks = pkg_cls.build_time_test_callbacks
- if test_callbacks and 'test' in test_callbacks:
- msg = ('{0} package contains "test" method in '
- 'build_time_test_callbacks')
- instr = ('Remove "test" from: [{0}]'
- .format(', '.join(test_callbacks)))
+ if test_callbacks and "test" in test_callbacks:
+ msg = '{0} package contains "test" method in ' "build_time_test_callbacks"
+ instr = 'Remove "test" from: [{0}]'.format(", ".join(test_callbacks))
errors.append(error_cls(msg.format(pkg_name), [instr]))
return errors
@@ -315,26 +300,28 @@ def _check_patch_urls(pkgs, error_cls):
full_index_arg = "?full_index=1"
if not patch.url.endswith(full_index_arg):
- errors.append(error_cls(
- "patch URL in package {0} must end with {1}".format(
- pkg_cls.name, full_index_arg,
- ),
- [patch.url],
- ))
+ errors.append(
+ error_cls(
+ "patch URL in package {0} must end with {1}".format(
+ pkg_cls.name,
+ full_index_arg,
+ ),
+ [patch.url],
+ )
+ )
return errors
@package_https_directives
def _linting_package_file(pkgs, error_cls):
- """Check for correctness of links
- """
+ """Check for correctness of links"""
errors = []
for pkg_name in pkgs:
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
# Does the homepage have http, and if so, does https work?
- if pkg_cls.homepage.startswith('http://'):
+ if pkg_cls.homepage.startswith("http://"):
https = re.sub("http", "https", pkg_cls.homepage, 1)
try:
response = urlopen(https)
@@ -370,36 +357,48 @@ def _unknown_variants_in_directives(pkgs, error_cls):
# conflict and trigger separately in that case.
# When os and target constraints can be created independently of
# the platform, TODO change this back to add an error.
- errors.extend(_analyze_variants_in_directive(
- pkg_cls, spack.spec.Spec(trigger),
- directive='conflicts', error_cls=error_cls
- ))
- errors.extend(_analyze_variants_in_directive(
- pkg_cls, vrn, directive='conflicts', error_cls=error_cls
- ))
+ errors.extend(
+ _analyze_variants_in_directive(
+ pkg_cls,
+ spack.spec.Spec(trigger),
+ directive="conflicts",
+ error_cls=error_cls,
+ )
+ )
+ errors.extend(
+ _analyze_variants_in_directive(
+ pkg_cls, vrn, directive="conflicts", error_cls=error_cls
+ )
+ )
# Check "depends_on" directive
for _, triggers in pkg_cls.dependencies.items():
triggers = list(triggers)
for trigger in list(triggers):
vrn = spack.spec.Spec(trigger)
- errors.extend(_analyze_variants_in_directive(
- pkg_cls, vrn, directive='depends_on', error_cls=error_cls
- ))
+ errors.extend(
+ _analyze_variants_in_directive(
+ pkg_cls, vrn, directive="depends_on", error_cls=error_cls
+ )
+ )
# Check "patch" directive
for _, triggers in pkg_cls.provided.items():
triggers = [spack.spec.Spec(x) for x in triggers]
for vrn in triggers:
- errors.extend(_analyze_variants_in_directive(
- pkg_cls, vrn, directive='patch', error_cls=error_cls
- ))
+ errors.extend(
+ _analyze_variants_in_directive(
+ pkg_cls, vrn, directive="patch", error_cls=error_cls
+ )
+ )
# Check "resource" directive
for vrn in pkg_cls.resources:
- errors.extend(_analyze_variants_in_directive(
- pkg_cls, vrn, directive='resource', error_cls=error_cls
- ))
+ errors.extend(
+ _analyze_variants_in_directive(
+ pkg_cls, vrn, directive="resource", error_cls=error_cls
+ )
+ )
return llnl.util.lang.dedupe(errors)
@@ -421,11 +420,10 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
except spack.repo.UnknownPackageError:
# This dependency is completely missing, so report
# and continue the analysis
- summary = (pkg_name + ": unknown package '{0}' in "
- "'depends_on' directive".format(dependency_name))
- details = [
- " in " + filename
- ]
+ summary = pkg_name + ": unknown package '{0}' in " "'depends_on' directive".format(
+ dependency_name
+ )
+ details = [" in " + filename]
errors.append(error_cls(summary=summary, details=details))
continue
@@ -436,17 +434,18 @@ def _unknown_variants_in_dependencies(pkgs, error_cls):
v, _ = dependency_pkg_cls.variants[name]
v.validate_or_raise(value, pkg_cls=dependency_pkg_cls)
except Exception as e:
- summary = (pkg_name + ": wrong variant used for a "
- "dependency in a 'depends_on' directive")
+ summary = (
+ pkg_name + ": wrong variant used for a "
+ "dependency in a 'depends_on' directive"
+ )
error_msg = str(e).strip()
if isinstance(e, KeyError):
- error_msg = ('the variant {0} does not '
- 'exist'.format(error_msg))
+ error_msg = "the variant {0} does not " "exist".format(error_msg)
error_msg += " in package '" + dependency_name + "'"
- errors.append(error_cls(
- summary=summary, details=[error_msg, 'in ' + filename]
- ))
+ errors.append(
+ error_cls(summary=summary, details=[error_msg, "in " + filename])
+ )
return errors
@@ -465,25 +464,24 @@ def _version_constraints_are_satisfiable_by_some_version_in_repo(pkgs, error_cls
if spack.repo.path.is_virtual(dependency_name):
continue
- dependencies_to_check.extend(
- [edge.spec for edge in dependency_data.values()]
- )
+ dependencies_to_check.extend([edge.spec for edge in dependency_data.values()])
for s in dependencies_to_check:
dependency_pkg_cls = None
try:
dependency_pkg_cls = spack.repo.path.get_pkg_class(s.name)
- assert any(
- v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions)
- )
+ assert any(v.satisfies(s.versions) for v in list(dependency_pkg_cls.versions))
except Exception:
- summary = ("{0}: dependency on {1} cannot be satisfied "
- "by known versions of {1.name}").format(pkg_name, s)
- details = ['happening in ' + filename]
+ summary = (
+ "{0}: dependency on {1} cannot be satisfied " "by known versions of {1.name}"
+ ).format(pkg_name, s)
+ details = ["happening in " + filename]
if dependency_pkg_cls is not None:
- details.append('known versions of {0.name} are {1}'.format(
- s, ', '.join([str(x) for x in dependency_pkg_cls.versions])
- ))
+ details.append(
+ "known versions of {0.name} are {1}".format(
+ s, ", ".join([str(x) for x in dependency_pkg_cls.versions])
+ )
+ )
errors.append(error_cls(summary=summary, details=details))
return errors
@@ -494,7 +492,7 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
spack.variant.InconsistentValidationError,
spack.variant.MultipleValuesInExclusiveVariantError,
spack.variant.InvalidVariantValueError,
- KeyError
+ KeyError,
)
errors = []
for name, v in constraint.variants.items():
@@ -508,11 +506,9 @@ def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
error_msg = str(e).strip()
if isinstance(e, KeyError):
- error_msg = 'the variant {0} does not exist'.format(error_msg)
+ error_msg = "the variant {0} does not exist".format(error_msg)
- err = error_cls(summary=summary, details=[
- error_msg, 'in ' + filename
- ])
+ err = error_cls(summary=summary, details=[error_msg, "in " + filename])
errors.append(err)
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index f9aa2df403..c100794e67 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -44,12 +44,13 @@ from spack.caches import misc_cache_location
from spack.spec import Spec
from spack.stage import Stage
-_build_cache_relative_path = 'build_cache'
-_build_cache_keys_relative_path = '_pgp'
+_build_cache_relative_path = "build_cache"
+_build_cache_keys_relative_path = "_pgp"
class FetchCacheError(Exception):
"""Error thrown when fetching the cache failed, usually a composite error list."""
+
def __init__(self, errors):
if not isinstance(errors, list):
raise TypeError("Expected a list of errors")
@@ -57,10 +58,12 @@ class FetchCacheError(Exception):
if len(errors) > 1:
msg = " Error {0}: {1}: {2}"
self.message = "Multiple errors during fetching:\n"
- self.message += "\n".join((
- msg.format(i + 1, err.__class__.__name__, str(err))
- for (i, err) in enumerate(errors)
- ))
+ self.message += "\n".join(
+ (
+ msg.format(i + 1, err.__class__.__name__, str(err))
+ for (i, err) in enumerate(errors)
+ )
+ )
else:
err = errors[0]
self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
@@ -90,7 +93,7 @@ class BinaryCacheIndex(object):
self._index_cache_root = cache_root
# the key associated with the serialized _local_index_cache
- self._index_contents_key = 'contents.json'
+ self._index_contents_key = "contents.json"
# a FileCache instance storing copies of remote binary cache indices
self._index_file_cache = None
@@ -114,8 +117,7 @@ class BinaryCacheIndex(object):
def _init_local_index_cache(self):
if not self._index_file_cache:
- self._index_file_cache = file_cache.FileCache(
- self._index_cache_root)
+ self._index_file_cache = file_cache.FileCache(self._index_cache_root)
cache_key = self._index_contents_key
self._index_file_cache.init_entry(cache_key)
@@ -124,13 +126,12 @@ class BinaryCacheIndex(object):
self._local_index_cache = {}
if os.path.isfile(cache_path):
- with self._index_file_cache.read_transaction(
- cache_key) as cache_file:
+ with self._index_file_cache.read_transaction(cache_key) as cache_file:
self._local_index_cache = json.load(cache_file)
def clear(self):
- """ For testing purposes we need to be able to empty the cache and
- clear associated data structures. """
+ """For testing purposes we need to be able to empty the cache and
+ clear associated data structures."""
if self._index_file_cache:
self._index_file_cache.destroy()
self._index_file_cache = None
@@ -145,10 +146,10 @@ class BinaryCacheIndex(object):
json.dump(self._local_index_cache, new)
def regenerate_spec_cache(self, clear_existing=False):
- """ Populate the local cache of concrete specs (``_mirrors_for_spec``)
+ """Populate the local cache of concrete specs (``_mirrors_for_spec``)
from the locally cached buildcache index files. This is essentially a
no-op if it has already been done, as we keep track of the index
- hashes for which we have already associated the built specs. """
+ hashes for which we have already associated the built specs."""
self._init_local_index_cache()
if clear_existing:
@@ -157,20 +158,18 @@ class BinaryCacheIndex(object):
for mirror_url in self._local_index_cache:
cache_entry = self._local_index_cache[mirror_url]
- cached_index_path = cache_entry['index_path']
- cached_index_hash = cache_entry['index_hash']
+ cached_index_path = cache_entry["index_path"]
+ cached_index_hash = cache_entry["index_hash"]
if cached_index_hash not in self._specs_already_associated:
- self._associate_built_specs_with_mirror(cached_index_path,
- mirror_url)
+ self._associate_built_specs_with_mirror(cached_index_path, mirror_url)
self._specs_already_associated.add(cached_index_hash)
def _associate_built_specs_with_mirror(self, cache_key, mirror_url):
tmpdir = tempfile.mkdtemp()
try:
- db_root_dir = os.path.join(tmpdir, 'db_root')
- db = spack_db.Database(None, db_dir=db_root_dir,
- enable_transaction_locking=False)
+ db_root_dir = os.path.join(tmpdir, "db_root")
+ db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False)
self._index_file_cache.init_entry(cache_key)
cache_path = self._index_file_cache.cache_path(cache_key)
@@ -189,13 +188,15 @@ class BinaryCacheIndex(object):
# A binary mirror can only have one spec per DAG hash, so
# if we already have an entry under this DAG hash for this
# mirror url, we're done.
- if entry['mirror_url'] == mirror_url:
+ if entry["mirror_url"] == mirror_url:
break
else:
- self._mirrors_for_spec[dag_hash].append({
- "mirror_url": mirror_url,
- "spec": indexed_spec,
- })
+ self._mirrors_for_spec[dag_hash].append(
+ {
+ "mirror_url": mirror_url,
+ "spec": indexed_spec,
+ }
+ )
finally:
shutil.rmtree(tmpdir)
@@ -206,7 +207,7 @@ class BinaryCacheIndex(object):
# with the same DAG hash are equivalent, so we can just
# return the first one in the list.
if len(self._mirrors_for_spec[dag_hash]) > 0:
- spec_list.append(self._mirrors_for_spec[dag_hash][0]['spec'])
+ spec_list.append(self._mirrors_for_spec[dag_hash][0]["spec"])
return spec_list
@@ -258,7 +259,7 @@ class BinaryCacheIndex(object):
if not mirrors_to_check:
return results
mirror_urls = mirrors_to_check.values()
- return [r for r in results if r['mirror_url'] in mirror_urls]
+ return [r for r in results if r["mirror_url"] in mirror_urls]
def update_spec(self, spec, found_list):
"""
@@ -273,24 +274,24 @@ class BinaryCacheIndex(object):
current_list = self._mirrors_for_spec[spec_dag_hash]
for new_entry in found_list:
for cur_entry in current_list:
- if new_entry['mirror_url'] == cur_entry['mirror_url']:
- cur_entry['spec'] = new_entry['spec']
+ if new_entry["mirror_url"] == cur_entry["mirror_url"]:
+ cur_entry["spec"] = new_entry["spec"]
break
else:
current_list.append = {
- 'mirror_url': new_entry['mirror_url'],
- 'spec': new_entry['spec'],
+ "mirror_url": new_entry["mirror_url"],
+ "spec": new_entry["spec"],
}
def update(self):
- """ Make sure local cache of buildcache index files is up to date.
+ """Make sure local cache of buildcache index files is up to date.
If the same mirrors are configured as the last time this was called
and none of the remote buildcache indices have changed, calling this
method will only result in fetching the index hash from each mirror
to confirm it is the same as what is stored locally. Otherwise, the
buildcache ``index.json`` and ``index.json.hash`` files are retrieved
from each configured mirror and stored locally (both in memory and
- on disk under ``_index_cache_root``). """
+ on disk under ``_index_cache_root``)."""
self._init_local_index_cache()
mirrors = spack.mirror.MirrorCollection()
@@ -327,13 +328,14 @@ class BinaryCacheIndex(object):
for cached_mirror_url in self._local_index_cache:
cache_entry = self._local_index_cache[cached_mirror_url]
- cached_index_hash = cache_entry['index_hash']
- cached_index_path = cache_entry['index_path']
+ cached_index_hash = cache_entry["index_hash"]
+ cached_index_path = cache_entry["index_path"]
if cached_mirror_url in configured_mirror_urls:
# May need to fetch the index and update the local caches
try:
needs_regen = self._fetch_and_cache_index(
- cached_mirror_url, expect_hash=cached_index_hash)
+ cached_mirror_url, expect_hash=cached_index_hash
+ )
all_methods_failed = False
except FetchCacheError as fetch_error:
needs_regen = False
@@ -343,18 +345,19 @@ class BinaryCacheIndex(object):
spec_cache_regenerate_needed |= needs_regen
else:
# No longer have this mirror, cached index should be removed
- items_to_remove.append({
- 'url': cached_mirror_url,
- 'cache_key': os.path.join(self._index_cache_root,
- cached_index_path)
- })
+ items_to_remove.append(
+ {
+ "url": cached_mirror_url,
+ "cache_key": os.path.join(self._index_cache_root, cached_index_path),
+ }
+ )
spec_cache_clear_needed = True
spec_cache_regenerate_needed = True
# Clean up items to be removed, identified above
for item in items_to_remove:
- url = item['url']
- cache_key = item['cache_key']
+ url = item["url"]
+ cache_key = item["cache_key"]
self._index_file_cache.remove(cache_key)
del self._local_index_cache[url]
@@ -383,7 +386,7 @@ class BinaryCacheIndex(object):
self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed)
def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
- """ Fetch a buildcache index file from a remote mirror and cache it.
+ """Fetch a buildcache index file from a remote mirror and cache it.
If we already have a cached index from this mirror, then we first
check if the hash has changed, and we avoid fetching it if not.
@@ -401,10 +404,8 @@ class BinaryCacheIndex(object):
Throws:
FetchCacheError: a composite exception.
"""
- index_fetch_url = url_util.join(
- mirror_url, _build_cache_relative_path, 'index.json')
- hash_fetch_url = url_util.join(
- mirror_url, _build_cache_relative_path, 'index.json.hash')
+ index_fetch_url = url_util.join(mirror_url, _build_cache_relative_path, "index.json")
+ hash_fetch_url = url_util.join(mirror_url, _build_cache_relative_path, "index.json.hash")
if not web_util.url_exists(index_fetch_url):
# A binary mirror is not required to have an index, so avoid
@@ -420,12 +421,14 @@ class BinaryCacheIndex(object):
# the index itself.
try:
_, _, fs = web_util.read_from_url(hash_fetch_url)
- fetched_hash = codecs.getreader('utf-8')(fs).read()
+ fetched_hash = codecs.getreader("utf-8")(fs).read()
except (URLError, web_util.SpackWebError) as url_err:
errors.append(
- RuntimeError("Unable to read index hash {0} due to {1}: {2}".format(
- hash_fetch_url, url_err.__class__.__name__, str(url_err)
- ))
+ RuntimeError(
+ "Unable to read index hash {0} due to {1}: {2}".format(
+ hash_fetch_url, url_err.__class__.__name__, str(url_err)
+ )
+ )
)
# The only case where we'll skip attempting to fetch the buildcache
@@ -434,8 +437,7 @@ class BinaryCacheIndex(object):
# the two hashes are the same.
if expect_hash and fetched_hash:
if fetched_hash == expect_hash:
- tty.debug('Cached index for {0} already up to date'.format(
- mirror_url))
+ tty.debug("Cached index for {0} already up to date".format(mirror_url))
return False
else:
# We expected a hash, we fetched a hash, and they were not the
@@ -444,28 +446,31 @@ class BinaryCacheIndex(object):
# existing cache file
if mirror_url in self._local_index_cache:
existing_entry = self._local_index_cache[mirror_url]
- old_cache_key = existing_entry['index_path']
+ old_cache_key = existing_entry["index_path"]
- tty.debug('Fetching index from {0}'.format(index_fetch_url))
+ tty.debug("Fetching index from {0}".format(index_fetch_url))
# Fetch index itself
try:
_, _, fs = web_util.read_from_url(index_fetch_url)
- index_object_str = codecs.getreader('utf-8')(fs).read()
+ index_object_str = codecs.getreader("utf-8")(fs).read()
except (URLError, web_util.SpackWebError) as url_err:
errors.append(
- RuntimeError("Unable to read index {0} due to {1}: {2}".format(
- index_fetch_url, url_err.__class__.__name__, str(url_err)
- ))
+ RuntimeError(
+ "Unable to read index {0} due to {1}: {2}".format(
+ index_fetch_url, url_err.__class__.__name__, str(url_err)
+ )
+ )
)
raise FetchCacheError(errors)
locally_computed_hash = compute_hash(index_object_str)
if fetched_hash is not None and locally_computed_hash != fetched_hash:
- msg = ('Computed hash ({0}) did not match remote ({1}), '
- 'indicating error in index transmission').format(
- locally_computed_hash, expect_hash)
+ msg = (
+ "Computed hash ({0}) did not match remote ({1}), "
+ "indicating error in index transmission"
+ ).format(locally_computed_hash, expect_hash)
errors.append(RuntimeError(msg))
# We somehow got an index that doesn't match the remote one, maybe
# the next time we try we'll be successful.
@@ -473,15 +478,14 @@ class BinaryCacheIndex(object):
url_hash = compute_hash(mirror_url)
- cache_key = '{0}_{1}.json'.format(
- url_hash[:10], locally_computed_hash[:10])
+ cache_key = "{0}_{1}.json".format(url_hash[:10], locally_computed_hash[:10])
self._index_file_cache.init_entry(cache_key)
with self._index_file_cache.write_transaction(cache_key) as (old, new):
new.write(index_object_str)
self._local_index_cache[mirror_url] = {
- 'index_hash': locally_computed_hash,
- 'index_path': cache_key,
+ "index_hash": locally_computed_hash,
+ "index_path": cache_key,
}
# clean up the old cache_key if necessary
@@ -495,7 +499,7 @@ class BinaryCacheIndex(object):
def binary_index_location():
"""Set up a BinaryCacheIndex for remote buildcache dbs in the user's homedir."""
- cache_root = os.path.join(misc_cache_location(), 'indices')
+ cache_root = os.path.join(misc_cache_location(), "indices")
return spack.util.path.canonicalize_path(cache_root)
@@ -552,6 +556,7 @@ class NoVerifyException(spack.error.SpackError):
"""
Raised if file fails signature verification.
"""
+
pass
@@ -559,6 +564,7 @@ class NoChecksumException(spack.error.SpackError):
"""
Raised if file fails checksum verification.
"""
+
pass
@@ -579,7 +585,7 @@ class UnsignedPackageException(spack.error.SpackError):
def compute_hash(data):
- return hashlib.sha256(data.encode('utf-8')).hexdigest()
+ return hashlib.sha256(data.encode("utf-8")).hexdigest()
def build_cache_relative_path():
@@ -607,7 +613,7 @@ def read_buildinfo_file(prefix):
Read buildinfo file
"""
filename = buildinfo_file_name(prefix)
- with open(filename, 'r') as inputfile:
+ with open(filename, "r") as inputfile:
content = inputfile.read()
buildinfo = yaml.load(content)
return buildinfo
@@ -622,9 +628,13 @@ def get_buildfile_manifest(spec):
metadata (.spack). This can be used to find a particular kind of file
in spack, or to generate the build metadata.
"""
- data = {"text_to_relocate": [], "binary_to_relocate": [],
- "link_to_relocate": [], "other": [],
- "binary_to_relocate_fullpath": []}
+ data = {
+ "text_to_relocate": [],
+ "binary_to_relocate": [],
+ "link_to_relocate": [],
+ "other": [],
+ "binary_to_relocate_fullpath": [],
+ }
exclude_list = (".spack", "man")
@@ -640,7 +650,7 @@ def get_buildfile_manifest(spec):
if os.path.islink(dir_path_name):
link = os.readlink(dir_path_name)
if os.path.isabs(link) and link.startswith(spack.store.layout.root):
- data['link_to_relocate'].append(rel_path_name)
+ data["link_to_relocate"].append(rel_path_name)
for filename in files:
path_name = os.path.join(root, filename)
@@ -653,25 +663,28 @@ def get_buildfile_manifest(spec):
if os.path.isabs(link):
# Relocate absolute links into the spack tree
if link.startswith(spack.store.layout.root):
- data['link_to_relocate'].append(rel_path_name)
+ data["link_to_relocate"].append(rel_path_name)
added = True
if relocate.needs_binary_relocation(m_type, m_subtype):
- if ((m_subtype in ('x-executable', 'x-sharedlib', 'x-pie-executable')
- and sys.platform != 'darwin') or
- (m_subtype in ('x-mach-binary')
- and sys.platform == 'darwin') or
- (not filename.endswith('.o'))):
- data['binary_to_relocate'].append(rel_path_name)
- data['binary_to_relocate_fullpath'].append(path_name)
+ if (
+ (
+ m_subtype in ("x-executable", "x-sharedlib", "x-pie-executable")
+ and sys.platform != "darwin"
+ )
+ or (m_subtype in ("x-mach-binary") and sys.platform == "darwin")
+ or (not filename.endswith(".o"))
+ ):
+ data["binary_to_relocate"].append(rel_path_name)
+ data["binary_to_relocate_fullpath"].append(path_name)
added = True
if relocate.needs_text_relocation(m_type, m_subtype):
- data['text_to_relocate'].append(rel_path_name)
+ data["text_to_relocate"].append(rel_path_name)
added = True
if not added:
- data['other'].append(path_name)
+ data["other"].append(path_name)
return data
@@ -690,18 +703,17 @@ def write_buildinfo_file(spec, workdir, rel=False):
# Create buildinfo data and write it to disk
buildinfo = {}
- buildinfo['sbang_install_path'] = spack.hooks.sbang.sbang_install_path()
- buildinfo['relative_rpaths'] = rel
- buildinfo['buildpath'] = spack.store.layout.root
- buildinfo['spackprefix'] = spack.paths.prefix
- buildinfo['relative_prefix'] = os.path.relpath(
- spec.prefix, spack.store.layout.root)
- buildinfo['relocate_textfiles'] = manifest['text_to_relocate']
- buildinfo['relocate_binaries'] = manifest['binary_to_relocate']
- buildinfo['relocate_links'] = manifest['link_to_relocate']
- buildinfo['prefix_to_hash'] = prefix_to_hash
+ buildinfo["sbang_install_path"] = spack.hooks.sbang.sbang_install_path()
+ buildinfo["relative_rpaths"] = rel
+ buildinfo["buildpath"] = spack.store.layout.root
+ buildinfo["spackprefix"] = spack.paths.prefix
+ buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
+ buildinfo["relocate_textfiles"] = manifest["text_to_relocate"]
+ buildinfo["relocate_binaries"] = manifest["binary_to_relocate"]
+ buildinfo["relocate_links"] = manifest["link_to_relocate"]
+ buildinfo["prefix_to_hash"] = prefix_to_hash
filename = buildinfo_file_name(workdir)
- with open(filename, 'w') as outfile:
+ with open(filename, "w") as outfile:
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
@@ -710,9 +722,12 @@ def tarball_directory_name(spec):
Return name of the tarball directory according to the convention
<os>-<architecture>/<compiler>/<package>-<version>/
"""
- return "%s/%s/%s-%s" % (spec.architecture,
- str(spec.compiler).replace("@", "-"),
- spec.name, spec.version)
+ return "%s/%s/%s-%s" % (
+ spec.architecture,
+ str(spec.compiler).replace("@", "-"),
+ spec.name,
+ spec.version,
+ )
def tarball_name(spec, ext):
@@ -720,12 +735,14 @@ def tarball_name(spec, ext):
Return the name of the tarfile according to the convention
<os>-<architecture>-<package>-<dag_hash><ext>
"""
- return "%s-%s-%s-%s-%s%s" % (spec.architecture,
- str(spec.compiler).replace("@", "-"),
- spec.name,
- spec.version,
- spec.dag_hash(),
- ext)
+ return "%s-%s-%s-%s-%s%s" % (
+ spec.architecture,
+ str(spec.compiler).replace("@", "-"),
+ spec.name,
+ spec.version,
+ spec.dag_hash(),
+ ext,
+ )
def tarball_path_name(spec, ext):
@@ -733,15 +750,14 @@ def tarball_path_name(spec, ext):
Return the full path+name for a given spec according to the convention
<tarball_directory_name>/<tarball_name>
"""
- return os.path.join(tarball_directory_name(spec),
- tarball_name(spec, ext))
+ return os.path.join(tarball_directory_name(spec), tarball_name(spec, ext))
def checksum_tarball(file):
# calculate sha256 hash of tar file
block_size = 65536
hasher = hashlib.sha256()
- with open(file, 'rb') as tfile:
+ with open(file, "rb") as tfile:
buf = tfile.read(block_size)
while len(buf) > 0:
hasher.update(buf)
@@ -762,12 +778,13 @@ def select_signing_key(key=None):
raise NoKeyException(
"No default key available for signing.\n"
"Use spack gpg init and spack gpg create"
- " to create a default key.")
+ " to create a default key."
+ )
return key
def sign_specfile(key, force, specfile_path):
- signed_specfile_path = '%s.sig' % specfile_path
+ signed_specfile_path = "%s.sig" % specfile_path
if os.path.exists(signed_specfile_path):
if force:
os.remove(signed_specfile_path)
@@ -780,16 +797,16 @@ def sign_specfile(key, force, specfile_path):
def _fetch_spec_from_mirror(spec_url):
s = None
- tty.debug('fetching {0}'.format(spec_url))
+ tty.debug("fetching {0}".format(spec_url))
_, _, spec_file = web_util.read_from_url(spec_url)
- spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
+ spec_file_contents = codecs.getreader("utf-8")(spec_file).read()
# Need full spec.json name or this gets confused with index.json.
- if spec_url.endswith('.json.sig'):
+ if spec_url.endswith(".json.sig"):
specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
s = Spec.from_dict(specfile_json)
- elif spec_url.endswith('.json'):
+ elif spec_url.endswith(".json"):
s = Spec.from_json(spec_file_contents)
- elif spec_url.endswith('.yaml'):
+ elif spec_url.endswith(".yaml"):
s = Spec.from_yaml(spec_file_contents)
return s
@@ -799,17 +816,17 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
try:
s = _fetch_spec_from_mirror(url_util.join(cache_prefix, file_path))
except (URLError, web_util.SpackWebError) as url_err:
- tty.error('Error reading specfile: {0}'.format(file_path))
+ tty.error("Error reading specfile: {0}".format(file_path))
tty.error(url_err)
if s:
db.add(s, None)
- db.mark(s, 'in_buildcache', True)
+ db.mark(s, "in_buildcache", True)
# Now generate the index, compute its hash, and push the two files to
# the mirror.
- index_json_path = os.path.join(db_root_dir, 'index.json')
- with open(index_json_path, 'w') as f:
+ index_json_path = os.path.join(db_root_dir, "index.json")
+ with open(index_json_path, "w") as f:
db._write_to_file(f)
# Read the index back in and compute its hash
@@ -818,23 +835,25 @@ def _read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir):
index_hash = compute_hash(index_string)
# Write the hash out to a local file
- index_hash_path = os.path.join(db_root_dir, 'index.json.hash')
- with open(index_hash_path, 'w') as f:
+ index_hash_path = os.path.join(db_root_dir, "index.json.hash")
+ with open(index_hash_path, "w") as f:
f.write(index_hash)
# Push the index itself
web_util.push_to_url(
index_json_path,
- url_util.join(cache_prefix, 'index.json'),
+ url_util.join(cache_prefix, "index.json"),
keep_original=False,
- extra_args={'ContentType': 'application/json'})
+ extra_args={"ContentType": "application/json"},
+ )
# Push the hash
web_util.push_to_url(
index_hash_path,
- url_util.join(cache_prefix, 'index.json.hash'),
+ url_util.join(cache_prefix, "index.json.hash"),
keep_original=False,
- extra_args={'ContentType': 'text/plain'})
+ extra_args={"ContentType": "text/plain"},
+ )
def generate_package_index(cache_prefix):
@@ -848,38 +867,39 @@ def generate_package_index(cache_prefix):
file_list = (
entry
for entry in web_util.list_url(cache_prefix)
- if entry.endswith('.yaml') or
- entry.endswith('spec.json') or
- entry.endswith('spec.json.sig'))
+ if entry.endswith(".yaml")
+ or entry.endswith("spec.json")
+ or entry.endswith("spec.json.sig")
+ )
except KeyError as inst:
- msg = 'No packages at {0}: {1}'.format(cache_prefix, inst)
+ msg = "No packages at {0}: {1}".format(cache_prefix, inst)
tty.warn(msg)
return
except Exception as err:
# If we got some kind of S3 (access denied or other connection
# error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return
- msg = 'Encountered problem listing packages at {0}: {1}'.format(
- cache_prefix, err)
+ msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
tty.warn(msg)
return
- tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
- cache_prefix))
+ tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
tmpdir = tempfile.mkdtemp()
- db_root_dir = os.path.join(tmpdir, 'db_root')
- db = spack_db.Database(None, db_dir=db_root_dir,
- enable_transaction_locking=False,
- record_fields=['spec', 'ref_count', 'in_buildcache'])
+ db_root_dir = os.path.join(tmpdir, "db_root")
+ db = spack_db.Database(
+ None,
+ db_dir=db_root_dir,
+ enable_transaction_locking=False,
+ record_fields=["spec", "ref_count", "in_buildcache"],
+ )
try:
_read_specs_and_push_index(file_list, cache_prefix, db, db_root_dir)
except Exception as err:
- msg = 'Encountered problem pushing package index to {0}: {1}'.format(
- cache_prefix, err)
+ msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
tty.warn(msg)
- tty.debug('\n' + traceback.format_exc())
+ tty.debug("\n" + traceback.format_exc())
finally:
shutil.rmtree(tmpdir)
@@ -892,25 +912,27 @@ def generate_key_index(key_prefix, tmpdir=None):
key_prefix.
"""
- tty.debug(' '.join(('Retrieving key.pub files from',
- url_util.format(key_prefix),
- 'to build key index')))
+ tty.debug(
+ " ".join(
+ ("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
+ )
+ )
try:
fingerprints = (
entry[:-4]
for entry in web_util.list_url(key_prefix, recursive=False)
- if entry.endswith('.pub'))
+ if entry.endswith(".pub")
+ )
except KeyError as inst:
- msg = 'No keys at {0}: {1}'.format(key_prefix, inst)
+ msg = "No keys at {0}: {1}".format(key_prefix, inst)
tty.warn(msg)
return
except Exception as err:
# If we got some kind of S3 (access denied or other connection
# error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return
- msg = 'Encountered problem listing keys at {0}: {1}'.format(
- key_prefix, err)
+ msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
tty.warn(msg)
return
@@ -918,31 +940,27 @@ def generate_key_index(key_prefix, tmpdir=None):
keys_local = url_util.local_file_path(key_prefix)
if keys_local:
- target = os.path.join(keys_local, 'index.json')
+ target = os.path.join(keys_local, "index.json")
else:
if not tmpdir:
tmpdir = tempfile.mkdtemp()
remove_tmpdir = True
- target = os.path.join(tmpdir, 'index.json')
+ target = os.path.join(tmpdir, "index.json")
- index = {
- 'keys': dict(
- (fingerprint, {}) for fingerprint
- in sorted(set(fingerprints)))
- }
- with open(target, 'w') as f:
+ index = {"keys": dict((fingerprint, {}) for fingerprint in sorted(set(fingerprints)))}
+ with open(target, "w") as f:
sjson.dump(index, f)
if not keys_local:
try:
web_util.push_to_url(
target,
- url_util.join(key_prefix, 'index.json'),
+ url_util.join(key_prefix, "index.json"),
keep_original=False,
- extra_args={'ContentType': 'application/json'})
+ extra_args={"ContentType": "application/json"},
+ )
except Exception as err:
- msg = 'Encountered problem pushing key index to {0}: {1}'.format(
- key_prefix, err)
+ msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
tty.warn(msg)
finally:
if remove_tmpdir:
@@ -950,29 +968,32 @@ def generate_key_index(key_prefix, tmpdir=None):
def _build_tarball(
- spec, outdir,
- force=False, relative=False, unsigned=False,
- allow_root=False, key=None, regenerate_index=False
+ spec,
+ outdir,
+ force=False,
+ relative=False,
+ unsigned=False,
+ allow_root=False,
+ key=None,
+ regenerate_index=False,
):
"""
Build a tarball from given spec and put it into the directory structure
used at the mirror (following <tarball_directory_name>).
"""
if not spec.concrete:
- raise ValueError('spec must be concrete to build tarball')
+ raise ValueError("spec must be concrete to build tarball")
# set up some paths
tmpdir = tempfile.mkdtemp()
cache_prefix = build_cache_prefix(tmpdir)
- tarfile_name = tarball_name(spec, '.spack')
+ tarfile_name = tarball_name(spec, ".spack")
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
- spackfile_path = os.path.join(
- cache_prefix, tarball_path_name(spec, '.spack'))
+ spackfile_path = os.path.join(cache_prefix, tarball_path_name(spec, ".spack"))
- remote_spackfile_path = url_util.join(
- outdir, os.path.relpath(spackfile_path, tmpdir))
+ remote_spackfile_path = url_util.join(outdir, os.path.relpath(spackfile_path, tmpdir))
mkdirp(tarfile_dir)
if web_util.url_exists(remote_spackfile_path):
@@ -986,17 +1007,18 @@ def _build_tarball(
# and preferences
spec_file = spack.store.layout.spec_file_path(spec)
- specfile_name = tarball_name(spec, '.spec.json')
+ specfile_name = tarball_name(spec, ".spec.json")
specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name))
- signed_specfile_path = '{0}.sig'.format(specfile_path)
- deprecated_specfile_path = specfile_path.replace('.spec.json', '.spec.yaml')
+ signed_specfile_path = "{0}.sig".format(specfile_path)
+ deprecated_specfile_path = specfile_path.replace(".spec.json", ".spec.yaml")
remote_specfile_path = url_util.join(
- outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)))
- remote_signed_specfile_path = '{0}.sig'.format(remote_specfile_path)
+ outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir))
+ )
+ remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
remote_specfile_path_deprecated = url_util.join(
- outdir, os.path.relpath(deprecated_specfile_path,
- os.path.realpath(tmpdir)))
+ outdir, os.path.relpath(deprecated_specfile_path, os.path.realpath(tmpdir))
+ )
# If force and exists, overwrite. Otherwise raise exception on collision.
if force:
@@ -1006,9 +1028,11 @@ def _build_tarball(
web_util.remove_url(remote_signed_specfile_path)
if web_util.url_exists(remote_specfile_path_deprecated):
web_util.remove_url(remote_specfile_path_deprecated)
- elif (web_util.url_exists(remote_specfile_path) or
- web_util.url_exists(remote_signed_specfile_path) or
- web_util.url_exists(remote_specfile_path_deprecated)):
+ elif (
+ web_util.url_exists(remote_specfile_path)
+ or web_util.url_exists(remote_signed_specfile_path)
+ or web_util.url_exists(remote_specfile_path_deprecated)
+ ):
raise NoOverwriteException(url_util.format(remote_specfile_path))
# make a copy of the install directory to work with
@@ -1016,12 +1040,11 @@ def _build_tarball(
# install_tree copies hardlinks
# create a temporary tarfile from prefix and exract it to workdir
# tarfile preserves hardlinks
- temp_tarfile_name = tarball_name(spec, '.tar')
+ temp_tarfile_name = tarball_name(spec, ".tar")
temp_tarfile_path = os.path.join(tarfile_dir, temp_tarfile_name)
- with closing(tarfile.open(temp_tarfile_path, 'w')) as tar:
- tar.add(name='%s' % spec.prefix,
- arcname='.')
- with closing(tarfile.open(temp_tarfile_path, 'r')) as tar:
+ with closing(tarfile.open(temp_tarfile_path, "w")) as tar:
+ tar.add(name="%s" % spec.prefix, arcname=".")
+ with closing(tarfile.open(temp_tarfile_path, "r")) as tar:
tar.extractall(workdir)
os.remove(temp_tarfile_path)
@@ -1048,9 +1071,8 @@ def _build_tarball(
tty.die(e)
# create gzip compressed tarball of the install prefix
- with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
- tar.add(name='%s' % workdir,
- arcname='%s' % os.path.basename(spec.prefix))
+ with closing(tarfile.open(tarfile_path, "w:gz")) as tar:
+ tar.add(name="%s" % workdir, arcname="%s" % os.path.basename(spec.prefix))
# remove copy of install directory
shutil.rmtree(workdir)
@@ -1059,30 +1081,27 @@ def _build_tarball(
# add sha256 checksum to spec.json
- with open(spec_file, 'r') as inputfile:
+ with open(spec_file, "r") as inputfile:
content = inputfile.read()
- if spec_file.endswith('.yaml'):
+ if spec_file.endswith(".yaml"):
spec_dict = yaml.load(content)
- elif spec_file.endswith('.json'):
+ elif spec_file.endswith(".json"):
spec_dict = sjson.load(content)
else:
- raise ValueError(
- '{0} not a valid spec file type (json or yaml)'.format(
- spec_file))
- spec_dict['buildcache_layout_version'] = 1
+ raise ValueError("{0} not a valid spec file type (json or yaml)".format(spec_file))
+ spec_dict["buildcache_layout_version"] = 1
bchecksum = {}
- bchecksum['hash_algorithm'] = 'sha256'
- bchecksum['hash'] = checksum
- spec_dict['binary_cache_checksum'] = bchecksum
+ bchecksum["hash_algorithm"] = "sha256"
+ bchecksum["hash"] = checksum
+ spec_dict["binary_cache_checksum"] = bchecksum
# Add original install prefix relative to layout root to spec.json.
# This will be used to determine is the directory layout has changed.
buildinfo = {}
- buildinfo['relative_prefix'] = os.path.relpath(
- spec.prefix, spack.store.layout.root)
- buildinfo['relative_rpaths'] = relative
- spec_dict['buildinfo'] = buildinfo
+ buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
+ buildinfo["relative_rpaths"] = relative
+ spec_dict["buildinfo"] = buildinfo
- with open(specfile_path, 'w') as outfile:
+ with open(specfile_path, "w") as outfile:
outfile.write(sjson.dump(spec_dict))
# sign the tarball and spec file with gpg
@@ -1091,30 +1110,25 @@ def _build_tarball(
sign_specfile(key, force, specfile_path)
# push tarball and signed spec json to remote mirror
- web_util.push_to_url(
- spackfile_path, remote_spackfile_path, keep_original=False)
+ web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
web_util.push_to_url(
signed_specfile_path if not unsigned else specfile_path,
remote_signed_specfile_path if not unsigned else remote_specfile_path,
- keep_original=False)
+ keep_original=False,
+ )
- tty.debug('Buildcache for "{0}" written to \n {1}'
- .format(spec, remote_spackfile_path))
+ tty.debug('Buildcache for "{0}" written to \n {1}'.format(spec, remote_spackfile_path))
try:
# push the key to the build cache's _pgp directory so it can be
# imported
if not unsigned:
- push_keys(outdir,
- keys=[key],
- regenerate_index=regenerate_index,
- tmpdir=tmpdir)
+ push_keys(outdir, keys=[key], regenerate_index=regenerate_index, tmpdir=tmpdir)
# create an index.json for the build_cache directory so specs can be
# found
if regenerate_index:
- generate_package_index(url_util.join(
- outdir, os.path.relpath(cache_prefix, tmpdir)))
+ generate_package_index(url_util.join(outdir, os.path.relpath(cache_prefix, tmpdir)))
finally:
shutil.rmtree(tmpdir)
@@ -1143,9 +1157,12 @@ def nodes_to_be_packaged(specs, include_root=True, include_dependencies=True):
if not include_dependencies:
nodes = [current_spec]
else:
- nodes = [n for n in current_spec.traverse(
- order='post', root=include_root, deptype=('link', 'run')
- )]
+ nodes = [
+ n
+ for n in current_spec.traverse(
+ order="post", root=include_root, deptype=("link", "run")
+ )
+ ]
for node in nodes:
if not skip_node(node):
@@ -1167,7 +1184,7 @@ def push(specs, push_url, specs_kwargs=None, **kwargs):
**kwargs: TODO
"""
- specs_kwargs = specs_kwargs or {'include_root': True, 'include_dependencies': True}
+ specs_kwargs = specs_kwargs or {"include_root": True, "include_dependencies": True}
nodes = nodes_to_be_packaged(specs, **specs_kwargs)
# TODO: This seems to be an easy target for task
@@ -1189,7 +1206,7 @@ def try_verify(specfile_path):
Returns:
``True`` if the signature could be verified, ``False`` otherwise.
"""
- suppress = config.get('config:suppress_gpg_warnings', False)
+ suppress = config.get("config:suppress_gpg_warnings", False)
try:
spack.util.gpg.verify(specfile_path, suppress_warnings=suppress)
@@ -1223,8 +1240,8 @@ def try_fetch(url_to_fetch):
def _delete_staged_downloads(download_result):
"""Clean up stages used to download tarball and specfile"""
- download_result['tarball_stage'].destroy()
- download_result['specfile_stage'].destroy()
+ download_result["tarball_stage"].destroy()
+ download_result["specfile_stage"].destroy()
def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
@@ -1256,11 +1273,10 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
}
"""
if not spack.mirror.MirrorCollection():
- tty.die("Please add a spack mirror to allow " +
- "download of pre-compiled packages.")
+ tty.die("Please add a spack mirror to allow " + "download of pre-compiled packages.")
- tarball = tarball_path_name(spec, '.spack')
- specfile_prefix = tarball_name(spec, '.spec')
+ tarball = tarball_path_name(spec, ".spack")
+ specfile_prefix = tarball_name(spec, ".spec")
mirrors_to_try = []
@@ -1273,35 +1289,36 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
# look in all configured mirrors if needed, as maybe the spec
# we need was in an un-indexed mirror. No need to check any
# mirror for the spec twice though.
- try_first = [i['mirror_url'] for i in mirrors_for_spec] if mirrors_for_spec else []
+ try_first = [i["mirror_url"] for i in mirrors_for_spec] if mirrors_for_spec else []
try_next = [
- i.fetch_url for i in spack.mirror.MirrorCollection().values()
+ i.fetch_url
+ for i in spack.mirror.MirrorCollection().values()
if i.fetch_url not in try_first
]
for url in try_first + try_next:
- mirrors_to_try.append({
- 'specfile': url_util.join(url,
- _build_cache_relative_path, specfile_prefix),
- 'spackfile': url_util.join(url,
- _build_cache_relative_path, tarball)
- })
+ mirrors_to_try.append(
+ {
+ "specfile": url_util.join(url, _build_cache_relative_path, specfile_prefix),
+ "spackfile": url_util.join(url, _build_cache_relative_path, tarball),
+ }
+ )
tried_to_verify_sigs = []
# Assumes we care more about finding a spec file by preferred ext
# than by mirrory priority. This can be made less complicated as
# we remove support for deprecated spec formats and buildcache layouts.
- for ext in ['json.sig', 'json', 'yaml']:
+ for ext in ["json.sig", "json", "yaml"]:
for mirror_to_try in mirrors_to_try:
- specfile_url = '{0}.{1}'.format(mirror_to_try['specfile'], ext)
- spackfile_url = mirror_to_try['spackfile']
+ specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext)
+ spackfile_url = mirror_to_try["spackfile"]
local_specfile_stage = try_fetch(specfile_url)
if local_specfile_stage:
local_specfile_path = local_specfile_stage.save_filename
signature_verified = False
- if ext.endswith('.sig') and not unsigned:
+ if ext.endswith(".sig") and not unsigned:
# If we found a signed specfile at the root, try to verify
# the signature immediately. We will not download the
# tarball if we could not verify the signature.
@@ -1310,7 +1327,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
if not signature_verified:
tty.warn("Failed to verify: {0}".format(specfile_url))
- if unsigned or signature_verified or not ext.endswith('.sig'):
+ if unsigned or signature_verified or not ext.endswith(".sig"):
# We will download the tarball in one of three cases:
# 1. user asked for --no-check-signature
# 2. user didn't ask for --no-check-signature, but we
@@ -1330,9 +1347,9 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
tarball_stage = try_fetch(spackfile_url)
if tarball_stage:
return {
- 'tarball_stage': tarball_stage,
- 'specfile_stage': local_specfile_stage,
- 'signature_verified': signature_verified,
+ "tarball_stage": tarball_stage,
+ "specfile_stage": local_specfile_stage,
+ "signature_verified": signature_verified,
}
local_specfile_stage.destroy()
@@ -1342,14 +1359,20 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None):
# an acceptable one for which we could download a tarball.
if tried_to_verify_sigs:
- raise NoVerifyException(("Spack found new style signed binary packages, "
- "but was unable to verify any of them. Please "
- "obtain and trust the correct public key. If "
- "these are public spack binaries, please see the "
- "spack docs for locations where keys can be found."))
-
- tty.warn("download_tarball() was unable to download " +
- "{0} from any configured mirrors".format(spec))
+ raise NoVerifyException(
+ (
+ "Spack found new style signed binary packages, "
+ "but was unable to verify any of them. Please "
+ "obtain and trust the correct public key. If "
+ "these are public spack binaries, please see the "
+ "spack docs for locations where keys can be found."
+ )
+ )
+
+ tty.warn(
+ "download_tarball() was unable to download "
+ + "{0} from any configured mirrors".format(spec)
+ )
return None
@@ -1360,26 +1383,24 @@ def make_package_relative(workdir, spec, allow_root):
"""
prefix = spec.prefix
buildinfo = read_buildinfo_file(workdir)
- old_layout_root = buildinfo['buildpath']
+ old_layout_root = buildinfo["buildpath"]
orig_path_names = list()
cur_path_names = list()
- for filename in buildinfo['relocate_binaries']:
+ for filename in buildinfo["relocate_binaries"]:
orig_path_names.append(os.path.join(prefix, filename))
cur_path_names.append(os.path.join(workdir, filename))
platform = spack.platforms.by_name(spec.platform)
- if 'macho' in platform.binary_formats:
- relocate.make_macho_binaries_relative(
- cur_path_names, orig_path_names, old_layout_root)
+ if "macho" in platform.binary_formats:
+ relocate.make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
- if 'elf' in platform.binary_formats:
- relocate.make_elf_binaries_relative(
- cur_path_names, orig_path_names, old_layout_root)
+ if "elf" in platform.binary_formats:
+ relocate.make_elf_binaries_relative(cur_path_names, orig_path_names, old_layout_root)
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
orig_path_names = list()
cur_path_names = list()
- for linkname in buildinfo.get('relocate_links', []):
+ for linkname in buildinfo.get("relocate_links", []):
orig_path_names.append(os.path.join(prefix, linkname))
cur_path_names.append(os.path.join(workdir, linkname))
relocate.make_link_relative(cur_path_names, orig_path_names)
@@ -1392,7 +1413,7 @@ def check_package_relocatable(workdir, spec, allow_root):
"""
buildinfo = read_buildinfo_file(workdir)
cur_path_names = list()
- for filename in buildinfo['relocate_binaries']:
+ for filename in buildinfo["relocate_binaries"]:
cur_path_names.append(os.path.join(workdir, filename))
relocate.raise_if_not_relocatable(cur_path_names, allow_root)
@@ -1409,15 +1430,15 @@ def relocate_package(spec, allow_root):
new_spack_prefix = str(spack.paths.prefix)
old_sbang_install_path = None
- if 'sbang_install_path' in buildinfo:
- old_sbang_install_path = str(buildinfo['sbang_install_path'])
- old_layout_root = str(buildinfo['buildpath'])
- old_spack_prefix = str(buildinfo.get('spackprefix'))
- old_rel_prefix = buildinfo.get('relative_prefix')
+ if "sbang_install_path" in buildinfo:
+ old_sbang_install_path = str(buildinfo["sbang_install_path"])
+ old_layout_root = str(buildinfo["buildpath"])
+ old_spack_prefix = str(buildinfo.get("spackprefix"))
+ old_rel_prefix = buildinfo.get("relative_prefix")
old_prefix = os.path.join(old_layout_root, old_rel_prefix)
- rel = buildinfo.get('relative_rpaths')
- prefix_to_hash = buildinfo.get('prefix_to_hash', None)
- if (old_rel_prefix != new_rel_prefix and not prefix_to_hash):
+ rel = buildinfo.get("relative_rpaths")
+ prefix_to_hash = buildinfo.get("prefix_to_hash", None)
+ if old_rel_prefix != new_rel_prefix and not prefix_to_hash:
msg = "Package tarball was created from an install "
msg += "prefix with a different directory layout and an older "
msg += "buildcache create implementation. It cannot be relocated."
@@ -1428,10 +1449,10 @@ def relocate_package(spec, allow_root):
if not prefix_to_hash:
prefix_to_hash = dict()
hash_to_prefix = dict()
- hash_to_prefix[spec.format('{hash}')] = str(spec.package.prefix)
+ hash_to_prefix[spec.format("{hash}")] = str(spec.package.prefix)
new_deps = spack.build_environment.get_rpath_deps(spec.package)
for d in new_deps:
- hash_to_prefix[d.format('{hash}')] = str(d.prefix)
+ hash_to_prefix[d.format("{hash}")] = str(d.prefix)
# Spurious replacements (e.g. sbang) will cause issues with binaries
# For example, the new sbang can be longer than the old one.
# Hence 2 dictionaries are maintained here.
@@ -1453,19 +1474,18 @@ def relocate_package(spec, allow_root):
# sbang was a bash script, and it lived in the spack prefix. It is
# now a POSIX script that lives in the install prefix. Old packages
# will have the old sbang location in their shebangs.
- orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(old_spack_prefix)
+ orig_sbang = "#!/bin/bash {0}/bin/sbang".format(old_spack_prefix)
new_sbang = spack.hooks.sbang.sbang_shebang_line()
prefix_to_prefix_text[orig_sbang] = new_sbang
- tty.debug("Relocating package from",
- "%s to %s." % (old_layout_root, new_layout_root))
+ tty.debug("Relocating package from", "%s to %s." % (old_layout_root, new_layout_root))
def is_backup_file(file):
- return file.endswith('~')
+ return file.endswith("~")
# Text files containing the prefix text
text_names = list()
- for filename in buildinfo['relocate_textfiles']:
+ for filename in buildinfo["relocate_textfiles"]:
text_name = os.path.join(workdir, filename)
# Don't add backup files generated by filter_file during install step.
if not is_backup_file(text_name):
@@ -1473,31 +1493,35 @@ def relocate_package(spec, allow_root):
# If we are not installing back to the same install tree do the relocation
if old_prefix != new_prefix:
- files_to_relocate = [os.path.join(workdir, filename)
- for filename in buildinfo.get('relocate_binaries')
- ]
+ files_to_relocate = [
+ os.path.join(workdir, filename) for filename in buildinfo.get("relocate_binaries")
+ ]
# If the buildcache was not created with relativized rpaths
# do the relocation of path in binaries
platform = spack.platforms.by_name(spec.platform)
- if 'macho' in platform.binary_formats:
- relocate.relocate_macho_binaries(files_to_relocate,
- old_layout_root,
- new_layout_root,
- prefix_to_prefix_bin, rel,
- old_prefix,
- new_prefix)
- if 'elf' in platform.binary_formats:
- relocate.relocate_elf_binaries(files_to_relocate,
- old_layout_root,
- new_layout_root,
- prefix_to_prefix_bin, rel,
- old_prefix,
- new_prefix)
- # Relocate links to the new install prefix
- links = [link for link in buildinfo.get('relocate_links', [])]
- relocate.relocate_links(
- links, old_layout_root, old_prefix, new_prefix
+ if "macho" in platform.binary_formats:
+ relocate.relocate_macho_binaries(
+ files_to_relocate,
+ old_layout_root,
+ new_layout_root,
+ prefix_to_prefix_bin,
+ rel,
+ old_prefix,
+ new_prefix,
)
+ if "elf" in platform.binary_formats:
+ relocate.relocate_elf_binaries(
+ files_to_relocate,
+ old_layout_root,
+ new_layout_root,
+ prefix_to_prefix_bin,
+ rel,
+ old_prefix,
+ new_prefix,
+ )
+ # Relocate links to the new install prefix
+ links = [link for link in buildinfo.get("relocate_links", [])]
+ relocate.relocate_links(links, old_layout_root, old_prefix, new_prefix)
# For all buildcaches
# relocate the install prefixes in text files including dependencies
@@ -1505,11 +1529,17 @@ def relocate_package(spec, allow_root):
paths_to_relocate = [old_prefix, old_layout_root]
paths_to_relocate.extend(prefix_to_hash.keys())
- files_to_relocate = list(filter(
- lambda pathname: not relocate.file_is_relocatable(
- pathname, paths_to_relocate=paths_to_relocate),
- map(lambda filename: os.path.join(workdir, filename),
- buildinfo['relocate_binaries'])))
+ files_to_relocate = list(
+ filter(
+ lambda pathname: not relocate.file_is_relocatable(
+ pathname, paths_to_relocate=paths_to_relocate
+ ),
+ map(
+ lambda filename: os.path.join(workdir, filename),
+ buildinfo["relocate_binaries"],
+ ),
+ )
+ )
# relocate the install prefixes in binary files including dependencies
relocate.relocate_text_bin(files_to_relocate, prefix_to_prefix_bin)
@@ -1522,19 +1552,19 @@ def relocate_package(spec, allow_root):
def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum):
stagepath = os.path.dirname(filename)
- spackfile_name = tarball_name(spec, '.spack')
+ spackfile_name = tarball_name(spec, ".spack")
spackfile_path = os.path.join(stagepath, spackfile_name)
- tarfile_name = tarball_name(spec, '.tar.gz')
+ tarfile_name = tarball_name(spec, ".tar.gz")
tarfile_path = os.path.join(extract_to, tarfile_name)
- deprecated_yaml_name = tarball_name(spec, '.spec.yaml')
+ deprecated_yaml_name = tarball_name(spec, ".spec.yaml")
deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name)
- json_name = tarball_name(spec, '.spec.json')
+ json_name = tarball_name(spec, ".spec.json")
json_path = os.path.join(extract_to, json_name)
- with closing(tarfile.open(spackfile_path, 'r')) as tar:
+ with closing(tarfile.open(spackfile_path, "r")) as tar:
tar.extractall(extract_to)
# some buildcache tarfiles use bzip2 compression
if not os.path.exists(tarfile_path):
- tarfile_name = tarball_name(spec, '.tar.bz2')
+ tarfile_name = tarball_name(spec, ".tar.bz2")
tarfile_path = os.path.join(extract_to, tarfile_name)
if os.path.exists(json_path):
@@ -1542,34 +1572,36 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum
elif os.path.exists(deprecated_yaml_path):
specfile_path = deprecated_yaml_path
else:
- raise ValueError('Cannot find spec file for {0}.'.format(extract_to))
+ raise ValueError("Cannot find spec file for {0}.".format(extract_to))
if not unsigned:
- if os.path.exists('%s.asc' % specfile_path):
- suppress = config.get('config:suppress_gpg_warnings', False)
+ if os.path.exists("%s.asc" % specfile_path):
+ suppress = config.get("config:suppress_gpg_warnings", False)
try:
- spack.util.gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
+ spack.util.gpg.verify("%s.asc" % specfile_path, specfile_path, suppress)
except Exception:
- raise NoVerifyException("Spack was unable to verify package "
- "signature, please obtain and trust the "
- "correct public key.")
+ raise NoVerifyException(
+ "Spack was unable to verify package "
+ "signature, please obtain and trust the "
+ "correct public key."
+ )
else:
raise UnsignedPackageException(
- "To install unsigned packages, use the --no-check-signature option.")
+ "To install unsigned packages, use the --no-check-signature option."
+ )
# get the sha256 checksum of the tarball
local_checksum = checksum_tarball(tarfile_path)
# if the checksums don't match don't install
- if local_checksum != remote_checksum['hash']:
+ if local_checksum != remote_checksum["hash"]:
raise NoChecksumException(
- "Package tarball failed checksum verification.\n"
- "It cannot be installed.")
+ "Package tarball failed checksum verification.\n" "It cannot be installed."
+ )
return tarfile_path
-def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
- force=False):
+def extract_tarball(spec, download_result, allow_root=False, unsigned=False, force=False):
"""
extract binary tarball for given package into install area
"""
@@ -1579,31 +1611,32 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
else:
raise NoOverwriteException(str(spec.prefix))
- specfile_path = download_result['specfile_stage'].save_filename
+ specfile_path = download_result["specfile_stage"].save_filename
- with open(specfile_path, 'r') as inputfile:
+ with open(specfile_path, "r") as inputfile:
content = inputfile.read()
- if specfile_path.endswith('.json.sig'):
+ if specfile_path.endswith(".json.sig"):
spec_dict = Spec.extract_json_from_clearsig(content)
- elif specfile_path.endswith('.json'):
+ elif specfile_path.endswith(".json"):
spec_dict = sjson.load(content)
else:
spec_dict = syaml.load(content)
- bchecksum = spec_dict['binary_cache_checksum']
- filename = download_result['tarball_stage'].save_filename
- signature_verified = download_result['signature_verified']
+ bchecksum = spec_dict["binary_cache_checksum"]
+ filename = download_result["tarball_stage"].save_filename
+ signature_verified = download_result["signature_verified"]
tmpdir = None
- if ('buildcache_layout_version' not in spec_dict or
- int(spec_dict['buildcache_layout_version']) < 1):
+ if (
+ "buildcache_layout_version" not in spec_dict
+ or int(spec_dict["buildcache_layout_version"]) < 1
+ ):
# Handle the older buildcache layout where the .spack file
# contains a spec json/yaml, maybe an .asc file (signature),
# and another tarball containing the actual install tree.
tmpdir = tempfile.mkdtemp()
try:
- tarfile_path = _extract_inner_tarball(
- spec, filename, tmpdir, unsigned, bchecksum)
+ tarfile_path = _extract_inner_tarball(spec, filename, tmpdir, unsigned, bchecksum)
except Exception as e:
_delete_staged_downloads(download_result)
shutil.rmtree(tmpdir)
@@ -1618,38 +1651,36 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
if not unsigned and not signature_verified:
raise UnsignedPackageException(
- "To install unsigned packages, use the --no-check-signature option.")
+ "To install unsigned packages, use the --no-check-signature option."
+ )
# compute the sha256 checksum of the tarball
local_checksum = checksum_tarball(tarfile_path)
# if the checksums don't match don't install
- if local_checksum != bchecksum['hash']:
+ if local_checksum != bchecksum["hash"]:
_delete_staged_downloads(download_result)
raise NoChecksumException(
- "Package tarball failed checksum verification.\n"
- "It cannot be installed.")
+ "Package tarball failed checksum verification.\n" "It cannot be installed."
+ )
- new_relative_prefix = str(os.path.relpath(spec.prefix,
- spack.store.layout.root))
+ new_relative_prefix = str(os.path.relpath(spec.prefix, spack.store.layout.root))
# if the original relative prefix is in the spec file use it
- buildinfo = spec_dict.get('buildinfo', {})
- old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
- rel = buildinfo.get('relative_rpaths')
- info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s'
- tty.debug(info %
- (old_relative_prefix, new_relative_prefix, rel))
+ buildinfo = spec_dict.get("buildinfo", {})
+ old_relative_prefix = buildinfo.get("relative_prefix", new_relative_prefix)
+ rel = buildinfo.get("relative_rpaths")
+ info = "old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s"
+ tty.debug(info % (old_relative_prefix, new_relative_prefix, rel))
# Extract the tarball into the store root, presumably on the same filesystem.
# The directory created is the base directory name of the old prefix.
# Moving the old prefix name to the new prefix location should preserve
# hard links and symbolic links.
- extract_tmp = os.path.join(spack.store.layout.root, '.tmp')
+ extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
mkdirp(extract_tmp)
- extracted_dir = os.path.join(extract_tmp,
- old_relative_prefix.split(os.path.sep)[-1])
+ extracted_dir = os.path.join(extract_tmp, old_relative_prefix.split(os.path.sep)[-1])
- with closing(tarfile.open(tarfile_path, 'r')) as tar:
+ with closing(tarfile.open(tarfile_path, "r")) as tar:
try:
tar.extractall(path=extract_tmp)
except Exception as e:
@@ -1671,12 +1702,12 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False,
shutil.rmtree(spec.prefix)
raise e
else:
- manifest_file = os.path.join(spec.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ manifest_file = os.path.join(
+ spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
if not os.path.exists(manifest_file):
- spec_id = spec.format('{name}/{hash:7}')
- tty.warn('No manifest file in tarball for spec %s' % spec_id)
+ spec_id = spec.format("{name}/{hash:7}")
+ tty.warn("No manifest file in tarball for spec %s" % spec_id)
finally:
if tmpdir:
shutil.rmtree(tmpdir)
@@ -1718,12 +1749,12 @@ def install_root_node(spec, allow_root, unsigned=False, force=False, sha256=None
if sha256:
checker = spack.util.crypto.Checker(sha256)
msg = 'cannot verify checksum for "{0}" [expected={1}]'
- tarball_path = download_result['tarball_stage'].save_filename
+ tarball_path = download_result["tarball_stage"].save_filename
msg = msg.format(tarball_path, sha256)
if not checker.check(tarball_path):
_delete_staged_downloads(download_result)
raise spack.binary_distribution.NoChecksumException(msg)
- tty.debug('Verified SHA256 checksum of the build cache')
+ tty.debug("Verified SHA256 checksum of the build cache")
tty.msg('Installing "{0}" from a buildcache'.format(spec.format()))
extract_tarball(spec, download_result, allow_root, unsigned, force)
@@ -1742,7 +1773,7 @@ def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
force (bool): force installation if the spec is already present in the
local store
"""
- for node in spec.traverse(root=True, order='post', deptype=('link', 'run')):
+ for node in spec.traverse(root=True, order="post", deptype=("link", "run")):
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
@@ -1750,20 +1781,23 @@ def try_direct_fetch(spec, mirrors=None):
"""
Try to find the spec directly on the configured mirrors
"""
- deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
- specfile_name = tarball_name(spec, '.spec.json')
- signed_specfile_name = tarball_name(spec, '.spec.json.sig')
+ deprecated_specfile_name = tarball_name(spec, ".spec.yaml")
+ specfile_name = tarball_name(spec, ".spec.json")
+ signed_specfile_name = tarball_name(spec, ".spec.json.sig")
specfile_is_signed = False
specfile_is_json = True
found_specs = []
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
buildcache_fetch_url_yaml = url_util.join(
- mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name)
+ mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name
+ )
buildcache_fetch_url_json = url_util.join(
- mirror.fetch_url, _build_cache_relative_path, specfile_name)
+ mirror.fetch_url, _build_cache_relative_path, specfile_name
+ )
buildcache_fetch_url_signed_json = url_util.join(
- mirror.fetch_url, _build_cache_relative_path, signed_specfile_name)
+ mirror.fetch_url, _build_cache_relative_path, signed_specfile_name
+ )
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_is_signed = True
@@ -1775,14 +1809,22 @@ def try_direct_fetch(spec, mirrors=None):
_, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml)
specfile_is_json = False
except (URLError, web_util.SpackWebError, HTTPError) as url_err_y:
- tty.debug('Did not find {0} on {1}'.format(
- specfile_name, buildcache_fetch_url_signed_json), url_err)
- tty.debug('Did not find {0} on {1}'.format(
- specfile_name, buildcache_fetch_url_json), url_err_x)
- tty.debug('Did not find {0} on {1}'.format(
- specfile_name, buildcache_fetch_url_yaml), url_err_y)
+ tty.debug(
+ "Did not find {0} on {1}".format(
+ specfile_name, buildcache_fetch_url_signed_json
+ ),
+ url_err,
+ )
+ tty.debug(
+ "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
+ url_err_x,
+ )
+ tty.debug(
+ "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_yaml),
+ url_err_y,
+ )
continue
- specfile_contents = codecs.getreader('utf-8')(fs).read()
+ specfile_contents = codecs.getreader("utf-8")(fs).read()
# read the spec from the build cache file. All specs in build caches
# are concrete (as they are built) so we need to mark this spec
@@ -1796,10 +1838,12 @@ def try_direct_fetch(spec, mirrors=None):
fetched_spec = Spec.from_yaml(specfile_contents)
fetched_spec._mark_concrete()
- found_specs.append({
- 'mirror_url': mirror.fetch_url,
- 'spec': fetched_spec,
- })
+ found_specs.append(
+ {
+ "mirror_url": mirror.fetch_url,
+ "spec": fetched_spec,
+ }
+ )
return found_specs
@@ -1861,44 +1905,43 @@ def clear_spec_cache():
def get_keys(install=False, trust=False, force=False, mirrors=None):
- """Get pgp public keys available on mirror with suffix .pub
- """
- mirror_collection = (mirrors or spack.mirror.MirrorCollection())
+ """Get pgp public keys available on mirror with suffix .pub"""
+ mirror_collection = mirrors or spack.mirror.MirrorCollection()
if not mirror_collection:
- tty.die("Please add a spack mirror to allow " +
- "download of build caches.")
+ tty.die("Please add a spack mirror to allow " + "download of build caches.")
for mirror in mirror_collection.values():
fetch_url = mirror.fetch_url
- keys_url = url_util.join(fetch_url,
- _build_cache_relative_path,
- _build_cache_keys_relative_path)
- keys_index = url_util.join(keys_url, 'index.json')
+ keys_url = url_util.join(
+ fetch_url, _build_cache_relative_path, _build_cache_keys_relative_path
+ )
+ keys_index = url_util.join(keys_url, "index.json")
- tty.debug('Finding public keys in {0}'.format(
- url_util.format(fetch_url)))
+ tty.debug("Finding public keys in {0}".format(url_util.format(fetch_url)))
try:
_, _, json_file = web_util.read_from_url(keys_index)
- json_index = sjson.load(codecs.getreader('utf-8')(json_file))
+ json_index = sjson.load(codecs.getreader("utf-8")(json_file))
except (URLError, web_util.SpackWebError) as url_err:
if web_util.url_exists(keys_index):
err_msg = [
- 'Unable to find public keys in {0},',
- ' caught exception attempting to read from {1}.',
+ "Unable to find public keys in {0},",
+ " caught exception attempting to read from {1}.",
]
- tty.error(''.join(err_msg).format(
- url_util.format(fetch_url),
- url_util.format(keys_index)))
+ tty.error(
+ "".join(err_msg).format(
+ url_util.format(fetch_url), url_util.format(keys_index)
+ )
+ )
tty.debug(url_err)
continue
- for fingerprint, key_attributes in json_index['keys'].items():
- link = os.path.join(keys_url, fingerprint + '.pub')
+ for fingerprint, key_attributes in json_index["keys"].items():
+ link = os.path.join(keys_url, fingerprint + ".pub")
with Stage(link, name="build_cache", keep=True) as stage:
if os.path.exists(stage.save_filename) and force:
@@ -1909,38 +1952,39 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
except fs.FetchError:
continue
- tty.debug('Found key {0}'.format(fingerprint))
+ tty.debug("Found key {0}".format(fingerprint))
if install:
if trust:
spack.util.gpg.trust(stage.save_filename)
- tty.debug('Added this key to trusted keys.')
+ tty.debug("Added this key to trusted keys.")
else:
- tty.debug('Will not add this key to trusted keys.'
- 'Use -t to install all downloaded keys')
+ tty.debug(
+ "Will not add this key to trusted keys."
+ "Use -t to install all downloaded keys"
+ )
def push_keys(*mirrors, **kwargs):
"""
Upload pgp public keys to the given mirrors
"""
- keys = kwargs.get('keys')
- regenerate_index = kwargs.get('regenerate_index', False)
- tmpdir = kwargs.get('tmpdir')
+ keys = kwargs.get("keys")
+ regenerate_index = kwargs.get("regenerate_index", False)
+ tmpdir = kwargs.get("tmpdir")
remove_tmpdir = False
keys = spack.util.gpg.public_keys(*(keys or []))
try:
for mirror in mirrors:
- push_url = getattr(mirror, 'push_url', mirror)
- keys_url = url_util.join(push_url,
- _build_cache_relative_path,
- _build_cache_keys_relative_path)
+ push_url = getattr(mirror, "push_url", mirror)
+ keys_url = url_util.join(
+ push_url, _build_cache_relative_path, _build_cache_keys_relative_path
+ )
keys_local = url_util.local_file_path(keys_url)
- verb = 'Writing' if keys_local else 'Uploading'
- tty.debug('{0} public keys to {1}'.format(
- verb, url_util.format(push_url)))
+ verb = "Writing" if keys_local else "Uploading"
+ tty.debug("{0} public keys to {1}".format(verb, url_util.format(push_url)))
if keys_local: # mirror is local, don't bother with the tmpdir
prefix = keys_local
@@ -1955,8 +1999,8 @@ def push_keys(*mirrors, **kwargs):
prefix = tmpdir
for fingerprint in keys:
- tty.debug(' ' + fingerprint)
- filename = fingerprint + '.pub'
+ tty.debug(" " + fingerprint)
+ filename = fingerprint + ".pub"
export_target = os.path.join(prefix, filename)
@@ -1970,9 +2014,8 @@ def push_keys(*mirrors, **kwargs):
# uploaded to the mirror.
if not keys_local:
spack.util.web.push_to_url(
- export_target,
- url_util.join(keys_url, filename),
- keep_original=False)
+ export_target, url_util.join(keys_url, filename), keep_original=False
+ )
if regenerate_index:
if keys_local:
@@ -1986,21 +2029,20 @@ def push_keys(*mirrors, **kwargs):
def needs_rebuild(spec, mirror_url):
if not spec.concrete:
- raise ValueError('spec must be concrete to check against mirror')
+ raise ValueError("spec must be concrete to check against mirror")
pkg_name = spec.name
pkg_version = spec.version
pkg_hash = spec.dag_hash()
- tty.debug('Checking {0}-{1}, dag_hash = {2}'.format(
- pkg_name, pkg_version, pkg_hash))
+ tty.debug("Checking {0}-{1}, dag_hash = {2}".format(pkg_name, pkg_version, pkg_hash))
tty.debug(spec.tree())
# Try to retrieve the specfile directly, based on the known
# format of the name, in order to determine if the package
# needs to be rebuilt.
cache_prefix = build_cache_prefix(mirror_url)
- specfile_name = tarball_name(spec, '.spec.json')
+ specfile_name = tarball_name(spec, ".spec.json")
specfile_path = os.path.join(cache_prefix, specfile_name)
# Only check for the presence of the json version of the spec. If the
@@ -2026,26 +2068,23 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
"""
rebuilds = {}
for mirror in spack.mirror.MirrorCollection(mirrors).values():
- tty.debug('Checking for built specs at {0}'.format(mirror.fetch_url))
+ tty.debug("Checking for built specs at {0}".format(mirror.fetch_url))
rebuild_list = []
for spec in specs:
if needs_rebuild(spec, mirror.fetch_url):
- rebuild_list.append({
- 'short_spec': spec.short_spec,
- 'hash': spec.dag_hash()
- })
+ rebuild_list.append({"short_spec": spec.short_spec, "hash": spec.dag_hash()})
if rebuild_list:
rebuilds[mirror.fetch_url] = {
- 'mirrorName': mirror.name,
- 'mirrorUrl': mirror.fetch_url,
- 'rebuildSpecs': rebuild_list
+ "mirrorName": mirror.name,
+ "mirrorUrl": mirror.fetch_url,
+ "rebuildSpecs": rebuild_list,
}
if output_file:
- with open(output_file, 'w') as outf:
+ with open(output_file, "w") as outf:
outf.write(json.dumps(rebuilds))
return 1 if rebuilds else 0
@@ -2053,13 +2092,12 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None):
def _download_buildcache_entry(mirror_root, descriptions):
for description in descriptions:
- path = description['path']
+ path = description["path"]
mkdirp(path)
- fail_if_missing = description['required']
- for url in description['url']:
+ fail_if_missing = description["required"]
+ for url in description["url"]:
description_url = os.path.join(mirror_root, url)
- stage = Stage(
- description_url, name="build_cache", path=path, keep=True)
+ stage = Stage(description_url, name="build_cache", path=path, keep=True)
try:
stage.fetch()
break
@@ -2067,26 +2105,23 @@ def _download_buildcache_entry(mirror_root, descriptions):
tty.debug(e)
else:
if fail_if_missing:
- tty.error('Failed to download required url {0}'.format(
- description_url))
+ tty.error("Failed to download required url {0}".format(description_url))
return False
return True
def download_buildcache_entry(file_descriptions, mirror_url=None):
if not mirror_url and not spack.mirror.MirrorCollection():
- tty.die("Please provide or add a spack mirror to allow " +
- "download of buildcache entries.")
+ tty.die(
+ "Please provide or add a spack mirror to allow " + "download of buildcache entries."
+ )
if mirror_url:
- mirror_root = os.path.join(
- mirror_url, _build_cache_relative_path)
+ mirror_root = os.path.join(mirror_url, _build_cache_relative_path)
return _download_buildcache_entry(mirror_root, file_descriptions)
for mirror in spack.mirror.MirrorCollection().values():
- mirror_root = os.path.join(
- mirror.fetch_url,
- _build_cache_relative_path)
+ mirror_root = os.path.join(mirror.fetch_url, _build_cache_relative_path)
if _download_buildcache_entry(mirror_root, file_descriptions):
return True
@@ -2096,9 +2131,7 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
return False
-def download_single_spec(
- concrete_spec, destination, mirror_url=None
-):
+def download_single_spec(concrete_spec, destination, mirror_url=None):
"""Download the buildcache files for a single concrete spec.
Args:
@@ -2106,22 +2139,25 @@ def download_single_spec(
destination (str): path where to put the downloaded buildcache
mirror_url (str): url of the mirror from which to download
"""
- tarfile_name = tarball_name(concrete_spec, '.spack')
+ tarfile_name = tarball_name(concrete_spec, ".spack")
tarball_dir_name = tarball_directory_name(concrete_spec)
tarball_path_name = os.path.join(tarball_dir_name, tarfile_name)
local_tarball_path = os.path.join(destination, tarball_dir_name)
files_to_fetch = [
{
- 'url': [tarball_path_name],
- 'path': local_tarball_path,
- 'required': True,
- }, {
- 'url': [tarball_name(concrete_spec, '.spec.json.sig'),
- tarball_name(concrete_spec, '.spec.json'),
- tarball_name(concrete_spec, '.spec.yaml')],
- 'path': destination,
- 'required': True,
+ "url": [tarball_path_name],
+ "path": local_tarball_path,
+ "required": True,
+ },
+ {
+ "url": [
+ tarball_name(concrete_spec, ".spec.json.sig"),
+ tarball_name(concrete_spec, ".spec.json"),
+ tarball_name(concrete_spec, ".spec.yaml"),
+ ],
+ "path": destination,
+ "required": True,
},
]
@@ -2130,6 +2166,7 @@ def download_single_spec(
class BinaryCacheQuery(object):
"""Callable object to query if a spec is in a binary cache"""
+
def __init__(self, all_architectures):
"""
Args:
@@ -2148,15 +2185,13 @@ class BinaryCacheQuery(object):
def __call__(self, spec, **kwargs):
matches = []
- if spec.startswith('/'):
+ if spec.startswith("/"):
# Matching a DAG hash
- query_hash = spec.replace('/', '')
+ query_hash = spec.replace("/", "")
for candidate_spec in self.possible_specs:
if candidate_spec.dag_hash().startswith(query_hash):
matches.append(candidate_spec)
else:
# Matching a spec constraint
- matches = [
- s for s in self.possible_specs if s.satisfies(spec)
- ]
+ matches = [s for s in self.possible_specs if s.satisfies(spec)]
return matches
diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py
index 8cd820f6c0..fa6a0e29c2 100644
--- a/lib/spack/spack/bootstrap.py
+++ b/lib/spack/spack/bootstrap.py
@@ -42,7 +42,7 @@ import spack.util.spack_yaml
import spack.util.url
#: Name of the file containing metadata about the bootstrapping source
-METADATA_YAML_FILENAME = 'metadata.yaml'
+METADATA_YAML_FILENAME = "metadata.yaml"
#: Map a bootstrapper type to the corresponding class
_bootstrap_methods = {}
@@ -55,9 +55,11 @@ def _bootstrapper(type):
Args:
type (str): string identifying the class
"""
+
def _register(cls):
_bootstrap_methods[type] = cls
return cls
+
return _register
@@ -74,12 +76,12 @@ def _try_import_from_store(module, query_spec, query_info=None):
# If it is a string assume it's one of the root specs by this module
if isinstance(query_spec, six.string_types):
# We have to run as part of this python interpreter
- query_spec += ' ^' + spec_for_current_python()
+ query_spec += " ^" + spec_for_current_python()
installed_specs = spack.store.db.query(query_spec, installed=True)
for candidate_spec in installed_specs:
- pkg = candidate_spec['python'].package
+ pkg = candidate_spec["python"].package
module_paths = [
os.path.join(candidate_spec.prefix, pkg.purelib),
os.path.join(candidate_spec.prefix, pkg.platlib),
@@ -98,17 +100,19 @@ def _try_import_from_store(module, query_spec, query_info=None):
try:
_fix_ext_suffix(candidate_spec)
if _python_import(module):
- msg = ('[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
- 'provides the "{0}" Python module').format(
- module, query_spec, candidate_spec.dag_hash()
- )
+ msg = (
+ '[BOOTSTRAP MODULE {0}] The installed spec "{1}/{2}" '
+ 'provides the "{0}" Python module'
+ ).format(module, query_spec, candidate_spec.dag_hash())
tty.debug(msg)
if query_info is not None:
- query_info['spec'] = candidate_spec
+ query_info["spec"] = candidate_spec
return True
except Exception as e:
- msg = ('unexpected error while trying to import module '
- '"{0}" from spec "{1}" [error="{2}"]')
+ msg = (
+ "unexpected error while trying to import module "
+ '"{0}" from spec "{1}" [error="{2}"]'
+ )
tty.warn(msg.format(module, candidate_spec, str(e)))
else:
msg = "Spec {0} did not provide module {1}"
@@ -134,10 +138,10 @@ def _fix_ext_suffix(candidate_spec):
# [RHEL + ppc64le]: https://github.com/spack/spack/issues/25734
#
_suffix_to_be_checked = {
- 'ppc64le': {
- 'glob': '*.cpython-*-powerpc64le-linux-gnu.so',
- 're': r'.cpython-[\w]*-powerpc64le-linux-gnu.so',
- 'fmt': r'{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so'
+ "ppc64le": {
+ "glob": "*.cpython-*-powerpc64le-linux-gnu.so",
+ "re": r".cpython-[\w]*-powerpc64le-linux-gnu.so",
+ "fmt": r"{module}.cpython-{major}{minor}m-powerpc64le-linux-gnu.so",
}
}
@@ -148,12 +152,12 @@ def _fix_ext_suffix(candidate_spec):
# If there's no EXT_SUFFIX (Python < 3.5) or the suffix matches
# the expectations, return since the package is surely good
- ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
+ ext_suffix = sysconfig.get_config_var("EXT_SUFFIX")
if ext_suffix is None:
return
expected = _suffix_to_be_checked[str(generic_target)]
- if fnmatch.fnmatch(ext_suffix, expected['glob']):
+ if fnmatch.fnmatch(ext_suffix, expected["glob"]):
return
# If we are here it means the current interpreter expects different names
@@ -163,8 +167,8 @@ def _fix_ext_suffix(candidate_spec):
# Check if standard names are installed and if we have to create
# link for this interpreter
- standard_extensions = fs.find(candidate_spec.prefix, expected['glob'])
- link_names = [re.sub(expected['re'], ext_suffix, s) for s in standard_extensions]
+ standard_extensions = fs.find(candidate_spec.prefix, expected["glob"])
+ link_names = [re.sub(expected["re"], ext_suffix, s) for s in standard_extensions]
for file_name, link_name in zip(standard_extensions, link_names):
if os.path.exists(link_name):
continue
@@ -172,12 +176,15 @@ def _fix_ext_suffix(candidate_spec):
# Check if this interpreter installed something and we have to create
# links for a standard CPython interpreter
- non_standard_extensions = fs.find(candidate_spec.prefix, '*' + ext_suffix)
+ non_standard_extensions = fs.find(candidate_spec.prefix, "*" + ext_suffix)
for abs_path in non_standard_extensions:
directory, filename = os.path.split(abs_path)
- module = filename.split('.')[0]
- link_name = os.path.join(directory, expected['fmt'].format(
- module=module, major=sys.version_info[0], minor=sys.version_info[1])
+ module = filename.split(".")[0]
+ link_name = os.path.join(
+ directory,
+ expected["fmt"].format(
+ module=module, major=sys.version_info[0], minor=sys.version_info[1]
+ ),
)
if os.path.exists(link_name):
continue
@@ -198,7 +205,7 @@ def _executables_in_store(executables, query_spec, query_info=None):
query_info (dict or None): if a dict is passed it is populated with the
command found and the concrete spec providing it
"""
- executables_str = ', '.join(executables)
+ executables_str = ", ".join(executables)
msg = "[BOOTSTRAP EXECUTABLES {0}] Try installed specs with query '{1}'"
tty.debug(msg.format(executables_str, query_spec))
installed_specs = spack.store.db.query(query_spec, installed=True)
@@ -207,25 +214,27 @@ def _executables_in_store(executables, query_spec, query_info=None):
bin_dir = concrete_spec.prefix.bin
# IF we have a "bin" directory and it contains
# the executables we are looking for
- if (os.path.exists(bin_dir) and os.path.isdir(bin_dir) and
- spack.util.executable.which_string(*executables, path=bin_dir)):
- spack.util.environment.path_put_first('PATH', [bin_dir])
+ if (
+ os.path.exists(bin_dir)
+ and os.path.isdir(bin_dir)
+ and spack.util.executable.which_string(*executables, path=bin_dir)
+ ):
+ spack.util.environment.path_put_first("PATH", [bin_dir])
if query_info is not None:
- query_info['command'] = spack.util.executable.which(
- *executables, path=bin_dir
- )
- query_info['spec'] = concrete_spec
+ query_info["command"] = spack.util.executable.which(*executables, path=bin_dir)
+ query_info["spec"] = concrete_spec
return True
return False
class _BootstrapperBase(object):
"""Base class to derive types that can bootstrap software for Spack"""
- config_scope_name = ''
+
+ config_scope_name = ""
def __init__(self, conf):
- self.name = conf['name']
- self.url = conf['info']['url']
+ self.name = conf["name"]
+ self.url = conf["info"]["url"]
@property
def mirror_url(self):
@@ -234,7 +243,7 @@ class _BootstrapperBase(object):
return spack.util.url.format(self.url)
# Check for :// and assume it's an url if we find it
- if '://' in self.url:
+ if "://" in self.url:
return self.url
# Otherwise, it's a relative path
@@ -243,19 +252,19 @@ class _BootstrapperBase(object):
@property
def mirror_scope(self):
return spack.config.InternalConfigScope(
- self.config_scope_name, {'mirrors:': {self.name: self.mirror_url}}
+ self.config_scope_name, {"mirrors:": {self.name: self.mirror_url}}
)
-@_bootstrapper(type='buildcache')
+@_bootstrapper(type="buildcache")
class _BuildcacheBootstrapper(_BootstrapperBase):
"""Install the software needed during bootstrapping from a buildcache."""
- config_scope_name = 'bootstrap_buildcache'
+ config_scope_name = "bootstrap_buildcache"
def __init__(self, conf):
super(_BuildcacheBootstrapper, self).__init__(conf)
- self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
+ self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
self.last_search = None
@staticmethod
@@ -277,7 +286,7 @@ class _BuildcacheBootstrapper(_BootstrapperBase):
def _read_metadata(self, package_name):
"""Return metadata about the given package."""
- json_filename = '{0}.json'.format(package_name)
+ json_filename = "{0}.json".format(package_name)
json_dir = self.metadata_dir
json_path = os.path.join(json_dir, json_filename)
with open(json_path) as f:
@@ -294,32 +303,22 @@ class _BuildcacheBootstrapper(_BootstrapperBase):
"cc": "/dev/null",
"cxx": "/dev/null",
"f77": "/dev/null",
- "fc": "/dev/null"
+ "fc": "/dev/null",
},
"spec": str(index_spec.compiler),
- "target": str(index_spec.target.family)
+ "target": str(index_spec.target.family),
}
with spack.platforms.use_platform(bincache_platform):
- with spack.config.override(
- 'compilers', [{'compiler': compiler_entry}]
- ):
- spec_str = '/' + pkg_hash
- query = spack.binary_distribution.BinaryCacheQuery(
- all_architectures=True
- )
+ with spack.config.override("compilers", [{"compiler": compiler_entry}]):
+ spec_str = "/" + pkg_hash
+ query = spack.binary_distribution.BinaryCacheQuery(all_architectures=True)
matches = spack.store.find([spec_str], multiple=False, query_fn=query)
for match in matches:
spack.binary_distribution.install_root_node(
- match,
- allow_root=True,
- unsigned=True,
- force=True,
- sha256=pkg_sha256
+ match, allow_root=True, unsigned=True, force=True, sha256=pkg_sha256
)
- def _install_and_test(
- self, abstract_spec, bincache_platform, bincache_data, test_fn
- ):
+ def _install_and_test(self, abstract_spec, bincache_platform, bincache_data, test_fn):
# Ensure we see only the buildcache being used to bootstrap
with spack.config.override(self.mirror_scope):
# This index is currently needed to get the compiler used to build some
@@ -330,10 +329,10 @@ class _BuildcacheBootstrapper(_BootstrapperBase):
if not index:
raise RuntimeError("The binary index is empty")
- for item in bincache_data['verified']:
- candidate_spec = item['spec']
+ for item in bincache_data["verified"]:
+ candidate_spec = item["spec"]
# This will be None for things that don't depend on python
- python_spec = item.get('python', None)
+ python_spec = item.get("python", None)
# Skip specs which are not compatible
if not abstract_spec.satisfies(candidate_spec):
continue
@@ -341,11 +340,9 @@ class _BuildcacheBootstrapper(_BootstrapperBase):
if python_spec is not None and python_spec not in abstract_spec:
continue
- for pkg_name, pkg_hash, pkg_sha256 in item['binaries']:
+ for pkg_name, pkg_hash, pkg_sha256 in item["binaries"]:
# TODO: undo installations that didn't complete?
- self._install_by_hash(
- pkg_hash, pkg_sha256, index, bincache_platform
- )
+ self._install_by_hash(pkg_hash, pkg_sha256, index, bincache_platform)
info = {}
if test_fn(query_spec=abstract_spec, query_info=info):
@@ -360,12 +357,10 @@ class _BuildcacheBootstrapper(_BootstrapperBase):
tty.info("Bootstrapping {0} from pre-built binaries".format(module))
abstract_spec, bincache_platform = self._spec_and_platform(
- abstract_spec_str + ' ^' + spec_for_current_python()
+ abstract_spec_str + " ^" + spec_for_current_python()
)
data = self._read_metadata(module)
- return self._install_and_test(
- abstract_spec, bincache_platform, data, test_fn
- )
+ return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
def try_search_path(self, executables, abstract_spec_str):
test_fn, info = functools.partial(_executables_in_store, executables), {}
@@ -376,19 +371,18 @@ class _BuildcacheBootstrapper(_BootstrapperBase):
abstract_spec, bincache_platform = self._spec_and_platform(abstract_spec_str)
tty.info("Bootstrapping {0} from pre-built binaries".format(abstract_spec.name))
data = self._read_metadata(abstract_spec.name)
- return self._install_and_test(
- abstract_spec, bincache_platform, data, test_fn
- )
+ return self._install_and_test(abstract_spec, bincache_platform, data, test_fn)
-@_bootstrapper(type='install')
+@_bootstrapper(type="install")
class _SourceBootstrapper(_BootstrapperBase):
"""Install the software needed during bootstrapping from sources."""
- config_scope_name = 'bootstrap_source'
+
+ config_scope_name = "bootstrap_source"
def __init__(self, conf):
super(_SourceBootstrapper, self).__init__(conf)
- self.metadata_dir = spack.util.path.canonicalize_path(conf['metadata'])
+ self.metadata_dir = spack.util.path.canonicalize_path(conf["metadata"])
self.conf = conf
self.last_search = None
@@ -407,11 +401,9 @@ class _SourceBootstrapper(_BootstrapperBase):
# Try to build and install from sources
with spack_python_interpreter():
# Add hint to use frontend operating system on Cray
- concrete_spec = spack.spec.Spec(
- abstract_spec_str + ' ^' + spec_for_current_python()
- )
+ concrete_spec = spack.spec.Spec(abstract_spec_str + " ^" + spec_for_current_python())
- if module == 'clingo':
+ if module == "clingo":
# TODO: remove when the old concretizer is deprecated
concrete_spec._old_concretize(deprecation_warning=False)
else:
@@ -442,7 +434,7 @@ class _SourceBootstrapper(_BootstrapperBase):
_add_externals_if_missing()
concrete_spec = spack.spec.Spec(abstract_spec_str)
- if concrete_spec.name == 'patchelf':
+ if concrete_spec.name == "patchelf":
concrete_spec._old_concretize(deprecation_warning=False)
else:
concrete_spec.concretize()
@@ -461,15 +453,15 @@ def _make_bootstrapper(conf):
"""Return a bootstrap object built according to the
configuration argument
"""
- btype = conf['type']
+ btype = conf["type"]
return _bootstrap_methods[btype](conf)
def source_is_enabled_or_raise(conf):
"""Raise ValueError if the source is not enabled for bootstrapping"""
- trusted, name = spack.config.get('bootstrap:trusted'), conf['name']
+ trusted, name = spack.config.get("bootstrap:trusted"), conf["name"]
if not trusted.get(name, False):
- raise ValueError('source is not trusted')
+ raise ValueError("source is not trusted")
def spec_for_current_python():
@@ -481,13 +473,13 @@ def spec_for_current_python():
https://www.python.org/dev/peps/pep-0513/
https://stackoverflow.com/a/35801395/771663
"""
- version_str = '.'.join(str(x) for x in sys.version_info[:2])
- variant_str = ''
+ version_str = ".".join(str(x) for x in sys.version_info[:2])
+ variant_str = ""
if sys.version_info[0] == 2 and sys.version_info[1] == 7:
- unicode_size = sysconfig.get_config_var('Py_UNICODE_SIZE')
- variant_str = '+ucs4' if unicode_size == 4 else '~ucs4'
+ unicode_size = sysconfig.get_config_var("Py_UNICODE_SIZE")
+ variant_str = "+ucs4" if unicode_size == 4 else "~ucs4"
- spec_fmt = 'python@{0} {1}'
+ spec_fmt = "python@{0} {1}"
return spec_fmt.format(version_str, variant_str)
@@ -501,13 +493,11 @@ def spack_python_interpreter():
external_python = spec_for_current_python()
entry = {
- 'buildable': False,
- 'externals': [
- {'prefix': python_prefix, 'spec': str(external_python)}
- ]
+ "buildable": False,
+ "externals": [{"prefix": python_prefix, "spec": str(external_python)}],
}
- with spack.config.override('packages:python::', entry):
+ with spack.config.override("packages:python::", entry):
yield
@@ -538,7 +528,7 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
h = GroupedExceptionHandler()
for current_config in bootstrapping_sources():
- with h.forward(current_config['name']):
+ with h.forward(current_config["name"]):
source_is_enabled_or_raise(current_config)
b = _make_bootstrapper(current_config)
@@ -556,7 +546,7 @@ def ensure_module_importable_or_raise(module, abstract_spec=None):
msg += h.grouped_message(with_tracebacks=True)
else:
msg += h.grouped_message(with_tracebacks=False)
- msg += '\nRun `spack --debug ...` for more detailed errors'
+ msg += "\nRun `spack --debug ...` for more detailed errors"
raise ImportError(msg)
@@ -578,21 +568,21 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
if cmd:
return cmd
- executables_str = ', '.join(executables)
+ executables_str = ", ".join(executables)
h = GroupedExceptionHandler()
for current_config in bootstrapping_sources():
- with h.forward(current_config['name']):
+ with h.forward(current_config["name"]):
source_is_enabled_or_raise(current_config)
b = _make_bootstrapper(current_config)
if b.try_search_path(executables, abstract_spec):
# Additional environment variables needed
- concrete_spec, cmd = b.last_search['spec'], b.last_search['command']
+ concrete_spec, cmd = b.last_search["spec"], b.last_search["command"]
env_mods = spack.util.environment.EnvironmentModifications()
for dep in concrete_spec.traverse(
- root=True, order='post', deptype=('link', 'run')
+ root=True, order="post", deptype=("link", "run")
):
env_mods.extend(
spack.user_environment.environment_modifications_for_spec(
@@ -606,14 +596,14 @@ def ensure_executables_in_path_or_raise(executables, abstract_spec):
"expected at least one exception to have been raised at this point: "
"while bootstrapping {0}".format(executables_str)
)
- msg = 'cannot bootstrap any of the {0} executables '.format(executables_str)
+ msg = "cannot bootstrap any of the {0} executables ".format(executables_str)
if abstract_spec:
msg += 'from spec "{0}" '.format(abstract_spec)
if tty.is_debug():
msg += h.grouped_message(with_tracebacks=True)
else:
msg += h.grouped_message(with_tracebacks=False)
- msg += '\nRun `spack --debug ...` for more detailed errors'
+ msg += "\nRun `spack --debug ...` for more detailed errors"
raise RuntimeError(msg)
@@ -626,22 +616,17 @@ def _python_import(module):
def _bootstrap_config_scopes():
- tty.debug('[BOOTSTRAP CONFIG SCOPE] name=_builtin')
- config_scopes = [
- spack.config.InternalConfigScope('_builtin', spack.config.config_defaults)
- ]
- configuration_paths = (
- spack.config.configuration_defaults_path,
- ('bootstrap', _config_path())
- )
+ tty.debug("[BOOTSTRAP CONFIG SCOPE] name=_builtin")
+ config_scopes = [spack.config.InternalConfigScope("_builtin", spack.config.config_defaults)]
+ configuration_paths = (spack.config.configuration_defaults_path, ("bootstrap", _config_path()))
for name, path in configuration_paths:
platform = spack.platforms.host().name
platform_scope = spack.config.ConfigScope(
- '/'.join([name, platform]), os.path.join(path, platform)
+ "/".join([name, platform]), os.path.join(path, platform)
)
generic_scope = spack.config.ConfigScope(name, path)
config_scopes.extend([generic_scope, platform_scope])
- msg = '[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}'
+ msg = "[BOOTSTRAP CONFIG SCOPE] name={0}, path={1}"
tty.debug(msg.format(generic_scope.name, generic_scope.path))
tty.debug(msg.format(platform_scope.name, platform_scope.path))
return config_scopes
@@ -658,13 +643,13 @@ def _add_compilers_if_missing():
def _add_externals_if_missing():
search_list = [
# clingo
- spack.repo.path.get_pkg_class('cmake'),
- spack.repo.path.get_pkg_class('bison'),
+ spack.repo.path.get_pkg_class("cmake"),
+ spack.repo.path.get_pkg_class("bison"),
# GnuPG
- spack.repo.path.get_pkg_class('gawk')
+ spack.repo.path.get_pkg_class("gawk"),
]
detected_packages = spack.detection.by_executable(search_list)
- spack.detection.update_configuration(detected_packages, scope='bootstrap')
+ spack.detection.update_configuration(detected_packages, scope="bootstrap")
#: Reference counter for the bootstrapping configuration context manager
@@ -705,12 +690,8 @@ def _ensure_bootstrap_configuration():
# We may need to compile code from sources, so ensure we
# have compilers for the current platform
_add_compilers_if_missing()
- spack.config.set(
- 'bootstrap', user_configuration['bootstrap']
- )
- spack.config.set(
- 'config', user_configuration['config']
- )
+ spack.config.set("bootstrap", user_configuration["bootstrap"])
+ spack.config.set("config", user_configuration["config"])
with spack.modules.disable_modules():
with spack_python_interpreter():
yield
@@ -723,21 +704,17 @@ def _read_and_sanitize_configuration():
# Read the "config" section but pop the install tree (the entry will not be
# considered due to the use_store context manager, so it will be confusing
# to have it in the configuration).
- config_yaml = spack.config.get('config')
- config_yaml.pop('install_tree', None)
- user_configuration = {
- 'bootstrap': spack.config.get('bootstrap'),
- 'config': config_yaml
- }
+ config_yaml = spack.config.get("config")
+ config_yaml.pop("install_tree", None)
+ user_configuration = {"bootstrap": spack.config.get("bootstrap"), "config": config_yaml}
return user_configuration
def store_path():
"""Path to the store used for bootstrapped software"""
- enabled = spack.config.get('bootstrap:enable', True)
+ enabled = spack.config.get("bootstrap:enable", True)
if not enabled:
- msg = ('bootstrapping is currently disabled. '
- 'Use "spack bootstrap enable" to enable it')
+ msg = "bootstrapping is currently disabled. " 'Use "spack bootstrap enable" to enable it'
raise RuntimeError(msg)
return _store_path()
@@ -745,23 +722,17 @@ def store_path():
def _root_path():
"""Root of all the bootstrap related folders"""
- return spack.config.get(
- 'bootstrap:root', spack.paths.default_user_bootstrap_path
- )
+ return spack.config.get("bootstrap:root", spack.paths.default_user_bootstrap_path)
def _store_path():
bootstrap_root_path = _root_path()
- return spack.util.path.canonicalize_path(
- os.path.join(bootstrap_root_path, 'store')
- )
+ return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "store"))
def _config_path():
bootstrap_root_path = _root_path()
- return spack.util.path.canonicalize_path(
- os.path.join(bootstrap_root_path, 'config')
- )
+ return spack.util.path.canonicalize_path(os.path.join(bootstrap_root_path, "config"))
def _root_spec(spec_str):
@@ -772,41 +743,39 @@ def _root_spec(spec_str):
"""
# Add a proper compiler hint to the root spec. We use GCC for
# everything but MacOS and Windows.
- if str(spack.platforms.host()) == 'darwin':
- spec_str += ' %apple-clang'
- elif str(spack.platforms.host()) == 'windows':
- spec_str += ' %msvc'
+ if str(spack.platforms.host()) == "darwin":
+ spec_str += " %apple-clang"
+ elif str(spack.platforms.host()) == "windows":
+ spec_str += " %msvc"
else:
- spec_str += ' %gcc'
+ spec_str += " %gcc"
target = archspec.cpu.host().family
- spec_str += ' target={0}'.format(target)
+ spec_str += " target={0}".format(target)
- tty.debug('[BOOTSTRAP ROOT SPEC] {0}'.format(spec_str))
+ tty.debug("[BOOTSTRAP ROOT SPEC] {0}".format(spec_str))
return spec_str
def clingo_root_spec():
"""Return the root spec used to bootstrap clingo"""
- return _root_spec('clingo-bootstrap@spack+python')
+ return _root_spec("clingo-bootstrap@spack+python")
def ensure_clingo_importable_or_raise():
"""Ensure that the clingo module is available for import."""
- ensure_module_importable_or_raise(
- module='clingo', abstract_spec=clingo_root_spec()
- )
+ ensure_module_importable_or_raise(module="clingo", abstract_spec=clingo_root_spec())
def gnupg_root_spec():
"""Return the root spec used to bootstrap GnuPG"""
- return _root_spec('gnupg@2.3:')
+ return _root_spec("gnupg@2.3:")
def ensure_gpg_in_path_or_raise():
"""Ensure gpg or gpg2 are in the PATH or raise."""
return ensure_executables_in_path_or_raise(
- executables=['gpg2', 'gpg'], abstract_spec=gnupg_root_spec()
+ executables=["gpg2", "gpg"], abstract_spec=gnupg_root_spec()
)
@@ -815,13 +784,13 @@ def patchelf_root_spec():
# TODO: patchelf is restricted to v0.13 since earlier versions have
# TODO: bugs that we don't to deal with, while v0.14 requires a C++17
# TODO: which may not be available on all platforms.
- return _root_spec('patchelf@0.13.1:0.13.99')
+ return _root_spec("patchelf@0.13.1:0.13.99")
def ensure_patchelf_in_path_or_raise():
"""Ensure patchelf is in the PATH or raise."""
return ensure_executables_in_path_or_raise(
- executables=['patchelf'], abstract_spec=patchelf_root_spec()
+ executables=["patchelf"], abstract_spec=patchelf_root_spec()
)
@@ -831,44 +800,44 @@ def ensure_patchelf_in_path_or_raise():
def isort_root_spec():
- return _root_spec('py-isort@4.3.5:')
+ return _root_spec("py-isort@4.3.5:")
def ensure_isort_in_path_or_raise():
"""Ensure that isort is in the PATH or raise."""
- executable, root_spec = 'isort', isort_root_spec()
+ executable, root_spec = "isort", isort_root_spec()
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
def mypy_root_spec():
- return _root_spec('py-mypy@0.900:')
+ return _root_spec("py-mypy@0.900:")
def ensure_mypy_in_path_or_raise():
"""Ensure that mypy is in the PATH or raise."""
- executable, root_spec = 'mypy', mypy_root_spec()
+ executable, root_spec = "mypy", mypy_root_spec()
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
def black_root_spec():
# black v21 is the last version to support Python 2.7.
# Upgrade when we no longer support Python 2.7
- return _root_spec('py-black@:21')
+ return _root_spec("py-black@:21")
def ensure_black_in_path_or_raise():
"""Ensure that black is in the PATH or raise."""
- executable, root_spec = 'black', black_root_spec()
+ executable, root_spec = "black", black_root_spec()
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
def flake8_root_spec():
- return _root_spec('py-flake8')
+ return _root_spec("py-flake8")
def ensure_flake8_in_path_or_raise():
"""Ensure that flake8 is in the PATH or raise."""
- executable, root_spec = 'flake8', flake8_root_spec()
+ executable, root_spec = "flake8", flake8_root_spec()
return ensure_executables_in_path_or_raise([executable], abstract_spec=root_spec)
@@ -880,8 +849,7 @@ def all_root_specs(development=False):
"""
specs = [clingo_root_spec(), gnupg_root_spec(), patchelf_root_spec()]
if development:
- specs += [isort_root_spec(), mypy_root_spec(),
- black_root_spec(), flake8_root_spec()]
+ specs += [isort_root_spec(), mypy_root_spec(), black_root_spec(), flake8_root_spec()]
return specs
@@ -889,8 +857,8 @@ def _missing(name, purpose, system_only=True):
"""Message to be printed if an executable is not found"""
msg = '[{2}] MISSING "{0}": {1}'
if not system_only:
- return msg.format(name, purpose, '@*y{{B}}')
- return msg.format(name, purpose, '@*y{{-}}')
+ return msg.format(name, purpose, "@*y{{B}}")
+ return msg.format(name, purpose, "@*y{{-}}")
def _required_system_executable(exes, msg):
@@ -915,82 +883,84 @@ def _required_executable(exes, query_spec, msg):
"""Search for an executable in the system path or in the bootstrap store."""
if isinstance(exes, six.string_types):
exes = (exes,)
- if (spack.util.executable.which_string(*exes) or
- _executables_in_store(exes, query_spec)):
+ if spack.util.executable.which_string(*exes) or _executables_in_store(exes, query_spec):
return True, None
return False, msg
def _core_requirements():
_core_system_exes = {
- 'make': _missing('make', 'required to build software from sources'),
- 'patch': _missing('patch', 'required to patch source code before building'),
- 'bash': _missing('bash', 'required for Spack compiler wrapper'),
- 'tar': _missing('tar', 'required to manage code archives'),
- 'gzip': _missing('gzip', 'required to compress/decompress code archives'),
- 'unzip': _missing('unzip', 'required to compress/decompress code archives'),
- 'bzip2': _missing('bzip2', 'required to compress/decompress code archives'),
- 'git': _missing('git', 'required to fetch/manage git repositories')
+ "make": _missing("make", "required to build software from sources"),
+ "patch": _missing("patch", "required to patch source code before building"),
+ "bash": _missing("bash", "required for Spack compiler wrapper"),
+ "tar": _missing("tar", "required to manage code archives"),
+ "gzip": _missing("gzip", "required to compress/decompress code archives"),
+ "unzip": _missing("unzip", "required to compress/decompress code archives"),
+ "bzip2": _missing("bzip2", "required to compress/decompress code archives"),
+ "git": _missing("git", "required to fetch/manage git repositories"),
}
- if platform.system().lower() == 'linux':
- _core_system_exes['xz'] = _missing(
- 'xz', 'required to compress/decompress code archives'
- )
+ if platform.system().lower() == "linux":
+ _core_system_exes["xz"] = _missing("xz", "required to compress/decompress code archives")
# Executables that are not bootstrapped yet
- result = [_required_system_executable(exe, msg)
- for exe, msg in _core_system_exes.items()]
+ result = [_required_system_executable(exe, msg) for exe, msg in _core_system_exes.items()]
# Python modules
- result.append(_required_python_module(
- 'clingo', clingo_root_spec(),
- _missing('clingo', 'required to concretize specs', False)
- ))
+ result.append(
+ _required_python_module(
+ "clingo", clingo_root_spec(), _missing("clingo", "required to concretize specs", False)
+ )
+ )
return result
def _buildcache_requirements():
_buildcache_exes = {
- 'file': _missing('file', 'required to analyze files for buildcaches'),
- ('gpg2', 'gpg'): _missing('gpg2', 'required to sign/verify buildcaches', False)
+ "file": _missing("file", "required to analyze files for buildcaches"),
+ ("gpg2", "gpg"): _missing("gpg2", "required to sign/verify buildcaches", False),
}
- if platform.system().lower() == 'darwin':
- _buildcache_exes['otool'] = _missing('otool', 'required to relocate binaries')
+ if platform.system().lower() == "darwin":
+ _buildcache_exes["otool"] = _missing("otool", "required to relocate binaries")
# Executables that are not bootstrapped yet
- result = [_required_system_executable(exe, msg)
- for exe, msg in _buildcache_exes.items()]
-
- if platform.system().lower() == 'linux':
- result.append(_required_executable(
- 'patchelf', patchelf_root_spec(),
- _missing('patchelf', 'required to relocate binaries', False)
- ))
+ result = [_required_system_executable(exe, msg) for exe, msg in _buildcache_exes.items()]
+
+ if platform.system().lower() == "linux":
+ result.append(
+ _required_executable(
+ "patchelf",
+ patchelf_root_spec(),
+ _missing("patchelf", "required to relocate binaries", False),
+ )
+ )
return result
def _optional_requirements():
_optional_exes = {
- 'zstd': _missing('zstd', 'required to compress/decompress code archives'),
- 'svn': _missing('svn', 'required to manage subversion repositories'),
- 'hg': _missing('hg', 'required to manage mercurial repositories')
+ "zstd": _missing("zstd", "required to compress/decompress code archives"),
+ "svn": _missing("svn", "required to manage subversion repositories"),
+ "hg": _missing("hg", "required to manage mercurial repositories"),
}
# Executables that are not bootstrapped yet
- result = [_required_system_executable(exe, msg)
- for exe, msg in _optional_exes.items()]
+ result = [_required_system_executable(exe, msg) for exe, msg in _optional_exes.items()]
return result
def _development_requirements():
return [
- _required_executable('isort', isort_root_spec(),
- _missing('isort', 'required for style checks', False)),
- _required_executable('mypy', mypy_root_spec(),
- _missing('mypy', 'required for style checks', False)),
- _required_executable('flake8', flake8_root_spec(),
- _missing('flake8', 'required for style checks', False)),
- _required_executable('black', black_root_spec(),
- _missing('black', 'required for code formatting', False))
+ _required_executable(
+ "isort", isort_root_spec(), _missing("isort", "required for style checks", False)
+ ),
+ _required_executable(
+ "mypy", mypy_root_spec(), _missing("mypy", "required for style checks", False)
+ ),
+ _required_executable(
+ "flake8", flake8_root_spec(), _missing("flake8", "required for style checks", False)
+ ),
+ _required_executable(
+ "black", black_root_spec(), _missing("black", "required for code formatting", False)
+ ),
]
@@ -1002,14 +972,14 @@ def status_message(section):
Args:
section (str): either 'core' or 'buildcache' or 'optional' or 'develop'
"""
- pass_token, fail_token = '@*g{[PASS]}', '@*r{[FAIL]}'
+ pass_token, fail_token = "@*g{[PASS]}", "@*r{[FAIL]}"
# Contain the header of the section and a list of requirements
spack_sections = {
- 'core': ("{0} @*{{Core Functionalities}}", _core_requirements),
- 'buildcache': ("{0} @*{{Binary packages}}", _buildcache_requirements),
- 'optional': ("{0} @*{{Optional Features}}", _optional_requirements),
- 'develop': ("{0} @*{{Development Dependencies}}", _development_requirements)
+ "core": ("{0} @*{{Core Functionalities}}", _core_requirements),
+ "buildcache": ("{0} @*{{Binary packages}}", _buildcache_requirements),
+ "optional": ("{0} @*{{Optional Features}}", _optional_requirements),
+ "develop": ("{0} @*{{Development Dependencies}}", _development_requirements),
}
msg, required_software = spack_sections[section]
@@ -1019,7 +989,7 @@ def status_message(section):
if not found:
missing_software = True
msg += "\n " + err_msg
- msg += '\n'
+ msg += "\n"
msg = msg.format(pass_token if not missing_software else fail_token)
return msg, missing_software
@@ -1031,12 +1001,12 @@ def bootstrapping_sources(scope=None):
scope (str or None): if a valid configuration scope is given, return the
list only from that scope
"""
- source_configs = spack.config.get('bootstrap:sources', default=None, scope=scope)
+ source_configs = spack.config.get("bootstrap:sources", default=None, scope=scope)
source_configs = source_configs or []
list_of_sources = []
for entry in source_configs:
current = copy.copy(entry)
- metadata_dir = spack.util.path.canonicalize_path(entry['metadata'])
+ metadata_dir = spack.util.path.canonicalize_path(entry["metadata"])
metadata_yaml = os.path.join(metadata_dir, METADATA_YAML_FILENAME)
with open(metadata_yaml) as f:
current.update(spack.util.spack_yaml.load(f))
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index ae961a695b..2d0fd2b5e6 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -84,31 +84,31 @@ from spack.util.string import plural
#
# This can be set by the user to globally disable parallel builds.
#
-SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
+SPACK_NO_PARALLEL_MAKE = "SPACK_NO_PARALLEL_MAKE"
#
# These environment variables are set by
# set_wrapper_variables and used to pass parameters to
# Spack's compiler wrappers.
#
-SPACK_ENV_PATH = 'SPACK_ENV_PATH'
-SPACK_INCLUDE_DIRS = 'SPACK_INCLUDE_DIRS'
-SPACK_LINK_DIRS = 'SPACK_LINK_DIRS'
-SPACK_RPATH_DIRS = 'SPACK_RPATH_DIRS'
-SPACK_RPATH_DEPS = 'SPACK_RPATH_DEPS'
-SPACK_LINK_DEPS = 'SPACK_LINK_DEPS'
-SPACK_PREFIX = 'SPACK_PREFIX'
-SPACK_INSTALL = 'SPACK_INSTALL'
-SPACK_DEBUG = 'SPACK_DEBUG'
-SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
-SPACK_DEBUG_LOG_ID = 'SPACK_DEBUG_LOG_ID'
-SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
-SPACK_CCACHE_BINARY = 'SPACK_CCACHE_BINARY'
-SPACK_SYSTEM_DIRS = 'SPACK_SYSTEM_DIRS'
+SPACK_ENV_PATH = "SPACK_ENV_PATH"
+SPACK_INCLUDE_DIRS = "SPACK_INCLUDE_DIRS"
+SPACK_LINK_DIRS = "SPACK_LINK_DIRS"
+SPACK_RPATH_DIRS = "SPACK_RPATH_DIRS"
+SPACK_RPATH_DEPS = "SPACK_RPATH_DEPS"
+SPACK_LINK_DEPS = "SPACK_LINK_DEPS"
+SPACK_PREFIX = "SPACK_PREFIX"
+SPACK_INSTALL = "SPACK_INSTALL"
+SPACK_DEBUG = "SPACK_DEBUG"
+SPACK_SHORT_SPEC = "SPACK_SHORT_SPEC"
+SPACK_DEBUG_LOG_ID = "SPACK_DEBUG_LOG_ID"
+SPACK_DEBUG_LOG_DIR = "SPACK_DEBUG_LOG_DIR"
+SPACK_CCACHE_BINARY = "SPACK_CCACHE_BINARY"
+SPACK_SYSTEM_DIRS = "SPACK_SYSTEM_DIRS"
# Platform-specific library suffix.
-dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
+dso_suffix = "dylib" if sys.platform == "darwin" else "so"
def should_set_parallel_jobs(jobserver_support=False):
@@ -117,9 +117,9 @@ def should_set_parallel_jobs(jobserver_support=False):
- jobserver_support is enabled, and a jobserver was found.
"""
if (
- jobserver_support and
- 'MAKEFLAGS' in os.environ and
- '--jobserver' in os.environ['MAKEFLAGS']
+ jobserver_support
+ and "MAKEFLAGS" in os.environ
+ and "--jobserver" in os.environ["MAKEFLAGS"]
):
return False
return not env_flag(SPACK_NO_PARALLEL_MAKE)
@@ -127,13 +127,13 @@ def should_set_parallel_jobs(jobserver_support=False):
class MakeExecutable(Executable):
"""Special callable executable object for make so the user can specify
- parallelism options on a per-invocation basis. Specifying
- 'parallel' to the call will override whatever the package's
- global setting is, so you can either default to true or false and
- override particular calls. Specifying 'jobs_env' to a particular
- call will name an environment variable which will be set to the
- parallelism level (without affecting the normal invocation with
- -j).
+ parallelism options on a per-invocation basis. Specifying
+ 'parallel' to the call will override whatever the package's
+ global setting is, so you can either default to true or false and
+ override particular calls. Specifying 'jobs_env' to a particular
+ call will name an environment variable which will be set to the
+ parallelism level (without affecting the normal invocation with
+ -j).
"""
def __init__(self, name, jobs):
@@ -144,25 +144,26 @@ class MakeExecutable(Executable):
"""parallel, and jobs_env from kwargs are swallowed and used here;
remaining arguments are passed through to the superclass.
"""
- parallel = should_set_parallel_jobs(jobserver_support=True) and \
- kwargs.pop('parallel', self.jobs > 1)
+ parallel = should_set_parallel_jobs(jobserver_support=True) and kwargs.pop(
+ "parallel", self.jobs > 1
+ )
if parallel:
- args = ('-j{0}'.format(self.jobs),) + args
- jobs_env = kwargs.pop('jobs_env', None)
+ args = ("-j{0}".format(self.jobs),) + args
+ jobs_env = kwargs.pop("jobs_env", None)
if jobs_env:
# Caller wants us to set an environment variable to
# control the parallelism.
- kwargs['extra_env'] = {jobs_env: str(self.jobs)}
+ kwargs["extra_env"] = {jobs_env: str(self.jobs)}
return super(MakeExecutable, self).__call__(*args, **kwargs)
def _on_cray():
host_platform = spack.platforms.host()
- host_os = host_platform.operating_system('default_os')
- on_cray = str(host_platform) == 'cray'
- using_cnl = re.match(r'cnl\d+', str(host_os))
+ host_os = host_platform.operating_system("default_os")
+ on_cray = str(host_platform) == "cray"
+ using_cnl = re.match(r"cnl\d+", str(host_os))
return on_cray, using_cnl
@@ -175,33 +176,33 @@ def clean_environment():
# Remove these vars from the environment during build because they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
- env.unset('LD_LIBRARY_PATH')
- env.unset('LD_RUN_PATH')
- env.unset('DYLD_LIBRARY_PATH')
- env.unset('DYLD_FALLBACK_LIBRARY_PATH')
+ env.unset("LD_LIBRARY_PATH")
+ env.unset("LD_RUN_PATH")
+ env.unset("DYLD_LIBRARY_PATH")
+ env.unset("DYLD_FALLBACK_LIBRARY_PATH")
# These vars affect how the compiler finds libraries and include dirs.
- env.unset('LIBRARY_PATH')
- env.unset('CPATH')
- env.unset('C_INCLUDE_PATH')
- env.unset('CPLUS_INCLUDE_PATH')
- env.unset('OBJC_INCLUDE_PATH')
+ env.unset("LIBRARY_PATH")
+ env.unset("CPATH")
+ env.unset("C_INCLUDE_PATH")
+ env.unset("CPLUS_INCLUDE_PATH")
+ env.unset("OBJC_INCLUDE_PATH")
- env.unset('CMAKE_PREFIX_PATH')
- env.unset('PYTHONPATH')
+ env.unset("CMAKE_PREFIX_PATH")
+ env.unset("PYTHONPATH")
# Affects GNU make, can e.g. indirectly inhibit enabling parallel build
# env.unset('MAKEFLAGS')
# Avoid that libraries of build dependencies get hijacked.
- env.unset('LD_PRELOAD')
- env.unset('DYLD_INSERT_LIBRARIES')
+ env.unset("LD_PRELOAD")
+ env.unset("DYLD_INSERT_LIBRARIES")
# Avoid <packagename>_ROOT user variables overriding spack dependencies
# https://cmake.org/cmake/help/latest/variable/PackageName_ROOT.html
# Spack needs SPACK_ROOT though, so we need to exclude that
for varname in os.environ.keys():
- if varname.endswith('_ROOT') and varname != 'SPACK_ROOT':
+ if varname.endswith("_ROOT") and varname != "SPACK_ROOT":
env.unset(varname)
# On Cray "cluster" systems, unset CRAY_LD_LIBRARY_PATH to avoid
@@ -210,46 +211,55 @@ def clean_environment():
# depending on the CNL version).
on_cray, using_cnl = _on_cray()
if on_cray and not using_cnl:
- env.unset('CRAY_LD_LIBRARY_PATH')
+ env.unset("CRAY_LD_LIBRARY_PATH")
for varname in os.environ.keys():
- if 'PKGCONF' in varname:
+ if "PKGCONF" in varname:
env.unset(varname)
# Unset the following variables because they can affect installation of
# Autotools and CMake packages.
build_system_vars = [
- 'CC', 'CFLAGS', 'CPP', 'CPPFLAGS', # C variables
- 'CXX', 'CCC', 'CXXFLAGS', 'CXXCPP', # C++ variables
- 'F77', 'FFLAGS', 'FLIBS', # Fortran77 variables
- 'FC', 'FCFLAGS', 'FCLIBS', # Fortran variables
- 'LDFLAGS', 'LIBS' # linker variables
+ "CC",
+ "CFLAGS",
+ "CPP",
+ "CPPFLAGS", # C variables
+ "CXX",
+ "CCC",
+ "CXXFLAGS",
+ "CXXCPP", # C++ variables
+ "F77",
+ "FFLAGS",
+ "FLIBS", # Fortran77 variables
+ "FC",
+ "FCFLAGS",
+ "FCLIBS", # Fortran variables
+ "LDFLAGS",
+ "LIBS", # linker variables
]
for v in build_system_vars:
env.unset(v)
# Unset mpi environment vars. These flags should only be set by
# mpi providers for packages with mpi dependencies
- mpi_vars = [
- 'MPICC', 'MPICXX', 'MPIFC', 'MPIF77', 'MPIF90'
- ]
+ mpi_vars = ["MPICC", "MPICXX", "MPIFC", "MPIF77", "MPIF90"]
for v in mpi_vars:
env.unset(v)
- build_lang = spack.config.get('config:build_language')
+ build_lang = spack.config.get("config:build_language")
if build_lang:
# Override language-related variables. This can be used to force
# English compiler messages etc., which allows parse_log_events to
# show useful matches.
- env.set('LC_ALL', build_lang)
+ env.set("LC_ALL", build_lang)
# Remove any macports installs from the PATH. The macports ld can
# cause conflicts with the built-in linker on el capitan. Solves
# assembler issues, e.g.:
# suffix or operands invalid for `movq'"
- path = get_path('PATH')
+ path = get_path("PATH")
for p in path:
- if '/macports/' in p:
- env.remove_path('PATH', p)
+ if "/macports/" in p:
+ env.remove_path("PATH", p)
return env
@@ -263,8 +273,7 @@ def set_compiler_environment_variables(pkg, env):
compiler.verify_executables()
# Set compiler variables used by CMake and autotools
- assert all(key in compiler.link_paths for key in (
- 'cc', 'cxx', 'f77', 'fc'))
+ assert all(key in compiler.link_paths for key in ("cc", "cxx", "f77", "fc"))
# Populate an object with the list of environment modifications
# and return it
@@ -274,36 +283,36 @@ def set_compiler_environment_variables(pkg, env):
# Set SPACK compiler variables so that our wrapper knows what to call
if compiler.cc:
- env.set('SPACK_CC', compiler.cc)
- env.set('CC', os.path.join(link_dir, compiler.link_paths['cc']))
+ env.set("SPACK_CC", compiler.cc)
+ env.set("CC", os.path.join(link_dir, compiler.link_paths["cc"]))
if compiler.cxx:
- env.set('SPACK_CXX', compiler.cxx)
- env.set('CXX', os.path.join(link_dir, compiler.link_paths['cxx']))
+ env.set("SPACK_CXX", compiler.cxx)
+ env.set("CXX", os.path.join(link_dir, compiler.link_paths["cxx"]))
if compiler.f77:
- env.set('SPACK_F77', compiler.f77)
- env.set('F77', os.path.join(link_dir, compiler.link_paths['f77']))
+ env.set("SPACK_F77", compiler.f77)
+ env.set("F77", os.path.join(link_dir, compiler.link_paths["f77"]))
if compiler.fc:
- env.set('SPACK_FC', compiler.fc)
- env.set('FC', os.path.join(link_dir, compiler.link_paths['fc']))
+ env.set("SPACK_FC", compiler.fc)
+ env.set("FC", os.path.join(link_dir, compiler.link_paths["fc"]))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
- env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
- env.set('SPACK_CXX_RPATH_ARG', compiler.cxx_rpath_arg)
- env.set('SPACK_F77_RPATH_ARG', compiler.f77_rpath_arg)
- env.set('SPACK_FC_RPATH_ARG', compiler.fc_rpath_arg)
- env.set('SPACK_LINKER_ARG', compiler.linker_arg)
+ env.set("SPACK_CC_RPATH_ARG", compiler.cc_rpath_arg)
+ env.set("SPACK_CXX_RPATH_ARG", compiler.cxx_rpath_arg)
+ env.set("SPACK_F77_RPATH_ARG", compiler.f77_rpath_arg)
+ env.set("SPACK_FC_RPATH_ARG", compiler.fc_rpath_arg)
+ env.set("SPACK_LINKER_ARG", compiler.linker_arg)
# Check whether we want to force RPATH or RUNPATH
- if spack.config.get('config:shared_linking') == 'rpath':
- env.set('SPACK_DTAGS_TO_STRIP', compiler.enable_new_dtags)
- env.set('SPACK_DTAGS_TO_ADD', compiler.disable_new_dtags)
+ if spack.config.get("config:shared_linking") == "rpath":
+ env.set("SPACK_DTAGS_TO_STRIP", compiler.enable_new_dtags)
+ env.set("SPACK_DTAGS_TO_ADD", compiler.disable_new_dtags)
else:
- env.set('SPACK_DTAGS_TO_STRIP', compiler.disable_new_dtags)
- env.set('SPACK_DTAGS_TO_ADD', compiler.enable_new_dtags)
+ env.set("SPACK_DTAGS_TO_STRIP", compiler.disable_new_dtags)
+ env.set("SPACK_DTAGS_TO_ADD", compiler.enable_new_dtags)
# Set the target parameters that the compiler will add
isa_arg = spec.architecture.target.optimization_flags(compiler)
- env.set('SPACK_TARGET_ARGS', isa_arg)
+ env.set("SPACK_TARGET_ARGS", isa_arg)
# Trap spack-tracked compiler flags as appropriate.
# env_flags are easy to accidentally override.
@@ -333,16 +342,16 @@ def set_compiler_environment_variables(pkg, env):
# Concreteness guarantees key safety here
if inject_flags[flag]:
# variables SPACK_<FLAG> inject flags through wrapper
- var_name = 'SPACK_{0}'.format(flag.upper())
- env.set(var_name, ' '.join(f for f in inject_flags[flag]))
+ var_name = "SPACK_{0}".format(flag.upper())
+ env.set(var_name, " ".join(f for f in inject_flags[flag]))
if env_flags[flag]:
# implicit variables
- env.set(flag.upper(), ' '.join(f for f in env_flags[flag]))
+ env.set(flag.upper(), " ".join(f for f in env_flags[flag]))
pkg.flags_to_build_system_args(build_system_flags)
- env.set('SPACK_COMPILER_SPEC', str(spec.compiler))
+ env.set("SPACK_COMPILER_SPEC", str(spec.compiler))
- env.set('SPACK_SYSTEM_DIRS', ':'.join(system_dirs))
+ env.set("SPACK_SYSTEM_DIRS", ":".join(system_dirs))
compiler.setup_custom_environment(pkg, env)
@@ -351,17 +360,17 @@ def set_compiler_environment_variables(pkg, env):
def set_wrapper_variables(pkg, env):
"""Set environment variables used by the Spack compiler wrapper
- (which have the prefix `SPACK_`) and also add the compiler wrappers
- to PATH.
-
- This determines the injected -L/-I/-rpath options; each
- of these specifies a search order and this function computes these
- options in a manner that is intended to match the DAG traversal order
- in `modifications_from_dependencies`: that method uses a post-order
- traversal so that `PrependPath` actions from dependencies take lower
- precedence; we use a post-order traversal here to match the visitation
- order of `modifications_from_dependencies` (so we are visiting the
- lowest priority packages first).
+ (which have the prefix `SPACK_`) and also add the compiler wrappers
+ to PATH.
+
+ This determines the injected -L/-I/-rpath options; each
+ of these specifies a search order and this function computes these
+ options in a manner that is intended to match the DAG traversal order
+ in `modifications_from_dependencies`: that method uses a post-order
+ traversal so that `PrependPath` actions from dependencies take lower
+ precedence; we use a post-order traversal here to match the visitation
+ order of `modifications_from_dependencies` (so we are visiting the
+ lowest priority packages first).
"""
# Set environment variables if specified for
# the given compiler
@@ -369,8 +378,8 @@ def set_wrapper_variables(pkg, env):
env.extend(spack.schema.environment.parse(compiler.environment))
if compiler.extra_rpaths:
- extra_rpaths = ':'.join(compiler.extra_rpaths)
- env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
+ extra_rpaths = ":".join(compiler.extra_rpaths)
+ env.set("SPACK_COMPILER_EXTRA_RPATHS", extra_rpaths)
# Add spack build environment path with compiler wrappers first in
# the path. We add the compiler wrapper path, which includes default
@@ -384,35 +393,35 @@ def set_wrapper_variables(pkg, env):
# directory. Add that to the path too.
env_paths = []
compiler_specific = os.path.join(
- spack.paths.build_env_path,
- os.path.dirname(pkg.compiler.link_paths['cc']))
+ spack.paths.build_env_path, os.path.dirname(pkg.compiler.link_paths["cc"])
+ )
for item in [spack.paths.build_env_path, compiler_specific]:
env_paths.append(item)
- ci = os.path.join(item, 'case-insensitive')
+ ci = os.path.join(item, "case-insensitive")
if os.path.isdir(ci):
env_paths.append(ci)
tty.debug("Adding compiler bin/ paths: " + " ".join(env_paths))
for item in env_paths:
- env.prepend_path('PATH', item)
+ env.prepend_path("PATH", item)
env.set_path(SPACK_ENV_PATH, env_paths)
# Working directory for the spack command itself, for debug logs.
- if spack.config.get('config:debug'):
- env.set(SPACK_DEBUG, 'TRUE')
+ if spack.config.get("config:debug"):
+ env.set(SPACK_DEBUG, "TRUE")
env.set(SPACK_SHORT_SPEC, pkg.spec.short_spec)
- env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format('{name}-{hash:7}'))
+ env.set(SPACK_DEBUG_LOG_ID, pkg.spec.format("{name}-{hash:7}"))
env.set(SPACK_DEBUG_LOG_DIR, spack.main.spack_working_dir)
# Find ccache binary and hand it to build environment
- if spack.config.get('config:ccache'):
- ccache = Executable('ccache')
+ if spack.config.get("config:ccache"):
+ ccache = Executable("ccache")
if not ccache:
raise RuntimeError("No ccache binary found in PATH")
env.set(SPACK_CCACHE_BINARY, ccache)
# Gather information about various types of dependencies
- link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
+ link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
rpath_deps = get_rpath_deps(pkg)
link_dirs = []
@@ -434,9 +443,8 @@ def set_wrapper_variables(pkg, env):
except NoLibrariesError:
tty.debug("No libraries found for {0}".format(dep.name))
- for default_lib_dir in ['lib', 'lib64']:
- default_lib_prefix = os.path.join(
- dep.prefix, default_lib_dir)
+ for default_lib_dir in ["lib", "lib64"]:
+ default_lib_prefix = os.path.join(dep.prefix, default_lib_dir)
if os.path.isdir(default_lib_prefix):
dep_link_dirs.append(default_lib_prefix)
@@ -449,21 +457,21 @@ def set_wrapper_variables(pkg, env):
except NoHeadersError:
tty.debug("No headers found for {0}".format(dep.name))
- for dspec in pkg.spec.traverse(root=False, order='post'):
+ for dspec in pkg.spec.traverse(root=False, order="post"):
if dspec.external:
update_compiler_args_for_dep(dspec)
# Just above, we prepended entries for -L/-rpath for externals. We
# now do this for non-external packages so that Spack-built packages
# are searched first for libraries etc.
- for dspec in pkg.spec.traverse(root=False, order='post'):
+ for dspec in pkg.spec.traverse(root=False, order="post"):
if not dspec.external:
update_compiler_args_for_dep(dspec)
# The top-level package is always RPATHed. It hasn't been installed yet
# so the RPATHs are added unconditionally (e.g. even though lib64/ may
# not be created for the install).
- for libdir in ['lib64', 'lib']:
+ for libdir in ["lib64", "lib"]:
lib_path = os.path.join(pkg.prefix, libdir)
rpath_dirs.insert(0, lib_path)
@@ -471,13 +479,14 @@ def set_wrapper_variables(pkg, env):
include_dirs = list(dedupe(filter_system_paths(include_dirs)))
rpath_dirs = list(dedupe(filter_system_paths(rpath_dirs)))
- env.set(SPACK_LINK_DIRS, ':'.join(link_dirs))
- env.set(SPACK_INCLUDE_DIRS, ':'.join(include_dirs))
- env.set(SPACK_RPATH_DIRS, ':'.join(rpath_dirs))
+ env.set(SPACK_LINK_DIRS, ":".join(link_dirs))
+ env.set(SPACK_INCLUDE_DIRS, ":".join(include_dirs))
+ env.set(SPACK_RPATH_DIRS, ":".join(rpath_dirs))
def determine_number_of_jobs(
- parallel=False, command_line=None, config_default=None, max_cpus=None):
+ parallel=False, command_line=None, config_default=None, max_cpus=None
+):
"""
Packages that require sequential builds need 1 job. Otherwise we use the
number of jobs set on the command line. If not set, then we use the config
@@ -494,8 +503,8 @@ def determine_number_of_jobs(
if not parallel:
return 1
- if command_line is None and 'command_line' in spack.config.scopes():
- command_line = spack.config.get('config:build_jobs', scope='command_line')
+ if command_line is None and "command_line" in spack.config.scopes():
+ command_line = spack.config.get("config:build_jobs", scope="command_line")
if command_line is not None:
return command_line
@@ -503,7 +512,7 @@ def determine_number_of_jobs(
max_cpus = max_cpus or cpus_available()
# in some rare cases _builtin config may not be set, so default to max 16
- config_default = config_default or spack.config.get('config:build_jobs', 16)
+ config_default = config_default or spack.config.get("config:build_jobs", 16)
return min(max_cpus, config_default)
@@ -512,7 +521,7 @@ def _set_variables_for_single_module(pkg, module):
"""Helper function to set module variables for single module."""
# Put a marker on this module so that it won't execute the body of this
# function again, since it is not needed
- marker = '_set_run_already_called'
+ marker = "_set_run_already_called"
if getattr(module, marker, False):
return
@@ -522,24 +531,24 @@ def _set_variables_for_single_module(pkg, module):
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
- m.make = MakeExecutable('make', jobs)
- m.gmake = MakeExecutable('gmake', jobs)
- m.scons = MakeExecutable('scons', jobs)
- m.ninja = MakeExecutable('ninja', jobs)
+ m.make = MakeExecutable("make", jobs)
+ m.gmake = MakeExecutable("gmake", jobs)
+ m.scons = MakeExecutable("scons", jobs)
+ m.ninja = MakeExecutable("ninja", jobs)
# easy shortcut to os.environ
m.env = os.environ
# Find the configure script in the archive path
# Don't use which for this; we want to find it in the current dir.
- m.configure = Executable('./configure')
+ m.configure = Executable("./configure")
- m.meson = Executable('meson')
- m.cmake = Executable('cmake')
- m.ctest = MakeExecutable('ctest', jobs)
+ m.meson = Executable("meson")
+ m.cmake = Executable("cmake")
+ m.ctest = MakeExecutable("ctest", jobs)
- if sys.platform == 'win32':
- m.nmake = Executable('nmake')
+ if sys.platform == "win32":
+ m.nmake = Executable("nmake")
# Standard CMake arguments
m.std_cmake_args = spack.build_systems.cmake.CMakePackage._std_args(pkg)
m.std_meson_args = spack.build_systems.meson.MesonPackage._std_args(pkg)
@@ -547,10 +556,10 @@ def _set_variables_for_single_module(pkg, module):
# Put spack compiler paths in module scope.
link_dir = spack.paths.build_env_path
- m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths['cc'])
- m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths['cxx'])
- m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths['f77'])
- m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths['fc'])
+ m.spack_cc = os.path.join(link_dir, pkg.compiler.link_paths["cc"])
+ m.spack_cxx = os.path.join(link_dir, pkg.compiler.link_paths["cxx"])
+ m.spack_f77 = os.path.join(link_dir, pkg.compiler.link_paths["f77"])
+ m.spack_fc = os.path.join(link_dir, pkg.compiler.link_paths["fc"])
# Emulate some shell commands for convenience
m.pwd = os.getcwd
@@ -575,11 +584,12 @@ def _set_variables_for_single_module(pkg, module):
m.dso_suffix = dso_suffix
def static_to_shared_library(static_lib, shared_lib=None, **kwargs):
- compiler_path = kwargs.get('compiler', m.spack_cc)
+ compiler_path = kwargs.get("compiler", m.spack_cc)
compiler = Executable(compiler_path)
- return _static_to_shared_library(pkg.spec.architecture, compiler,
- static_lib, shared_lib, **kwargs)
+ return _static_to_shared_library(
+ pkg.spec.architecture, compiler, static_lib, shared_lib, **kwargs
+ )
m.static_to_shared_library = static_to_shared_library
@@ -590,7 +600,7 @@ def _set_variables_for_single_module(pkg, module):
def set_module_variables_for_package(pkg):
"""Populate the module scope of install() with some useful functions.
- This makes things easier for package writers.
+ This makes things easier for package writers.
"""
# If a user makes their own package repo, e.g.
# spack.pkg.mystuff.libelf.Libelf, and they inherit from an existing class
@@ -602,8 +612,7 @@ def set_module_variables_for_package(pkg):
_set_variables_for_single_module(pkg, mod)
-def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
- **kwargs):
+def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None, **kwargs):
"""
Converts a static library to a shared library. The static library has to
be built with PIC for the conversion to work.
@@ -621,50 +630,49 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
compat_version (str): Library compatibility version. Default is
version.
"""
- compiler_output = kwargs.get('compiler_output', None)
- arguments = kwargs.get('arguments', [])
- version = kwargs.get('version', None)
- compat_version = kwargs.get('compat_version', version)
+ compiler_output = kwargs.get("compiler_output", None)
+ arguments = kwargs.get("arguments", [])
+ version = kwargs.get("version", None)
+ compat_version = kwargs.get("compat_version", version)
if not shared_lib:
- shared_lib = '{0}.{1}'.format(os.path.splitext(static_lib)[0],
- dso_suffix)
+ shared_lib = "{0}.{1}".format(os.path.splitext(static_lib)[0], dso_suffix)
compiler_args = []
# TODO: Compiler arguments should not be hardcoded but provided by
# the different compiler classes.
- if 'linux' in arch or 'cray' in arch:
+ if "linux" in arch or "cray" in arch:
soname = os.path.basename(shared_lib)
if compat_version:
- soname += '.{0}'.format(compat_version)
+ soname += ".{0}".format(compat_version)
compiler_args = [
- '-shared',
- '-Wl,-soname,{0}'.format(soname),
- '-Wl,--whole-archive',
+ "-shared",
+ "-Wl,-soname,{0}".format(soname),
+ "-Wl,--whole-archive",
static_lib,
- '-Wl,--no-whole-archive'
+ "-Wl,--no-whole-archive",
]
- elif 'darwin' in arch:
+ elif "darwin" in arch:
install_name = shared_lib
if compat_version:
- install_name += '.{0}'.format(compat_version)
+ install_name += ".{0}".format(compat_version)
compiler_args = [
- '-dynamiclib',
- '-install_name', '{0}'.format(install_name),
- '-Wl,-force_load,{0}'.format(static_lib)
+ "-dynamiclib",
+ "-install_name",
+ "{0}".format(install_name),
+ "-Wl,-force_load,{0}".format(static_lib),
]
if compat_version:
- compiler_args.extend(['-compatibility_version', '{0}'.format(
- compat_version)])
+ compiler_args.extend(["-compatibility_version", "{0}".format(compat_version)])
if version:
- compiler_args.extend(['-current_version', '{0}'.format(version)])
+ compiler_args.extend(["-current_version", "{0}".format(version)])
if len(arguments) > 0:
compiler_args.extend(arguments)
@@ -672,11 +680,11 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
shared_lib_base = shared_lib
if version:
- shared_lib += '.{0}'.format(version)
+ shared_lib += ".{0}".format(version)
elif compat_version:
- shared_lib += '.{0}'.format(compat_version)
+ shared_lib += ".{0}".format(compat_version)
- compiler_args.extend(['-o', shared_lib])
+ compiler_args.extend(["-o", shared_lib])
# Create symlinks for version and compat_version
shared_lib_link = os.path.basename(shared_lib)
@@ -685,8 +693,7 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
symlink(shared_lib_link, shared_lib_base)
if compat_version and compat_version != version:
- symlink(shared_lib_link, '{0}.{1}'.format(shared_lib_base,
- compat_version))
+ symlink(shared_lib_link, "{0}.{1}".format(shared_lib_base, compat_version))
return compiler(*compiler_args, output=compiler_output)
@@ -694,19 +701,17 @@ def _static_to_shared_library(arch, compiler, static_lib, shared_lib=None,
def get_rpath_deps(pkg):
"""Return immediate or transitive RPATHs depending on the package."""
if pkg.transitive_rpaths:
- return [d for d in pkg.spec.traverse(root=False, deptype=('link'))]
+ return [d for d in pkg.spec.traverse(root=False, deptype=("link"))]
else:
- return pkg.spec.dependencies(deptype='link')
+ return pkg.spec.dependencies(deptype="link")
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
deps = get_rpath_deps(pkg)
- rpaths.extend(d.prefix.lib for d in deps
- if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in deps
- if os.path.isdir(d.prefix.lib64))
+ rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
+ rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
@@ -752,8 +757,9 @@ def parent_class_modules(cls):
Includes cls.__module__
"""
- if (not issubclass(cls, spack.package_base.PackageBase) or
- issubclass(spack.package_base.PackageBase, cls)):
+ if not issubclass(cls, spack.package_base.PackageBase) or issubclass(
+ spack.package_base.PackageBase, cls
+ ):
return []
result = []
module = sys.modules.get(cls.__module__)
@@ -779,12 +785,10 @@ def load_external_modules(pkg):
load_module(external_module)
-def setup_package(pkg, dirty, context='build'):
+def setup_package(pkg, dirty, context="build"):
"""Execute all environment setup routines."""
- if context not in ['build', 'test']:
- raise ValueError(
- "'context' must be one of ['build', 'test'] - got: {0}"
- .format(context))
+ if context not in ["build", "test"]:
+ raise ValueError("'context' must be one of ['build', 'test'] - got: {0}".format(context))
set_module_variables_for_package(pkg)
@@ -794,37 +798,37 @@ def setup_package(pkg, dirty, context='build'):
env_mods = EnvironmentModifications()
# setup compilers for build contexts
- need_compiler = context == 'build' or (context == 'test' and
- pkg.test_requires_compiler)
+ need_compiler = context == "build" or (context == "test" and pkg.test_requires_compiler)
if need_compiler:
set_compiler_environment_variables(pkg, env_mods)
set_wrapper_variables(pkg, env_mods)
- env_mods.extend(modifications_from_dependencies(
- pkg.spec, context, custom_mods_only=False))
+ env_mods.extend(modifications_from_dependencies(pkg.spec, context, custom_mods_only=False))
# architecture specific setup
platform = spack.platforms.by_name(pkg.spec.architecture.platform)
target = platform.target(pkg.spec.architecture.target)
platform.setup_platform_environment(pkg, env_mods)
- if context == 'build':
+ if context == "build":
pkg.setup_build_environment(env_mods)
- if (not dirty) and (not env_mods.is_unset('CPATH')):
- tty.debug("A dependency has updated CPATH, this may lead pkg-"
- "config to assume that the package is part of the system"
- " includes and omit it when invoked with '--cflags'.")
- elif context == 'test':
+ if (not dirty) and (not env_mods.is_unset("CPATH")):
+ tty.debug(
+ "A dependency has updated CPATH, this may lead pkg-"
+ "config to assume that the package is part of the system"
+ " includes and omit it when invoked with '--cflags'."
+ )
+ elif context == "test":
env_mods.extend(
inspect_path(
pkg.spec.prefix,
spack.user_environment.prefix_inspections(pkg.spec.platform),
- exclude=is_system_path
+ exclude=is_system_path,
)
)
pkg.setup_run_environment(env_mods)
- env_mods.prepend_path('PATH', '.')
+ env_mods.prepend_path("PATH", ".")
# First apply the clean environment changes
env_base.apply_modifications()
@@ -840,8 +844,8 @@ def setup_package(pkg, dirty, context='build'):
# unnecessary
on_cray, _ = _on_cray()
if on_cray and not dirty:
- for mod in ['cray-mpich', 'cray-libsci']:
- module('unload', mod)
+ for mod in ["cray-mpich", "cray-libsci"]:
+ module("unload", mod)
if target.module_name:
load_module(target.module_name)
@@ -850,8 +854,7 @@ def setup_package(pkg, dirty, context='build'):
implicit_rpaths = pkg.compiler.implicit_rpaths()
if implicit_rpaths:
- env_mods.set('SPACK_COMPILER_IMPLICIT_RPATHS',
- ':'.join(implicit_rpaths))
+ env_mods.set("SPACK_COMPILER_IMPLICIT_RPATHS", ":".join(implicit_rpaths))
# Make sure nothing's strange about the Spack environment.
validate(env_mods, tty.warn)
@@ -867,14 +870,14 @@ def _make_runnable(pkg, env):
# environment variable
prefix = pkg.prefix
- for dirname in ['bin', 'bin64']:
+ for dirname in ["bin", "bin64"]:
bin_dir = os.path.join(prefix, dirname)
if os.path.isdir(bin_dir):
- env.prepend_path('PATH', bin_dir)
+ env.prepend_path("PATH", bin_dir)
def modifications_from_dependencies(
- spec, context, custom_mods_only=True, set_package_py_globals=True
+ spec, context, custom_mods_only=True, set_package_py_globals=True
):
"""Returns the environment modifications that are required by
the dependencies of a spec and also applies modifications
@@ -914,60 +917,57 @@ def modifications_from_dependencies(
package.py files (this may be problematic when using buildcaches that have
been built on a different but compatible OS)
"""
- if context not in ['build', 'run', 'test']:
+ if context not in ["build", "run", "test"]:
raise ValueError(
- "Expecting context to be one of ['build', 'run', 'test'], "
- "got: {0}".format(context))
+ "Expecting context to be one of ['build', 'run', 'test'], " "got: {0}".format(context)
+ )
env = EnvironmentModifications()
# Note: see computation of 'custom_mod_deps' and 'exe_deps' later in this
# function; these sets form the building blocks of those collections.
- build_deps = set(spec.dependencies(deptype=('build', 'test')))
- link_deps = set(spec.traverse(root=False, deptype='link'))
+ build_deps = set(spec.dependencies(deptype=("build", "test")))
+ link_deps = set(spec.traverse(root=False, deptype="link"))
build_link_deps = build_deps | link_deps
build_and_supporting_deps = set()
for build_dep in build_deps:
- build_and_supporting_deps.update(build_dep.traverse(deptype='run'))
- run_and_supporting_deps = set(
- spec.traverse(root=False, deptype=('run', 'link')))
+ build_and_supporting_deps.update(build_dep.traverse(deptype="run"))
+ run_and_supporting_deps = set(spec.traverse(root=False, deptype=("run", "link")))
test_and_supporting_deps = set()
- for test_dep in set(spec.dependencies(deptype='test')):
- test_and_supporting_deps.update(test_dep.traverse(deptype='run'))
+ for test_dep in set(spec.dependencies(deptype="test")):
+ test_and_supporting_deps.update(test_dep.traverse(deptype="run"))
# All dependencies that might have environment modifications to apply
custom_mod_deps = set()
- if context == 'build':
+ if context == "build":
custom_mod_deps.update(build_and_supporting_deps)
# Tests may be performed after build
custom_mod_deps.update(test_and_supporting_deps)
else:
# test/run context
custom_mod_deps.update(run_and_supporting_deps)
- if context == 'test':
+ if context == "test":
custom_mod_deps.update(test_and_supporting_deps)
custom_mod_deps.update(link_deps)
# Determine 'exe_deps': the set of packages with binaries we want to use
- if context == 'build':
+ if context == "build":
exe_deps = build_and_supporting_deps | test_and_supporting_deps
- elif context == 'run':
- exe_deps = set(spec.traverse(deptype='run'))
- elif context == 'test':
+ elif context == "run":
+ exe_deps = set(spec.traverse(deptype="run"))
+ elif context == "test":
exe_deps = test_and_supporting_deps
def default_modifications_for_dep(dep):
- if (dep in build_link_deps and
- not is_system_path(dep.prefix) and
- context == 'build'):
+ if dep in build_link_deps and not is_system_path(dep.prefix) and context == "build":
prefix = dep.prefix
- env.prepend_path('CMAKE_PREFIX_PATH', prefix)
+ env.prepend_path("CMAKE_PREFIX_PATH", prefix)
- for directory in ('lib', 'lib64', 'share'):
- pcdir = os.path.join(prefix, directory, 'pkgconfig')
+ for directory in ("lib", "lib64", "share"):
+ pcdir = os.path.join(prefix, directory, "pkgconfig")
if os.path.isdir(pcdir):
- env.prepend_path('PKG_CONFIG_PATH', pcdir)
+ env.prepend_path("PKG_CONFIG_PATH", pcdir)
if dep in exe_deps and not is_system_path(dep.prefix):
_make_runnable(dep, env)
@@ -990,7 +990,7 @@ def modifications_from_dependencies(
set_module_variables_for_package(dpkg)
# Allow dependencies to modify the module
dpkg.setup_dependent_package(spec.package.module, spec)
- if context == 'build':
+ if context == "build":
dpkg.setup_dependent_build_environment(env, spec)
else:
dpkg.setup_dependent_run_environment(env, spec)
@@ -999,11 +999,11 @@ def modifications_from_dependencies(
# The Spec.traverse method provides this: i.e. in addition to
# the post-order semantics, it also guarantees a fixed traversal order
# among dependencies which are not constrained by post-order semantics.
- for dspec in spec.traverse(root=False, order='post'):
+ for dspec in spec.traverse(root=False, order="post"):
if dspec.external:
add_modifications_for_dep(dspec)
- for dspec in spec.traverse(root=False, order='post'):
+ for dspec in spec.traverse(root=False, order="post"):
# Default env modifications for non-external packages can override
# custom modifications of external packages (this can only occur
# for modifications to PATH, CMAKE_PREFIX_PATH, and PKG_CONFIG_PATH)
@@ -1017,14 +1017,14 @@ def get_cmake_prefix_path(pkg):
# Note that unlike modifications_from_dependencies, this does not include
# any edits to CMAKE_PREFIX_PATH defined in custom
# setup_dependent_build_environment implementations of dependency packages
- build_deps = set(pkg.spec.dependencies(deptype=('build', 'test')))
- link_deps = set(pkg.spec.traverse(root=False, deptype=('link')))
+ build_deps = set(pkg.spec.dependencies(deptype=("build", "test")))
+ link_deps = set(pkg.spec.traverse(root=False, deptype=("link")))
build_link_deps = build_deps | link_deps
spack_built = []
externals = []
# modifications_from_dependencies updates CMAKE_PREFIX_PATH by first
# prepending all externals and then all non-externals
- for dspec in pkg.spec.traverse(root=False, order='post'):
+ for dspec in pkg.spec.traverse(root=False, order="post"):
if dspec in build_link_deps:
if dspec.external:
externals.insert(0, dspec)
@@ -1032,15 +1032,15 @@ def get_cmake_prefix_path(pkg):
spack_built.insert(0, dspec)
ordered_build_link_deps = spack_built + externals
- build_link_prefixes = filter_system_paths(
- x.prefix for x in ordered_build_link_deps)
+ build_link_prefixes = filter_system_paths(x.prefix for x in ordered_build_link_deps)
return build_link_prefixes
-def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
- input_multiprocess_fd, jsfd1, jsfd2):
+def _setup_pkg_and_run(
+ serialized_pkg, function, kwargs, child_pipe, input_multiprocess_fd, jsfd1, jsfd2
+):
- context = kwargs.get('context', 'build')
+ context = kwargs.get("context", "build")
try:
# We are in the child process. Python sets sys.stdin to
@@ -1053,10 +1053,11 @@ def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
pkg = serialized_pkg.restore()
- if not kwargs.get('fake', False):
- kwargs['unmodified_env'] = os.environ.copy()
- kwargs['env_modifications'] = setup_package(
- pkg, dirty=kwargs.get('dirty', False), context=context)
+ if not kwargs.get("fake", False):
+ kwargs["unmodified_env"] = os.environ.copy()
+ kwargs["env_modifications"] = setup_package(
+ pkg, dirty=kwargs.get("dirty", False), context=context
+ )
return_value = function(pkg, kwargs)
child_pipe.send(return_value)
@@ -1077,25 +1078,30 @@ def _setup_pkg_and_run(serialized_pkg, function, kwargs, child_pipe,
package_context = get_package_context(tb)
logfile = None
- if context == 'build':
+ if context == "build":
try:
- if hasattr(pkg, 'log_path'):
+ if hasattr(pkg, "log_path"):
logfile = pkg.log_path
except NameError:
# 'pkg' is not defined yet
pass
- elif context == 'test':
+ elif context == "test":
logfile = os.path.join(
- pkg.test_suite.stage,
- spack.install_test.TestSuite.test_log_name(pkg.spec))
+ pkg.test_suite.stage, spack.install_test.TestSuite.test_log_name(pkg.spec)
+ )
# make a pickleable exception to send to parent.
msg = "%s: %s" % (exc_type.__name__, str(exc))
- ce = ChildError(msg,
- exc_type.__module__,
- exc_type.__name__,
- tb_string, logfile, context, package_context)
+ ce = ChildError(
+ msg,
+ exc_type.__module__,
+ exc_type.__name__,
+ tb_string,
+ logfile,
+ context,
+ package_context,
+ )
child_pipe.send(ce)
finally:
@@ -1152,21 +1158,28 @@ def start_build_process(pkg, function, kwargs):
try:
# Forward sys.stdin when appropriate, to allow toggling verbosity
- if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin,
- 'fileno'):
+ if sys.platform != "win32" and sys.stdin.isatty() and hasattr(sys.stdin, "fileno"):
input_fd = os.dup(sys.stdin.fileno())
input_multiprocess_fd = MultiProcessFd(input_fd)
- mflags = os.environ.get('MAKEFLAGS', False)
+ mflags = os.environ.get("MAKEFLAGS", False)
if mflags:
- m = re.search(r'--jobserver-[^=]*=(\d),(\d)', mflags)
+ m = re.search(r"--jobserver-[^=]*=(\d),(\d)", mflags)
if m:
jobserver_fd1 = MultiProcessFd(int(m.group(1)))
jobserver_fd2 = MultiProcessFd(int(m.group(2)))
p = multiprocessing.Process(
target=_setup_pkg_and_run,
- args=(serialized_pkg, function, kwargs, child_pipe,
- input_multiprocess_fd, jobserver_fd1, jobserver_fd2))
+ args=(
+ serialized_pkg,
+ function,
+ kwargs,
+ child_pipe,
+ input_multiprocess_fd,
+ jobserver_fd1,
+ jobserver_fd2,
+ ),
+ )
p.start()
@@ -1217,6 +1230,7 @@ def get_package_context(traceback, context=3):
from there.
"""
+
def make_stack(tb, stack=None):
"""Tracebacks come out of the system in caller -> callee order. Return
an array in callee -> caller order so we can traverse it."""
@@ -1231,19 +1245,19 @@ def get_package_context(traceback, context=3):
for tb in stack:
frame = tb.tb_frame
- if 'self' in frame.f_locals:
+ if "self" in frame.f_locals:
# Find the first proper subclass of PackageBase.
- obj = frame.f_locals['self']
+ obj = frame.f_locals["self"]
if isinstance(obj, spack.package_base.PackageBase):
break
# We found obj, the Package implementation we care about.
# Point out the location in the install method where we failed.
lines = [
- '{0}:{1:d}, in {2}:'.format(
+ "{0}:{1:d}, in {2}:".format(
inspect.getfile(frame.f_code),
frame.f_lineno - 1, # subtract 1 because f_lineno is 0-indexed
- frame.f_code.co_name
+ frame.f_code.co_name,
)
]
@@ -1254,16 +1268,15 @@ def get_package_context(traceback, context=3):
# Subtract 1 because f_lineno is 0-indexed.
fun_lineno = frame.f_lineno - start - 1
start_ctx = max(0, fun_lineno - context)
- sourcelines = sourcelines[start_ctx:fun_lineno + context + 1]
+ sourcelines = sourcelines[start_ctx : fun_lineno + context + 1]
for i, line in enumerate(sourcelines):
is_error = start_ctx + i == fun_lineno
- mark = '>> ' if is_error else ' '
+ mark = ">> " if is_error else " "
# Add start to get lineno relative to start of file, not function.
- marked = ' {0}{1:-6d}{2}'.format(
- mark, start + start_ctx + i, line.rstrip())
+ marked = " {0}{1:-6d}{2}".format(mark, start + start_ctx + i, line.rstrip())
if is_error:
- marked = colorize('@R{%s}' % cescape(marked))
+ marked = colorize("@R{%s}" % cescape(marked))
lines.append(marked)
return lines
@@ -1311,12 +1324,12 @@ class ChildError(InstallError):
mode with spack -d.
"""
+
# List of errors considered "build errors", for which we'll show log
# context instead of Python context.
- build_errors = [('spack.util.executable', 'ProcessError')]
+ build_errors = [("spack.util.executable", "ProcessError")]
- def __init__(self, msg, module, classname, traceback_string, log_name,
- log_type, context):
+ def __init__(self, msg, module, classname, traceback_string, log_name, log_type, context):
super(ChildError, self).__init__(msg)
self.module = module
self.name = classname
@@ -1328,7 +1341,7 @@ class ChildError(InstallError):
@property
def long_message(self):
out = StringIO()
- out.write(self._long_message if self._long_message else '')
+ out.write(self._long_message if self._long_message else "")
have_log = self.log_name and os.path.exists(self.log_name)
@@ -1342,16 +1355,16 @@ class ChildError(InstallError):
# The error happened in the Python code, so try to show
# some context from the Package itself.
if self.context:
- out.write('\n')
- out.write('\n'.join(self.context))
- out.write('\n')
+ out.write("\n")
+ out.write("\n".join(self.context))
+ out.write("\n")
if out.getvalue():
- out.write('\n')
+ out.write("\n")
if have_log:
- out.write('See {0} log for details:\n'.format(self.log_type))
- out.write(' {0}\n'.format(self.log_name))
+ out.write("See {0} log for details:\n".format(self.log_type))
+ out.write(" {0}\n".format(self.log_name))
return out.getvalue()
@@ -1371,7 +1384,8 @@ class ChildError(InstallError):
self.traceback,
self.log_name,
self.log_type,
- self.context)
+ self.context,
+ )
def _make_child_error(msg, module, name, traceback, log, log_type, context):
@@ -1381,6 +1395,7 @@ def _make_child_error(msg, module, name, traceback, log, log_type, context):
class StopPhase(spack.error.SpackError):
"""Pickle-able exception to control stopped builds."""
+
def __reduce__(self):
return _make_stop_phase, (self.message, self.long_message)
@@ -1400,9 +1415,7 @@ def write_log_summary(out, log_type, log, last=None):
nerr = last
# If errors are found, only display errors
- out.write(
- "\n%s found in %s log:\n" %
- (plural(nerr, 'error'), log_type))
+ out.write("\n%s found in %s log:\n" % (plural(nerr, "error"), log_type))
out.write(make_log_context(errors))
elif nwar > 0:
if last and nwar > last:
@@ -1410,7 +1423,5 @@ def write_log_summary(out, log_type, log, last=None):
nwar = last
# If no errors are found but warnings are, display warnings
- out.write(
- "\n%s found in %s log:\n" %
- (plural(nwar, 'warning'), log_type))
+ out.write("\n%s found in %s log:\n" % (plural(nwar, "warning"), log_type))
out.write(make_log_context(warnings))
diff --git a/lib/spack/spack/build_systems/aspell_dict.py b/lib/spack/spack/build_systems/aspell_dict.py
index 5927973906..f1e41cc3df 100644
--- a/lib/spack/spack/build_systems/aspell_dict.py
+++ b/lib/spack/spack/build_systems/aspell_dict.py
@@ -23,29 +23,32 @@ from spack.util.executable import which
class AspellDictPackage(AutotoolsPackage):
"""Specialized class for building aspell dictionairies."""
- extends('aspell')
+ extends("aspell")
def view_destination(self, view):
- aspell_spec = self.spec['aspell']
+ aspell_spec = self.spec["aspell"]
if view.get_projection_for_spec(aspell_spec) != aspell_spec.prefix:
- raise ExtensionError(
- 'aspell does not support non-global extensions')
+ raise ExtensionError("aspell does not support non-global extensions")
aspell = aspell_spec.command
- return aspell('dump', 'config', 'dict-dir', output=str).strip()
+ return aspell("dump", "config", "dict-dir", output=str).strip()
def view_source(self):
return self.prefix.lib
def patch(self):
- filter_file(r'^dictdir=.*$', 'dictdir=/lib', 'configure')
- filter_file(r'^datadir=.*$', 'datadir=/lib', 'configure')
+ filter_file(r"^dictdir=.*$", "dictdir=/lib", "configure")
+ filter_file(r"^datadir=.*$", "datadir=/lib", "configure")
def configure(self, spec, prefix):
- aspell = spec['aspell'].prefix.bin.aspell
- prezip = spec['aspell'].prefix.bin.prezip
+ aspell = spec["aspell"].prefix.bin.aspell
+ prezip = spec["aspell"].prefix.bin.prezip
destdir = prefix
- sh = which('sh')
- sh('./configure', '--vars', "ASPELL={0}".format(aspell),
- "PREZIP={0}".format(prezip),
- "DESTDIR={0}".format(destdir))
+ sh = which("sh")
+ sh(
+ "./configure",
+ "--vars",
+ "ASPELL={0}".format(aspell),
+ "PREZIP={0}".format(prezip),
+ "DESTDIR={0}".format(destdir),
+ )
diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py
index 6e2b820c90..0a9946a1c0 100644
--- a/lib/spack/spack/build_systems/autotools.py
+++ b/lib/spack/spack/build_systems/autotools.py
@@ -52,11 +52,12 @@ class AutotoolsPackage(PackageBase):
+-----------------------------------------------+--------------------+
"""
+
#: Phases of a GNU Autotools package
- phases = ['autoreconf', 'configure', 'build', 'install']
+ phases = ["autoreconf", "configure", "build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'AutotoolsPackage'
+ build_system_class = "AutotoolsPackage"
@property
def patch_config_files(self):
@@ -71,9 +72,11 @@ class AutotoolsPackage(PackageBase):
the directory containing the system ``config.guess`` and ``config.sub``
files.
"""
- return (self.spec.satisfies('target=ppc64le:')
- or self.spec.satisfies('target=aarch64:')
- or self.spec.satisfies('target=riscv64:'))
+ return (
+ self.spec.satisfies("target=ppc64le:")
+ or self.spec.satisfies("target=aarch64:")
+ or self.spec.satisfies("target=riscv64:")
+ )
#: Whether or not to update ``libtool``
#: (currently only for Arm/Clang/Fujitsu/NVHPC compilers)
@@ -84,13 +87,13 @@ class AutotoolsPackage(PackageBase):
build_targets = [] # type: List[str]
#: Targets for ``make`` during the :py:meth:`~.AutotoolsPackage.install`
#: phase
- install_targets = ['install']
+ install_targets = ["install"]
#: Callback names for build-time test
- build_time_test_callbacks = ['check']
+ build_time_test_callbacks = ["check"]
#: Callback names for install-time test
- install_time_test_callbacks = ['installcheck']
+ install_time_test_callbacks = ["installcheck"]
#: Set to true to force the autoreconf step even if configure is present
force_autoreconf = False
@@ -101,10 +104,10 @@ class AutotoolsPackage(PackageBase):
#: after the installation. If True instead it installs them.
install_libtool_archives = False
- depends_on('gnuconfig', type='build', when='target=ppc64le:')
- depends_on('gnuconfig', type='build', when='target=aarch64:')
- depends_on('gnuconfig', type='build', when='target=riscv64:')
- conflicts('platform=windows')
+ depends_on("gnuconfig", type="build", when="target=ppc64le:")
+ depends_on("gnuconfig", type="build", when="target=aarch64:")
+ depends_on("gnuconfig", type="build", when="target=riscv64:")
+ conflicts("platform=windows")
@property
def _removed_la_files_log(self):
@@ -112,17 +115,17 @@ class AutotoolsPackage(PackageBase):
build_dir = self.build_directory
if not os.path.isabs(self.build_directory):
build_dir = os.path.join(self.stage.path, build_dir)
- return os.path.join(build_dir, 'removed_la_files.txt')
+ return os.path.join(build_dir, "removed_la_files.txt")
@property
def archive_files(self):
"""Files to archive for packages based on autotools"""
- files = [os.path.join(self.build_directory, 'config.log')]
+ files = [os.path.join(self.build_directory, "config.log")]
if not self.install_libtool_archives:
files.append(self._removed_la_files_log)
return files
- @run_after('autoreconf')
+ @run_after("autoreconf")
def _do_patch_config_files(self):
"""Some packages ship with older config.guess/config.sub files and
need to have these updated when installed on a newer architecture.
@@ -136,20 +139,18 @@ class AutotoolsPackage(PackageBase):
# TODO: Expand this to select the 'config.sub'-compatible architecture
# for each platform (e.g. 'config.sub' doesn't accept 'power9le', but
# does accept 'ppc64le').
- if self.spec.satisfies('target=ppc64le:'):
- config_arch = 'ppc64le'
- elif self.spec.satisfies('target=aarch64:'):
- config_arch = 'aarch64'
- elif self.spec.satisfies('target=riscv64:'):
- config_arch = 'riscv64'
+ if self.spec.satisfies("target=ppc64le:"):
+ config_arch = "ppc64le"
+ elif self.spec.satisfies("target=aarch64:"):
+ config_arch = "aarch64"
+ elif self.spec.satisfies("target=riscv64:"):
+ config_arch = "riscv64"
else:
- config_arch = 'local'
+ config_arch = "local"
def runs_ok(script_abs_path):
# Construct the list of arguments for the call
- additional_args = {
- 'config.sub': [config_arch]
- }
+ additional_args = {"config.sub": [config_arch]}
script_name = os.path.basename(script_abs_path)
args = [script_abs_path] + additional_args.get(script_name, [])
@@ -162,7 +163,7 @@ class AutotoolsPackage(PackageBase):
return True
# Get the list of files that needs to be patched
- to_be_patched = fs.find(self.stage.path, files=['config.sub', 'config.guess'])
+ to_be_patched = fs.find(self.stage.path, files=["config.sub", "config.guess"])
to_be_patched = [f for f in to_be_patched if not runs_ok(f)]
# If there are no files to be patched, return early
@@ -171,33 +172,37 @@ class AutotoolsPackage(PackageBase):
# Otherwise, require `gnuconfig` to be a build dependency
self._require_build_deps(
- pkgs=['gnuconfig'],
- spec=self.spec,
- err="Cannot patch config files")
+ pkgs=["gnuconfig"], spec=self.spec, err="Cannot patch config files"
+ )
# Get the config files we need to patch (config.sub / config.guess).
to_be_found = list(set(os.path.basename(f) for f in to_be_patched))
- gnuconfig = self.spec['gnuconfig']
+ gnuconfig = self.spec["gnuconfig"]
gnuconfig_dir = gnuconfig.prefix
# An external gnuconfig may not not have a prefix.
if gnuconfig_dir is None:
- raise InstallError("Spack could not find substitutes for GNU config "
- "files because no prefix is available for the "
- "`gnuconfig` package. Make sure you set a prefix "
- "path instead of modules for external `gnuconfig`.")
+ raise InstallError(
+ "Spack could not find substitutes for GNU config "
+ "files because no prefix is available for the "
+ "`gnuconfig` package. Make sure you set a prefix "
+ "path instead of modules for external `gnuconfig`."
+ )
candidates = fs.find(gnuconfig_dir, files=to_be_found, recursive=False)
# For external packages the user may have specified an incorrect prefix.
# otherwise the installation is just corrupt.
if not candidates:
- msg = ("Spack could not find `config.guess` and `config.sub` "
- "files in the `gnuconfig` prefix `{0}`. This means the "
- "`gnuconfig` package is broken").format(gnuconfig_dir)
+ msg = (
+ "Spack could not find `config.guess` and `config.sub` "
+ "files in the `gnuconfig` prefix `{0}`. This means the "
+ "`gnuconfig` package is broken"
+ ).format(gnuconfig_dir)
if gnuconfig.external:
- msg += (" or the `gnuconfig` package prefix is misconfigured as"
- " an external package")
+ msg += (
+ " or the `gnuconfig` package prefix is misconfigured as" " an external package"
+ )
raise InstallError(msg)
# Filter working substitutes
@@ -223,7 +228,7 @@ To resolve this problem, please try the following:
and set the prefix to the directory containing the `config.guess` and
`config.sub` files.
"""
- raise InstallError(msg.format(', '.join(to_be_found), self.name))
+ raise InstallError(msg.format(", ".join(to_be_found), self.name))
# Copy the good files over the bad ones
for abs_path in to_be_patched:
@@ -233,7 +238,7 @@ To resolve this problem, please try the following:
fs.copy(substitutes[name], abs_path)
os.chmod(abs_path, mode)
- @run_before('configure')
+ @run_before("configure")
def _set_autotools_environment_variables(self):
"""Many autotools builds use a version of mknod.m4 that fails when
running as root unless FORCE_UNSAFE_CONFIGURE is set to 1.
@@ -248,7 +253,7 @@ To resolve this problem, please try the following:
"""
os.environ["FORCE_UNSAFE_CONFIGURE"] = "1"
- @run_after('configure')
+ @run_after("configure")
def _do_patch_libtool(self):
"""If configure generates a "libtool" script that does not correctly
detect the compiler (and patch_libtool is set), patch in the correct
@@ -258,29 +263,34 @@ To resolve this problem, please try the following:
if not self.patch_libtool:
return
- for libtool_path in fs.find(
- self.build_directory, 'libtool', recursive=True):
+ for libtool_path in fs.find(self.build_directory, "libtool", recursive=True):
self._patch_libtool(libtool_path)
def _patch_libtool(self, libtool_path):
if (
- self.spec.satisfies('%arm') or
- self.spec.satisfies('%clang') or
- self.spec.satisfies('%fj') or
- self.spec.satisfies('%nvhpc')
+ self.spec.satisfies("%arm")
+ or self.spec.satisfies("%clang")
+ or self.spec.satisfies("%fj")
+ or self.spec.satisfies("%nvhpc")
):
fs.filter_file('wl=""\n', 'wl="-Wl,"\n', libtool_path)
- fs.filter_file('pic_flag=""\n',
- 'pic_flag="{0}"\n'
- .format(self.compiler.cc_pic_flag),
- libtool_path)
- if self.spec.satisfies('%fj'):
- fs.filter_file('-nostdlib', '', libtool_path)
- rehead = r'/\S*/'
- objfile = ['fjhpctag.o', 'fjcrt0.o', 'fjlang08.o', 'fjomp.o',
- 'crti.o', 'crtbeginS.o', 'crtendS.o']
+ fs.filter_file(
+ 'pic_flag=""\n', 'pic_flag="{0}"\n'.format(self.compiler.cc_pic_flag), libtool_path
+ )
+ if self.spec.satisfies("%fj"):
+ fs.filter_file("-nostdlib", "", libtool_path)
+ rehead = r"/\S*/"
+ objfile = [
+ "fjhpctag.o",
+ "fjcrt0.o",
+ "fjlang08.o",
+ "fjomp.o",
+ "crti.o",
+ "crtbeginS.o",
+ "crtendS.o",
+ ]
for o in objfile:
- fs.filter_file(rehead + o, '', libtool_path)
+ fs.filter_file(rehead + o, "", libtool_path)
@property
def configure_directory(self):
@@ -293,9 +303,7 @@ To resolve this problem, please try the following:
@property
def configure_abs_path(self):
# Absolute path to configure
- configure_abs_path = os.path.join(
- os.path.abspath(self.configure_directory), 'configure'
- )
+ configure_abs_path = os.path.join(os.path.abspath(self.configure_directory), "configure")
return configure_abs_path
@property
@@ -303,7 +311,7 @@ To resolve this problem, please try the following:
"""Override to provide another place to build the package"""
return self.configure_directory
- @run_before('autoreconf')
+ @run_before("autoreconf")
def delete_configure_to_force_update(self):
if self.force_autoreconf:
force_remove(self.configure_abs_path)
@@ -312,20 +320,20 @@ To resolve this problem, please try the following:
"""Require `pkgs` to be direct build dependencies of `spec`. Raises a
RuntimeError with a helpful error messages when any dep is missing."""
- build_deps = [d.name for d in spec.dependencies(deptype='build')]
+ build_deps = [d.name for d in spec.dependencies(deptype="build")]
missing_deps = [x for x in pkgs if x not in build_deps]
if not missing_deps:
return
# Raise an exception on missing deps.
- msg = ("{0}: missing dependencies: {1}.\n\nPlease add "
- "the following lines to the package:\n\n"
- .format(err, ", ".join(missing_deps)))
+ msg = (
+ "{0}: missing dependencies: {1}.\n\nPlease add "
+ "the following lines to the package:\n\n".format(err, ", ".join(missing_deps))
+ )
for dep in missing_deps:
- msg += (" depends_on('{0}', type='build', when='@{1}')\n"
- .format(dep, spec.version))
+ msg += " depends_on('{0}', type='build', when='@{1}')\n".format(dep, spec.version)
msg += "\nUpdate the version (when='@{0}') as needed.".format(spec.version)
raise RuntimeError(msg)
@@ -339,20 +347,19 @@ To resolve this problem, please try the following:
# Else try to regenerate it, which reuquires a few build dependencies
self._require_build_deps(
- pkgs=['autoconf', 'automake', 'libtool'],
- spec=spec,
- err="Cannot generate configure")
-
- tty.msg('Configure script not found: trying to generate it')
- tty.warn('*********************************************************')
- tty.warn('* If the default procedure fails, consider implementing *')
- tty.warn('* a custom AUTORECONF phase in the package *')
- tty.warn('*********************************************************')
+ pkgs=["autoconf", "automake", "libtool"], spec=spec, err="Cannot generate configure"
+ )
+
+ tty.msg("Configure script not found: trying to generate it")
+ tty.warn("*********************************************************")
+ tty.warn("* If the default procedure fails, consider implementing *")
+ tty.warn("* a custom AUTORECONF phase in the package *")
+ tty.warn("*********************************************************")
with working_dir(self.configure_directory):
m = inspect.getmodule(self)
# This line is what is needed most of the time
# --install, --verbose, --force
- autoreconf_args = ['-ivf']
+ autoreconf_args = ["-ivf"]
autoreconf_args += self.autoreconf_search_path_args
autoreconf_args += self.autoreconf_extra_args
m.autoreconf(*autoreconf_args)
@@ -365,7 +372,7 @@ To resolve this problem, please try the following:
spack dependencies."""
return _autoreconf_search_path_args(self.spec)
- @run_after('autoreconf')
+ @run_after("autoreconf")
def set_configure_or_die(self):
"""Checks the presence of a ``configure`` file after the
autoreconf phase. If it is found sets a module attribute
@@ -376,13 +383,11 @@ To resolve this problem, please try the following:
"""
# Check if a configure script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
- msg = 'configure script not found in {0}'
+ msg = "configure script not found in {0}"
raise RuntimeError(msg.format(self.configure_directory))
# Monkey-patch the configure script in the corresponding module
- inspect.getmodule(self).configure = Executable(
- self.configure_abs_path
- )
+ inspect.getmodule(self).configure = Executable(self.configure_abs_path)
def configure_args(self):
"""Produces a list containing all the arguments that must be passed to
@@ -396,16 +401,16 @@ To resolve this problem, please try the following:
"""Produces a list of all command line arguments to pass specified
compiler flags to configure."""
# Has to be dynamic attribute due to caching.
- setattr(self, 'configure_flag_args', [])
+ setattr(self, "configure_flag_args", [])
for flag, values in flags.items():
if values:
- values_str = '{0}={1}'.format(flag.upper(), ' '.join(values))
+ values_str = "{0}={1}".format(flag.upper(), " ".join(values))
self.configure_flag_args.append(values_str)
# Spack's fflags are meant for both F77 and FC, therefore we
# additionaly set FCFLAGS if required.
- values = flags.get('fflags', None)
+ values = flags.get("fflags", None)
if values:
- values_str = 'FCFLAGS={0}'.format(' '.join(values))
+ values_str = "FCFLAGS={0}".format(" ".join(values))
self.configure_flag_args.append(values_str)
def configure(self, spec, prefix):
@@ -413,26 +418,25 @@ To resolve this problem, please try the following:
:meth:`~spack.build_systems.autotools.AutotoolsPackage.configure_args`
and an appropriately set prefix.
"""
- options = getattr(self, 'configure_flag_args', [])
- options += ['--prefix={0}'.format(prefix)]
+ options = getattr(self, "configure_flag_args", [])
+ options += ["--prefix={0}".format(prefix)]
options += self.configure_args()
with working_dir(self.build_directory, create=True):
inspect.getmodule(self).configure(*options)
def setup_build_environment(self, env):
- if (self.spec.platform == 'darwin'
- and macos_version() >= Version('11')):
+ if self.spec.platform == "darwin" and macos_version() >= Version("11"):
# Many configure files rely on matching '10.*' for macOS version
# detection and fail to add flags if it shows as version 11.
- env.set('MACOSX_DEPLOYMENT_TARGET', '10.16')
+ env.set("MACOSX_DEPLOYMENT_TARGET", "10.16")
def build(self, spec, prefix):
"""Makes the build targets specified by
:py:attr:``~.AutotoolsPackage.build_targets``
"""
# See https://autotools.io/automake/silent.html
- params = ['V=1']
+ params = ["V=1"]
params += self.build_targets
with working_dir(self.build_directory):
inspect.getmodule(self).make(*params)
@@ -444,23 +448,18 @@ To resolve this problem, please try the following:
with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.install_targets)
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
def check(self):
"""Searches the Makefile for targets ``test`` and ``check``
and runs them if found.
"""
with working_dir(self.build_directory):
- self._if_make_target_execute('test')
- self._if_make_target_execute('check')
+ self._if_make_target_execute("test")
+ self._if_make_target_execute("check")
def _activate_or_not(
- self,
- name,
- activation_word,
- deactivation_word,
- activation_value=None,
- variant=None
+ self, name, activation_word, deactivation_word, activation_value=None, variant=None
):
"""This function contains the current implementation details of
:meth:`~spack.build_systems.autotools.AutotoolsPackage.with_or_without` and
@@ -523,7 +522,7 @@ To resolve this problem, please try the following:
spec = self.spec
args = []
- if activation_value == 'prefix':
+ if activation_value == "prefix":
activation_value = lambda x: spec[x].prefix
variant = variant or name
@@ -544,45 +543,41 @@ To resolve this problem, please try the following:
# BoolValuedVariant carry information about a single option.
# Nonetheless, for uniformity of treatment we'll package them
# in an iterable of one element.
- condition = '+{name}'.format(name=variant)
+ condition = "+{name}".format(name=variant)
options = [(name, condition in spec)]
else:
- condition = '{variant}={value}'
+ condition = "{variant}={value}"
# "feature_values" is used to track values which correspond to
# features which can be enabled or disabled as understood by the
# package's build system. It excludes values which have special
# meanings and do not correspond to features (e.g. "none")
- feature_values = getattr(
- variant_desc.values, 'feature_values', None
- ) or variant_desc.values
+ feature_values = (
+ getattr(variant_desc.values, "feature_values", None) or variant_desc.values
+ )
options = [
- (value,
- condition.format(variant=variant,
- value=value) in spec)
+ (value, condition.format(variant=variant, value=value) in spec)
for value in feature_values
]
# For each allowed value in the list of values
for option_value, activated in options:
# Search for an override in the package for this value
- override_name = '{0}_or_{1}_{2}'.format(
+ override_name = "{0}_or_{1}_{2}".format(
activation_word, deactivation_word, option_value
)
line_generator = getattr(self, override_name, None)
# If not available use a sensible default
if line_generator is None:
+
def _default_generator(is_activated):
if is_activated:
- line = '--{0}-{1}'.format(
- activation_word, option_value
- )
+ line = "--{0}-{1}".format(activation_word, option_value)
if activation_value is not None and activation_value(option_value):
- line += '={0}'.format(
- activation_value(option_value)
- )
+ line += "={0}".format(activation_value(option_value))
return line
- return '--{0}-{1}'.format(deactivation_word, option_value)
+ return "--{0}-{1}".format(deactivation_word, option_value)
+
line_generator = _default_generator
args.append(line_generator(activated))
return args
@@ -613,8 +608,7 @@ To resolve this problem, please try the following:
Returns:
list of arguments to configure
"""
- return self._activate_or_not(name, 'with', 'without', activation_value,
- variant)
+ return self._activate_or_not(name, "with", "without", activation_value, variant)
def enable_or_disable(self, name, activation_value=None, variant=None):
"""Same as
@@ -633,23 +627,21 @@ To resolve this problem, please try the following:
Returns:
list of arguments to configure
"""
- return self._activate_or_not(
- name, 'enable', 'disable', activation_value, variant
- )
+ return self._activate_or_not(name, "enable", "disable", activation_value, variant)
- run_after('install')(PackageBase._run_default_install_time_test_callbacks)
+ run_after("install")(PackageBase._run_default_install_time_test_callbacks)
def installcheck(self):
"""Searches the Makefile for an ``installcheck`` target
and runs it if found.
"""
with working_dir(self.build_directory):
- self._if_make_target_execute('installcheck')
+ self._if_make_target_execute("installcheck")
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
- @run_after('install')
+ @run_after("install")
def remove_libtool_archives(self):
"""Remove all .la files in prefix sub-folders if the package sets
``install_libtool_archives`` to be False.
@@ -659,14 +651,14 @@ To resolve this problem, please try the following:
return
# Remove the files and create a log of what was removed
- libtool_files = fs.find(str(self.prefix), '*.la', recursive=True)
+ libtool_files = fs.find(str(self.prefix), "*.la", recursive=True)
with fs.safe_remove(*libtool_files):
fs.mkdirp(os.path.dirname(self._removed_la_files_log))
- with open(self._removed_la_files_log, mode='w') as f:
- f.write('\n'.join(libtool_files))
+ with open(self._removed_la_files_log, mode="w") as f:
+ f.write("\n".join(libtool_files))
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
- run_after('install')(PackageBase.apply_macos_rpath_fixups)
+ run_after("install")(PackageBase.apply_macos_rpath_fixups)
def _autoreconf_search_path_args(spec):
@@ -674,7 +666,7 @@ def _autoreconf_search_path_args(spec):
flags_spack, flags_external = [], []
# We don't want to add an include flag for automake's default search path.
- for automake in spec.dependencies(name='automake', deptype='build'):
+ for automake in spec.dependencies(name="automake", deptype="build"):
try:
s = os.stat(automake.prefix.share.aclocal)
if stat.S_ISDIR(s.st_mode):
@@ -682,7 +674,7 @@ def _autoreconf_search_path_args(spec):
except OSError:
pass
- for dep in spec.dependencies(deptype='build'):
+ for dep in spec.dependencies(deptype="build"):
path = dep.prefix.share.aclocal
# Skip non-existing aclocal paths
try:
@@ -694,5 +686,5 @@ def _autoreconf_search_path_args(spec):
continue
dirs_seen.add((s.st_ino, s.st_dev))
flags = flags_external if dep.external else flags_spack
- flags.extend(['-I', path])
+ flags.extend(["-I", path])
return flags_spack + flags_external
diff --git a/lib/spack/spack/build_systems/cached_cmake.py b/lib/spack/spack/build_systems/cached_cmake.py
index 47b95f6f89..52e3d82411 100644
--- a/lib/spack/spack/build_systems/cached_cmake.py
+++ b/lib/spack/spack/build_systems/cached_cmake.py
@@ -36,7 +36,7 @@ class CachedCMakePackage(CMakePackage):
sidestep certain parsing bugs in extremely long ``cmake`` commands, and to
avoid system limits on the length of the command line."""
- phases = ['initconfig', 'cmake', 'build', 'install']
+ phases = ["initconfig", "cmake", "build", "install"]
@property
def cache_name(self):
@@ -52,7 +52,7 @@ class CachedCMakePackage(CMakePackage):
return os.path.join(self.stage.source_path, self.cache_name)
def flag_handler(self, name, flags):
- if name in ('cflags', 'cxxflags', 'cppflags', 'fflags'):
+ if name in ("cflags", "cxxflags", "cppflags", "fflags"):
return (None, None, None) # handled in the cmake cache
return (flags, None, None)
@@ -64,10 +64,8 @@ class CachedCMakePackage(CMakePackage):
# Fortran compiler is optional
if "FC" in os.environ:
- spack_fc_entry = cmake_cache_path(
- "CMAKE_Fortran_COMPILER", os.environ['FC'])
- system_fc_entry = cmake_cache_path(
- "CMAKE_Fortran_COMPILER", self.compiler.fc)
+ spack_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", os.environ["FC"])
+ system_fc_entry = cmake_cache_path("CMAKE_Fortran_COMPILER", self.compiler.fc)
else:
spack_fc_entry = "# No Fortran compiler defined in spec"
system_fc_entry = "# No Fortran compiler defined in spec"
@@ -78,33 +76,29 @@ class CachedCMakePackage(CMakePackage):
"#------------------{0}".format("-" * 60),
"# Compiler Spec: {0}".format(spec.compiler),
"#------------------{0}".format("-" * 60),
- 'if(DEFINED ENV{SPACK_CC})\n',
- ' ' + cmake_cache_path(
- "CMAKE_C_COMPILER", os.environ['CC']),
- ' ' + cmake_cache_path(
- "CMAKE_CXX_COMPILER", os.environ['CXX']),
- ' ' + spack_fc_entry,
- 'else()\n',
- ' ' + cmake_cache_path(
- "CMAKE_C_COMPILER", self.compiler.cc),
- ' ' + cmake_cache_path(
- "CMAKE_CXX_COMPILER", self.compiler.cxx),
- ' ' + system_fc_entry,
- 'endif()\n'
+ "if(DEFINED ENV{SPACK_CC})\n",
+ " " + cmake_cache_path("CMAKE_C_COMPILER", os.environ["CC"]),
+ " " + cmake_cache_path("CMAKE_CXX_COMPILER", os.environ["CXX"]),
+ " " + spack_fc_entry,
+ "else()\n",
+ " " + cmake_cache_path("CMAKE_C_COMPILER", self.compiler.cc),
+ " " + cmake_cache_path("CMAKE_CXX_COMPILER", self.compiler.cxx),
+ " " + system_fc_entry,
+ "endif()\n",
]
# use global spack compiler flags
- cppflags = ' '.join(spec.compiler_flags['cppflags'])
+ cppflags = " ".join(spec.compiler_flags["cppflags"])
if cppflags:
# avoid always ending up with ' ' with no flags defined
- cppflags += ' '
- cflags = cppflags + ' '.join(spec.compiler_flags['cflags'])
+ cppflags += " "
+ cflags = cppflags + " ".join(spec.compiler_flags["cflags"])
if cflags:
entries.append(cmake_cache_string("CMAKE_C_FLAGS", cflags))
- cxxflags = cppflags + ' '.join(spec.compiler_flags['cxxflags'])
+ cxxflags = cppflags + " ".join(spec.compiler_flags["cxxflags"])
if cxxflags:
entries.append(cmake_cache_string("CMAKE_CXX_FLAGS", cxxflags))
- fflags = ' '.join(spec.compiler_flags['fflags'])
+ fflags = " ".join(spec.compiler_flags["fflags"])
if fflags:
entries.append(cmake_cache_string("CMAKE_Fortran_FLAGS", fflags))
@@ -113,7 +107,7 @@ class CachedCMakePackage(CMakePackage):
def initconfig_mpi_entries(self):
spec = self.spec
- if not spec.satisfies('^mpi'):
+ if not spec.satisfies("^mpi"):
return []
entries = [
@@ -122,32 +116,27 @@ class CachedCMakePackage(CMakePackage):
"#------------------{0}\n".format("-" * 60),
]
- entries.append(cmake_cache_path("MPI_C_COMPILER",
- spec['mpi'].mpicc))
- entries.append(cmake_cache_path("MPI_CXX_COMPILER",
- spec['mpi'].mpicxx))
- entries.append(cmake_cache_path("MPI_Fortran_COMPILER",
- spec['mpi'].mpifc))
+ entries.append(cmake_cache_path("MPI_C_COMPILER", spec["mpi"].mpicc))
+ entries.append(cmake_cache_path("MPI_CXX_COMPILER", spec["mpi"].mpicxx))
+ entries.append(cmake_cache_path("MPI_Fortran_COMPILER", spec["mpi"].mpifc))
# Check for slurm
using_slurm = False
- slurm_checks = ['+slurm',
- 'schedulers=slurm',
- 'process_managers=slurm']
- if any(spec['mpi'].satisfies(variant) for variant in slurm_checks):
+ slurm_checks = ["+slurm", "schedulers=slurm", "process_managers=slurm"]
+ if any(spec["mpi"].satisfies(variant) for variant in slurm_checks):
using_slurm = True
# Determine MPIEXEC
if using_slurm:
- if spec['mpi'].external:
+ if spec["mpi"].external:
# Heuristic until we have dependents on externals
- mpiexec = '/usr/bin/srun'
+ mpiexec = "/usr/bin/srun"
else:
- mpiexec = os.path.join(spec['slurm'].prefix.bin, 'srun')
+ mpiexec = os.path.join(spec["slurm"].prefix.bin, "srun")
else:
- mpiexec = os.path.join(spec['mpi'].prefix.bin, 'mpirun')
+ mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpirun")
if not os.path.exists(mpiexec):
- mpiexec = os.path.join(spec['mpi'].prefix.bin, 'mpiexec')
+ mpiexec = os.path.join(spec["mpi"].prefix.bin, "mpiexec")
if not os.path.exists(mpiexec):
msg = "Unable to determine MPIEXEC, %s tests may fail" % self.name
@@ -156,9 +145,8 @@ class CachedCMakePackage(CMakePackage):
else:
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
# vs the older versions which expect MPIEXEC
- if self.spec["cmake"].satisfies('@3.10:'):
- entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE",
- mpiexec))
+ if self.spec["cmake"].satisfies("@3.10:"):
+ entries.append(cmake_cache_path("MPIEXEC_EXECUTABLE", mpiexec))
else:
entries.append(cmake_cache_path("MPIEXEC", mpiexec))
@@ -179,24 +167,22 @@ class CachedCMakePackage(CMakePackage):
"#------------------{0}\n".format("-" * 60),
]
- if spec.satisfies('^cuda'):
+ if spec.satisfies("^cuda"):
entries.append("#------------------{0}".format("-" * 30))
entries.append("# Cuda")
entries.append("#------------------{0}\n".format("-" * 30))
- cudatoolkitdir = spec['cuda'].prefix
- entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR",
- cudatoolkitdir))
+ cudatoolkitdir = spec["cuda"].prefix
+ entries.append(cmake_cache_path("CUDA_TOOLKIT_ROOT_DIR", cudatoolkitdir))
cudacompiler = "${CUDA_TOOLKIT_ROOT_DIR}/bin/nvcc"
- entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER",
- cudacompiler))
+ entries.append(cmake_cache_path("CMAKE_CUDA_COMPILER", cudacompiler))
- if spec.satisfies('^mpi'):
- entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER",
- "${MPI_CXX_COMPILER}"))
+ if spec.satisfies("^mpi"):
+ entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${MPI_CXX_COMPILER}"))
else:
- entries.append(cmake_cache_path("CMAKE_CUDA_HOST_COMPILER",
- "${CMAKE_CXX_COMPILER}"))
+ entries.append(
+ cmake_cache_path("CMAKE_CUDA_HOST_COMPILER", "${CMAKE_CXX_COMPILER}")
+ )
return entries
@@ -205,8 +191,7 @@ class CachedCMakePackage(CMakePackage):
"#------------------{0}".format("-" * 60),
"# !!!! This is a generated file, edit at own risk !!!!",
"#------------------{0}".format("-" * 60),
- "# CMake executable path: {0}".format(
- self.spec['cmake'].command.path),
+ "# CMake executable path: {0}".format(self.spec["cmake"].command.path),
"#------------------{0}\n".format("-" * 60),
]
@@ -215,24 +200,26 @@ class CachedCMakePackage(CMakePackage):
return []
def initconfig(self, spec, prefix):
- cache_entries = (self.std_initconfig_entries() +
- self.initconfig_compiler_entries() +
- self.initconfig_mpi_entries() +
- self.initconfig_hardware_entries() +
- self.initconfig_package_entries())
+ cache_entries = (
+ self.std_initconfig_entries()
+ + self.initconfig_compiler_entries()
+ + self.initconfig_mpi_entries()
+ + self.initconfig_hardware_entries()
+ + self.initconfig_package_entries()
+ )
- with open(self.cache_name, 'w') as f:
+ with open(self.cache_name, "w") as f:
for entry in cache_entries:
- f.write('%s\n' % entry)
- f.write('\n')
+ f.write("%s\n" % entry)
+ f.write("\n")
@property
def std_cmake_args(self):
args = super(CachedCMakePackage, self).std_cmake_args
- args.extend(['-C', self.cache_path])
+ args.extend(["-C", self.cache_path])
return args
- @run_after('install')
+ @run_after("install")
def install_cmake_cache(self):
mkdirp(self.spec.prefix.share.cmake)
install(self.cache_path, self.spec.prefix.share.cmake)
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
index 265341344a..de54bcd0e3 100644
--- a/lib/spack/spack/build_systems/cmake.py
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -23,7 +23,7 @@ from spack.util.path import convert_to_posix_path
# Regex to extract the primary generator from the CMake generator
# string.
-_primary_generator_extractor = re.compile(r'(?:.* - )?(.*)')
+_primary_generator_extractor = re.compile(r"(?:.* - )?(.*)")
def _extract_primary_generator(generator):
@@ -74,16 +74,17 @@ class CMakePackage(PackageBase):
if the generator string does not follow the prescribed format, or if
the primary generator is not supported.
"""
+
#: Phases of a CMake package
- phases = ['cmake', 'build', 'install']
+ phases = ["cmake", "build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'CMakePackage'
+ build_system_class = "CMakePackage"
build_targets = [] # type: List[str]
- install_targets = ['install']
+ install_targets = ["install"]
- build_time_test_callbacks = ['check']
+ build_time_test_callbacks = ["check"]
#: The build system generator to use.
#:
@@ -96,28 +97,29 @@ class CMakePackage(PackageBase):
generator = "Unix Makefiles"
- if sys.platform == 'win32':
+ if sys.platform == "win32":
generator = "Ninja"
- depends_on('ninja')
+ depends_on("ninja")
# https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html
- variant('build_type', default='RelWithDebInfo',
- description='CMake build type',
- values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
+ variant(
+ "build_type",
+ default="RelWithDebInfo",
+ description="CMake build type",
+ values=("Debug", "Release", "RelWithDebInfo", "MinSizeRel"),
+ )
# https://cmake.org/cmake/help/latest/variable/CMAKE_INTERPROCEDURAL_OPTIMIZATION.html
- variant('ipo', default=False,
- description='CMake interprocedural optimization')
+ variant("ipo", default=False, description="CMake interprocedural optimization")
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
- conflicts('+ipo', when='^cmake@:3.8',
- msg='+ipo is not supported by CMake < 3.9')
+ conflicts("+ipo", when="^cmake@:3.8", msg="+ipo is not supported by CMake < 3.9")
- depends_on('cmake', type='build')
+ depends_on("cmake", type="build")
@property
def archive_files(self):
"""Files to archive for packages based on CMake"""
- return [os.path.join(self.build_directory, 'CMakeCache.txt')]
+ return [os.path.join(self.build_directory, "CMakeCache.txt")]
@property
def root_cmakelists_dir(self):
@@ -139,7 +141,7 @@ class CMakePackage(PackageBase):
"""
# standard CMake arguments
std_cmake_args = CMakePackage._std_args(self)
- std_cmake_args += getattr(self, 'cmake_flag_args', [])
+ std_cmake_args += getattr(self, "cmake_flag_args", [])
return std_cmake_args
@staticmethod
@@ -152,54 +154,56 @@ class CMakePackage(PackageBase):
generator = CMakePackage.generator
# Make sure a valid generator was chosen
- valid_primary_generators = ['Unix Makefiles', 'Ninja']
+ valid_primary_generators = ["Unix Makefiles", "Ninja"]
primary_generator = _extract_primary_generator(generator)
if primary_generator not in valid_primary_generators:
- msg = "Invalid CMake generator: '{0}'\n".format(generator)
+ msg = "Invalid CMake generator: '{0}'\n".format(generator)
msg += "CMakePackage currently supports the following "
- msg += "primary generators: '{0}'".\
- format("', '".join(valid_primary_generators))
+ msg += "primary generators: '{0}'".format("', '".join(valid_primary_generators))
raise InstallError(msg)
try:
- build_type = pkg.spec.variants['build_type'].value
+ build_type = pkg.spec.variants["build_type"].value
except KeyError:
- build_type = 'RelWithDebInfo'
+ build_type = "RelWithDebInfo"
try:
- ipo = pkg.spec.variants['ipo'].value
+ ipo = pkg.spec.variants["ipo"].value
except KeyError:
ipo = False
define = CMakePackage.define
args = [
- '-G', generator,
- define('CMAKE_INSTALL_PREFIX', convert_to_posix_path(pkg.prefix)),
- define('CMAKE_BUILD_TYPE', build_type),
- define('BUILD_TESTING', pkg.run_tests),
+ "-G",
+ generator,
+ define("CMAKE_INSTALL_PREFIX", convert_to_posix_path(pkg.prefix)),
+ define("CMAKE_BUILD_TYPE", build_type),
+ define("BUILD_TESTING", pkg.run_tests),
]
# CMAKE_INTERPROCEDURAL_OPTIMIZATION only exists for CMake >= 3.9
- if pkg.spec.satisfies('^cmake@3.9:'):
- args.append(define('CMAKE_INTERPROCEDURAL_OPTIMIZATION', ipo))
+ if pkg.spec.satisfies("^cmake@3.9:"):
+ args.append(define("CMAKE_INTERPROCEDURAL_OPTIMIZATION", ipo))
- if primary_generator == 'Unix Makefiles':
- args.append(define('CMAKE_VERBOSE_MAKEFILE', True))
+ if primary_generator == "Unix Makefiles":
+ args.append(define("CMAKE_VERBOSE_MAKEFILE", True))
if platform.mac_ver()[0]:
- args.extend([
- define('CMAKE_FIND_FRAMEWORK', "LAST"),
- define('CMAKE_FIND_APPBUNDLE', "LAST"),
- ])
+ args.extend(
+ [
+ define("CMAKE_FIND_FRAMEWORK", "LAST"),
+ define("CMAKE_FIND_APPBUNDLE", "LAST"),
+ ]
+ )
# Set up CMake rpath
- args.extend([
- define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', True),
- define('CMAKE_INSTALL_RPATH',
- spack.build_environment.get_rpaths(pkg)),
- define('CMAKE_PREFIX_PATH',
- spack.build_environment.get_cmake_prefix_path(pkg))
- ])
+ args.extend(
+ [
+ define("CMAKE_INSTALL_RPATH_USE_LINK_PATH", True),
+ define("CMAKE_INSTALL_RPATH", spack.build_environment.get_rpaths(pkg)),
+ define("CMAKE_PREFIX_PATH", spack.build_environment.get_cmake_prefix_path(pkg)),
+ ]
+ )
return args
@staticmethod
@@ -230,10 +234,10 @@ class CMakePackage(PackageBase):
# Create a list of pairs. Each pair includes a configuration
# option and whether or not that option is activated
if isinstance(value, bool):
- kind = 'BOOL'
+ kind = "BOOL"
value = "ON" if value else "OFF"
else:
- kind = 'STRING'
+ kind = "STRING"
if isinstance(value, Sequence) and not isinstance(value, six.string_types):
value = ";".join(str(v) for v in value)
else:
@@ -289,11 +293,10 @@ class CMakePackage(PackageBase):
variant = cmake_var.lower()
if variant not in self.variants:
- raise KeyError(
- '"{0}" is not a variant of "{1}"'.format(variant, self.name))
+ raise KeyError('"{0}" is not a variant of "{1}"'.format(variant, self.name))
if variant not in self.spec.variants:
- return ''
+ return ""
value = self.spec.variants[variant].value
if isinstance(value, (tuple, list)):
@@ -308,37 +311,34 @@ class CMakePackage(PackageBase):
so cppflags will be added to cflags, cxxflags, and fflags to mimic the
behavior in other tools."""
# Has to be dynamic attribute due to caching
- setattr(self, 'cmake_flag_args', [])
+ setattr(self, "cmake_flag_args", [])
- flag_string = '-DCMAKE_{0}_FLAGS={1}'
- langs = {'C': 'c', 'CXX': 'cxx', 'Fortran': 'f'}
+ flag_string = "-DCMAKE_{0}_FLAGS={1}"
+ langs = {"C": "c", "CXX": "cxx", "Fortran": "f"}
# Handle language compiler flags
for lang, pre in langs.items():
- flag = pre + 'flags'
+ flag = pre + "flags"
# cmake has no explicit cppflags support -> add it to all langs
- lang_flags = ' '.join(flags.get(flag, []) + flags.get('cppflags',
- []))
+ lang_flags = " ".join(flags.get(flag, []) + flags.get("cppflags", []))
if lang_flags:
- self.cmake_flag_args.append(flag_string.format(lang,
- lang_flags))
+ self.cmake_flag_args.append(flag_string.format(lang, lang_flags))
# Cmake has different linker arguments for different build types.
# We specify for each of them.
- if flags['ldflags']:
- ldflags = ' '.join(flags['ldflags'])
- ld_string = '-DCMAKE_{0}_LINKER_FLAGS={1}'
+ if flags["ldflags"]:
+ ldflags = " ".join(flags["ldflags"])
+ ld_string = "-DCMAKE_{0}_LINKER_FLAGS={1}"
# cmake has separate linker arguments for types of builds.
- for type in ['EXE', 'MODULE', 'SHARED', 'STATIC']:
+ for type in ["EXE", "MODULE", "SHARED", "STATIC"]:
self.cmake_flag_args.append(ld_string.format(type, ldflags))
# CMake has libs options separated by language. Apply ours to each.
- if flags['ldlibs']:
- libs_flags = ' '.join(flags['ldlibs'])
- libs_string = '-DCMAKE_{0}_STANDARD_LIBRARIES={1}'
+ if flags["ldlibs"]:
+ libs_flags = " ".join(flags["ldlibs"])
+ libs_string = "-DCMAKE_{0}_STANDARD_LIBRARIES={1}"
for lang in langs:
- self.cmake_flag_args.append(libs_string.format(lang,
- libs_flags))
+ self.cmake_flag_args.append(libs_string.format(lang, libs_flags))
@property
def build_dirname(self):
@@ -346,7 +346,7 @@ class CMakePackage(PackageBase):
:return: name of the subdirectory for building the package
"""
- return 'spack-build-%s' % self.spec.dag_hash(7)
+ return "spack-build-%s" % self.spec.dag_hash(7)
@property
def build_directory(self):
@@ -381,35 +381,33 @@ class CMakePackage(PackageBase):
def build(self, spec, prefix):
"""Make the build targets"""
with working_dir(self.build_directory):
- if self.generator == 'Unix Makefiles':
+ if self.generator == "Unix Makefiles":
inspect.getmodule(self).make(*self.build_targets)
- elif self.generator == 'Ninja':
+ elif self.generator == "Ninja":
self.build_targets.append("-v")
inspect.getmodule(self).ninja(*self.build_targets)
def install(self, spec, prefix):
"""Make the install targets"""
with working_dir(self.build_directory):
- if self.generator == 'Unix Makefiles':
+ if self.generator == "Unix Makefiles":
inspect.getmodule(self).make(*self.install_targets)
- elif self.generator == 'Ninja':
+ elif self.generator == "Ninja":
inspect.getmodule(self).ninja(*self.install_targets)
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
def check(self):
"""Searches the CMake-generated Makefile for the target ``test``
and runs it if found.
"""
with working_dir(self.build_directory):
- if self.generator == 'Unix Makefiles':
- self._if_make_target_execute('test',
- jobs_env='CTEST_PARALLEL_LEVEL')
- self._if_make_target_execute('check')
- elif self.generator == 'Ninja':
- self._if_ninja_target_execute('test',
- jobs_env='CTEST_PARALLEL_LEVEL')
- self._if_ninja_target_execute('check')
+ if self.generator == "Unix Makefiles":
+ self._if_make_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
+ self._if_make_target_execute("check")
+ elif self.generator == "Ninja":
+ self._if_ninja_target_execute("test", jobs_env="CTEST_PARALLEL_LEVEL")
+ self._if_ninja_target_execute("check")
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py
index 81994f0106..ff4c525266 100644
--- a/lib/spack/spack/build_systems/cuda.py
+++ b/lib/spack/spack/build_systems/cuda.py
@@ -20,66 +20,85 @@ class CudaPackage(PackageBase):
# https://developer.nvidia.com/cuda-gpus
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
cuda_arch_values = (
- '10', '11', '12', '13',
- '20', '21',
- '30', '32', '35', '37',
- '50', '52', '53',
- '60', '61', '62',
- '70', '72', '75',
- '80', '86'
+ "10",
+ "11",
+ "12",
+ "13",
+ "20",
+ "21",
+ "30",
+ "32",
+ "35",
+ "37",
+ "50",
+ "52",
+ "53",
+ "60",
+ "61",
+ "62",
+ "70",
+ "72",
+ "75",
+ "80",
+ "86",
)
# FIXME: keep cuda and cuda_arch separate to make usage easier until
# Spack has depends_on(cuda, when='cuda_arch!=None') or alike
- variant('cuda', default=False,
- description='Build with CUDA')
-
- variant('cuda_arch',
- description='CUDA architecture',
- values=spack.variant.any_combination_of(*cuda_arch_values),
- sticky=True,
- when='+cuda')
+ variant("cuda", default=False, description="Build with CUDA")
+
+ variant(
+ "cuda_arch",
+ description="CUDA architecture",
+ values=spack.variant.any_combination_of(*cuda_arch_values),
+ sticky=True,
+ when="+cuda",
+ )
# https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
# https://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
@staticmethod
def cuda_flags(arch_list):
- return [('--generate-code arch=compute_{0},code=sm_{0} '
- '--generate-code arch=compute_{0},code=compute_{0}').format(s)
- for s in arch_list]
+ return [
+ (
+ "--generate-code arch=compute_{0},code=sm_{0} "
+ "--generate-code arch=compute_{0},code=compute_{0}"
+ ).format(s)
+ for s in arch_list
+ ]
- depends_on('cuda', when='+cuda')
+ depends_on("cuda", when="+cuda")
# CUDA version vs Architecture
# https://en.wikipedia.org/wiki/CUDA#GPUs_supported
# https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features
- depends_on('cuda@:6.0', when='cuda_arch=10')
- depends_on('cuda@:6.5', when='cuda_arch=11')
- depends_on('cuda@2.1:6.5', when='cuda_arch=12')
- depends_on('cuda@2.1:6.5', when='cuda_arch=13')
+ depends_on("cuda@:6.0", when="cuda_arch=10")
+ depends_on("cuda@:6.5", when="cuda_arch=11")
+ depends_on("cuda@2.1:6.5", when="cuda_arch=12")
+ depends_on("cuda@2.1:6.5", when="cuda_arch=13")
- depends_on('cuda@3.0:8.0', when='cuda_arch=20')
- depends_on('cuda@3.2:8.0', when='cuda_arch=21')
+ depends_on("cuda@3.0:8.0", when="cuda_arch=20")
+ depends_on("cuda@3.2:8.0", when="cuda_arch=21")
- depends_on('cuda@5.0:10.2', when='cuda_arch=30')
- depends_on('cuda@5.0:10.2', when='cuda_arch=32')
- depends_on('cuda@5.0:', when='cuda_arch=35')
- depends_on('cuda@6.5:', when='cuda_arch=37')
+ depends_on("cuda@5.0:10.2", when="cuda_arch=30")
+ depends_on("cuda@5.0:10.2", when="cuda_arch=32")
+ depends_on("cuda@5.0:", when="cuda_arch=35")
+ depends_on("cuda@6.5:", when="cuda_arch=37")
- depends_on('cuda@6.0:', when='cuda_arch=50')
- depends_on('cuda@6.5:', when='cuda_arch=52')
- depends_on('cuda@6.5:', when='cuda_arch=53')
+ depends_on("cuda@6.0:", when="cuda_arch=50")
+ depends_on("cuda@6.5:", when="cuda_arch=52")
+ depends_on("cuda@6.5:", when="cuda_arch=53")
- depends_on('cuda@8.0:', when='cuda_arch=60')
- depends_on('cuda@8.0:', when='cuda_arch=61')
- depends_on('cuda@8.0:', when='cuda_arch=62')
+ depends_on("cuda@8.0:", when="cuda_arch=60")
+ depends_on("cuda@8.0:", when="cuda_arch=61")
+ depends_on("cuda@8.0:", when="cuda_arch=62")
- depends_on('cuda@9.0:', when='cuda_arch=70')
- depends_on('cuda@9.0:', when='cuda_arch=72')
- depends_on('cuda@10.0:', when='cuda_arch=75')
+ depends_on("cuda@9.0:", when="cuda_arch=70")
+ depends_on("cuda@9.0:", when="cuda_arch=72")
+ depends_on("cuda@10.0:", when="cuda_arch=75")
- depends_on('cuda@11.0:', when='cuda_arch=80')
- depends_on('cuda@11.1:', when='cuda_arch=86')
+ depends_on("cuda@11.0:", when="cuda_arch=80")
+ depends_on("cuda@11.1:", when="cuda_arch=86")
# From the NVIDIA install guide we know of conflicts for particular
# platforms (linux, darwin), architectures (x86, powerpc) and compilers
@@ -90,15 +109,15 @@ class CudaPackage(PackageBase):
# Linux x86_64 compiler conflicts from here:
# https://gist.github.com/ax3l/9489132
- with when('^cuda~allow-unsupported-compilers'):
+ with when("^cuda~allow-unsupported-compilers"):
# GCC
# According to
# https://github.com/spack/spack/pull/25054#issuecomment-886531664
# these conflicts are valid independently from the architecture
# minimum supported versions
- conflicts('%gcc@:4', when='+cuda ^cuda@11.0:')
- conflicts('%gcc@:5', when='+cuda ^cuda@11.4:')
+ conflicts("%gcc@:4", when="+cuda ^cuda@11.0:")
+ conflicts("%gcc@:5", when="+cuda ^cuda@11.4:")
# maximum supported version
# NOTE:
@@ -106,39 +125,38 @@ class CudaPackage(PackageBase):
# it has been decided to use an upper bound for the latest version.
# This implies that the last one in the list has to be updated at
# each release of a new cuda minor version.
- conflicts('%gcc@10:', when='+cuda ^cuda@:11.0')
- conflicts('%gcc@11:', when='+cuda ^cuda@:11.4.0')
- conflicts('%gcc@12:', when='+cuda ^cuda@:11.7')
- conflicts('%clang@12:', when='+cuda ^cuda@:11.4.0')
- conflicts('%clang@13:', when='+cuda ^cuda@:11.5')
- conflicts('%clang@14:', when='+cuda ^cuda@:11.7')
+ conflicts("%gcc@10:", when="+cuda ^cuda@:11.0")
+ conflicts("%gcc@11:", when="+cuda ^cuda@:11.4.0")
+ conflicts("%gcc@12:", when="+cuda ^cuda@:11.7")
+ conflicts("%clang@12:", when="+cuda ^cuda@:11.4.0")
+ conflicts("%clang@13:", when="+cuda ^cuda@:11.5")
+ conflicts("%clang@14:", when="+cuda ^cuda@:11.7")
# https://gist.github.com/ax3l/9489132#gistcomment-3860114
- conflicts('%gcc@10', when='+cuda ^cuda@:11.4.0')
- conflicts('%gcc@5:', when='+cuda ^cuda@:7.5 target=x86_64:')
- conflicts('%gcc@6:', when='+cuda ^cuda@:8 target=x86_64:')
- conflicts('%gcc@7:', when='+cuda ^cuda@:9.1 target=x86_64:')
- conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=x86_64:')
- conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89 target=x86_64:')
- conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27 target=x86_64:')
- conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5 target=x86_64:')
- conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8 target=x86_64:')
- conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1 target=x86_64:')
- conflicts('%pgi@:16,19:', when='+cuda ^cuda@9.2.88:10 target=x86_64:')
- conflicts('%pgi@:17,20:', when='+cuda ^cuda@10.1.105:10.2.89 target=x86_64:')
- conflicts('%pgi@:17,21:', when='+cuda ^cuda@11.0.2:11.1.0 target=x86_64:')
- conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5 target=x86_64:')
- conflicts('%clang@:3.7,4:', when='+cuda ^cuda@8.0:9.0 target=x86_64:')
- conflicts('%clang@:3.7,4.1:', when='+cuda ^cuda@9.1 target=x86_64:')
- conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2 target=x86_64:')
- conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130 target=x86_64:')
- conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105 target=x86_64:')
- conflicts('%clang@:3.7,8.1:',
- when='+cuda ^cuda@10.1.105:10.1.243 target=x86_64:')
- conflicts('%clang@:3.2,9:', when='+cuda ^cuda@10.2.89 target=x86_64:')
- conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=x86_64:')
- conflicts('%clang@10:', when='+cuda ^cuda@:11.0.3 target=x86_64:')
- conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=x86_64:')
+ conflicts("%gcc@10", when="+cuda ^cuda@:11.4.0")
+ conflicts("%gcc@5:", when="+cuda ^cuda@:7.5 target=x86_64:")
+ conflicts("%gcc@6:", when="+cuda ^cuda@:8 target=x86_64:")
+ conflicts("%gcc@7:", when="+cuda ^cuda@:9.1 target=x86_64:")
+ conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=x86_64:")
+ conflicts("%gcc@9:", when="+cuda ^cuda@:10.2.89 target=x86_64:")
+ conflicts("%pgi@:14.8", when="+cuda ^cuda@:7.0.27 target=x86_64:")
+ conflicts("%pgi@:15.3,15.5:", when="+cuda ^cuda@7.5 target=x86_64:")
+ conflicts("%pgi@:16.2,16.0:16.3", when="+cuda ^cuda@8 target=x86_64:")
+ conflicts("%pgi@:15,18:", when="+cuda ^cuda@9.0:9.1 target=x86_64:")
+ conflicts("%pgi@:16,19:", when="+cuda ^cuda@9.2.88:10 target=x86_64:")
+ conflicts("%pgi@:17,20:", when="+cuda ^cuda@10.1.105:10.2.89 target=x86_64:")
+ conflicts("%pgi@:17,21:", when="+cuda ^cuda@11.0.2:11.1.0 target=x86_64:")
+ conflicts("%clang@:3.4", when="+cuda ^cuda@:7.5 target=x86_64:")
+ conflicts("%clang@:3.7,4:", when="+cuda ^cuda@8.0:9.0 target=x86_64:")
+ conflicts("%clang@:3.7,4.1:", when="+cuda ^cuda@9.1 target=x86_64:")
+ conflicts("%clang@:3.7,5.1:", when="+cuda ^cuda@9.2 target=x86_64:")
+ conflicts("%clang@:3.7,6.1:", when="+cuda ^cuda@10.0.130 target=x86_64:")
+ conflicts("%clang@:3.7,7.1:", when="+cuda ^cuda@10.1.105 target=x86_64:")
+ conflicts("%clang@:3.7,8.1:", when="+cuda ^cuda@10.1.105:10.1.243 target=x86_64:")
+ conflicts("%clang@:3.2,9:", when="+cuda ^cuda@10.2.89 target=x86_64:")
+ conflicts("%clang@:5", when="+cuda ^cuda@11.0.2: target=x86_64:")
+ conflicts("%clang@10:", when="+cuda ^cuda@:11.0.3 target=x86_64:")
+ conflicts("%clang@11:", when="+cuda ^cuda@:11.1.0 target=x86_64:")
# x86_64 vs. ppc64le differ according to NVidia docs
# Linux ppc64le compiler conflicts from Table from the docs below:
@@ -149,43 +167,43 @@ class CudaPackage(PackageBase):
# https://docs.nvidia.com/cuda/archive/8.0/cuda-installation-guide-linux/index.html
# information prior to CUDA 9 difficult to find
- conflicts('%gcc@6:', when='+cuda ^cuda@:9 target=ppc64le:')
- conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130 target=ppc64le:')
- conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243 target=ppc64le:')
+ conflicts("%gcc@6:", when="+cuda ^cuda@:9 target=ppc64le:")
+ conflicts("%gcc@8:", when="+cuda ^cuda@:10.0.130 target=ppc64le:")
+ conflicts("%gcc@9:", when="+cuda ^cuda@:10.1.243 target=ppc64le:")
# officially, CUDA 11.0.2 only supports the system GCC 8.3 on ppc64le
- conflicts('%pgi', when='+cuda ^cuda@:8 target=ppc64le:')
- conflicts('%pgi@:16', when='+cuda ^cuda@:9.1.185 target=ppc64le:')
- conflicts('%pgi@:17', when='+cuda ^cuda@:10 target=ppc64le:')
- conflicts('%clang@4:', when='+cuda ^cuda@:9.0.176 target=ppc64le:')
- conflicts('%clang@5:', when='+cuda ^cuda@:9.1 target=ppc64le:')
- conflicts('%clang@6:', when='+cuda ^cuda@:9.2 target=ppc64le:')
- conflicts('%clang@7:', when='+cuda ^cuda@10.0.130 target=ppc64le:')
- conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105 target=ppc64le:')
- conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89 target=ppc64le:')
- conflicts('%clang@:5', when='+cuda ^cuda@11.0.2: target=ppc64le:')
- conflicts('%clang@10:', when='+cuda ^cuda@:11.0.2 target=ppc64le:')
- conflicts('%clang@11:', when='+cuda ^cuda@:11.1.0 target=ppc64le:')
+ conflicts("%pgi", when="+cuda ^cuda@:8 target=ppc64le:")
+ conflicts("%pgi@:16", when="+cuda ^cuda@:9.1.185 target=ppc64le:")
+ conflicts("%pgi@:17", when="+cuda ^cuda@:10 target=ppc64le:")
+ conflicts("%clang@4:", when="+cuda ^cuda@:9.0.176 target=ppc64le:")
+ conflicts("%clang@5:", when="+cuda ^cuda@:9.1 target=ppc64le:")
+ conflicts("%clang@6:", when="+cuda ^cuda@:9.2 target=ppc64le:")
+ conflicts("%clang@7:", when="+cuda ^cuda@10.0.130 target=ppc64le:")
+ conflicts("%clang@7.1:", when="+cuda ^cuda@:10.1.105 target=ppc64le:")
+ conflicts("%clang@8.1:", when="+cuda ^cuda@:10.2.89 target=ppc64le:")
+ conflicts("%clang@:5", when="+cuda ^cuda@11.0.2: target=ppc64le:")
+ conflicts("%clang@10:", when="+cuda ^cuda@:11.0.2 target=ppc64le:")
+ conflicts("%clang@11:", when="+cuda ^cuda@:11.1.0 target=ppc64le:")
# Intel is mostly relevant for x86_64 Linux, even though it also
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
- conflicts('%intel@:11.0', when='+cuda ^cuda@:3.1')
- conflicts('%intel@:12.0', when='+cuda ^cuda@5.5:')
- conflicts('%intel@:13.0', when='+cuda ^cuda@6.0:')
- conflicts('%intel@:13.2', when='+cuda ^cuda@6.5:')
- conflicts('%intel@:14.9', when='+cuda ^cuda@7:')
+ conflicts("%intel@:11.0", when="+cuda ^cuda@:3.1")
+ conflicts("%intel@:12.0", when="+cuda ^cuda@5.5:")
+ conflicts("%intel@:13.0", when="+cuda ^cuda@6.0:")
+ conflicts("%intel@:13.2", when="+cuda ^cuda@6.5:")
+ conflicts("%intel@:14.9", when="+cuda ^cuda@7:")
# Intel 15.x is compatible with CUDA 7 thru current CUDA
- conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
- conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
- conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
- conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
- conflicts('%intel@19.1:', when='+cuda ^cuda@:10.1')
- conflicts('%intel@19.2:', when='+cuda ^cuda@:11.1.0')
+ conflicts("%intel@16.0:", when="+cuda ^cuda@:8.0.43")
+ conflicts("%intel@17.0:", when="+cuda ^cuda@:8.0.60")
+ conflicts("%intel@18.0:", when="+cuda ^cuda@:9.9")
+ conflicts("%intel@19.0:", when="+cuda ^cuda@:10.0")
+ conflicts("%intel@19.1:", when="+cuda ^cuda@:10.1")
+ conflicts("%intel@19.2:", when="+cuda ^cuda@:11.1.0")
# XL is mostly relevant for ppc64le Linux
- conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
- conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
- conflicts('%xl@:12,17:', when='+cuda ^cuda@:11.1.0')
+ conflicts("%xl@:12,14:", when="+cuda ^cuda@:9.1")
+ conflicts("%xl@:12,14:15,17:", when="+cuda ^cuda@9.2")
+ conflicts("%xl@:12,17:", when="+cuda ^cuda@:11.1.0")
# Darwin.
# TODO: add missing conflicts for %apple-clang cuda@:10
- conflicts('platform=darwin', when='+cuda ^cuda@11.0.2: ')
+ conflicts("platform=darwin", when="+cuda ^cuda@11.0.2: ")
diff --git a/lib/spack/spack/build_systems/gnu.py b/lib/spack/spack/build_systems/gnu.py
index 2bca018872..336991c721 100644
--- a/lib/spack/spack/build_systems/gnu.py
+++ b/lib/spack/spack/build_systems/gnu.py
@@ -11,16 +11,17 @@ import spack.util.url
class GNUMirrorPackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for GNU packages."""
+
#: Path of the package in a GNU mirror
gnu_mirror_path = None # type: Optional[str]
#: List of GNU mirrors used by Spack
base_mirrors = [
- 'https://ftpmirror.gnu.org/',
- 'https://ftp.gnu.org/gnu/',
+ "https://ftpmirror.gnu.org/",
+ "https://ftp.gnu.org/gnu/",
# Fall back to http if https didn't work (for instance because
# Spack is bootstrapping curl)
- 'http://ftpmirror.gnu.org/'
+ "http://ftpmirror.gnu.org/",
]
@property
@@ -34,6 +35,5 @@ class GNUMirrorPackage(spack.package_base.PackageBase):
def _ensure_gnu_mirror_path_is_set_or_raise(self):
if self.gnu_mirror_path is None:
cls_name = type(self).__name__
- msg = ('{0} must define a `gnu_mirror_path` attribute'
- ' [none defined]')
+ msg = "{0} must define a `gnu_mirror_path` attribute" " [none defined]"
raise AttributeError(msg.format(cls_name))
diff --git a/lib/spack/spack/build_systems/intel.py b/lib/spack/spack/build_systems/intel.py
index a249afff90..133b5030de 100644
--- a/lib/spack/spack/build_systems/intel.py
+++ b/lib/spack/spack/build_systems/intel.py
@@ -38,25 +38,24 @@ from spack.version import Version, ver
def debug_print(msg, *args):
- '''Prints a message (usu. a variable) and the callers' names for a couple
+ """Prints a message (usu. a variable) and the callers' names for a couple
of stack frames.
- '''
+ """
# https://docs.python.org/2/library/inspect.html#the-interpreter-stack
stack = inspect.stack()
_func_name = 3
- tty.debug("%s.%s:\t%s" % (stack[2][_func_name], stack[1][_func_name], msg),
- *args)
+ tty.debug("%s.%s:\t%s" % (stack[2][_func_name], stack[1][_func_name], msg), *args)
def raise_lib_error(*args):
- '''Bails out with an error message. Shows args after the first as one per
+ """Bails out with an error message. Shows args after the first as one per
line, tab-indented, useful for long paths to line up and stand out.
- '''
+ """
raise InstallError("\n\t".join(str(i) for i in args))
def _expand_fields(s):
- '''[Experimental] Expand arch-related fields in a string, typically a
+ """[Experimental] Expand arch-related fields in a string, typically a
filename.
Supported fields and their typical expansions are::
@@ -66,24 +65,24 @@ def _expand_fields(s):
{libarch} intel64, empty on Mac
{bits} 64
- '''
+ """
# Python-native string formatting requires arg list counts to match the
# replacement field count; optional fields are far easier with regexes.
- _bits = '64'
- _arch = 'intel64' # TBD: ia32
+ _bits = "64"
+ _arch = "intel64" # TBD: ia32
- if 'linux' in sys.platform: # NB: linux2 vs. linux
- s = re.sub('{platform}', 'linux', s)
- s = re.sub('{libarch}', _arch, s)
- elif 'darwin' in sys.platform:
- s = re.sub('{platform}', 'mac', s)
- s = re.sub('{libarch}', '', s) # no arch dirs are used (as of 2018)
+ if "linux" in sys.platform: # NB: linux2 vs. linux
+ s = re.sub("{platform}", "linux", s)
+ s = re.sub("{libarch}", _arch, s)
+ elif "darwin" in sys.platform:
+ s = re.sub("{platform}", "mac", s)
+ s = re.sub("{libarch}", "", s) # no arch dirs are used (as of 2018)
# elif 'win' in sys.platform: # TBD
# s = re.sub('{platform}', 'windows', s)
- s = re.sub('{arch}', _arch, s)
- s = re.sub('{bits}', _bits, s)
+ s = re.sub("{arch}", _arch, s)
+ s = re.sub("{bits}", _bits, s)
return s
@@ -99,12 +98,13 @@ class IntelPackage(PackageBase):
only thing necessary will be to override setup_run_environment
to set the appropriate environment variables.
"""
+
#: Phases of an Intel package
- phases = ['configure', 'install']
+ phases = ["configure", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'IntelPackage'
+ build_system_class = "IntelPackage"
#: A dict that maps Spack version specs to release years, needed to infer
#: the installation directory layout for pre-2016 versions in the family of
@@ -117,54 +117,65 @@ class IntelPackage(PackageBase):
# that satisfies self.spec will be used.
version_years = {
# intel-daal is versioned 2016 and later, no divining is needed
- 'intel-ipp@9.0:9': 2016,
- 'intel-mkl@11.3.0:11.3': 2016,
- 'intel-mpi@5.1:5': 2016,
+ "intel-ipp@9.0:9": 2016,
+ "intel-mkl@11.3.0:11.3": 2016,
+ "intel-mpi@5.1:5": 2016,
}
# Below is the list of possible values for setting auto dispatch functions
# for the Intel compilers. Using these allows for the building of fat
# binaries that will detect the CPU SIMD capabilities at run time and
# activate the appropriate extensions.
- auto_dispatch_options = ('COMMON-AVX512', 'MIC-AVX512', 'CORE-AVX512',
- 'CORE-AVX2', 'CORE-AVX-I', 'AVX', 'SSE4.2',
- 'SSE4.1', 'SSSE3', 'SSE3', 'SSE2')
+ auto_dispatch_options = (
+ "COMMON-AVX512",
+ "MIC-AVX512",
+ "CORE-AVX512",
+ "CORE-AVX2",
+ "CORE-AVX-I",
+ "AVX",
+ "SSE4.2",
+ "SSE4.1",
+ "SSSE3",
+ "SSE3",
+ "SSE2",
+ )
@property
def license_required(self):
# The Intel libraries are provided without requiring a license as of
# version 2017.2. Trying to specify one anyway will fail. See:
# https://software.intel.com/en-us/articles/free-ipsxe-tools-and-libraries
- return self._has_compilers or self.version < ver('2017.2')
+ return self._has_compilers or self.version < ver("2017.2")
#: Comment symbol used in the license.lic file
- license_comment = '#'
+ license_comment = "#"
#: Environment variables that Intel searches for a license file
- license_vars = ['INTEL_LICENSE_FILE']
+ license_vars = ["INTEL_LICENSE_FILE"]
#: URL providing information on how to acquire a license key
- license_url = 'https://software.intel.com/en-us/articles/intel-license-manager-faq'
+ license_url = "https://software.intel.com/en-us/articles/intel-license-manager-faq"
#: Location where Intel searches for a license file
@property
def license_files(self):
- dirs = ['Licenses']
+ dirs = ["Licenses"]
if self._has_compilers:
- dirs.append(self.component_bin_dir('compiler'))
+ dirs.append(self.component_bin_dir("compiler"))
for variant, component_suite_dir in {
- '+advisor': 'advisor',
- '+inspector': 'inspector',
- '+itac': 'itac',
- '+vtune': 'vtune_profiler',
+ "+advisor": "advisor",
+ "+inspector": "inspector",
+ "+itac": "itac",
+ "+vtune": "vtune_profiler",
}.items():
if variant in self.spec:
- dirs.append(self.normalize_path(
- 'licenses', component_suite_dir, relative=True))
+ dirs.append(
+ self.normalize_path("licenses", component_suite_dir, relative=True)
+ )
- files = [os.path.join(d, 'license.lic') for d in dirs]
+ files = [os.path.join(d, "license.lic") for d in dirs]
return files
#: Components to install (list of name patterns from pset/mediaconfig.xml)
@@ -173,7 +184,7 @@ class IntelPackage(PackageBase):
def pset_components(self):
# Do not detail single-purpose client packages.
if not self._has_compilers:
- return ['ALL']
+ return ["ALL"]
# tty.warn('DEBUG: installing ALL components')
# return ['ALL']
@@ -183,34 +194,35 @@ class IntelPackage(PackageBase):
# Later releases have overlapping minor parts that differ by "edition".
# NB: The spack package 'intel' is a subset of
# 'intel-parallel-studio@composer' without the lib variants.
- c = ' intel-icc intel-ifort' \
- ' intel-ccomp intel-fcomp intel-comp-' \
- ' intel-compilerproc intel-compilerprof intel-compilerpro-' \
- ' intel-psxe intel-openmp'
+ c = (
+ " intel-icc intel-ifort"
+ " intel-ccomp intel-fcomp intel-comp-"
+ " intel-compilerproc intel-compilerprof intel-compilerpro-"
+ " intel-psxe intel-openmp"
+ )
additions_for = {
- 'cluster': ' intel-icsxe',
- 'professional': ' intel-ips-',
- 'composer': ' intel-compxe',
+ "cluster": " intel-icsxe",
+ "professional": " intel-ips-",
+ "composer": " intel-compxe",
}
if self._edition in additions_for:
c += additions_for[self._edition]
for variant, components_to_add in {
- '+daal': ' intel-daal', # Data Analytics Acceleration Lib
- '+gdb': ' intel-gdb', # Integrated Performance Primitives
- '+ipp': ' intel-ipp intel-crypto-ipp',
- '+mkl': ' intel-mkl', # Math Kernel Library
- '+mpi': ' intel-mpi intel-imb', # MPI runtime, SDK, benchm.
- '+tbb': ' intel-tbb', # Threading Building Blocks
- '+advisor': ' intel-advisor',
- '+clck': ' intel_clck', # Cluster Checker
- '+inspector': ' intel-inspector',
- '+itac': ' intel-itac intel-ta intel-tc'
- ' intel-trace-analyzer intel-trace-collector',
- # Trace Analyzer and Collector
- '+vtune': ' intel-vtune'
- # VTune, ..-profiler since 2020, ..-amplifier before
+ "+daal": " intel-daal", # Data Analytics Acceleration Lib
+ "+gdb": " intel-gdb", # Integrated Performance Primitives
+ "+ipp": " intel-ipp intel-crypto-ipp",
+ "+mkl": " intel-mkl", # Math Kernel Library
+ "+mpi": " intel-mpi intel-imb", # MPI runtime, SDK, benchm.
+ "+tbb": " intel-tbb", # Threading Building Blocks
+ "+advisor": " intel-advisor",
+ "+clck": " intel_clck", # Cluster Checker
+ "+inspector": " intel-inspector",
+ "+itac": " intel-itac intel-ta intel-tc" " intel-trace-analyzer intel-trace-collector",
+ # Trace Analyzer and Collector
+ "+vtune": " intel-vtune"
+ # VTune, ..-profiler since 2020, ..-amplifier before
}.items():
if variant in self.spec:
c += components_to_add
@@ -223,11 +235,11 @@ class IntelPackage(PackageBase):
# ---------------------------------------------------------------------
@property
def _filtered_components(self):
- '''Expands the list of desired component patterns to the exact names
+ """Expands the list of desired component patterns to the exact names
present in the given download.
- '''
+ """
c = self.pset_components
- if 'ALL' in c or 'DEFAULTS' in c: # No filter needed
+ if "ALL" in c or "DEFAULTS" in c: # No filter needed
return c
# mediaconfig.xml is known to contain duplicate components.
@@ -243,8 +255,8 @@ class IntelPackage(PackageBase):
#
# https://software.intel.com/en-us/articles/configuration-file-format
#
- xmltree = ElementTree.parse('pset/mediaconfig.xml')
- for entry in xmltree.getroot().findall('.//Abbr'): # XPath expression
+ xmltree = ElementTree.parse("pset/mediaconfig.xml")
+ for entry in xmltree.getroot().findall(".//Abbr"): # XPath expression
name_present = entry.text
for name_requested in requested:
if name_present.startswith(name_requested):
@@ -254,36 +266,36 @@ class IntelPackage(PackageBase):
@property
def intel64_int_suffix(self):
- '''Provide the suffix for Intel library names to match a client
+ """Provide the suffix for Intel library names to match a client
application's desired int size, conveyed by the active spec variant.
The possible suffixes and their meanings are:
``ilp64`` all of int, long, and pointer are 64 bit,
`` lp64`` only long and pointer are 64 bit; int will be 32bit.
- '''
- if '+ilp64' in self.spec:
- return 'ilp64'
+ """
+ if "+ilp64" in self.spec:
+ return "ilp64"
else:
- return 'lp64'
+ return "lp64"
@property
def _has_compilers(self):
- return self.name in ['intel', 'intel-parallel-studio']
+ return self.name in ["intel", "intel-parallel-studio"]
@property
def _edition(self):
- if self.name == 'intel-parallel-studio':
- return self.version[0] # clearer than .up_to(1), I think.
- elif self.name == 'intel':
- return 'composer'
+ if self.name == "intel-parallel-studio":
+ return self.version[0] # clearer than .up_to(1), I think.
+ elif self.name == "intel":
+ return "composer"
else:
- return ''
+ return ""
@property
def version_yearlike(self):
- '''Return the version in a unified style, suitable for Version class
+ """Return the version in a unified style, suitable for Version class
conditionals.
- '''
+ """
# Input data for this routine: self.version
# Returns: YYYY.Nupdate[.Buildseq]
#
@@ -309,18 +321,18 @@ class IntelPackage(PackageBase):
# (*) YYYY is taken from @property "version_years" (a dict of specs)
#
try:
- if self.name == 'intel':
+ if self.name == "intel":
# Has a "Minor" version element, but it is always set as 0. To
# be useful for comparisons, drop it and get YYYY.Nupdate.
- v_tail = self.version[2:] # coerced just fine via __getitem__
+ v_tail = self.version[2:] # coerced just fine via __getitem__
else:
v_tail = self.version[1:]
except IndexError:
# Hmm - this happens on "spack install intel-mkl@11".
# I thought concretization picks an actual version??
- return self.version # give up
+ return self.version # give up
- if self.name == 'intel-parallel-studio':
+ if self.name == "intel-parallel-studio":
return v_tail
v_year = self.version[0]
@@ -332,7 +344,7 @@ class IntelPackage(PackageBase):
v_year = year
break
- return ver('%s.%s' % (v_year, v_tail))
+ return ver("%s.%s" % (v_year, v_tail))
# ---------------------------------------------------------------------
# Directory handling common to all Intel components
@@ -345,8 +357,8 @@ class IntelPackage(PackageBase):
# Not using class IntelPackage:
# intel-gpu-tools/ intel-mkl-dnn/ intel-tbb/
#
- def normalize_suite_dir(self, suite_dir_name, version_globs=['*.*.*']):
- '''Returns the version-specific and absolute path to the directory of
+ def normalize_suite_dir(self, suite_dir_name, version_globs=["*.*.*"]):
+ """Returns the version-specific and absolute path to the directory of
an Intel product or a suite of product components.
Parameters:
@@ -373,7 +385,7 @@ class IntelPackage(PackageBase):
first) expected to qualify suite_dir_name to its fully
version-specific install directory (as opposed to a
compatibility directory or symlink).
- '''
+ """
# See ./README-intel.rst for background and analysis of dir layouts.
d = self.prefix
@@ -381,7 +393,7 @@ class IntelPackage(PackageBase):
# Distinguish between product installations that were done external to
# Spack (integrated via packages.yaml) and Spack-internal ones. The
# resulting prefixes may differ in directory depth and specificity.
- unversioned_dirname = ''
+ unversioned_dirname = ""
if suite_dir_name and suite_dir_name in d:
# If e.g. MKL was installed outside of Spack, it is likely just one
# product or product component among possibly many other Intel
@@ -391,8 +403,7 @@ class IntelPackage(PackageBase):
# version-specific directory. This is what we want and need, and
# nothing more specific than that, i.e., if needed, convert, e.g.:
# .../compilers_and_libraries*/* -> .../compilers_and_libraries*
- d = re.sub('(%s%s.*?)%s.*' %
- (os.sep, re.escape(suite_dir_name), os.sep), r'\1', d)
+ d = re.sub("(%s%s.*?)%s.*" % (os.sep, re.escape(suite_dir_name), os.sep), r"\1", d)
# The Intel installer scripts try hard to place compatibility links
# named like this in the install dir to convey upgrade benefits to
@@ -448,15 +459,14 @@ class IntelPackage(PackageBase):
if unversioned_dirname:
for g in version_globs:
try_glob = unversioned_dirname + g
- debug_print('trying %s' % try_glob)
+ debug_print("trying %s" % try_glob)
matching_dirs = sorted(glob.glob(try_glob))
# NB: Python glob() returns results in arbitrary order - ugh!
# NB2: sorted() is a shortcut that is NOT number-aware.
if matching_dirs:
- debug_print('found %d:' % len(matching_dirs),
- matching_dirs)
+ debug_print("found %d:" % len(matching_dirs), matching_dirs)
# Take the highest and thus presumably newest match, which
# better be the sole one anyway.
d = matching_dirs[-1]
@@ -469,9 +479,8 @@ class IntelPackage(PackageBase):
debug_print(d)
return Prefix(d)
- def normalize_path(self, component_path, component_suite_dir=None,
- relative=False):
- '''Returns the absolute or relative path to a component or file under a
+ def normalize_path(self, component_path, component_suite_dir=None, relative=False):
+ """Returns the absolute or relative path to a component or file under a
component suite directory.
Intel's product names, scope, and directory layout changed over the
@@ -498,7 +507,7 @@ class IntelPackage(PackageBase):
relative (bool): When True, return path relative to self.prefix,
otherwise, return an absolute path (the default).
- '''
+ """
# Design note: Choosing the default for `component_suite_dir` was a bit
# tricky since there better be a sensible means to specify direct
# parentage under self.prefix (even though you normally shouldn't need
@@ -518,60 +527,59 @@ class IntelPackage(PackageBase):
# are not natively versioned by year.
cs = component_suite_dir
- if cs is None and component_path.startswith('ism'):
- cs = 'parallel_studio_xe'
+ if cs is None and component_path.startswith("ism"):
+ cs = "parallel_studio_xe"
v = self.version_yearlike
# Glob variants to complete component_suite_dir.
# Helper var for older MPI versions - those are reparented, with each
# version in their own version-named dir.
- standalone_glob = '[1-9]*.*.*'
+ standalone_glob = "[1-9]*.*.*"
# Most other components; try most specific glob first.
# flake8 is far too opinionated about lists - ugh.
normalize_kwargs = {
- 'version_globs': [
- '_%s' % self.version,
- '_%s.*' % v.up_to(2), # should be: YYYY.Nupdate
- '_*.*.*', # last resort
+ "version_globs": [
+ "_%s" % self.version,
+ "_%s.*" % v.up_to(2), # should be: YYYY.Nupdate
+ "_*.*.*", # last resort
]
}
for rename_rule in [
# cs given as arg, in years, dir actually used, [version_globs]
- [None, ':2015', 'composer_xe'],
- [None, '2016:', 'compilers_and_libraries'],
- ['advisor', ':2016', 'advisor_xe'],
- ['inspector', ':2016', 'inspector_xe'],
- ['vtune_profiler', ':2017', 'vtune_amplifier_xe'],
- ['vtune', ':2017', 'vtune_amplifier_xe'], # alt.
- ['vtune_profiler', ':2019', 'vtune_amplifier'],
- ['itac', ':', 'itac', [os.sep + standalone_glob]],
+ [None, ":2015", "composer_xe"],
+ [None, "2016:", "compilers_and_libraries"],
+ ["advisor", ":2016", "advisor_xe"],
+ ["inspector", ":2016", "inspector_xe"],
+ ["vtune_profiler", ":2017", "vtune_amplifier_xe"],
+ ["vtune", ":2017", "vtune_amplifier_xe"], # alt.
+ ["vtune_profiler", ":2019", "vtune_amplifier"],
+ ["itac", ":", "itac", [os.sep + standalone_glob]],
]:
if cs == rename_rule[0] and v.satisfies(ver(rename_rule[1])):
cs = rename_rule[2]
if len(rename_rule) > 3:
- normalize_kwargs = {'version_globs': rename_rule[3]}
+ normalize_kwargs = {"version_globs": rename_rule[3]}
break
d = self.normalize_suite_dir(cs, **normalize_kwargs)
# Help find components not located directly under d.
# NB: ancestor() not well suited if version_globs may contain os.sep .
- parent_dir = re.sub(os.sep + re.escape(cs) + '.*', '', d)
+ parent_dir = re.sub(os.sep + re.escape(cs) + ".*", "", d)
reparent_as = {}
- if cs == 'compilers_and_libraries': # must qualify further
- d = os.path.join(d, _expand_fields('{platform}'))
- elif cs == 'composer_xe':
- reparent_as = {'mpi': 'impi'}
+ if cs == "compilers_and_libraries": # must qualify further
+ d = os.path.join(d, _expand_fields("{platform}"))
+ elif cs == "composer_xe":
+ reparent_as = {"mpi": "impi"}
# ignore 'imb' (MPI Benchmarks)
for nominal_p, actual_p in reparent_as.items():
if component_path.startswith(nominal_p):
- dirs = glob.glob(
- os.path.join(parent_dir, actual_p, standalone_glob))
- debug_print('reparent dirs: %s' % dirs)
+ dirs = glob.glob(os.path.join(parent_dir, actual_p, standalone_glob))
+ debug_print("reparent dirs: %s" % dirs)
# Brazenly assume last match is the most recent version;
# convert back to relative of parent_dir, and re-assemble.
rel_dir = dirs[-1].split(parent_dir + os.sep, 1)[-1]
@@ -589,31 +597,31 @@ class IntelPackage(PackageBase):
def component_bin_dir(self, component, **kwargs):
d = self.normalize_path(component, **kwargs)
- if component == 'compiler': # bin dir is always under PARENT
- d = os.path.join(ancestor(d), 'bin', _expand_fields('{libarch}'))
- d = d.rstrip(os.sep) # cosmetics, when {libarch} is empty
+ if component == "compiler": # bin dir is always under PARENT
+ d = os.path.join(ancestor(d), "bin", _expand_fields("{libarch}"))
+ d = d.rstrip(os.sep) # cosmetics, when {libarch} is empty
# NB: Works fine even with relative=True, e.g.:
# composer_xe/compiler -> composer_xe/bin/intel64
- elif component == 'mpi':
- d = os.path.join(d, _expand_fields('{libarch}'), 'bin')
+ elif component == "mpi":
+ d = os.path.join(d, _expand_fields("{libarch}"), "bin")
else:
- d = os.path.join(d, 'bin')
+ d = os.path.join(d, "bin")
debug_print(d)
return d
def component_lib_dir(self, component, **kwargs):
- '''Provide directory suitable for find_libraries() and
+ """Provide directory suitable for find_libraries() and
SPACK_COMPILER_EXTRA_RPATHS.
- '''
+ """
d = self.normalize_path(component, **kwargs)
- if component == 'mpi':
- d = os.path.join(d, _expand_fields('{libarch}'), 'lib')
+ if component == "mpi":
+ d = os.path.join(d, _expand_fields("{libarch}"), "lib")
else:
- d = os.path.join(d, 'lib', _expand_fields('{libarch}'))
- d = d.rstrip(os.sep) # cosmetics, when {libarch} is empty
+ d = os.path.join(d, "lib", _expand_fields("{libarch}"))
+ d = d.rstrip(os.sep) # cosmetics, when {libarch} is empty
- if component == 'tbb': # must qualify further for abi
+ if component == "tbb": # must qualify further for abi
d = os.path.join(d, self._tbb_abi)
debug_print(d)
@@ -622,42 +630,42 @@ class IntelPackage(PackageBase):
def component_include_dir(self, component, **kwargs):
d = self.normalize_path(component, **kwargs)
- if component == 'mpi':
- d = os.path.join(d, _expand_fields('{libarch}'), 'include')
+ if component == "mpi":
+ d = os.path.join(d, _expand_fields("{libarch}"), "include")
else:
- d = os.path.join(d, 'include')
+ d = os.path.join(d, "include")
debug_print(d)
return d
@property
def file_to_source(self):
- '''Full path of file to source for initializing an Intel package.
+ """Full path of file to source for initializing an Intel package.
A client package could override as follows:
` @property`
` def file_to_source(self):`
` return self.normalize_path("apsvars.sh", "vtune_amplifier")`
- '''
+ """
vars_file_info_for = {
# key (usu. spack package name) -> [rel_path, component_suite_dir]
# Extension note: handle additions by Spack name or ad-hoc keys.
- '@early_compiler': ['bin/compilervars', None],
- 'intel-parallel-studio': ['bin/psxevars', 'parallel_studio_xe'],
- 'intel': ['bin/compilervars', None],
- 'intel-daal': ['daal/bin/daalvars', None],
- 'intel-ipp': ['ipp/bin/ippvars', None],
- 'intel-mkl': ['mkl/bin/mklvars', None],
- 'intel-mpi': ['mpi/{libarch}/bin/mpivars', None],
+ "@early_compiler": ["bin/compilervars", None],
+ "intel-parallel-studio": ["bin/psxevars", "parallel_studio_xe"],
+ "intel": ["bin/compilervars", None],
+ "intel-daal": ["daal/bin/daalvars", None],
+ "intel-ipp": ["ipp/bin/ippvars", None],
+ "intel-mkl": ["mkl/bin/mklvars", None],
+ "intel-mpi": ["mpi/{libarch}/bin/mpivars", None],
}
key = self.name
- if self.version_yearlike.satisfies(ver(':2015')):
+ if self.version_yearlike.satisfies(ver(":2015")):
# Same file as 'intel' but 'None' for component_suite_dir will
# resolve differently. Listed as a separate entry to serve as
# example and to avoid pitfalls upon possible refactoring.
- key = '@early_compiler'
+ key = "@early_compiler"
f, component_suite_dir = vars_file_info_for[key]
- f = _expand_fields(f) + '.sh'
+ f = _expand_fields(f) + ".sh"
# TODO?? win32 would have to handle os.sep, '.bat' (unless POSIX??)
f = self.normalize_path(f, component_suite_dir)
@@ -668,50 +676,53 @@ class IntelPackage(PackageBase):
# ---------------------------------------------------------------------
@property
def openmp_libs(self):
- '''Supply LibraryList for linking OpenMP'''
+ """Supply LibraryList for linking OpenMP"""
- if '%intel' in self.spec:
+ if "%intel" in self.spec:
# NB: Hunting down explicit library files may be the Spack way of
# doing things, but be aware that "{icc|ifort} --help openmp"
# steers us towards options instead: -qopenmp-link={dynamic,static}
- omp_libnames = ['libiomp5']
+ omp_libnames = ["libiomp5"]
omp_libs = find_libraries(
omp_libnames,
- root=self.component_lib_dir('compiler'),
- shared=('+shared' in self.spec))
+ root=self.component_lib_dir("compiler"),
+ shared=("+shared" in self.spec),
+ )
# Note about search root here: For MKL, the directory
# "$MKLROOT/../compiler" will be present even for an MKL-only
# product installation (as opposed to one being ghosted via
# packages.yaml), specificially to provide the 'iomp5' libs.
- elif '%gcc' in self.spec:
+ elif "%gcc" in self.spec:
with self.compiler.compiler_environment():
omp_lib_path = Executable(self.compiler.cc)(
- '--print-file-name', 'libgomp.%s' % dso_suffix, output=str)
+ "--print-file-name", "libgomp.%s" % dso_suffix, output=str
+ )
omp_libs = LibraryList(omp_lib_path.strip())
- elif '%clang' in self.spec:
+ elif "%clang" in self.spec:
with self.compiler.compiler_environment():
omp_lib_path = Executable(self.compiler.cc)(
- '--print-file-name', 'libomp.%s' % dso_suffix, output=str)
+ "--print-file-name", "libomp.%s" % dso_suffix, output=str
+ )
omp_libs = LibraryList(omp_lib_path.strip())
if len(omp_libs) < 1:
- raise_lib_error('Cannot locate OpenMP libraries:', omp_libnames)
+ raise_lib_error("Cannot locate OpenMP libraries:", omp_libnames)
debug_print(omp_libs)
return omp_libs
@property
def _gcc_executable(self):
- '''Return GCC executable'''
+ """Return GCC executable"""
# Match the available gcc, as it's done in tbbvars.sh.
- gcc_name = 'gcc'
+ gcc_name = "gcc"
# but first check if -gcc-name is specified in cflags
- for flag in self.spec.compiler_flags['cflags']:
- if flag.startswith('-gcc-name='):
- gcc_name = flag.split('-gcc-name=')[1]
+ for flag in self.spec.compiler_flags["cflags"]:
+ if flag.startswith("-gcc-name="):
+ gcc_name = flag.split("-gcc-name=")[1]
break
debug_print(gcc_name)
return Executable(gcc_name)
@@ -720,24 +731,21 @@ class IntelPackage(PackageBase):
def tbb_headers(self):
# Note: TBB is included as
# #include <tbb/task_scheduler_init.h>
- return HeaderList([
- self.component_include_dir('tbb') + '/dummy.h'])
+ return HeaderList([self.component_include_dir("tbb") + "/dummy.h"])
@property
def tbb_libs(self):
- '''Supply LibraryList for linking TBB'''
+ """Supply LibraryList for linking TBB"""
# TODO: When is 'libtbbmalloc' needed?
- tbb_lib = find_libraries(
- ['libtbb'], root=self.component_lib_dir('tbb'))
+ tbb_lib = find_libraries(["libtbb"], root=self.component_lib_dir("tbb"))
# NB: Like icc with -qopenmp, so does icpc steer us towards using an
# option: "icpc -tbb"
# TODO: clang(?)
- gcc = self._gcc_executable # must be gcc, not self.compiler.cc
+ gcc = self._gcc_executable # must be gcc, not self.compiler.cc
with self.compiler.compiler_environment():
- cxx_lib_path = gcc(
- '--print-file-name', 'libstdc++.%s' % dso_suffix, output=str)
+ cxx_lib_path = gcc("--print-file-name", "libstdc++.%s" % dso_suffix, output=str)
libs = tbb_lib + LibraryList(cxx_lib_path.rstrip())
debug_print(libs)
@@ -745,23 +753,26 @@ class IntelPackage(PackageBase):
@property
def _tbb_abi(self):
- '''Select the ABI needed for linking TBB'''
+ """Select the ABI needed for linking TBB"""
gcc = self._gcc_executable
with self.compiler.compiler_environment():
- matches = re.search(r'(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*',
- gcc('--version', output=str), re.I | re.M)
- abi = ''
- if sys.platform == 'darwin':
+ matches = re.search(
+ r"(gcc|LLVM).* ([0-9]+\.[0-9]+\.[0-9]+).*",
+ gcc("--version", output=str),
+ re.I | re.M,
+ )
+ abi = ""
+ if sys.platform == "darwin":
pass
elif matches:
# TODO: Confirm that this covers clang (needed on Linux only)
gcc_version = Version(matches.groups()[1])
- if gcc_version >= ver('4.7'):
- abi = 'gcc4.7'
- elif gcc_version >= ver('4.4'):
- abi = 'gcc4.4'
+ if gcc_version >= ver("4.7"):
+ abi = "gcc4.7"
+ elif gcc_version >= ver("4.4"):
+ abi = "gcc4.4"
else:
- abi = 'gcc4.1' # unlikely, one hopes.
+ abi = "gcc4.1" # unlikely, one hopes.
# Alrighty then ...
debug_print(abi)
@@ -776,38 +787,41 @@ class IntelPackage(PackageBase):
# For reference, see The Intel Math Kernel Library Link Line Advisor:
# https://software.intel.com/en-us/articles/intel-mkl-link-line-advisor/
- mkl_integer = 'libmkl_intel_' + self.intel64_int_suffix
+ mkl_integer = "libmkl_intel_" + self.intel64_int_suffix
- if self.spec.satisfies('threads=openmp'):
- if '%intel' in self.spec:
- mkl_threading = 'libmkl_intel_thread'
- elif '%gcc' in self.spec or '%clang' in self.spec:
- mkl_threading = 'libmkl_gnu_thread'
+ if self.spec.satisfies("threads=openmp"):
+ if "%intel" in self.spec:
+ mkl_threading = "libmkl_intel_thread"
+ elif "%gcc" in self.spec or "%clang" in self.spec:
+ mkl_threading = "libmkl_gnu_thread"
threading_engine_libs = self.openmp_libs
- elif self.spec.satisfies('threads=tbb'):
- mkl_threading = 'libmkl_tbb_thread'
+ elif self.spec.satisfies("threads=tbb"):
+ mkl_threading = "libmkl_tbb_thread"
threading_engine_libs = self.tbb_libs
- elif self.spec.satisfies('threads=none'):
- mkl_threading = 'libmkl_sequential'
+ elif self.spec.satisfies("threads=none"):
+ mkl_threading = "libmkl_sequential"
threading_engine_libs = LibraryList([])
else:
- raise_lib_error('Cannot determine MKL threading libraries.')
+ raise_lib_error("Cannot determine MKL threading libraries.")
- mkl_libnames = [mkl_integer, mkl_threading, 'libmkl_core']
+ mkl_libnames = [mkl_integer, mkl_threading, "libmkl_core"]
mkl_libs = find_libraries(
- mkl_libnames,
- root=self.component_lib_dir('mkl'),
- shared=('+shared' in self.spec))
+ mkl_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
+ )
debug_print(mkl_libs)
if len(mkl_libs) < 3:
- raise_lib_error('Cannot locate core MKL libraries:', mkl_libnames,
- 'in:', self.component_lib_dir('mkl'))
+ raise_lib_error(
+ "Cannot locate core MKL libraries:",
+ mkl_libnames,
+ "in:",
+ self.component_lib_dir("mkl"),
+ )
# The Intel MKL link line advisor recommends these system libraries
system_libs = find_system_libraries(
- 'libpthread libm libdl'.split(),
- shared=('+shared' in self.spec))
+ "libpthread libm libdl".split(), shared=("+shared" in self.spec)
+ )
debug_print(system_libs)
return mkl_libs + threading_engine_libs + system_libs
@@ -823,40 +837,40 @@ class IntelPackage(PackageBase):
# we must supply a personality matching the MPI implementation that
# is active for the root package that asked for ScaLapack.
spec_root = self.spec.root
- if sys.platform == 'darwin' and '^mpich' in spec_root:
+ if sys.platform == "darwin" and "^mpich" in spec_root:
# The only supported choice for MKL 2018 on Mac.
- blacs_lib = 'libmkl_blacs_mpich'
- elif '^openmpi' in spec_root:
- blacs_lib = 'libmkl_blacs_openmpi'
- elif '^mpich@1' in spec_root:
+ blacs_lib = "libmkl_blacs_mpich"
+ elif "^openmpi" in spec_root:
+ blacs_lib = "libmkl_blacs_openmpi"
+ elif "^mpich@1" in spec_root:
# Was supported only up to 2015.
- blacs_lib = 'libmkl_blacs'
- elif ('^mpich@2:' in spec_root or
- '^cray-mpich' in spec_root or
- '^mvapich2' in spec_root or
- '^intel-mpi' in spec_root or
- '^intel-oneapi-mpi' in spec_root or
- '^intel-parallel-studio' in spec_root):
- blacs_lib = 'libmkl_blacs_intelmpi'
- elif '^mpt' in spec_root:
- blacs_lib = 'libmkl_blacs_sgimpt'
+ blacs_lib = "libmkl_blacs"
+ elif (
+ "^mpich@2:" in spec_root
+ or "^cray-mpich" in spec_root
+ or "^mvapich2" in spec_root
+ or "^intel-mpi" in spec_root
+ or "^intel-oneapi-mpi" in spec_root
+ or "^intel-parallel-studio" in spec_root
+ ):
+ blacs_lib = "libmkl_blacs_intelmpi"
+ elif "^mpt" in spec_root:
+ blacs_lib = "libmkl_blacs_sgimpt"
else:
- raise_lib_error('Cannot find a BLACS library for the given MPI.')
+ raise_lib_error("Cannot find a BLACS library for the given MPI.")
- int_suff = '_' + self.intel64_int_suffix
+ int_suff = "_" + self.intel64_int_suffix
scalapack_libnames = [
- 'libmkl_scalapack' + int_suff,
+ "libmkl_scalapack" + int_suff,
blacs_lib + int_suff,
]
sca_libs = find_libraries(
- scalapack_libnames,
- root=self.component_lib_dir('mkl'),
- shared=('+shared' in self.spec))
+ scalapack_libnames, root=self.component_lib_dir("mkl"), shared=("+shared" in self.spec)
+ )
debug_print(sca_libs)
if len(sca_libs) < 2:
- raise_lib_error(
- 'Cannot locate ScaLapack/BLACS libraries:', scalapack_libnames)
+ raise_lib_error("Cannot locate ScaLapack/BLACS libraries:", scalapack_libnames)
# NB: ScaLapack is installed as "cluster" components within MKL or
# MKL-encompassing products. But those were *optional* for the ca.
# 2015/2016 product releases, which was easy to overlook, and I have
@@ -871,8 +885,7 @@ class IntelPackage(PackageBase):
# ---------------------------------------------------------------------
@property
def mpi_compiler_wrappers(self):
- '''Return paths to compiler wrappers as a dict of env-like names
- '''
+ """Return paths to compiler wrappers as a dict of env-like names"""
# Intel comes with 2 different flavors of MPI wrappers:
#
# * mpiicc, mpiicpc, and mpiifort are hardcoded to wrap around
@@ -885,30 +898,29 @@ class IntelPackage(PackageBase):
# and friends are set to point to the Intel compilers, but in
# practice, mpicc fails to compile some applications while
# mpiicc works.
- bindir = self.component_bin_dir('mpi')
- if self.compiler.name == 'intel':
+ bindir = self.component_bin_dir("mpi")
+ if self.compiler.name == "intel":
wrapper_vars = {
# eschew Prefix objects -- emphasize the command strings.
- 'MPICC': os.path.join(bindir, 'mpiicc'),
- 'MPICXX': os.path.join(bindir, 'mpiicpc'),
- 'MPIF77': os.path.join(bindir, 'mpiifort'),
- 'MPIF90': os.path.join(bindir, 'mpiifort'),
- 'MPIFC': os.path.join(bindir, 'mpiifort'),
+ "MPICC": os.path.join(bindir, "mpiicc"),
+ "MPICXX": os.path.join(bindir, "mpiicpc"),
+ "MPIF77": os.path.join(bindir, "mpiifort"),
+ "MPIF90": os.path.join(bindir, "mpiifort"),
+ "MPIFC": os.path.join(bindir, "mpiifort"),
}
else:
wrapper_vars = {
- 'MPICC': os.path.join(bindir, 'mpicc'),
- 'MPICXX': os.path.join(bindir, 'mpicxx'),
- 'MPIF77': os.path.join(bindir, 'mpif77'),
- 'MPIF90': os.path.join(bindir, 'mpif90'),
- 'MPIFC': os.path.join(bindir, 'mpif90'),
+ "MPICC": os.path.join(bindir, "mpicc"),
+ "MPICXX": os.path.join(bindir, "mpicxx"),
+ "MPIF77": os.path.join(bindir, "mpif77"),
+ "MPIF90": os.path.join(bindir, "mpif90"),
+ "MPIFC": os.path.join(bindir, "mpif90"),
}
# debug_print("wrapper_vars =", wrapper_vars)
return wrapper_vars
- def mpi_setup_dependent_build_environment(
- self, env, dependent_spec, compilers_of_client={}):
- '''Unified back-end for setup_dependent_build_environment() of
+ def mpi_setup_dependent_build_environment(self, env, dependent_spec, compilers_of_client={}):
+ """Unified back-end for setup_dependent_build_environment() of
Intel packages that provide 'mpi'.
Parameters:
@@ -918,16 +930,16 @@ class IntelPackage(PackageBase):
compilers_of_client (dict): Conveys spack_cc, spack_cxx, etc.,
from the scope of dependent packages; constructed in caller.
- '''
+ """
# See also: setup_dependent_package()
wrapper_vars = {
- 'I_MPI_CC': compilers_of_client['CC'],
- 'I_MPI_CXX': compilers_of_client['CXX'],
- 'I_MPI_F77': compilers_of_client['F77'],
- 'I_MPI_F90': compilers_of_client['F90'],
- 'I_MPI_FC': compilers_of_client['FC'],
+ "I_MPI_CC": compilers_of_client["CC"],
+ "I_MPI_CXX": compilers_of_client["CXX"],
+ "I_MPI_F77": compilers_of_client["F77"],
+ "I_MPI_F90": compilers_of_client["F90"],
+ "I_MPI_FC": compilers_of_client["FC"],
# NB: Normally set by the modulefile, but that is not active here:
- 'I_MPI_ROOT': self.normalize_path('mpi'),
+ "I_MPI_ROOT": self.normalize_path("mpi"),
}
# CAUTION - SIMILAR code in:
@@ -936,27 +948,31 @@ class IntelPackage(PackageBase):
# var/spack/repos/builtin/packages/mvapich2/package.py
#
# On Cray, the regular compiler wrappers *are* the MPI wrappers.
- if 'platform=cray' in self.spec:
+ if "platform=cray" in self.spec:
# TODO: Confirm
- wrapper_vars.update({
- 'MPICC': compilers_of_client['CC'],
- 'MPICXX': compilers_of_client['CXX'],
- 'MPIF77': compilers_of_client['F77'],
- 'MPIF90': compilers_of_client['F90'],
- })
+ wrapper_vars.update(
+ {
+ "MPICC": compilers_of_client["CC"],
+ "MPICXX": compilers_of_client["CXX"],
+ "MPIF77": compilers_of_client["F77"],
+ "MPIF90": compilers_of_client["F90"],
+ }
+ )
else:
compiler_wrapper_commands = self.mpi_compiler_wrappers
- wrapper_vars.update({
- 'MPICC': compiler_wrapper_commands['MPICC'],
- 'MPICXX': compiler_wrapper_commands['MPICXX'],
- 'MPIF77': compiler_wrapper_commands['MPIF77'],
- 'MPIF90': compiler_wrapper_commands['MPIF90'],
- })
+ wrapper_vars.update(
+ {
+ "MPICC": compiler_wrapper_commands["MPICC"],
+ "MPICXX": compiler_wrapper_commands["MPICXX"],
+ "MPIF77": compiler_wrapper_commands["MPIF77"],
+ "MPIF90": compiler_wrapper_commands["MPIF90"],
+ }
+ )
# Ensure that the directory containing the compiler wrappers is in the
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
# but because of the intel directory hierarchy that is insufficient.
- env.prepend_path('PATH', os.path.dirname(wrapper_vars['MPICC']))
+ env.prepend_path("PATH", os.path.dirname(wrapper_vars["MPICC"]))
for key, value in wrapper_vars.items():
env.set(key, value)
@@ -969,17 +985,17 @@ class IntelPackage(PackageBase):
@property
def headers(self):
result = HeaderList([])
- if '+mpi' in self.spec or self.provides('mpi'):
+ if "+mpi" in self.spec or self.provides("mpi"):
result += find_headers(
- ['mpi'],
- root=self.component_include_dir('mpi'),
- recursive=False)
- if '+mkl' in self.spec or self.provides('mkl'):
+ ["mpi"], root=self.component_include_dir("mpi"), recursive=False
+ )
+ if "+mkl" in self.spec or self.provides("mkl"):
result += find_headers(
- ['mkl_cblas', 'mkl_lapacke'],
- root=self.component_include_dir('mkl'),
- recursive=False)
- if '+tbb' in self.spec or self.provides('tbb'):
+ ["mkl_cblas", "mkl_lapacke"],
+ root=self.component_include_dir("mkl"),
+ recursive=False,
+ )
+ if "+tbb" in self.spec or self.provides("tbb"):
result += self.tbb_headers
debug_print(result)
@@ -988,35 +1004,35 @@ class IntelPackage(PackageBase):
@property
def libs(self):
result = LibraryList([])
- if '+tbb' in self.spec or self.provides('tbb'):
+ if "+tbb" in self.spec or self.provides("tbb"):
result = self.tbb_libs + result
- if '+mkl' in self.spec or self.provides('blas'):
+ if "+mkl" in self.spec or self.provides("blas"):
result = self.blas_libs + result
- if '+mkl' in self.spec or self.provides('lapack'):
+ if "+mkl" in self.spec or self.provides("lapack"):
result = self.lapack_libs + result
- if '+mpi' in self.spec or self.provides('mpi'):
+ if "+mpi" in self.spec or self.provides("mpi"):
# If prefix is too general, recursive searches may get files from
# supported but inappropriate sub-architectures like 'mic'.
- libnames = ['libmpifort', 'libmpi']
- if 'cxx' in self.spec.last_query.extra_parameters:
- libnames = ['libmpicxx'] + libnames
- result = find_libraries(
- libnames,
- root=self.component_lib_dir('mpi'),
- shared=True, recursive=True) + result
+ libnames = ["libmpifort", "libmpi"]
+ if "cxx" in self.spec.last_query.extra_parameters:
+ libnames = ["libmpicxx"] + libnames
+ result = (
+ find_libraries(
+ libnames, root=self.component_lib_dir("mpi"), shared=True, recursive=True
+ )
+ + result
+ )
# Intel MPI since 2019 depends on libfabric which is not in the
# lib directory but in a directory of its own which should be
# included in the rpath
- if self.version_yearlike >= ver('2019'):
- d = ancestor(self.component_lib_dir('mpi'))
- if '+external-libfabric' in self.spec:
- result += self.spec['libfabric'].libs
+ if self.version_yearlike >= ver("2019"):
+ d = ancestor(self.component_lib_dir("mpi"))
+ if "+external-libfabric" in self.spec:
+ result += self.spec["libfabric"].libs
else:
- result += find_libraries(['libfabric'],
- os.path.join(d, 'libfabric', 'lib'))
+ result += find_libraries(["libfabric"], os.path.join(d, "libfabric", "lib"))
- if '^mpi' in self.spec.root and ('+mkl' in self.spec or
- self.provides('scalapack')):
+ if "^mpi" in self.spec.root and ("+mkl" in self.spec or self.provides("scalapack")):
result = self.scalapack_libs + result
debug_print(result)
@@ -1037,7 +1053,7 @@ class IntelPackage(PackageBase):
# All Intel packages expect at least the architecture as argument.
# Some accept more args, but those are not (yet?) handled here.
- args = (_expand_fields('{arch}'),)
+ args = (_expand_fields("{arch}"),)
# On Mac, the platform is *also required*, at least as of 2018.
# I am not sure about earlier versions.
@@ -1046,14 +1062,14 @@ class IntelPackage(PackageBase):
env.extend(EnvironmentModifications.from_sourcing_file(f, *args))
- if self.spec.name in ('intel', 'intel-parallel-studio'):
+ if self.spec.name in ("intel", "intel-parallel-studio"):
# this package provides compilers
# TODO: fix check above when compilers are dependencies
- env.set('CC', self.prefix.bin.icc)
- env.set('CXX', self.prefix.bin.icpc)
- env.set('FC', self.prefix.bin.ifort)
- env.set('F77', self.prefix.bin.ifort)
- env.set('F90', self.prefix.bin.ifort)
+ env.set("CC", self.prefix.bin.icc)
+ env.set("CXX", self.prefix.bin.icpc)
+ env.set("FC", self.prefix.bin.ifort)
+ env.set("F77", self.prefix.bin.ifort)
+ env.set("F90", self.prefix.bin.ifort)
def setup_dependent_build_environment(self, env, dependent_spec):
# NB: This function is overwritten by 'mpi' provider packages:
@@ -1067,13 +1083,12 @@ class IntelPackage(PackageBase):
# Handle everything in a callback version.
self._setup_dependent_env_callback(env, dependent_spec)
- def _setup_dependent_env_callback(
- self, env, dependent_spec, compilers_of_client={}):
+ def _setup_dependent_env_callback(self, env, dependent_spec, compilers_of_client={}):
# Expected to be called from a client's
# setup_dependent_build_environment(),
# with args extended to convey the client's compilers as needed.
- if '+mkl' in self.spec or self.provides('mkl'):
+ if "+mkl" in self.spec or self.provides("mkl"):
# Spack's env philosophy demands that we replicate some of the
# settings normally handled by file_to_source ...
#
@@ -1084,49 +1099,48 @@ class IntelPackage(PackageBase):
#
# Use a local dict to facilitate debug_print():
env_mods = {
- 'MKLROOT': self.normalize_path('mkl'),
- 'SPACK_COMPILER_EXTRA_RPATHS': self.component_lib_dir('mkl'),
- 'CMAKE_PREFIX_PATH': self.normalize_path('mkl'),
- 'CMAKE_LIBRARY_PATH': self.component_lib_dir('mkl'),
- 'CMAKE_INCLUDE_PATH': self.component_include_dir('mkl'),
+ "MKLROOT": self.normalize_path("mkl"),
+ "SPACK_COMPILER_EXTRA_RPATHS": self.component_lib_dir("mkl"),
+ "CMAKE_PREFIX_PATH": self.normalize_path("mkl"),
+ "CMAKE_LIBRARY_PATH": self.component_lib_dir("mkl"),
+ "CMAKE_INCLUDE_PATH": self.component_include_dir("mkl"),
}
- env.set('MKLROOT', env_mods['MKLROOT'])
- env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
- env_mods['SPACK_COMPILER_EXTRA_RPATHS'])
- env.append_path('CMAKE_PREFIX_PATH', env_mods['CMAKE_PREFIX_PATH'])
- env.append_path('CMAKE_LIBRARY_PATH',
- env_mods['CMAKE_LIBRARY_PATH'])
- env.append_path('CMAKE_INCLUDE_PATH',
- env_mods['CMAKE_INCLUDE_PATH'])
+ env.set("MKLROOT", env_mods["MKLROOT"])
+ env.append_path("SPACK_COMPILER_EXTRA_RPATHS", env_mods["SPACK_COMPILER_EXTRA_RPATHS"])
+ env.append_path("CMAKE_PREFIX_PATH", env_mods["CMAKE_PREFIX_PATH"])
+ env.append_path("CMAKE_LIBRARY_PATH", env_mods["CMAKE_LIBRARY_PATH"])
+ env.append_path("CMAKE_INCLUDE_PATH", env_mods["CMAKE_INCLUDE_PATH"])
debug_print("adding/modifying build env:", env_mods)
- if '+mpi' in self.spec or self.provides('mpi'):
+ if "+mpi" in self.spec or self.provides("mpi"):
if compilers_of_client:
self.mpi_setup_dependent_build_environment(
- env, dependent_spec, compilers_of_client)
+ env, dependent_spec, compilers_of_client
+ )
# We could forego this nonce function and inline its code here,
# but (a) it sisters mpi_compiler_wrappers() [needed twice]
# which performs dizzyingly similar but necessarily different
# actions, and (b) function code leaves a bit more breathing
# room within the suffocating corset of flake8 line length.
else:
- raise InstallError('compilers_of_client arg required for MPI')
+ raise InstallError("compilers_of_client arg required for MPI")
def setup_dependent_package(self, module, dep_spec):
# https://spack.readthedocs.io/en/latest/spack.html#spack.package_base.PackageBase.setup_dependent_package
# Reminder: "module" refers to Python module.
# Called before the install() method of dependents.
- if '+mpi' in self.spec or self.provides('mpi'):
+ if "+mpi" in self.spec or self.provides("mpi"):
compiler_wrapper_commands = self.mpi_compiler_wrappers
- self.spec.mpicc = compiler_wrapper_commands['MPICC']
- self.spec.mpicxx = compiler_wrapper_commands['MPICXX']
- self.spec.mpif77 = compiler_wrapper_commands['MPIF77']
- self.spec.mpifc = compiler_wrapper_commands['MPIFC']
- debug_print(("spec '%s' received .mpi* properties:" % self.spec),
- compiler_wrapper_commands)
+ self.spec.mpicc = compiler_wrapper_commands["MPICC"]
+ self.spec.mpicxx = compiler_wrapper_commands["MPICXX"]
+ self.spec.mpif77 = compiler_wrapper_commands["MPIF77"]
+ self.spec.mpifc = compiler_wrapper_commands["MPIFC"]
+ debug_print(
+ ("spec '%s' received .mpi* properties:" % self.spec), compiler_wrapper_commands
+ )
# ---------------------------------------------------------------------
# Specifics for installation phase
@@ -1137,19 +1151,20 @@ class IntelPackage(PackageBase):
All Intel software shares the same license, so we store it in a
common 'intel' directory."""
- return os.path.join(self.global_license_dir, 'intel', 'license.lic')
+ return os.path.join(self.global_license_dir, "intel", "license.lic")
@property
def _determine_license_type(self):
- '''Provide appropriate license tokens for the installer (silent.cfg).
- '''
+ """Provide appropriate license tokens for the installer (silent.cfg)."""
# See:
# ./README-intel.rst, section "Details for licensing tokens".
# ./build_systems/README-intel.rst, section "Licenses"
#
# Ideally, we just tell the installer to look around on the system.
# Thankfully, we neither need to care nor emulate where it looks:
- license_type = {'ACTIVATION_TYPE': 'exist_lic', }
+ license_type = {
+ "ACTIVATION_TYPE": "exist_lic",
+ }
# However (and only), if the spack-internal Intel license file has been
# populated beyond its templated explanatory comments, proffer it to
@@ -1160,21 +1175,20 @@ class IntelPackage(PackageBase):
# self.license_files having been populated, so the "if" is usually
# true by the time the present function runs; ../hooks/licensing.py
with open(f) as fh:
- if re.search(r'^[ \t]*[^' + self.license_comment + '\n]',
- fh.read(), re.MULTILINE):
+ if re.search(r"^[ \t]*[^" + self.license_comment + "\n]", fh.read(), re.MULTILINE):
license_type = {
- 'ACTIVATION_TYPE': 'license_file',
- 'ACTIVATION_LICENSE_FILE': f,
+ "ACTIVATION_TYPE": "license_file",
+ "ACTIVATION_LICENSE_FILE": f,
}
debug_print(license_type)
return license_type
def configure(self, spec, prefix):
- '''Generates the silent.cfg file to pass to installer.sh.
+ """Generates the silent.cfg file to pass to installer.sh.
See https://software.intel.com/en-us/articles/configuration-file-format
- '''
+ """
# Both tokens AND values of the configuration file are validated during
# the run of the underlying binary installer. Any unknown token or
@@ -1191,136 +1205,130 @@ class IntelPackage(PackageBase):
# our configuration accordingly. We can do this because the tokens are
# quite long and specific.
- validator_code = open('pset/check.awk', 'r').read()
+ validator_code = open("pset/check.awk", "r").read()
# Let's go a little further and distill the tokens (plus some noise).
- tokenlike_words = set(re.findall(r'[A-Z_]{4,}', validator_code))
+ tokenlike_words = set(re.findall(r"[A-Z_]{4,}", validator_code))
# NB: .cfg files generated with the "--duplicate filename" option have
# the COMPONENTS string begin with a separator - do not worry about it.
- components_joined = ';'.join(self._filtered_components)
- nonrpm_db_dir = os.path.join(prefix, 'nonrpm-db')
+ components_joined = ";".join(self._filtered_components)
+ nonrpm_db_dir = os.path.join(prefix, "nonrpm-db")
config_draft = {
# Basics first - these should be accepted in all products.
- 'ACCEPT_EULA': 'accept',
- 'PSET_MODE': 'install',
- 'CONTINUE_WITH_OPTIONAL_ERROR': 'yes',
- 'CONTINUE_WITH_INSTALLDIR_OVERWRITE': 'yes',
- 'SIGNING_ENABLED': 'no',
-
+ "ACCEPT_EULA": "accept",
+ "PSET_MODE": "install",
+ "CONTINUE_WITH_OPTIONAL_ERROR": "yes",
+ "CONTINUE_WITH_INSTALLDIR_OVERWRITE": "yes",
+ "SIGNING_ENABLED": "no",
# Highly variable package specifics:
- 'PSET_INSTALL_DIR': prefix,
- 'NONRPM_DB_DIR': nonrpm_db_dir,
- 'COMPONENTS': components_joined,
-
+ "PSET_INSTALL_DIR": prefix,
+ "NONRPM_DB_DIR": nonrpm_db_dir,
+ "COMPONENTS": components_joined,
# Conditional tokens; the first is supported post-2015 only.
# Ignore ia32; most recent products don't even provide it.
- 'ARCH_SELECTED': 'INTEL64', # was: 'ALL'
-
+ "ARCH_SELECTED": "INTEL64", # was: 'ALL'
# 'ism' component -- see uninstall_ism(); also varies by release.
- 'PHONEHOME_SEND_USAGE_DATA': 'no',
+ "PHONEHOME_SEND_USAGE_DATA": "no",
# Ah, as of 2018.2, that somewhat loaded term got replaced by one
# in business-speak. We uphold our preference, both out of general
# principles and for technical reasons like overhead and non-routed
# compute nodes.
- 'INTEL_SW_IMPROVEMENT_PROGRAM_CONSENT': 'no',
+ "INTEL_SW_IMPROVEMENT_PROGRAM_CONSENT": "no",
}
# Deal with licensing only if truly needed.
# NB: Token was 'ACTIVATION' pre ~2013, so basically irrelevant here.
- if 'ACTIVATION_TYPE' in tokenlike_words:
+ if "ACTIVATION_TYPE" in tokenlike_words:
config_draft.update(self._determine_license_type)
# Write sorted *by token* so the file looks less like a hash dump.
- f = open('silent.cfg', 'w')
+ f = open("silent.cfg", "w")
for token, value in sorted(config_draft.items()):
if token in tokenlike_words:
- f.write('%s=%s\n' % (token, value))
+ f.write("%s=%s\n" % (token, value))
f.close()
def install(self, spec, prefix):
- '''Runs Intel's install.sh installation script. Afterwards, save the
+ """Runs Intel's install.sh installation script. Afterwards, save the
installer config and logs to <prefix>/.spack
- '''
+ """
# prepare
- tmpdir = tempfile.mkdtemp(prefix='spack-intel-')
+ tmpdir = tempfile.mkdtemp(prefix="spack-intel-")
- install_script = Executable('./install.sh')
- install_script.add_default_env('TMPDIR', tmpdir)
+ install_script = Executable("./install.sh")
+ install_script.add_default_env("TMPDIR", tmpdir)
# Need to set HOME to avoid using ~/intel
- install_script.add_default_env('HOME', prefix)
+ install_script.add_default_env("HOME", prefix)
# perform
- install_script('--silent', 'silent.cfg')
+ install_script("--silent", "silent.cfg")
# preserve config and logs
- dst = os.path.join(self.prefix, '.spack')
- install('silent.cfg', dst)
- for f in glob.glob('%s/intel*log' % tmpdir):
+ dst = os.path.join(self.prefix, ".spack")
+ install("silent.cfg", dst)
+ for f in glob.glob("%s/intel*log" % tmpdir):
install(f, dst)
- @run_after('install')
+ @run_after("install")
def validate_install(self):
# Sometimes the installer exits with an error but doesn't pass a
# non-zero exit code to spack. Check for the existence of a 'bin'
# directory to catch this error condition.
if not os.path.exists(self.prefix.bin):
- raise InstallError('The installer has failed to install anything.')
+ raise InstallError("The installer has failed to install anything.")
- @run_after('install')
+ @run_after("install")
def configure_rpath(self):
- if '+rpath' not in self.spec:
+ if "+rpath" not in self.spec:
return
# https://software.intel.com/en-us/cpp-compiler-18.0-developer-guide-and-reference-using-configuration-files
- compilers_bin_dir = self.component_bin_dir('compiler')
- compilers_lib_dir = self.component_lib_dir('compiler')
+ compilers_bin_dir = self.component_bin_dir("compiler")
+ compilers_lib_dir = self.component_lib_dir("compiler")
- for compiler_name in 'icc icpc ifort'.split():
+ for compiler_name in "icc icpc ifort".split():
f = os.path.join(compilers_bin_dir, compiler_name)
if not os.path.isfile(f):
- raise InstallError(
- 'Cannot find compiler command to configure rpath:\n\t' + f)
+ raise InstallError("Cannot find compiler command to configure rpath:\n\t" + f)
- compiler_cfg = os.path.abspath(f + '.cfg')
- with open(compiler_cfg, 'w') as fh:
- fh.write('-Xlinker -rpath={0}\n'.format(compilers_lib_dir))
+ compiler_cfg = os.path.abspath(f + ".cfg")
+ with open(compiler_cfg, "w") as fh:
+ fh.write("-Xlinker -rpath={0}\n".format(compilers_lib_dir))
- @run_after('install')
+ @run_after("install")
def configure_auto_dispatch(self):
if self._has_compilers:
- if ('auto_dispatch=none' in self.spec):
+ if "auto_dispatch=none" in self.spec:
return
- compilers_bin_dir = self.component_bin_dir('compiler')
+ compilers_bin_dir = self.component_bin_dir("compiler")
- for compiler_name in 'icc icpc ifort'.split():
+ for compiler_name in "icc icpc ifort".split():
f = os.path.join(compilers_bin_dir, compiler_name)
if not os.path.isfile(f):
raise InstallError(
- 'Cannot find compiler command to configure '
- 'auto_dispatch:\n\t' + f)
+ "Cannot find compiler command to configure " "auto_dispatch:\n\t" + f
+ )
ad = []
for x in IntelPackage.auto_dispatch_options:
- if 'auto_dispatch={0}'.format(x) in self.spec:
+ if "auto_dispatch={0}".format(x) in self.spec:
ad.append(x)
- compiler_cfg = os.path.abspath(f + '.cfg')
- with open(compiler_cfg, 'a') as fh:
- fh.write('-ax{0}\n'.format(','.join(ad)))
+ compiler_cfg = os.path.abspath(f + ".cfg")
+ with open(compiler_cfg, "a") as fh:
+ fh.write("-ax{0}\n".format(",".join(ad)))
- @run_after('install')
+ @run_after("install")
def filter_compiler_wrappers(self):
- if (('+mpi' in self.spec or self.provides('mpi')) and
- '~newdtags' in self.spec):
- bin_dir = self.component_bin_dir('mpi')
- for f in 'mpif77 mpif90 mpigcc mpigxx mpiicc mpiicpc ' \
- 'mpiifort'.split():
+ if ("+mpi" in self.spec or self.provides("mpi")) and "~newdtags" in self.spec:
+ bin_dir = self.component_bin_dir("mpi")
+ for f in "mpif77 mpif90 mpigcc mpigxx mpiicc mpiicpc " "mpiifort".split():
f = os.path.join(bin_dir, f)
- filter_file('-Xlinker --enable-new-dtags', ' ', f, string=True)
+ filter_file("-Xlinker --enable-new-dtags", " ", f, string=True)
- @run_after('install')
+ @run_after("install")
def uninstall_ism(self):
# The "Intel(R) Software Improvement Program" [ahem] gets installed,
# apparently regardless of PHONEHOME_SEND_USAGE_DATA.
@@ -1331,12 +1339,11 @@ class IntelPackage(PackageBase):
# "... you can also uninstall the Intel(R) Software Manager
# completely: <installdir>/intel/ism/uninstall.sh"
- f = os.path.join(self.normalize_path('ism'), 'uninstall.sh')
+ f = os.path.join(self.normalize_path("ism"), "uninstall.sh")
if os.path.isfile(f):
- tty.warn('Uninstalling "Intel Software Improvement Program"'
- 'component')
+ tty.warn('Uninstalling "Intel Software Improvement Program"' "component")
uninstall = Executable(f)
- uninstall('--silent')
+ uninstall("--silent")
# TODO? also try
# ~/intel/ism/uninstall --silent
@@ -1346,15 +1353,14 @@ class IntelPackage(PackageBase):
@property
def base_lib_dir(self):
- """Provide the library directory located in the base of Intel installation.
- """
- d = self.normalize_path('')
- d = os.path.join(d, 'lib')
+ """Provide the library directory located in the base of Intel installation."""
+ d = self.normalize_path("")
+ d = os.path.join(d, "lib")
debug_print(d)
return d
- @run_after('install')
+ @run_after("install")
def modify_LLVMgold_rpath(self):
"""Add libimf.so and other required libraries to the RUNPATH of LLVMgold.so.
@@ -1362,8 +1368,9 @@ class IntelPackage(PackageBase):
`ld -plugin LLVMgold.so` is called by the compiler.
"""
if self._has_compilers:
- LLVMgold_libs = find_libraries('LLVMgold', self.base_lib_dir,
- shared=True, recursive=True)
+ LLVMgold_libs = find_libraries(
+ "LLVMgold", self.base_lib_dir, shared=True, recursive=True
+ )
# Ignore ia32 entries as they mostly ignore throughout the rest
# of the file.
# The first entry in rpath preserves the original, the seconds entry
@@ -1371,15 +1378,19 @@ class IntelPackage(PackageBase):
# in compiler releases, then we need to search for libimf.so instead
# of this static path.
for lib in LLVMgold_libs:
- if not self.spec.satisfies('^patchelf'):
+ if not self.spec.satisfies("^patchelf"):
raise spack.error.SpackError(
- 'Attempting to patch RPATH in LLVMgold.so.'
- + '`patchelf` dependency should be set in package.py'
+ "Attempting to patch RPATH in LLVMgold.so."
+ + "`patchelf` dependency should be set in package.py"
)
- patchelf = Executable('patchelf')
- rpath = ':'.join([patchelf('--print-rpath', lib, output=str).strip(),
- '$ORIGIN/../compiler/lib/intel64_lin'])
- patchelf('--set-rpath', rpath, lib)
+ patchelf = Executable("patchelf")
+ rpath = ":".join(
+ [
+ patchelf("--print-rpath", lib, output=str).strip(),
+ "$ORIGIN/../compiler/lib/intel64_lin",
+ ]
+ )
+ patchelf("--set-rpath", rpath, lib)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/lua.py b/lib/spack/spack/build_systems/lua.py
index ed71aac9e9..c0d4321097 100644
--- a/lib/spack/spack/build_systems/lua.py
+++ b/lib/spack/spack/build_systems/lua.py
@@ -17,27 +17,27 @@ from spack.util.executable import Executable
class LuaPackage(PackageBase):
"""Specialized class for lua packages"""
- phases = ['unpack', 'generate_luarocks_config', 'preprocess', 'install']
+ phases = ["unpack", "generate_luarocks_config", "preprocess", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'LuaPackage'
+ build_system_class = "LuaPackage"
list_depth = 1 # LuaRocks requires at least one level of spidering to find versions
- depends_on('lua-lang')
- extends('lua', when='^lua')
- with when('^lua-luajit'):
- extends('lua-luajit')
- depends_on('luajit')
- depends_on('lua-luajit+lualinks')
- with when('^lua-luajit-openresty'):
- extends('lua-luajit-openresty')
- depends_on('luajit')
- depends_on('lua-luajit-openresty+lualinks')
+ depends_on("lua-lang")
+ extends("lua", when="^lua")
+ with when("^lua-luajit"):
+ extends("lua-luajit")
+ depends_on("luajit")
+ depends_on("lua-luajit+lualinks")
+ with when("^lua-luajit-openresty"):
+ extends("lua-luajit-openresty")
+ depends_on("luajit")
+ depends_on("lua-luajit-openresty+lualinks")
def unpack(self, spec, prefix):
- if os.path.splitext(self.stage.archive_file)[1] == '.rock':
- directory = self.luarocks('unpack', self.stage.archive_file, output=str)
- dirlines = directory.split('\n')
+ if os.path.splitext(self.stage.archive_file)[1] == ".rock":
+ directory = self.luarocks("unpack", self.stage.archive_file, output=str)
+ dirlines = directory.split("\n")
# TODO: figure out how to scope this better
os.chdir(dirlines[2])
@@ -48,19 +48,17 @@ class LuaPackage(PackageBase):
)
def _luarocks_config_path(self):
- return os.path.join(self.stage.source_path, 'spack_luarocks.lua')
+ return os.path.join(self.stage.source_path, "spack_luarocks.lua")
def generate_luarocks_config(self, spec, prefix):
spec = self.spec
table_entries = []
- for d in spec.traverse(
- deptypes=("build", "run"), deptype_query="run"
- ):
+ for d in spec.traverse(deptypes=("build", "run"), deptype_query="run"):
if d.package.extends(self.extendee_spec):
table_entries.append(self._generate_tree_line(d.name, d.prefix))
path = self._luarocks_config_path()
- with open(path, 'w') as config:
+ with open(path, "w") as config:
config.write(
"""
deps_mode="all"
@@ -74,7 +72,7 @@ class LuaPackage(PackageBase):
return path
def setup_build_environment(self, env):
- env.set('LUAROCKS_CONFIG', self._luarocks_config_path())
+ env.set("LUAROCKS_CONFIG", self._luarocks_config_path())
def preprocess(self, spec, prefix):
"""Override this to preprocess source before building with luarocks"""
@@ -82,21 +80,21 @@ class LuaPackage(PackageBase):
@property
def lua(self):
- return Executable(self.spec['lua-lang'].prefix.bin.lua)
+ return Executable(self.spec["lua-lang"].prefix.bin.lua)
@property
def luarocks(self):
- lr = Executable(self.spec['lua-lang'].prefix.bin.luarocks)
+ lr = Executable(self.spec["lua-lang"].prefix.bin.luarocks)
return lr
def luarocks_args(self):
return []
def install(self, spec, prefix):
- rock = '.'
- specs = find('.', '*.rockspec', recursive=False)
+ rock = "."
+ specs = find(".", "*.rockspec", recursive=False)
if specs:
rock = specs[0]
rocks_args = self.luarocks_args()
rocks_args.append(rock)
- self.luarocks('--tree=' + prefix, 'make', *rocks_args)
+ self.luarocks("--tree=" + prefix, "make", *rocks_args)
diff --git a/lib/spack/spack/build_systems/makefile.py b/lib/spack/spack/build_systems/makefile.py
index be5986da33..e2bb8c0c26 100644
--- a/lib/spack/spack/build_systems/makefile.py
+++ b/lib/spack/spack/build_systems/makefile.py
@@ -43,25 +43,26 @@ class MakefilePackage(PackageBase):
| | Makefile is located|
+-----------------------------------------------+--------------------+
"""
+
#: Phases of a package that is built with an hand-written Makefile
- phases = ['edit', 'build', 'install']
+ phases = ["edit", "build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'MakefilePackage'
+ build_system_class = "MakefilePackage"
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.build`
#: phase
build_targets = [] # type: List[str]
#: Targets for ``make`` during the :py:meth:`~.MakefilePackage.install`
#: phase
- install_targets = ['install']
+ install_targets = ["install"]
- conflicts('platform=windows')
+ conflicts("platform=windows")
#: Callback names for build-time test
- build_time_test_callbacks = ['check']
+ build_time_test_callbacks = ["check"]
#: Callback names for install-time test
- install_time_test_callbacks = ['installcheck']
+ install_time_test_callbacks = ["installcheck"]
@property
def build_directory(self):
@@ -75,7 +76,7 @@ class MakefilePackage(PackageBase):
"""Edits the Makefile before calling make. This phase cannot
be defaulted.
"""
- tty.msg('Using default implementation: skipping edit phase.')
+ tty.msg("Using default implementation: skipping edit phase.")
def build(self, spec, prefix):
"""Calls make, passing :py:attr:`~.MakefilePackage.build_targets`
@@ -91,27 +92,27 @@ class MakefilePackage(PackageBase):
with working_dir(self.build_directory):
inspect.getmodule(self).make(*self.install_targets)
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
def check(self):
"""Searches the Makefile for targets ``test`` and ``check``
and runs them if found.
"""
with working_dir(self.build_directory):
- self._if_make_target_execute('test')
- self._if_make_target_execute('check')
+ self._if_make_target_execute("test")
+ self._if_make_target_execute("check")
- run_after('install')(PackageBase._run_default_install_time_test_callbacks)
+ run_after("install")(PackageBase._run_default_install_time_test_callbacks)
def installcheck(self):
"""Searches the Makefile for an ``installcheck`` target
and runs it if found.
"""
with working_dir(self.build_directory):
- self._if_make_target_execute('installcheck')
+ self._if_make_target_execute("installcheck")
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
- run_after('install')(PackageBase.apply_macos_rpath_fixups)
+ run_after("install")(PackageBase.apply_macos_rpath_fixups)
diff --git a/lib/spack/spack/build_systems/maven.py b/lib/spack/spack/build_systems/maven.py
index ffa67346ab..1ff1882e13 100644
--- a/lib/spack/spack/build_systems/maven.py
+++ b/lib/spack/spack/build_systems/maven.py
@@ -21,15 +21,16 @@ class MavenPackage(PackageBase):
* build
* install
"""
+
# Default phases
- phases = ['build', 'install']
+ phases = ["build", "install"]
# To be used in UI queries that require to know which
# build-system class we are using
- build_system_class = 'MavenPackage'
+ build_system_class = "MavenPackage"
- depends_on('java', type=('build', 'run'))
- depends_on('maven', type='build')
+ depends_on("java", type=("build", "run"))
+ depends_on("maven", type="build")
@property
def build_directory(self):
@@ -44,17 +45,17 @@ class MavenPackage(PackageBase):
"""Compile code and package into a JAR file."""
with working_dir(self.build_directory):
- mvn = which('mvn')
+ mvn = which("mvn")
if self.run_tests:
- mvn('verify', *self.build_args())
+ mvn("verify", *self.build_args())
else:
- mvn('package', '-DskipTests', *self.build_args())
+ mvn("package", "-DskipTests", *self.build_args())
def install(self, spec, prefix):
"""Copy to installation prefix."""
with working_dir(self.build_directory):
- install_tree('.', prefix)
+ install_tree(".", prefix)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/meson.py b/lib/spack/spack/build_systems/meson.py
index 9e3bbf20c8..886a7bee50 100644
--- a/lib/spack/spack/build_systems/meson.py
+++ b/lib/spack/spack/build_systems/meson.py
@@ -42,31 +42,40 @@ class MesonPackage(PackageBase):
"""
+
#: Phases of a Meson package
- phases = ['meson', 'build', 'install']
+ phases = ["meson", "build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'MesonPackage'
+ build_system_class = "MesonPackage"
build_targets = [] # type: List[str]
- install_targets = ['install']
-
- build_time_test_callbacks = ['check']
-
- variant('buildtype', default='debugoptimized',
- description='Meson build type',
- values=('plain', 'debug', 'debugoptimized', 'release', 'minsize'))
- variant('default_library', default='shared', values=('shared', 'static'),
- multi=True, description='Build shared libs, static libs or both')
- variant('strip', default=False, description='Strip targets on install')
-
- depends_on('meson', type='build')
- depends_on('ninja', type='build')
+ install_targets = ["install"]
+
+ build_time_test_callbacks = ["check"]
+
+ variant(
+ "buildtype",
+ default="debugoptimized",
+ description="Meson build type",
+ values=("plain", "debug", "debugoptimized", "release", "minsize"),
+ )
+ variant(
+ "default_library",
+ default="shared",
+ values=("shared", "static"),
+ multi=True,
+ description="Build shared libs, static libs or both",
+ )
+ variant("strip", default=False, description="Strip targets on install")
+
+ depends_on("meson", type="build")
+ depends_on("ninja", type="build")
@property
def archive_files(self):
"""Files to archive for packages based on Meson"""
- return [os.path.join(self.build_directory, 'meson-logs/meson-log.txt')]
+ return [os.path.join(self.build_directory, "meson-logs/meson-log.txt")]
@property
def root_mesonlists_dir(self):
@@ -88,7 +97,7 @@ class MesonPackage(PackageBase):
"""
# standard Meson arguments
std_meson_args = MesonPackage._std_args(self)
- std_meson_args += getattr(self, 'meson_flag_args', [])
+ std_meson_args += getattr(self, "meson_flag_args", [])
return std_meson_args
@staticmethod
@@ -96,29 +105,29 @@ class MesonPackage(PackageBase):
"""Computes the standard meson arguments for a generic package"""
try:
- build_type = pkg.spec.variants['buildtype'].value
+ build_type = pkg.spec.variants["buildtype"].value
except KeyError:
- build_type = 'release'
+ build_type = "release"
- strip = 'true' if '+strip' in pkg.spec else 'false'
+ strip = "true" if "+strip" in pkg.spec else "false"
- if 'default_library=static,shared' in pkg.spec:
- default_library = 'both'
- elif 'default_library=static' in pkg.spec:
- default_library = 'static'
+ if "default_library=static,shared" in pkg.spec:
+ default_library = "both"
+ elif "default_library=static" in pkg.spec:
+ default_library = "static"
else:
- default_library = 'shared'
+ default_library = "shared"
args = [
- '--prefix={0}'.format(pkg.prefix),
+ "--prefix={0}".format(pkg.prefix),
# If we do not specify libdir explicitly, Meson chooses something
# like lib/x86_64-linux-gnu, which causes problems when trying to
# find libraries and pkg-config files.
# See https://github.com/mesonbuild/meson/issues/2197
- '--libdir={0}'.format(pkg.prefix.lib),
- '-Dbuildtype={0}'.format(build_type),
- '-Dstrip={0}'.format(strip),
- '-Ddefault_library={0}'.format(default_library)
+ "--libdir={0}".format(pkg.prefix.lib),
+ "-Dbuildtype={0}".format(build_type),
+ "-Dstrip={0}".format(strip),
+ "-Ddefault_library={0}".format(default_library),
]
return args
@@ -127,7 +136,7 @@ class MesonPackage(PackageBase):
"""Produces a list of all command line arguments to pass the specified
compiler flags to meson."""
# Has to be dynamic attribute due to caching
- setattr(self, 'meson_flag_args', [])
+ setattr(self, "meson_flag_args", [])
@property
def build_directory(self):
@@ -135,7 +144,7 @@ class MesonPackage(PackageBase):
:return: directory where to build the package
"""
- return os.path.join(self.stage.source_path, 'spack-build')
+ return os.path.join(self.stage.source_path, "spack-build")
def meson_args(self):
"""Produces a list containing all the arguments that must be passed to
@@ -163,7 +172,7 @@ class MesonPackage(PackageBase):
def build(self, spec, prefix):
"""Make the build targets"""
- options = ['-v']
+ options = ["-v"]
options += self.build_targets
with working_dir(self.build_directory):
inspect.getmodule(self).ninja(*options)
@@ -173,15 +182,15 @@ class MesonPackage(PackageBase):
with working_dir(self.build_directory):
inspect.getmodule(self).ninja(*self.install_targets)
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
def check(self):
"""Searches the Meson-generated file for the target ``test``
and runs it if found.
"""
with working_dir(self.build_directory):
- self._if_ninja_target_execute('test')
- self._if_ninja_target_execute('check')
+ self._if_ninja_target_execute("test")
+ self._if_ninja_target_execute("check")
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/octave.py b/lib/spack/spack/build_systems/octave.py
index 13dd864e3b..9916c319b0 100644
--- a/lib/spack/spack/build_systems/octave.py
+++ b/lib/spack/spack/build_systems/octave.py
@@ -19,32 +19,34 @@ class OctavePackage(PackageBase):
1. :py:meth:`~.OctavePackage.install`
"""
+
# Default phases
- phases = ['install']
+ phases = ["install"]
# To be used in UI queries that require to know which
# build-system class we are using
- build_system_class = 'OctavePackage'
+ build_system_class = "OctavePackage"
- extends('octave')
+ extends("octave")
def setup_build_environment(self, env):
# octave does not like those environment variables to be set:
- env.unset('CC')
- env.unset('CXX')
- env.unset('FC')
+ env.unset("CC")
+ env.unset("CXX")
+ env.unset("FC")
def install(self, spec, prefix):
"""Install the package from the archive file"""
inspect.getmodule(self).octave(
- '--quiet',
- '--norc',
- '--built-in-docstrings-file=/dev/null',
- '--texi-macros-file=/dev/null',
- '--eval', 'pkg prefix %s; pkg install %s' %
- (prefix, self.stage.archive_file))
+ "--quiet",
+ "--norc",
+ "--built-in-docstrings-file=/dev/null",
+ "--texi-macros-file=/dev/null",
+ "--eval",
+ "pkg prefix %s; pkg install %s" % (prefix, self.stage.archive_file),
+ )
# Testing
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/oneapi.py b/lib/spack/spack/build_systems/oneapi.py
index c0da2fdb53..669c66fe8f 100644
--- a/lib/spack/spack/build_systems/oneapi.py
+++ b/lib/spack/spack/build_systems/oneapi.py
@@ -22,9 +22,9 @@ from spack.util.executable import Executable
class IntelOneApiPackage(Package):
"""Base class for Intel oneAPI packages."""
- homepage = 'https://software.intel.com/oneapi'
+ homepage = "https://software.intel.com/oneapi"
- phases = ['install']
+ phases = ["install"]
# oneAPI license does not allow mirroring outside of the
# organization (e.g. University/Company).
@@ -55,7 +55,7 @@ class IntelOneApiPackage(Package):
def install_component(self, installer_path):
"""Shared install method for all oneapi packages."""
- if platform.system() == 'Linux':
+ if platform.system() == "Linux":
# Intel installer assumes and enforces that all components
# are installed into a single prefix. Spack wants to
# install each component in a separate prefix. The
@@ -69,28 +69,35 @@ class IntelOneApiPackage(Package):
# with other install depends on the userid. For root, we
# delete the installercache before and after install. For
# non root we redefine the HOME environment variable.
- if getpass.getuser() == 'root':
- shutil.rmtree('/var/intel/installercache', ignore_errors=True)
+ if getpass.getuser() == "root":
+ shutil.rmtree("/var/intel/installercache", ignore_errors=True)
- bash = Executable('bash')
+ bash = Executable("bash")
# Installer writes files in ~/intel set HOME so it goes to prefix
- bash.add_default_env('HOME', self.prefix)
+ bash.add_default_env("HOME", self.prefix)
# Installer checks $XDG_RUNTIME_DIR/.bootstrapper_lock_file as well
- bash.add_default_env('XDG_RUNTIME_DIR',
- join_path(self.stage.path, 'runtime'))
-
- bash(installer_path,
- '-s', '-a', '-s', '--action', 'install',
- '--eula', 'accept',
- '--install-dir', self.prefix)
-
- if getpass.getuser() == 'root':
- shutil.rmtree('/var/intel/installercache', ignore_errors=True)
+ bash.add_default_env("XDG_RUNTIME_DIR", join_path(self.stage.path, "runtime"))
+
+ bash(
+ installer_path,
+ "-s",
+ "-a",
+ "-s",
+ "--action",
+ "install",
+ "--eula",
+ "accept",
+ "--install-dir",
+ self.prefix,
+ )
+
+ if getpass.getuser() == "root":
+ shutil.rmtree("/var/intel/installercache", ignore_errors=True)
# Some installers have a bug and do not return an error code when failing
if not isdir(join_path(self.prefix, self.component_dir)):
- raise RuntimeError('install failed')
+ raise RuntimeError("install failed")
def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
@@ -101,8 +108,11 @@ class IntelOneApiPackage(Package):
$ source {prefix}/{component}/{version}/env/vars.sh
"""
- env.extend(EnvironmentModifications.from_sourcing_file(
- join_path(self.component_prefix, 'env', 'vars.sh')))
+ env.extend(
+ EnvironmentModifications.from_sourcing_file(
+ join_path(self.component_prefix, "env", "vars.sh")
+ )
+ )
class IntelOneApiLibraryPackage(IntelOneApiPackage):
@@ -116,14 +126,14 @@ class IntelOneApiLibraryPackage(IntelOneApiPackage):
@property
def headers(self):
- include_path = join_path(self.component_prefix, 'include')
- return find_headers('*', include_path, recursive=True)
+ include_path = join_path(self.component_prefix, "include")
+ return find_headers("*", include_path, recursive=True)
@property
def libs(self):
- lib_path = join_path(self.component_prefix, 'lib', 'intel64')
+ lib_path = join_path(self.component_prefix, "lib", "intel64")
lib_path = lib_path if isdir(lib_path) else dirname(lib_path)
- return find_libraries('*', root=lib_path, shared=True, recursive=True)
+ return find_libraries("*", root=lib_path, shared=True, recursive=True)
class IntelOneApiStaticLibraryList(object):
@@ -151,9 +161,10 @@ class IntelOneApiStaticLibraryList(object):
@property
def link_flags(self):
- return '-Wl,--start-group {0} -Wl,--end-group {1}'.format(
- ' '.join(self.static_libs.libraries), self.dynamic_libs.link_flags)
+ return "-Wl,--start-group {0} -Wl,--end-group {1}".format(
+ " ".join(self.static_libs.libraries), self.dynamic_libs.link_flags
+ )
@property
def ld_flags(self):
- return '{0} {1}'.format(self.search_flags, self.link_flags)
+ return "{0} {1}".format(self.search_flags, self.link_flags)
diff --git a/lib/spack/spack/build_systems/perl.py b/lib/spack/spack/build_systems/perl.py
index 60530abd05..1f354beece 100644
--- a/lib/spack/spack/build_systems/perl.py
+++ b/lib/spack/spack/build_systems/perl.py
@@ -34,17 +34,18 @@ class PerlPackage(PackageBase):
:py:meth:`~.PerlPackage.configure`.
Arguments should not include the installation base directory.
"""
+
#: Phases of a Perl package
- phases = ['configure', 'build', 'install']
+ phases = ["configure", "build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'PerlPackage'
+ build_system_class = "PerlPackage"
#: Callback names for build-time test
- build_time_test_callbacks = ['check']
+ build_time_test_callbacks = ["check"]
- extends('perl')
+ extends("perl")
def configure_args(self):
"""Produces a list containing the arguments that must be passed to
@@ -62,20 +63,19 @@ class PerlPackage(PackageBase):
:raise RuntimeError: if neither Makefile.PL or Build.PL exist
"""
- if os.path.isfile('Makefile.PL'):
- self.build_method = 'Makefile.PL'
+ if os.path.isfile("Makefile.PL"):
+ self.build_method = "Makefile.PL"
self.build_executable = inspect.getmodule(self).make
- elif os.path.isfile('Build.PL'):
- self.build_method = 'Build.PL'
- self.build_executable = Executable(
- os.path.join(self.stage.source_path, 'Build'))
+ elif os.path.isfile("Build.PL"):
+ self.build_method = "Build.PL"
+ self.build_executable = Executable(os.path.join(self.stage.source_path, "Build"))
else:
- raise RuntimeError('Unknown build_method for perl package')
+ raise RuntimeError("Unknown build_method for perl package")
- if self.build_method == 'Makefile.PL':
- options = ['Makefile.PL', 'INSTALL_BASE={0}'.format(prefix)]
- elif self.build_method == 'Build.PL':
- options = ['Build.PL', '--install_base', prefix]
+ if self.build_method == "Makefile.PL":
+ options = ["Makefile.PL", "INSTALL_BASE={0}".format(prefix)]
+ elif self.build_method == "Build.PL":
+ options = ["Build.PL", "--install_base", prefix]
options += self.configure_args()
inspect.getmodule(self).perl(*options)
@@ -84,27 +84,27 @@ class PerlPackage(PackageBase):
# Build.PL may be too long causing the build to fail. Patching the shebang
# does not happen until after install so set '/usr/bin/env perl' here in
# the Build script.
- @run_after('configure')
+ @run_after("configure")
def fix_shebang(self):
- if self.build_method == 'Build.PL':
- pattern = '#!{0}'.format(self.spec['perl'].command.path)
- repl = '#!/usr/bin/env perl'
- filter_file(pattern, repl, 'Build', backup=False)
+ if self.build_method == "Build.PL":
+ pattern = "#!{0}".format(self.spec["perl"].command.path)
+ repl = "#!/usr/bin/env perl"
+ filter_file(pattern, repl, "Build", backup=False)
def build(self, spec, prefix):
"""Builds a Perl package."""
self.build_executable()
# Ensure that tests run after build (if requested):
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
def check(self):
"""Runs built-in tests of a Perl package."""
- self.build_executable('test')
+ self.build_executable("test")
def install(self, spec, prefix):
"""Installs a Perl package."""
- self.build_executable('install')
+ self.build_executable("install")
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py
index 4e109b628d..1a69d79f40 100644
--- a/lib/spack/spack/build_systems/python.py
+++ b/lib/spack/spack/build_systems/python.py
@@ -28,27 +28,28 @@ from spack.package_base import PackageBase, run_after
class PythonPackage(PackageBase):
"""Specialized class for packages that are built using pip."""
+
#: Package name, version, and extension on PyPI
pypi = None # type: Optional[str]
- maintainers = ['adamjstewart']
+ maintainers = ["adamjstewart"]
# Default phases
- phases = ['install']
+ phases = ["install"]
# To be used in UI queries that require to know which
# build-system class we are using
- build_system_class = 'PythonPackage'
+ build_system_class = "PythonPackage"
#: Callback names for install-time test
- install_time_test_callbacks = ['test']
+ install_time_test_callbacks = ["test"]
- extends('python')
- depends_on('py-pip', type='build')
+ extends("python")
+ depends_on("py-pip", type="build")
# FIXME: technically wheel is only needed when building from source, not when
# installing a downloaded wheel, but I don't want to add wheel as a dep to every
# package manually
- depends_on('py-wheel', type='build')
+ depends_on("py-wheel", type="build")
py_namespace = None # type: Optional[str]
@@ -56,43 +57,43 @@ class PythonPackage(PackageBase):
def _std_args(cls):
return [
# Verbose
- '-vvv',
+ "-vvv",
# Disable prompting for input
- '--no-input',
+ "--no-input",
# Disable the cache
- '--no-cache-dir',
+ "--no-cache-dir",
# Don't check to see if pip is up-to-date
- '--disable-pip-version-check',
+ "--disable-pip-version-check",
# Install packages
- 'install',
+ "install",
# Don't install package dependencies
- '--no-deps',
+ "--no-deps",
# Overwrite existing packages
- '--ignore-installed',
+ "--ignore-installed",
# Use env vars like PYTHONPATH
- '--no-build-isolation',
+ "--no-build-isolation",
# Don't warn that prefix.bin is not in PATH
- '--no-warn-script-location',
+ "--no-warn-script-location",
# Ignore the PyPI package index
- '--no-index',
+ "--no-index",
]
@classproperty
def homepage(cls):
if cls.pypi:
- name = cls.pypi.split('/')[0]
- return 'https://pypi.org/project/' + name + '/'
+ name = cls.pypi.split("/")[0]
+ return "https://pypi.org/project/" + name + "/"
@classproperty
def url(cls):
if cls.pypi:
- return 'https://files.pythonhosted.org/packages/source/' + cls.pypi[0] + '/' + cls.pypi
+ return "https://files.pythonhosted.org/packages/source/" + cls.pypi[0] + "/" + cls.pypi
@classproperty
def list_url(cls):
if cls.pypi:
- name = cls.pypi.split('/')[0]
- return 'https://pypi.org/simple/' + name + '/'
+ name = cls.pypi.split("/")[0]
+ return "https://pypi.org/simple/" + name + "/"
@property
def import_modules(self):
@@ -113,7 +114,7 @@ class PythonPackage(PackageBase):
list: list of strings of module names
"""
modules = []
- pkg = self.spec['python'].package
+ pkg = self.spec["python"].package
# Packages may be installed in platform-specific or platform-independent
# site-packages directories
@@ -122,19 +123,23 @@ class PythonPackage(PackageBase):
# Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files
- for path in find(root, '__init__.py', recursive=True):
- modules.append(path.replace(root + os.sep, '', 1).replace(
- os.sep + '__init__.py', '').replace('/', '.'))
+ for path in find(root, "__init__.py", recursive=True):
+ modules.append(
+ path.replace(root + os.sep, "", 1)
+ .replace(os.sep + "__init__.py", "")
+ .replace("/", ".")
+ )
# Some Python libraries are modules: individual *.py files
# found in the site-packages directory
- for path in find(root, '*.py', recursive=False):
- modules.append(path.replace(root + os.sep, '', 1).replace(
- '.py', '').replace('/', '.'))
+ for path in find(root, "*.py", recursive=False):
+ modules.append(
+ path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", ".")
+ )
- modules = [mod for mod in modules if re.match('[a-zA-Z0-9._]+$', mod)]
+ modules = [mod for mod in modules if re.match("[a-zA-Z0-9._]+$", mod)]
- tty.debug('Detected the following modules: {0}'.format(modules))
+ tty.debug("Detected the following modules: {0}".format(modules))
return modules
@@ -162,17 +167,17 @@ class PythonPackage(PackageBase):
def install(self, spec, prefix):
"""Install everything from build directory."""
- args = PythonPackage._std_args(self) + ['--prefix=' + prefix]
+ args = PythonPackage._std_args(self) + ["--prefix=" + prefix]
for option in self.install_options(spec, prefix):
- args.append('--install-option=' + option)
+ args.append("--install-option=" + option)
for option in self.global_options(spec, prefix):
- args.append('--global-option=' + option)
+ args.append("--global-option=" + option)
- if self.stage.archive_file and self.stage.archive_file.endswith('.whl'):
+ if self.stage.archive_file and self.stage.archive_file.endswith(".whl"):
args.append(self.stage.archive_file)
else:
- args.append('.')
+ args.append(".")
pip = inspect.getmodule(self).pip
with working_dir(self.build_directory):
@@ -190,7 +195,7 @@ class PythonPackage(PackageBase):
if headers:
return headers
- msg = 'Unable to locate {} headers in {} or {}'
+ msg = "Unable to locate {} headers in {} or {}"
raise NoHeadersError(msg.format(self.spec.name, include, platlib))
@property
@@ -198,7 +203,7 @@ class PythonPackage(PackageBase):
"""Discover libraries in platlib."""
# Remove py- prefix in package name
- library = 'lib' + self.spec.name[3:].replace('-', '?')
+ library = "lib" + self.spec.name[3:].replace("-", "?")
root = inspect.getmodule(self).platlib
for shared in [True, False]:
@@ -206,7 +211,7 @@ class PythonPackage(PackageBase):
if libs:
return libs
- msg = 'Unable to recursively locate {} libraries in {}'
+ msg = "Unable to recursively locate {} libraries in {}"
raise NoLibrariesError(msg.format(self.spec.name, root))
# Testing
@@ -217,34 +222,32 @@ class PythonPackage(PackageBase):
# Make sure we are importing the installed modules,
# not the ones in the source directory
for module in self.import_modules:
- self.run_test(inspect.getmodule(self).python.path,
- ['-c', 'import {0}'.format(module)],
- purpose='checking import of {0}'.format(module),
- work_dir='spack-test')
+ self.run_test(
+ inspect.getmodule(self).python.path,
+ ["-c", "import {0}".format(module)],
+ purpose="checking import of {0}".format(module),
+ work_dir="spack-test",
+ )
- run_after('install')(PackageBase._run_default_install_time_test_callbacks)
+ run_after("install")(PackageBase._run_default_install_time_test_callbacks)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
def view_file_conflicts(self, view, merge_map):
"""Report all file conflicts, excepting special cases for python.
- Specifically, this does not report errors for duplicate
- __init__.py files for packages in the same namespace.
+ Specifically, this does not report errors for duplicate
+ __init__.py files for packages in the same namespace.
"""
- conflicts = list(dst for src, dst in merge_map.items()
- if os.path.exists(dst))
+ conflicts = list(dst for src, dst in merge_map.items() if os.path.exists(dst))
if conflicts and self.py_namespace:
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
- namespaces = set(
- x.package.py_namespace for x in ext_map.values())
- namespace_re = (
- r'site-packages/{0}/__init__.py'.format(self.py_namespace))
+ namespaces = set(x.package.py_namespace for x in ext_map.values())
+ namespace_re = r"site-packages/{0}/__init__.py".format(self.py_namespace)
find_namespace = match_predicate(namespace_re)
if self.py_namespace in namespaces:
- conflicts = list(
- x for x in conflicts if not find_namespace(x))
+ conflicts = list(x for x in conflicts if not find_namespace(x))
return conflicts
@@ -252,9 +255,7 @@ class PythonPackage(PackageBase):
bin_dir = self.spec.prefix.bin
python_prefix = self.extendee_spec.prefix
python_is_external = self.extendee_spec.external
- global_view = same_path(python_prefix, view.get_projection_for_spec(
- self.spec
- ))
+ global_view = same_path(python_prefix, view.get_projection_for_spec(self.spec))
for src, dst in merge_map.items():
if os.path.exists(dst):
continue
@@ -265,8 +266,9 @@ class PythonPackage(PackageBase):
is_script = is_nonsymlink_exe_with_shebang(src)
if is_script and not python_is_external:
filter_file(
- python_prefix, os.path.abspath(
- view.get_projection_for_spec(self.spec)), dst
+ python_prefix,
+ os.path.abspath(view.get_projection_for_spec(self.spec)),
+ dst,
)
else:
orig_link_target = os.path.realpath(src)
@@ -278,19 +280,16 @@ class PythonPackage(PackageBase):
if self.py_namespace:
ext_map = view.extensions_layout.extension_map(self.extendee_spec)
remaining_namespaces = set(
- spec.package.py_namespace for name, spec in ext_map.items()
- if name != self.name)
+ spec.package.py_namespace for name, spec in ext_map.items() if name != self.name
+ )
if self.py_namespace in remaining_namespaces:
namespace_init = match_predicate(
- r'site-packages/{0}/__init__.py'.format(self.py_namespace))
+ r"site-packages/{0}/__init__.py".format(self.py_namespace)
+ )
ignore_namespace = True
bin_dir = self.spec.prefix.bin
- global_view = (
- self.extendee_spec.prefix == view.get_projection_for_spec(
- self.spec
- )
- )
+ global_view = self.extendee_spec.prefix == view.get_projection_for_spec(self.spec)
to_remove = []
for src, dst in merge_map.items():
diff --git a/lib/spack/spack/build_systems/qmake.py b/lib/spack/spack/build_systems/qmake.py
index b2330ce963..c2af684592 100644
--- a/lib/spack/spack/build_systems/qmake.py
+++ b/lib/spack/spack/build_systems/qmake.py
@@ -27,17 +27,18 @@ class QMakePackage(PackageBase):
They all have sensible defaults and for many packages the only thing
necessary will be to override :py:meth:`~.QMakePackage.qmake_args`.
"""
+
#: Phases of a qmake package
- phases = ['qmake', 'build', 'install']
+ phases = ["qmake", "build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'QMakePackage'
+ build_system_class = "QMakePackage"
#: Callback names for build-time test
- build_time_test_callbacks = ['check']
+ build_time_test_callbacks = ["check"]
- depends_on('qt', type='build')
+ depends_on("qt", type="build")
@property
def build_directory(self):
@@ -66,18 +67,17 @@ class QMakePackage(PackageBase):
"""Make the install targets"""
with working_dir(self.build_directory):
- inspect.getmodule(self).make('install')
+ inspect.getmodule(self).make("install")
# Tests
def check(self):
- """Searches the Makefile for a ``check:`` target and runs it if found.
- """
+ """Searches the Makefile for a ``check:`` target and runs it if found."""
with working_dir(self.build_directory):
- self._if_make_target_execute('check')
+ self._if_make_target_execute("check")
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/r.py b/lib/spack/spack/build_systems/r.py
index 9a94152b82..450cae733b 100644
--- a/lib/spack/spack/build_systems/r.py
+++ b/lib/spack/spack/build_systems/r.py
@@ -24,7 +24,8 @@ class RPackage(PackageBase):
It has sensible defaults, and for many packages the only thing
necessary will be to add dependencies
"""
- phases = ['install']
+
+ phases = ["install"]
# package attributes that can be expanded to set the homepage, url,
# list_url, and git values
@@ -34,41 +35,41 @@ class RPackage(PackageBase):
# For Bioconductor packages
bioc = None # type: Optional[str]
- maintainers = ['glennpj']
+ maintainers = ["glennpj"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'RPackage'
+ build_system_class = "RPackage"
- extends('r')
+ extends("r")
@lang.classproperty
def homepage(cls):
if cls.cran:
- return 'https://cloud.r-project.org/package=' + cls.cran
+ return "https://cloud.r-project.org/package=" + cls.cran
elif cls.bioc:
- return 'https://bioconductor.org/packages/' + cls.bioc
+ return "https://bioconductor.org/packages/" + cls.bioc
@lang.classproperty
def url(cls):
if cls.cran:
return (
- 'https://cloud.r-project.org/src/contrib/'
- + cls.cran + '_' + str(list(cls.versions)[0]) + '.tar.gz'
+ "https://cloud.r-project.org/src/contrib/"
+ + cls.cran
+ + "_"
+ + str(list(cls.versions)[0])
+ + ".tar.gz"
)
@lang.classproperty
def list_url(cls):
if cls.cran:
- return (
- 'https://cloud.r-project.org/src/contrib/Archive/'
- + cls.cran + '/'
- )
+ return "https://cloud.r-project.org/src/contrib/Archive/" + cls.cran + "/"
@property
def git(self):
if self.bioc:
- return 'https://git.bioconductor.org/packages/' + self.bioc
+ return "https://git.bioconductor.org/packages/" + self.bioc
def configure_args(self):
"""Arguments to pass to install via ``--configure-args``."""
@@ -84,24 +85,17 @@ class RPackage(PackageBase):
config_args = self.configure_args()
config_vars = self.configure_vars()
- args = [
- '--vanilla',
- 'CMD',
- 'INSTALL'
- ]
+ args = ["--vanilla", "CMD", "INSTALL"]
if config_args:
- args.append('--configure-args={0}'.format(' '.join(config_args)))
+ args.append("--configure-args={0}".format(" ".join(config_args)))
if config_vars:
- args.append('--configure-vars={0}'.format(' '.join(config_vars)))
+ args.append("--configure-vars={0}".format(" ".join(config_vars)))
- args.extend([
- '--library={0}'.format(self.module.r_lib_dir),
- self.stage.source_path
- ])
+ args.extend(["--library={0}".format(self.module.r_lib_dir), self.stage.source_path])
inspect.getmodule(self).R(*args)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/racket.py b/lib/spack/spack/build_systems/racket.py
index b2419f04e6..c6984f1d9a 100644
--- a/lib/spack/spack/build_systems/racket.py
+++ b/lib/spack/spack/build_systems/racket.py
@@ -25,17 +25,18 @@ class RacketPackage(PackageBase):
* install
* setup
"""
+
#: Package name, version, and extension on PyPI
- maintainers = ['elfprince13']
+ maintainers = ["elfprince13"]
# Default phases
- phases = ['install']
+ phases = ["install"]
# To be used in UI queries that require to know which
# build-system class we are using
- build_system_class = 'RacketPackage'
+ build_system_class = "RacketPackage"
- extends('racket')
+ extends("racket")
pkgs = False
subdirectory = None # type: Optional[str]
@@ -45,7 +46,7 @@ class RacketPackage(PackageBase):
@lang.classproperty
def homepage(cls):
if cls.pkgs:
- return 'https://pkgs.racket-lang.org/package/{0}'.format(cls.name)
+ return "https://pkgs.racket-lang.org/package/{0}".format(cls.name)
@property
def build_directory(self):
@@ -59,14 +60,31 @@ class RacketPackage(PackageBase):
raco = Executable("raco")
with working_dir(self.build_directory):
allow_parallel = self.parallel and (not env_flag(SPACK_NO_PARALLEL_MAKE))
- args = ['pkg', 'install', '-t', 'dir', '-n', self.name, '--deps', 'fail',
- '--ignore-implies', '--copy', '-i', '-j',
- str(determine_number_of_jobs(allow_parallel)),
- '--', os.getcwd()]
+ args = [
+ "pkg",
+ "install",
+ "-t",
+ "dir",
+ "-n",
+ self.name,
+ "--deps",
+ "fail",
+ "--ignore-implies",
+ "--copy",
+ "-i",
+ "-j",
+ str(determine_number_of_jobs(allow_parallel)),
+ "--",
+ os.getcwd(),
+ ]
try:
raco(*args)
except ProcessError:
args.insert(-2, "--skip-installed")
raco(*args)
- tty.warn(("Racket package {0} was already installed, uninstalling via "
- "Spack may make someone unhappy!").format(self.name))
+ tty.warn(
+ (
+ "Racket package {0} was already installed, uninstalling via "
+ "Spack may make someone unhappy!"
+ ).format(self.name)
+ )
diff --git a/lib/spack/spack/build_systems/rocm.py b/lib/spack/spack/build_systems/rocm.py
index 3f8f31cfc8..1ece110636 100644
--- a/lib/spack/spack/build_systems/rocm.py
+++ b/lib/spack/spack/build_systems/rocm.py
@@ -90,28 +90,44 @@ class ROCmPackage(PackageBase):
# https://llvm.org/docs/AMDGPUUsage.html
# Possible architectures
amdgpu_targets = (
- 'gfx701', 'gfx801', 'gfx802', 'gfx803', 'gfx900', 'gfx900:xnack-',
- 'gfx906', 'gfx908', 'gfx90a',
- 'gfx906:xnack-', 'gfx908:xnack-', 'gfx90a:xnack-', 'gfx90a:xnack+',
- 'gfx1010', 'gfx1011', 'gfx1012', 'gfx1030', 'gfx1031',
+ "gfx701",
+ "gfx801",
+ "gfx802",
+ "gfx803",
+ "gfx900",
+ "gfx900:xnack-",
+ "gfx906",
+ "gfx908",
+ "gfx90a",
+ "gfx906:xnack-",
+ "gfx908:xnack-",
+ "gfx90a:xnack-",
+ "gfx90a:xnack+",
+ "gfx1010",
+ "gfx1011",
+ "gfx1012",
+ "gfx1030",
+ "gfx1031",
)
- variant('rocm', default=False, description='Enable ROCm support')
+ variant("rocm", default=False, description="Enable ROCm support")
# possible amd gpu targets for rocm builds
- variant('amdgpu_target',
- description='AMD GPU architecture',
- values=spack.variant.any_combination_of(*amdgpu_targets),
- when='+rocm')
+ variant(
+ "amdgpu_target",
+ description="AMD GPU architecture",
+ values=spack.variant.any_combination_of(*amdgpu_targets),
+ when="+rocm",
+ )
- depends_on('llvm-amdgpu', when='+rocm')
- depends_on('hsa-rocr-dev', when='+rocm')
- depends_on('hip', when='+rocm')
+ depends_on("llvm-amdgpu", when="+rocm")
+ depends_on("hsa-rocr-dev", when="+rocm")
+ depends_on("hip", when="+rocm")
- conflicts('^blt@:0.3.6', when='+rocm')
+ conflicts("^blt@:0.3.6", when="+rocm")
# need amd gpu type for rocm builds
- conflicts('amdgpu_target=none', when='+rocm')
+ conflicts("amdgpu_target=none", when="+rocm")
# https://github.com/ROCm-Developer-Tools/HIP/blob/master/bin/hipcc
# It seems that hip-clang does not (yet?) accept this flag, in which case
@@ -120,7 +136,7 @@ class ROCmPackage(PackageBase):
@staticmethod
def hip_flags(amdgpu_target):
archs = ",".join(amdgpu_target)
- return '--amdgpu-target={0}'.format(archs)
+ return "--amdgpu-target={0}".format(archs)
# HIP version vs Architecture
diff --git a/lib/spack/spack/build_systems/ruby.py b/lib/spack/spack/build_systems/ruby.py
index cec1116477..fcc071f19e 100644
--- a/lib/spack/spack/build_systems/ruby.py
+++ b/lib/spack/spack/build_systems/ruby.py
@@ -19,29 +19,29 @@ class RubyPackage(PackageBase):
#. :py:meth:`~.RubyPackage.install`
"""
- maintainers = ['Kerilk']
+ maintainers = ["Kerilk"]
#: Phases of a Ruby package
- phases = ['build', 'install']
+ phases = ["build", "install"]
#: This attribute is used in UI queries that need to know the build
#: system base class
- build_system_class = 'RubyPackage'
+ build_system_class = "RubyPackage"
- extends('ruby')
+ extends("ruby")
def build(self, spec, prefix):
"""Build a Ruby gem."""
# ruby-rake provides both rake.gemspec and Rakefile, but only
# rake.gemspec can be built without an existing rake installation
- gemspecs = glob.glob('*.gemspec')
- rakefiles = glob.glob('Rakefile')
+ gemspecs = glob.glob("*.gemspec")
+ rakefiles = glob.glob("Rakefile")
if gemspecs:
- inspect.getmodule(self).gem('build', '--norc', gemspecs[0])
+ inspect.getmodule(self).gem("build", "--norc", gemspecs[0])
elif rakefiles:
jobs = inspect.getmodule(self).make_jobs
- inspect.getmodule(self).rake('package', '-j{0}'.format(jobs))
+ inspect.getmodule(self).rake("package", "-j{0}".format(jobs))
else:
# Some Ruby packages only ship `*.gem` files, so nothing to build
pass
@@ -51,14 +51,14 @@ class RubyPackage(PackageBase):
The ruby package sets ``GEM_HOME`` to tell gem where to install to."""
- gems = glob.glob('*.gem')
+ gems = glob.glob("*.gem")
if gems:
# if --install-dir is not used, GEM_PATH is deleted from the
# environement, and Gems required to build native extensions will
# not be found. Those extensions are built during `gem install`.
inspect.getmodule(self).gem(
- 'install', '--norc', '--ignore-dependencies',
- '--install-dir', prefix, gems[0])
+ "install", "--norc", "--ignore-dependencies", "--install-dir", prefix, gems[0]
+ )
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/scons.py b/lib/spack/spack/build_systems/scons.py
index 55ae072085..5470091121 100644
--- a/lib/spack/spack/build_systems/scons.py
+++ b/lib/spack/spack/build_systems/scons.py
@@ -25,17 +25,18 @@ class SConsPackage(PackageBase):
variables that control the build. You will likely need to override
:py:meth:`~.SConsPackage.build_args` to pass the appropriate variables.
"""
+
#: Phases of a SCons package
- phases = ['build', 'install']
+ phases = ["build", "install"]
#: To be used in UI queries that require to know which
#: build-system class we are using
- build_system_class = 'SConsPackage'
+ build_system_class = "SConsPackage"
#: Callback names for build-time test
- build_time_test_callbacks = ['build_test']
+ build_time_test_callbacks = ["build_test"]
- depends_on('scons', type='build')
+ depends_on("scons", type="build")
def build_args(self, spec, prefix):
"""Arguments to pass to build."""
@@ -55,7 +56,7 @@ class SConsPackage(PackageBase):
"""Install the package."""
args = self.install_args(spec, prefix)
- inspect.getmodule(self).scons('install', *args)
+ inspect.getmodule(self).scons("install", *args)
# Testing
@@ -67,7 +68,7 @@ class SConsPackage(PackageBase):
"""
pass
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/sip.py b/lib/spack/spack/build_systems/sip.py
index bb039b2600..4d16f6731e 100644
--- a/lib/spack/spack/build_systems/sip.py
+++ b/lib/spack/spack/build_systems/sip.py
@@ -28,23 +28,24 @@ class SIPPackage(PackageBase):
The configure phase already adds a set of default flags. To see more
options, run ``python configure.py --help``.
"""
+
# Default phases
- phases = ['configure', 'build', 'install']
+ phases = ["configure", "build", "install"]
# To be used in UI queries that require to know which
# build-system class we are using
- build_system_class = 'SIPPackage'
+ build_system_class = "SIPPackage"
#: Name of private sip module to install alongside package
- sip_module = 'sip'
+ sip_module = "sip"
#: Callback names for install-time test
- install_time_test_callbacks = ['test']
+ install_time_test_callbacks = ["test"]
- extends('python')
+ extends("python")
- depends_on('qt')
- depends_on('py-sip')
+ depends_on("qt")
+ depends_on("py-sip")
@property
def import_modules(self):
@@ -67,24 +68,26 @@ class SIPPackage(PackageBase):
modules = []
root = os.path.join(
self.prefix,
- self.spec['python'].package.platlib,
+ self.spec["python"].package.platlib,
)
# Some Python libraries are packages: collections of modules
# distributed in directories containing __init__.py files
- for path in find(root, '__init__.py', recursive=True):
- modules.append(path.replace(root + os.sep, '', 1).replace(
- os.sep + '__init__.py', '').replace('/', '.'))
+ for path in find(root, "__init__.py", recursive=True):
+ modules.append(
+ path.replace(root + os.sep, "", 1)
+ .replace(os.sep + "__init__.py", "")
+ .replace("/", ".")
+ )
# Some Python libraries are modules: individual *.py files
# found in the site-packages directory
- for path in find(root, '*.py', recursive=False):
- modules.append(path.replace(root + os.sep, '', 1).replace(
- '.py', '').replace('/', '.'))
+ for path in find(root, "*.py", recursive=False):
+ modules.append(path.replace(root + os.sep, "", 1).replace(".py", "").replace("/", "."))
- modules = [mod for mod in modules if re.match('[a-zA-Z0-9._]+$', mod)]
+ modules = [mod for mod in modules if re.match("[a-zA-Z0-9._]+$", mod)]
- tty.debug('Detected the following modules: {0}'.format(modules))
+ tty.debug("Detected the following modules: {0}".format(modules))
return modules
@@ -94,7 +97,7 @@ class SIPPackage(PackageBase):
def configure_file(self):
"""Returns the name of the configure file to use."""
- return 'configure.py'
+ return "configure.py"
def configure(self, spec, prefix):
"""Configure the package."""
@@ -102,16 +105,22 @@ class SIPPackage(PackageBase):
args = self.configure_args()
- args.extend([
- '--verbose',
- '--confirm-license',
- '--qmake', spec['qt'].prefix.bin.qmake,
- '--sip', spec['py-sip'].prefix.bin.sip,
- '--sip-incdir', join_path(spec['py-sip'].prefix,
- spec['python'].package.include),
- '--bindir', prefix.bin,
- '--destdir', inspect.getmodule(self).python_platlib,
- ])
+ args.extend(
+ [
+ "--verbose",
+ "--confirm-license",
+ "--qmake",
+ spec["qt"].prefix.bin.qmake,
+ "--sip",
+ spec["py-sip"].prefix.bin.sip,
+ "--sip-incdir",
+ join_path(spec["py-sip"].prefix, spec["python"].package.include),
+ "--bindir",
+ prefix.bin,
+ "--destdir",
+ inspect.getmodule(self).python_platlib,
+ ]
+ )
self.python(configure, *args)
@@ -133,7 +142,7 @@ class SIPPackage(PackageBase):
"""Install the package."""
args = self.install_args()
- inspect.getmodule(self).make('install', parallel=False, *args)
+ inspect.getmodule(self).make("install", parallel=False, *args)
def install_args(self):
"""Arguments to pass to install."""
@@ -147,23 +156,25 @@ class SIPPackage(PackageBase):
# Make sure we are importing the installed modules,
# not the ones in the source directory
for module in self.import_modules:
- self.run_test(inspect.getmodule(self).python.path,
- ['-c', 'import {0}'.format(module)],
- purpose='checking import of {0}'.format(module),
- work_dir='spack-test')
+ self.run_test(
+ inspect.getmodule(self).python.path,
+ ["-c", "import {0}".format(module)],
+ purpose="checking import of {0}".format(module),
+ work_dir="spack-test",
+ )
- run_after('install')(PackageBase._run_default_install_time_test_callbacks)
+ run_after("install")(PackageBase._run_default_install_time_test_callbacks)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
- @run_after('install')
+ @run_after("install")
def extend_path_setup(self):
# See github issue #14121 and PR #15297
- module = self.spec['py-sip'].variants['module'].value
- if module != 'sip':
- module = module.split('.')[0]
+ module = self.spec["py-sip"].variants["module"].value
+ if module != "sip":
+ module = module.split(".")[0]
with working_dir(inspect.getmodule(self).python_platlib):
- with open(os.path.join(module, '__init__.py'), 'a') as f:
- f.write('from pkgutil import extend_path\n')
- f.write('__path__ = extend_path(__path__, __name__)\n')
+ with open(os.path.join(module, "__init__.py"), "a") as f:
+ f.write("from pkgutil import extend_path\n")
+ f.write("__path__ = extend_path(__path__, __name__)\n")
diff --git a/lib/spack/spack/build_systems/sourceforge.py b/lib/spack/spack/build_systems/sourceforge.py
index 38032f6b1b..d0107d6d2b 100644
--- a/lib/spack/spack/build_systems/sourceforge.py
+++ b/lib/spack/spack/build_systems/sourceforge.py
@@ -11,32 +11,31 @@ import spack.util.url
class SourceforgePackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for Sourceforge
- packages."""
+ packages."""
+
#: Path of the package in a Sourceforge mirror
sourceforge_mirror_path = None # type: Optional[str]
#: List of Sourceforge mirrors used by Spack
base_mirrors = [
- 'https://prdownloads.sourceforge.net/',
- 'https://freefr.dl.sourceforge.net/',
- 'https://netcologne.dl.sourceforge.net/',
- 'https://pilotfiber.dl.sourceforge.net/',
- 'https://downloads.sourceforge.net/',
- 'http://kent.dl.sourceforge.net/sourceforge/'
+ "https://prdownloads.sourceforge.net/",
+ "https://freefr.dl.sourceforge.net/",
+ "https://netcologne.dl.sourceforge.net/",
+ "https://pilotfiber.dl.sourceforge.net/",
+ "https://downloads.sourceforge.net/",
+ "http://kent.dl.sourceforge.net/sourceforge/",
]
@property
def urls(self):
self._ensure_sourceforge_mirror_path_is_set_or_raise()
return [
- spack.util.url.join(m, self.sourceforge_mirror_path,
- resolve_href=True)
+ spack.util.url.join(m, self.sourceforge_mirror_path, resolve_href=True)
for m in self.base_mirrors
]
def _ensure_sourceforge_mirror_path_is_set_or_raise(self):
if self.sourceforge_mirror_path is None:
cls_name = type(self).__name__
- msg = ('{0} must define a `sourceforge_mirror_path` attribute'
- ' [none defined]')
+ msg = "{0} must define a `sourceforge_mirror_path` attribute" " [none defined]"
raise AttributeError(msg.format(cls_name))
diff --git a/lib/spack/spack/build_systems/sourceware.py b/lib/spack/spack/build_systems/sourceware.py
index a26ea345ee..ed18675ace 100644
--- a/lib/spack/spack/build_systems/sourceware.py
+++ b/lib/spack/spack/build_systems/sourceware.py
@@ -10,29 +10,28 @@ import spack.util.url
class SourcewarePackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for Sourceware.org
- packages."""
+ packages."""
+
#: Path of the package in a Sourceware mirror
sourceware_mirror_path = None # type: Optional[str]
#: List of Sourceware mirrors used by Spack
base_mirrors = [
- 'https://sourceware.org/pub/',
- 'https://mirrors.kernel.org/sourceware/',
- 'https://ftp.gwdg.de/pub/linux/sources.redhat.com/'
+ "https://sourceware.org/pub/",
+ "https://mirrors.kernel.org/sourceware/",
+ "https://ftp.gwdg.de/pub/linux/sources.redhat.com/",
]
@property
def urls(self):
self._ensure_sourceware_mirror_path_is_set_or_raise()
return [
- spack.util.url.join(m, self.sourceware_mirror_path,
- resolve_href=True)
+ spack.util.url.join(m, self.sourceware_mirror_path, resolve_href=True)
for m in self.base_mirrors
]
def _ensure_sourceware_mirror_path_is_set_or_raise(self):
if self.sourceware_mirror_path is None:
cls_name = type(self).__name__
- msg = ('{0} must define a `sourceware_mirror_path` attribute'
- ' [none defined]')
+ msg = "{0} must define a `sourceware_mirror_path` attribute" " [none defined]"
raise AttributeError(msg.format(cls_name))
diff --git a/lib/spack/spack/build_systems/waf.py b/lib/spack/spack/build_systems/waf.py
index 8a65f48ec4..3571ffd525 100644
--- a/lib/spack/spack/build_systems/waf.py
+++ b/lib/spack/spack/build_systems/waf.py
@@ -39,23 +39,24 @@ class WafPackage(PackageBase):
All of these functions are empty except for the ``configure_args``
function, which passes ``--prefix=/path/to/installation/prefix``.
"""
+
# Default phases
- phases = ['configure', 'build', 'install']
+ phases = ["configure", "build", "install"]
# To be used in UI queries that require to know which
# build-system class we are using
- build_system_class = 'WafPackage'
+ build_system_class = "WafPackage"
# Callback names for build-time test
- build_time_test_callbacks = ['build_test']
+ build_time_test_callbacks = ["build_test"]
# Callback names for install-time test
- install_time_test_callbacks = ['install_test']
+ install_time_test_callbacks = ["install_test"]
# Much like AutotoolsPackage does not require automake and autoconf
# to build, WafPackage does not require waf to build. It only requires
# python to run the waf build script.
- depends_on('python@2.5:', type='build')
+ depends_on("python@2.5:", type="build")
@property
def build_directory(self):
@@ -71,14 +72,14 @@ class WafPackage(PackageBase):
jobs = inspect.getmodule(self).make_jobs
with working_dir(self.build_directory):
- self.python('waf', '-j{0}'.format(jobs), *args, **kwargs)
+ self.python("waf", "-j{0}".format(jobs), *args, **kwargs)
def configure(self, spec, prefix):
"""Configures the project."""
- args = ['--prefix={0}'.format(self.prefix)]
+ args = ["--prefix={0}".format(self.prefix)]
args += self.configure_args()
- self.waf('configure', *args)
+ self.waf("configure", *args)
def configure_args(self):
"""Arguments to pass to configure."""
@@ -88,7 +89,7 @@ class WafPackage(PackageBase):
"""Executes the build."""
args = self.build_args()
- self.waf('build', *args)
+ self.waf("build", *args)
def build_args(self):
"""Arguments to pass to build."""
@@ -98,7 +99,7 @@ class WafPackage(PackageBase):
"""Installs the targets on the system."""
args = self.install_args()
- self.waf('install', *args)
+ self.waf("install", *args)
def install_args(self):
"""Arguments to pass to install."""
@@ -114,7 +115,7 @@ class WafPackage(PackageBase):
"""
pass
- run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+ run_after("build")(PackageBase._run_default_build_time_test_callbacks)
def install_test(self):
"""Run unit tests after install.
@@ -124,7 +125,7 @@ class WafPackage(PackageBase):
"""
pass
- run_after('install')(PackageBase._run_default_install_time_test_callbacks)
+ run_after("install")(PackageBase._run_default_install_time_test_callbacks)
# Check that self.prefix is there after installation
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/xorg.py b/lib/spack/spack/build_systems/xorg.py
index 246e830254..bfa87cc9d7 100644
--- a/lib/spack/spack/build_systems/xorg.py
+++ b/lib/spack/spack/build_systems/xorg.py
@@ -11,7 +11,8 @@ import spack.util.url
class XorgPackage(spack.package_base.PackageBase):
"""Mixin that takes care of setting url and mirrors for x.org
- packages."""
+ packages."""
+
#: Path of the package in a x.org mirror
xorg_mirror_path = None # type: Optional[str]
@@ -20,25 +21,23 @@ class XorgPackage(spack.package_base.PackageBase):
# A good package to test with is `util-macros`, which had a "recent"
# release.
base_mirrors = [
- 'https://www.x.org/archive/individual/',
- 'https://mirrors.ircam.fr/pub/x.org/individual/',
- 'https://mirror.transip.net/xorg/individual/',
- 'ftp://ftp.freedesktop.org/pub/xorg/individual/',
- 'http://xorg.mirrors.pair.com/individual/'
+ "https://www.x.org/archive/individual/",
+ "https://mirrors.ircam.fr/pub/x.org/individual/",
+ "https://mirror.transip.net/xorg/individual/",
+ "ftp://ftp.freedesktop.org/pub/xorg/individual/",
+ "http://xorg.mirrors.pair.com/individual/",
]
@property
def urls(self):
self._ensure_xorg_mirror_path_is_set_or_raise()
return [
- spack.util.url.join(m, self.xorg_mirror_path,
- resolve_href=True)
+ spack.util.url.join(m, self.xorg_mirror_path, resolve_href=True)
for m in self.base_mirrors
]
def _ensure_xorg_mirror_path_is_set_or_raise(self):
if self.xorg_mirror_path is None:
cls_name = type(self).__name__
- msg = ('{0} must define a `xorg_mirror_path` attribute'
- ' [none defined]')
+ msg = "{0} must define a `xorg_mirror_path` attribute" " [none defined]"
raise AttributeError(msg.format(cls_name))
diff --git a/lib/spack/spack/caches.py b/lib/spack/spack/caches.py
index 8112a97ea3..9ae578e550 100644
--- a/lib/spack/spack/caches.py
+++ b/lib/spack/spack/caches.py
@@ -24,7 +24,7 @@ def misc_cache_location():
Currently the ``misc_cache`` stores indexes for virtual dependency
providers and for which packages provide which tags.
"""
- path = spack.config.get('config:misc_cache', spack.paths.default_misc_cache_path)
+ path = spack.config.get("config:misc_cache", spack.paths.default_misc_cache_path)
return spack.util.path.canonicalize_path(path)
@@ -43,7 +43,7 @@ def fetch_cache_location():
This prevents Spack from repeatedly fetch the same files when
building the same package different ways or multiple times.
"""
- path = spack.config.get('config:source_cache')
+ path = spack.config.get("config:source_cache")
if not path:
path = spack.paths.default_fetch_cache_path
path = spack.util.path.canonicalize_path(path)
@@ -75,9 +75,7 @@ class MirrorCache(object):
cosmetic_path = os.path.join(self.root, mirror_ref.cosmetic_path)
storage_path = os.path.join(self.root, mirror_ref.storage_path)
- relative_dst = os.path.relpath(
- storage_path,
- start=os.path.dirname(cosmetic_path))
+ relative_dst = os.path.relpath(storage_path, start=os.path.dirname(cosmetic_path))
if not os.path.exists(cosmetic_path):
if os.path.lexists(cosmetic_path):
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index 8467645fb5..06dc4741ba 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -38,14 +38,14 @@ from spack.error import SpackError
from spack.spec import Spec
JOB_RETRY_CONDITIONS = [
- 'always',
+ "always",
]
-TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
+TEMP_STORAGE_MIRROR_NAME = "ci_temporary_mirror"
SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
-spack_gpg = spack.main.SpackCommand('gpg')
-spack_compiler = spack.main.SpackCommand('compiler')
+spack_gpg = spack.main.SpackCommand("gpg")
+spack_compiler = spack.main.SpackCommand("compiler")
class TemporaryDirectory(object):
@@ -61,13 +61,9 @@ class TemporaryDirectory(object):
def _create_buildgroup(opener, headers, url, project, group_name, group_type):
- data = {
- "newbuildgroup": group_name,
- "project": project,
- "type": group_type
- }
+ data = {"newbuildgroup": group_name, "project": project, "type": group_type}
- enc_data = json.dumps(data).encode('utf-8')
+ enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers)
@@ -75,71 +71,64 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type):
response_code = response.getcode()
if response_code != 200 and response_code != 201:
- msg = 'Creating buildgroup failed (response code = {0}'.format(
- response_code)
+ msg = "Creating buildgroup failed (response code = {0}".format(response_code)
tty.warn(msg)
return None
response_text = response.read()
response_json = json.loads(response_text)
- build_group_id = response_json['id']
+ build_group_id = response_json["id"]
return build_group_id
-def _populate_buildgroup(job_names, group_name, project, site,
- credentials, cdash_url):
+def _populate_buildgroup(job_names, group_name, project, site, credentials, cdash_url):
url = "{0}/api/v1/buildgroup.php".format(cdash_url)
headers = {
- 'Authorization': 'Bearer {0}'.format(credentials),
- 'Content-Type': 'application/json',
+ "Authorization": "Bearer {0}".format(credentials),
+ "Content-Type": "application/json",
}
opener = build_opener(HTTPHandler)
- parent_group_id = _create_buildgroup(
- opener, headers, url, project, group_name, 'Daily')
+ parent_group_id = _create_buildgroup(opener, headers, url, project, group_name, "Daily")
group_id = _create_buildgroup(
- opener, headers, url, project, 'Latest {0}'.format(group_name),
- 'Latest')
+ opener, headers, url, project, "Latest {0}".format(group_name), "Latest"
+ )
if not parent_group_id or not group_id:
- msg = 'Failed to create or retrieve buildgroups for {0}'.format(
- group_name)
+ msg = "Failed to create or retrieve buildgroups for {0}".format(group_name)
tty.warn(msg)
return
data = {
- 'project': project,
- 'buildgroupid': group_id,
- 'dynamiclist': [{
- 'match': name,
- 'parentgroupid': parent_group_id,
- 'site': site
- } for name in job_names]
+ "project": project,
+ "buildgroupid": group_id,
+ "dynamiclist": [
+ {"match": name, "parentgroupid": parent_group_id, "site": site} for name in job_names
+ ],
}
- enc_data = json.dumps(data).encode('utf-8')
+ enc_data = json.dumps(data).encode("utf-8")
request = Request(url, data=enc_data, headers=headers)
- request.get_method = lambda: 'PUT'
+ request.get_method = lambda: "PUT"
response = opener.open(request)
response_code = response.getcode()
if response_code != 200:
- msg = 'Error response code ({0}) in _populate_buildgroup'.format(
- response_code)
+ msg = "Error response code ({0}) in _populate_buildgroup".format(response_code)
tty.warn(msg)
def _is_main_phase(phase_name):
- return True if phase_name == 'specs' else False
+ return True if phase_name == "specs" else False
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
- """ Given the necessary parts, format the gitlab job name
+ """Given the necessary parts, format the gitlab job name
Arguments:
phase (str): Either 'specs' for the main phase, or the name of a
@@ -154,37 +143,37 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
Returns: The job name
"""
item_idx = 0
- format_str = ''
+ format_str = ""
format_args = []
if phase:
- format_str += '({{{0}}})'.format(item_idx)
+ format_str += "({{{0}}})".format(item_idx)
format_args.append(phase)
item_idx += 1
- format_str += ' {{{0}}}'.format(item_idx)
+ format_str += " {{{0}}}".format(item_idx)
format_args.append(spec.name)
item_idx += 1
- format_str += '/{{{0}}}'.format(item_idx)
+ format_str += "/{{{0}}}".format(item_idx)
format_args.append(spec.dag_hash(7))
item_idx += 1
- format_str += ' {{{0}}}'.format(item_idx)
+ format_str += " {{{0}}}".format(item_idx)
format_args.append(spec.version)
item_idx += 1
if _is_main_phase(phase) is True or strip_compiler is False:
- format_str += ' {{{0}}}'.format(item_idx)
+ format_str += " {{{0}}}".format(item_idx)
format_args.append(spec.compiler)
item_idx += 1
- format_str += ' {{{0}}}'.format(item_idx)
+ format_str += " {{{0}}}".format(item_idx)
format_args.append(osarch)
item_idx += 1
if build_group:
- format_str += ' {{{0}}}'.format(item_idx)
+ format_str += " {{{0}}}".format(item_idx)
format_args.append(build_group)
item_idx += 1
@@ -192,8 +181,9 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
def _get_cdash_build_name(spec, build_group):
- return '{0}@{1}%{2} arch={3} ({4})'.format(
- spec.name, spec.version, spec.compiler, spec.architecture, build_group)
+ return "{0}@{1}%{2} arch={3} ({4})".format(
+ spec.name, spec.version, spec.compiler, spec.architecture, build_group
+ )
def _remove_reserved_tags(tags):
@@ -203,26 +193,25 @@ def _remove_reserved_tags(tags):
def _get_spec_string(spec):
format_elements = [
- '{name}{@version}',
- '{%compiler}',
+ "{name}{@version}",
+ "{%compiler}",
]
if spec.architecture:
- format_elements.append(' {arch=architecture}')
+ format_elements.append(" {arch=architecture}")
- return spec.format(''.join(format_elements))
+ return spec.format("".join(format_elements))
def _format_root_spec(spec, main_phase, strip_compiler):
if main_phase is False and strip_compiler is True:
- return '{0}@{1} arch={2}'.format(
- spec.name, spec.version, spec.architecture)
+ return "{0}@{1} arch={2}".format(spec.name, spec.version, spec.architecture)
else:
return spec.dag_hash()
def _spec_deps_key(s):
- return '{0}/{1}'.format(s.name, s.dag_hash(7))
+ return "{0}/{1}".format(s.name, s.dag_hash(7))
def _add_dependency(spec_label, dep_label, deps):
@@ -233,24 +222,26 @@ def _add_dependency(spec_label, dep_label, deps):
deps[spec_label].add(dep_label)
-def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
- mirrors_to_check=None):
- spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
- mirrors_to_check=mirrors_to_check)
+def _get_spec_dependencies(
+ specs, deps, spec_labels, check_index_only=False, mirrors_to_check=None
+):
+ spec_deps_obj = _compute_spec_deps(
+ specs, check_index_only=check_index_only, mirrors_to_check=mirrors_to_check
+ )
if spec_deps_obj:
- dependencies = spec_deps_obj['dependencies']
- specs = spec_deps_obj['specs']
+ dependencies = spec_deps_obj["dependencies"]
+ specs = spec_deps_obj["specs"]
for entry in specs:
- spec_labels[entry['label']] = {
- 'spec': Spec(entry['spec']),
- 'rootSpec': entry['root_spec'],
- 'needs_rebuild': entry['needs_rebuild'],
+ spec_labels[entry["label"]] = {
+ "spec": Spec(entry["spec"]),
+ "rootSpec": entry["root_spec"],
+ "needs_rebuild": entry["needs_rebuild"],
}
for entry in dependencies:
- _add_dependency(entry['spec'], entry['depends'], deps)
+ _add_dependency(entry["spec"], entry["depends"], deps)
def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
@@ -303,8 +294,13 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
deps = {}
spec_labels = {}
- _get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
- mirrors_to_check=mirrors_to_check)
+ _get_spec_dependencies(
+ specs,
+ deps,
+ spec_labels,
+ check_index_only=check_index_only,
+ mirrors_to_check=mirrors_to_check,
+ )
# Save the original deps, as we need to return them at the end of the
# function. In the while loop below, the "dependencies" variable is
@@ -334,16 +330,17 @@ def _print_staging_summary(spec_labels, dependencies, stages):
if not stages:
return
- tty.msg(' Staging summary ([x] means a job needs rebuilding):')
+ tty.msg(" Staging summary ([x] means a job needs rebuilding):")
for stage_index, stage in enumerate(stages):
- tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage)))
+ tty.msg(" stage {0} ({1} jobs):".format(stage_index, len(stage)))
for job in sorted(stage):
- s = spec_labels[job]['spec']
- tty.msg(' [{1}] {0} -> {2}'.format(
- job,
- 'x' if spec_labels[job]['needs_rebuild'] else ' ',
- _get_spec_string(s)))
+ s = spec_labels[job]["spec"]
+ tty.msg(
+ " [{1}] {0} -> {2}".format(
+ job, "x" if spec_labels[job]["needs_rebuild"] else " ", _get_spec_string(s)
+ )
+ )
def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
@@ -405,48 +402,53 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None)
dependencies = []
def append_dep(s, d):
- dependencies.append({
- 'spec': s,
- 'depends': d,
- })
+ dependencies.append(
+ {
+ "spec": s,
+ "depends": d,
+ }
+ )
for spec in spec_list:
root_spec = spec
for s in spec.traverse(deptype=all):
if s.external:
- tty.msg('Will not stage external pkg: {0}'.format(s))
+ tty.msg("Will not stage external pkg: {0}".format(s))
continue
up_to_date_mirrors = bindist.get_mirrors_for_spec(
- spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
+ spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only
+ )
skey = _spec_deps_key(s)
spec_labels[skey] = {
- 'spec': _get_spec_string(s),
- 'root': root_spec,
- 'needs_rebuild': not up_to_date_mirrors,
+ "spec": _get_spec_string(s),
+ "root": root_spec,
+ "needs_rebuild": not up_to_date_mirrors,
}
for d in s.dependencies(deptype=all):
dkey = _spec_deps_key(d)
if d.external:
- tty.msg('Will not stage external dep: {0}'.format(d))
+ tty.msg("Will not stage external dep: {0}".format(d))
continue
append_dep(skey, dkey)
for spec_label, spec_holder in spec_labels.items():
- specs.append({
- 'label': spec_label,
- 'spec': spec_holder['spec'],
- 'root_spec': spec_holder['root'],
- 'needs_rebuild': spec_holder['needs_rebuild'],
- })
+ specs.append(
+ {
+ "label": spec_label,
+ "spec": spec_holder["spec"],
+ "root_spec": spec_holder["root"],
+ "needs_rebuild": spec_holder["needs_rebuild"],
+ }
+ )
deps_json_obj = {
- 'specs': specs,
- 'dependencies': dependencies,
+ "specs": specs,
+ "dependencies": dependencies,
}
return deps_json_obj
@@ -459,20 +461,19 @@ def _spec_matches(spec, match_string):
def _copy_attributes(attrs_list, src_dict, dest_dict):
for runner_attr in attrs_list:
if runner_attr in src_dict:
- if runner_attr in dest_dict and runner_attr == 'tags':
+ if runner_attr in dest_dict and runner_attr == "tags":
# For 'tags', we combine the lists of tags, while
# avoiding duplicates
for tag in src_dict[runner_attr]:
if tag not in dest_dict[runner_attr]:
dest_dict[runner_attr].append(tag)
- elif runner_attr in dest_dict and runner_attr == 'variables':
+ elif runner_attr in dest_dict and runner_attr == "variables":
# For 'variables', we merge the dictionaries. Any conflicts
# (i.e. 'runner-attributes' has same variable key as the
# higher level) we resolve by keeping the more specific
# 'runner-attributes' version.
for src_key, src_val in src_dict[runner_attr].items():
- dest_dict[runner_attr][src_key] = copy.deepcopy(
- src_dict[runner_attr][src_key])
+ dest_dict[runner_attr][src_key] = copy.deepcopy(src_dict[runner_attr][src_key])
else:
dest_dict[runner_attr] = copy.deepcopy(src_dict[runner_attr])
@@ -480,24 +481,24 @@ def _copy_attributes(attrs_list, src_dict, dest_dict):
def _find_matching_config(spec, gitlab_ci):
runner_attributes = {}
overridable_attrs = [
- 'image',
- 'tags',
- 'variables',
- 'before_script',
- 'script',
- 'after_script',
+ "image",
+ "tags",
+ "variables",
+ "before_script",
+ "script",
+ "after_script",
]
_copy_attributes(overridable_attrs, gitlab_ci, runner_attributes)
- ci_mappings = gitlab_ci['mappings']
+ ci_mappings = gitlab_ci["mappings"]
for ci_mapping in ci_mappings:
- for match_string in ci_mapping['match']:
+ for match_string in ci_mapping["match"]:
if _spec_matches(spec, match_string):
- if 'runner-attributes' in ci_mapping:
- _copy_attributes(overridable_attrs,
- ci_mapping['runner-attributes'],
- runner_attributes)
+ if "runner-attributes" in ci_mapping:
+ _copy_attributes(
+ overridable_attrs, ci_mapping["runner-attributes"], runner_attributes
+ )
return runner_attributes
else:
return None
@@ -506,43 +507,50 @@ def _find_matching_config(spec, gitlab_ci):
def _pkg_name_from_spec_label(spec_label):
- return spec_label[:spec_label.index('/')]
-
-
-def _format_job_needs(phase_name, strip_compilers, dep_jobs,
- osname, build_group, prune_dag, stage_spec_dict,
- enable_artifacts_buildcache):
+ return spec_label[: spec_label.index("/")]
+
+
+def _format_job_needs(
+ phase_name,
+ strip_compilers,
+ dep_jobs,
+ osname,
+ build_group,
+ prune_dag,
+ stage_spec_dict,
+ enable_artifacts_buildcache,
+):
needs_list = []
for dep_job in dep_jobs:
dep_spec_key = _spec_deps_key(dep_job)
dep_spec_info = stage_spec_dict[dep_spec_key]
- if not prune_dag or dep_spec_info['needs_rebuild']:
- needs_list.append({
- 'job': get_job_name(phase_name,
- strip_compilers,
- dep_job,
- dep_job.architecture,
- build_group),
- 'artifacts': enable_artifacts_buildcache,
- })
+ if not prune_dag or dep_spec_info["needs_rebuild"]:
+ needs_list.append(
+ {
+ "job": get_job_name(
+ phase_name, strip_compilers, dep_job, dep_job.architecture, build_group
+ ),
+ "artifacts": enable_artifacts_buildcache,
+ }
+ )
return needs_list
def get_change_revisions():
"""If this is a git repo get the revisions to use when checking
for changed packages and spack core modules."""
- git_dir = os.path.join(spack.paths.prefix, '.git')
+ git_dir = os.path.join(spack.paths.prefix, ".git")
if os.path.exists(git_dir) and os.path.isdir(git_dir):
# TODO: This will only find changed packages from the last
# TODO: commit. While this may work for single merge commits
# TODO: when merging the topic branch into the base, it will
# TODO: require more thought outside of that narrow case.
- return 'HEAD^', 'HEAD'
+ return "HEAD^", "HEAD"
return None, None
-def get_stack_changed(env_path, rev1='HEAD^', rev2='HEAD'):
+def get_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
"""Given an environment manifest path and two revisions to compare, return
whether or not the stack was changed. Returns True if the environment
manifest changed between the provided revisions (or additionally if the
@@ -550,24 +558,29 @@ def get_stack_changed(env_path, rev1='HEAD^', rev2='HEAD'):
git = exe.which("git")
if git:
with fs.working_dir(spack.paths.prefix):
- git_log = git("diff", "--name-only", rev1, rev2,
- output=str, error=os.devnull,
- fail_on_error=False).strip()
- lines = [] if not git_log else re.split(r'\s+', git_log)
+ git_log = git(
+ "diff",
+ "--name-only",
+ rev1,
+ rev2,
+ output=str,
+ error=os.devnull,
+ fail_on_error=False,
+ ).strip()
+ lines = [] if not git_log else re.split(r"\s+", git_log)
for path in lines:
- if '.gitlab-ci.yml' in path or path in env_path:
- tty.debug('env represented by {0} changed'.format(
- env_path))
- tty.debug('touched file: {0}'.format(path))
+ if ".gitlab-ci.yml" in path or path in env_path:
+ tty.debug("env represented by {0} changed".format(env_path))
+ tty.debug("touched file: {0}".format(path))
return True
return False
-def compute_affected_packages(rev1='HEAD^', rev2='HEAD'):
+def compute_affected_packages(rev1="HEAD^", rev2="HEAD"):
"""Determine which packages were added, removed or changed
between rev1 and rev2, and return the names as a set"""
- return spack.repo.get_all_package_diffs('ARC', rev1=rev1, rev2=rev2)
+ return spack.repo.get_all_package_diffs("ARC", rev1=rev1, rev2=rev2)
def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True):
@@ -591,25 +604,32 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
"""
affected_specs = set()
all_concrete_specs = env.all_specs()
- tty.debug('All concrete environment specs:')
+ tty.debug("All concrete environment specs:")
for s in all_concrete_specs:
- tty.debug(' {0}/{1}'.format(s.name, s.dag_hash()[:7]))
+ tty.debug(" {0}/{1}".format(s.name, s.dag_hash()[:7]))
for pkg in affected_pkgs:
env_matches = [s for s in all_concrete_specs if s.name == pkg]
for match in env_matches:
affected_specs.add(match)
if dependencies:
- affected_specs.update(match.traverse(direction='children', root=False))
+ affected_specs.update(match.traverse(direction="children", root=False))
if dependents:
- affected_specs.update(match.traverse(direction='parents', root=False))
+ affected_specs.update(match.traverse(direction="parents", root=False))
return affected_specs
-def generate_gitlab_ci_yaml(env, print_summary, output_file,
- prune_dag=False, check_index_only=False,
- run_optimizer=False, use_dependencies=False,
- artifacts_root=None, remote_mirror_override=None):
- """ Generate a gitlab yaml file to run a dynamic child pipeline from
+def generate_gitlab_ci_yaml(
+ env,
+ print_summary,
+ output_file,
+ prune_dag=False,
+ check_index_only=False,
+ run_optimizer=False,
+ use_dependencies=False,
+ artifacts_root=None,
+ remote_mirror_override=None,
+):
+ """Generate a gitlab yaml file to run a dynamic child pipeline from
the spec matrix in the active environment.
Arguments:
@@ -647,46 +667,46 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
yaml_root = ev.config_dict(env.yaml)
- if 'gitlab-ci' not in yaml_root:
+ if "gitlab-ci" not in yaml_root:
tty.die('Environment yaml does not have "gitlab-ci" section')
- gitlab_ci = yaml_root['gitlab-ci']
+ gitlab_ci = yaml_root["gitlab-ci"]
build_group = None
enable_cdash_reporting = False
cdash_auth_token = None
- if 'cdash' in yaml_root:
+ if "cdash" in yaml_root:
enable_cdash_reporting = True
- ci_cdash = yaml_root['cdash']
- build_group = ci_cdash['build-group']
- cdash_url = ci_cdash['url']
- cdash_project = ci_cdash['project']
- cdash_site = ci_cdash['site']
+ ci_cdash = yaml_root["cdash"]
+ build_group = ci_cdash["build-group"]
+ cdash_url = ci_cdash["url"]
+ cdash_project = ci_cdash["project"]
+ cdash_site = ci_cdash["site"]
- if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
+ if "SPACK_CDASH_AUTH_TOKEN" in os.environ:
tty.verbose("Using CDash auth token from environment")
- cdash_auth_token = os.environ.get('SPACK_CDASH_AUTH_TOKEN')
+ cdash_auth_token = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
- prune_untouched_packages = os.environ.get('SPACK_PRUNE_UNTOUCHED', None)
+ prune_untouched_packages = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
if prune_untouched_packages:
# Requested to prune untouched packages, but assume we won't do that
# unless we're actually in a git repo.
prune_untouched_packages = False
rev1, rev2 = get_change_revisions()
- tty.debug('Got following revisions: rev1={0}, rev2={1}'.format(rev1, rev2))
+ tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2))
if rev1 and rev2:
# If the stack file itself did not change, proceed with pruning
if not get_stack_changed(env.manifest_path, rev1, rev2):
prune_untouched_packages = True
affected_pkgs = compute_affected_packages(rev1, rev2)
- tty.debug('affected pkgs:')
+ tty.debug("affected pkgs:")
for p in affected_pkgs:
- tty.debug(' {0}'.format(p))
+ tty.debug(" {0}".format(p))
affected_specs = get_spec_filter_list(env, affected_pkgs)
- tty.debug('all affected specs:')
+ tty.debug("all affected specs:")
for s in affected_specs:
- tty.debug(' {0}'.format(s.name))
+ tty.debug(" {0}".format(s.name))
# Downstream jobs will "need" (depend on, for both scheduling and
# artifacts, which include spack.lock file) this pipeline generation
@@ -694,73 +714,79 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
# do not exist, then maybe this is just running in a shell, in which
# case, there is no expectation gitlab will ever run the generated
# pipeline and those environment variables do not matter.
- generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
- parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
+ generate_job_name = os.environ.get("CI_JOB_NAME", "job-does-not-exist")
+ parent_pipeline_id = os.environ.get("CI_PIPELINE_ID", "pipeline-does-not-exist")
# Values: "spack_pull_request", "spack_protected_branch", or not set
- spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
+ spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None)
- spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
+ spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None)
- if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
- tty.die('spack ci generate requires an env containing a mirror')
+ if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
+ tty.die("spack ci generate requires an env containing a mirror")
- ci_mirrors = yaml_root['mirrors']
+ ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
# Check for a list of "known broken" specs that we should not bother
# trying to build.
- broken_specs_url = ''
+ broken_specs_url = ""
known_broken_specs_encountered = []
- if 'broken-specs-url' in gitlab_ci:
- broken_specs_url = gitlab_ci['broken-specs-url']
+ if "broken-specs-url" in gitlab_ci:
+ broken_specs_url = gitlab_ci["broken-specs-url"]
enable_artifacts_buildcache = False
- if 'enable-artifacts-buildcache' in gitlab_ci:
- enable_artifacts_buildcache = gitlab_ci['enable-artifacts-buildcache']
+ if "enable-artifacts-buildcache" in gitlab_ci:
+ enable_artifacts_buildcache = gitlab_ci["enable-artifacts-buildcache"]
rebuild_index_enabled = True
- if 'rebuild-index' in gitlab_ci and gitlab_ci['rebuild-index'] is False:
+ if "rebuild-index" in gitlab_ci and gitlab_ci["rebuild-index"] is False:
rebuild_index_enabled = False
temp_storage_url_prefix = None
- if 'temporary-storage-url-prefix' in gitlab_ci:
- temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']
+ if "temporary-storage-url-prefix" in gitlab_ci:
+ temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
bootstrap_specs = []
phases = []
- if 'bootstrap' in gitlab_ci:
- for phase in gitlab_ci['bootstrap']:
+ if "bootstrap" in gitlab_ci:
+ for phase in gitlab_ci["bootstrap"]:
try:
- phase_name = phase.get('name')
- strip_compilers = phase.get('compiler-agnostic')
+ phase_name = phase.get("name")
+ strip_compilers = phase.get("compiler-agnostic")
except AttributeError:
phase_name = phase
strip_compilers = False
- phases.append({
- 'name': phase_name,
- 'strip-compilers': strip_compilers,
- })
+ phases.append(
+ {
+ "name": phase_name,
+ "strip-compilers": strip_compilers,
+ }
+ )
for bs in env.spec_lists[phase_name]:
- bootstrap_specs.append({
- 'spec': bs,
- 'phase-name': phase_name,
- 'strip-compilers': strip_compilers,
- })
-
- phases.append({
- 'name': 'specs',
- 'strip-compilers': False,
- })
+ bootstrap_specs.append(
+ {
+ "spec": bs,
+ "phase-name": phase_name,
+ "strip-compilers": strip_compilers,
+ }
+ )
+
+ phases.append(
+ {
+ "name": "specs",
+ "strip-compilers": False,
+ }
+ )
# If a remote mirror override (alternate buildcache destination) was
# specified, add it here in case it has already built hashes we might
# generate.
mirrors_to_check = None
if remote_mirror_override:
- if spack_pipeline_type == 'spack_protected_branch':
+ if spack_pipeline_type == "spack_protected_branch":
# Overriding the main mirror in this case might result
# in skipping jobs on a release pipeline because specs are
# up to date in develop. Eventually we want to notice and take
@@ -768,59 +794,47 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
# develop to the release, but until we have that, this makes
# sure we schedule a rebuild job if the spec isn't already in
# override mirror.
- mirrors_to_check = {
- 'override': remote_mirror_override
- }
+ mirrors_to_check = {"override": remote_mirror_override}
# If we have a remote override and we want generate pipeline using
# --check-index-only, then the override mirror needs to be added to
# the configured mirrors when bindist.update() is run, or else we
# won't fetch its index and include in our local cache.
- spack.mirror.add(
- 'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
+ spack.mirror.add("ci_pr_mirror", remote_mirror_override, cfg.default_modify_scope())
pipeline_artifacts_dir = artifacts_root
if not pipeline_artifacts_dir:
- proj_dir = os.environ.get('CI_PROJECT_DIR', os.getcwd())
- pipeline_artifacts_dir = os.path.join(proj_dir, 'jobs_scratch_dir')
+ proj_dir = os.environ.get("CI_PROJECT_DIR", os.getcwd())
+ pipeline_artifacts_dir = os.path.join(proj_dir, "jobs_scratch_dir")
pipeline_artifacts_dir = os.path.abspath(pipeline_artifacts_dir)
- concrete_env_dir = os.path.join(
- pipeline_artifacts_dir, 'concrete_environment')
+ concrete_env_dir = os.path.join(pipeline_artifacts_dir, "concrete_environment")
# Now that we've added the mirrors we know about, they should be properly
# reflected in the environment manifest file, so copy that into the
# concrete environment directory, along with the spack.lock file.
if not os.path.exists(concrete_env_dir):
os.makedirs(concrete_env_dir)
- shutil.copyfile(env.manifest_path,
- os.path.join(concrete_env_dir, 'spack.yaml'))
- shutil.copyfile(env.lock_path,
- os.path.join(concrete_env_dir, 'spack.lock'))
+ shutil.copyfile(env.manifest_path, os.path.join(concrete_env_dir, "spack.yaml"))
+ shutil.copyfile(env.lock_path, os.path.join(concrete_env_dir, "spack.lock"))
- job_log_dir = os.path.join(pipeline_artifacts_dir, 'logs')
- job_repro_dir = os.path.join(pipeline_artifacts_dir, 'reproduction')
- local_mirror_dir = os.path.join(pipeline_artifacts_dir, 'mirror')
- user_artifacts_dir = os.path.join(pipeline_artifacts_dir, 'user_data')
+ job_log_dir = os.path.join(pipeline_artifacts_dir, "logs")
+ job_repro_dir = os.path.join(pipeline_artifacts_dir, "reproduction")
+ local_mirror_dir = os.path.join(pipeline_artifacts_dir, "mirror")
+ user_artifacts_dir = os.path.join(pipeline_artifacts_dir, "user_data")
# We communicate relative paths to the downstream jobs to avoid issues in
# situations where the CI_PROJECT_DIR varies between the pipeline
# generation job and the rebuild jobs. This can happen when gitlab
# checks out the project into a runner-specific directory, for example,
# and different runners are picked for generate and rebuild jobs.
- ci_project_dir = os.environ.get('CI_PROJECT_DIR')
- rel_artifacts_root = os.path.relpath(
- pipeline_artifacts_dir, ci_project_dir)
- rel_concrete_env_dir = os.path.relpath(
- concrete_env_dir, ci_project_dir)
- rel_job_log_dir = os.path.relpath(
- job_log_dir, ci_project_dir)
- rel_job_repro_dir = os.path.relpath(
- job_repro_dir, ci_project_dir)
- rel_local_mirror_dir = os.path.relpath(
- local_mirror_dir, ci_project_dir)
- rel_user_artifacts_dir = os.path.relpath(
- user_artifacts_dir, ci_project_dir)
+ ci_project_dir = os.environ.get("CI_PROJECT_DIR")
+ rel_artifacts_root = os.path.relpath(pipeline_artifacts_dir, ci_project_dir)
+ rel_concrete_env_dir = os.path.relpath(concrete_env_dir, ci_project_dir)
+ rel_job_log_dir = os.path.relpath(job_log_dir, ci_project_dir)
+ rel_job_repro_dir = os.path.relpath(job_repro_dir, ci_project_dir)
+ rel_local_mirror_dir = os.path.relpath(local_mirror_dir, ci_project_dir)
+ rel_user_artifacts_dir = os.path.relpath(user_artifacts_dir, ci_project_dir)
# Speed up staging by first fetching binary indices from all mirrors
# (including the override mirror we may have just added above).
@@ -832,14 +846,15 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
staged_phases = {}
try:
for phase in phases:
- phase_name = phase['name']
- if phase_name == 'specs':
+ phase_name = phase["name"]
+ if phase_name == "specs":
# Anything in the "specs" of the environment are already
# concretized by the block at the top of this method, so we
# only need to find the concrete versions, and then avoid
# re-concretizing them needlessly later on.
concrete_phase_specs = [
- concrete for abstract, concrete in env.concretized_specs()
+ concrete
+ for abstract, concrete in env.concretized_specs()
if abstract in env.spec_lists[phase_name]
]
else:
@@ -853,11 +868,12 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
staged_phases[phase_name] = stage_spec_jobs(
concrete_phase_specs,
check_index_only=check_index_only,
- mirrors_to_check=mirrors_to_check)
+ mirrors_to_check=mirrors_to_check,
+ )
finally:
# Clean up remote mirror override if enabled
if remote_mirror_override:
- spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
+ spack.mirror.remove("ci_pr_mirror", cfg.default_modify_scope())
all_job_names = []
output_object = {}
@@ -867,117 +883,110 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
stage_names = []
max_length_needs = 0
- max_needs_job = ''
+ max_needs_job = ""
# If this is configured, spack will fail "spack ci generate" if it
# generates any hash which exists under the broken specs url.
broken_spec_urls = None
if broken_specs_url:
- if broken_specs_url.startswith('http'):
+ if broken_specs_url.startswith("http"):
# To make checking each spec against the list faster, we require
# a url protocol that allows us to iterate the url in advance.
- tty.msg('Cannot use an http(s) url for broken specs, ignoring')
+ tty.msg("Cannot use an http(s) url for broken specs, ignoring")
else:
broken_spec_urls = web_util.list_url(broken_specs_url)
before_script, after_script = None, None
for phase in phases:
- phase_name = phase['name']
- strip_compilers = phase['strip-compilers']
+ phase_name = phase["name"]
+ strip_compilers = phase["strip-compilers"]
main_phase = _is_main_phase(phase_name)
spec_labels, dependencies, stages = staged_phases[phase_name]
for stage_jobs in stages:
- stage_name = 'stage-{0}'.format(stage_id)
+ stage_name = "stage-{0}".format(stage_id)
stage_names.append(stage_name)
stage_id += 1
for spec_label in stage_jobs:
spec_record = spec_labels[spec_label]
- root_spec = spec_record['rootSpec']
+ root_spec = spec_record["rootSpec"]
pkg_name = _pkg_name_from_spec_label(spec_label)
release_spec = root_spec[pkg_name]
release_spec_dag_hash = release_spec.dag_hash()
if prune_untouched_packages:
if release_spec not in affected_specs:
- tty.debug('Pruning {0}, untouched by change.'.format(
- release_spec.name))
- spec_record['needs_rebuild'] = False
+ tty.debug("Pruning {0}, untouched by change.".format(release_spec.name))
+ spec_record["needs_rebuild"] = False
continue
- runner_attribs = _find_matching_config(
- release_spec, gitlab_ci)
+ runner_attribs = _find_matching_config(release_spec, gitlab_ci)
if not runner_attribs:
- tty.warn('No match found for {0}, skipping it'.format(
- release_spec))
+ tty.warn("No match found for {0}, skipping it".format(release_spec))
continue
- tags = [tag for tag in runner_attribs['tags']]
+ tags = [tag for tag in runner_attribs["tags"]]
if spack_pipeline_type is not None:
# For spack pipelines "public" and "protected" are reserved tags
tags = _remove_reserved_tags(tags)
- if spack_pipeline_type == 'spack_protected_branch':
- tags.extend(['aws', 'protected'])
- elif spack_pipeline_type == 'spack_pull_request':
- tags.extend(['public'])
+ if spack_pipeline_type == "spack_protected_branch":
+ tags.extend(["aws", "protected"])
+ elif spack_pipeline_type == "spack_pull_request":
+ tags.extend(["public"])
variables = {}
- if 'variables' in runner_attribs:
- variables.update(runner_attribs['variables'])
+ if "variables" in runner_attribs:
+ variables.update(runner_attribs["variables"])
image_name = None
image_entry = None
- if 'image' in runner_attribs:
- build_image = runner_attribs['image']
+ if "image" in runner_attribs:
+ build_image = runner_attribs["image"]
try:
- image_name = build_image.get('name')
- entrypoint = build_image.get('entrypoint')
+ image_name = build_image.get("name")
+ entrypoint = build_image.get("entrypoint")
image_entry = [p for p in entrypoint]
except AttributeError:
image_name = build_image
- job_script = ['spack env activate --without-view .']
+ job_script = ["spack env activate --without-view ."]
if artifacts_root:
- job_script.insert(0, 'cd {0}'.format(concrete_env_dir))
+ job_script.insert(0, "cd {0}".format(concrete_env_dir))
- job_script.extend([
- 'spack ci rebuild'
- ])
+ job_script.extend(["spack ci rebuild"])
- if 'script' in runner_attribs:
- job_script = [s for s in runner_attribs['script']]
+ if "script" in runner_attribs:
+ job_script = [s for s in runner_attribs["script"]]
before_script = None
- if 'before_script' in runner_attribs:
- before_script = [
- s for s in runner_attribs['before_script']
- ]
+ if "before_script" in runner_attribs:
+ before_script = [s for s in runner_attribs["before_script"]]
after_script = None
- if 'after_script' in runner_attribs:
- after_script = [s for s in runner_attribs['after_script']]
+ if "after_script" in runner_attribs:
+ after_script = [s for s in runner_attribs["after_script"]]
osname = str(release_spec.architecture)
- job_name = get_job_name(phase_name, strip_compilers,
- release_spec, osname, build_group)
+ job_name = get_job_name(
+ phase_name, strip_compilers, release_spec, osname, build_group
+ )
- compiler_action = 'NONE'
+ compiler_action = "NONE"
if len(phases) > 1:
- compiler_action = 'FIND_ANY'
+ compiler_action = "FIND_ANY"
if _is_main_phase(phase_name):
- compiler_action = 'INSTALL_MISSING'
+ compiler_action = "INSTALL_MISSING"
job_vars = {
- 'SPACK_ROOT_SPEC': _format_root_spec(
- root_spec, main_phase, strip_compilers),
- 'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
- 'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
- 'SPACK_COMPILER_ACTION': compiler_action
+ "SPACK_ROOT_SPEC": _format_root_spec(root_spec, main_phase, strip_compilers),
+ "SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash,
+ "SPACK_JOB_SPEC_PKG_NAME": release_spec.name,
+ "SPACK_COMPILER_ACTION": compiler_action,
}
job_dependencies = []
@@ -985,26 +994,30 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
if enable_artifacts_buildcache:
# Get dependencies transitively, so they're all
# available in the artifacts buildcache.
- dep_jobs = [
- d for d in release_spec.traverse(deptype=all,
- root=False)
- ]
+ dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
else:
# In this case, "needs" is only used for scheduling
# purposes, so we only get the direct dependencies.
dep_jobs = []
for dep_label in dependencies[spec_label]:
dep_pkg = _pkg_name_from_spec_label(dep_label)
- dep_root = spec_labels[dep_label]['rootSpec']
+ dep_root = spec_labels[dep_label]["rootSpec"]
dep_jobs.append(dep_root[dep_pkg])
job_dependencies.extend(
- _format_job_needs(phase_name, strip_compilers,
- dep_jobs, osname, build_group,
- prune_dag, spec_labels,
- enable_artifacts_buildcache))
-
- rebuild_spec = spec_record['needs_rebuild']
+ _format_job_needs(
+ phase_name,
+ strip_compilers,
+ dep_jobs,
+ osname,
+ build_group,
+ prune_dag,
+ spec_labels,
+ enable_artifacts_buildcache,
+ )
+ )
+
+ rebuild_spec = spec_record["needs_rebuild"]
# This next section helps gitlab make sure the right
# bootstrapped compiler exists in the artifacts buildcache by
@@ -1014,20 +1027,16 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
# bootstrap spec lists, then we will add more dependencies to
# the job (that compiler and maybe it's dependencies as well).
if _is_main_phase(phase_name):
- spec_arch_family = (release_spec.architecture
- .target
- .microarchitecture
- .family)
- compiler_pkg_spec = compilers.pkg_spec_for_compiler(
- release_spec.compiler)
+ spec_arch_family = release_spec.architecture.target.microarchitecture.family
+ compiler_pkg_spec = compilers.pkg_spec_for_compiler(release_spec.compiler)
for bs in bootstrap_specs:
- c_spec = bs['spec']
+ c_spec = bs["spec"]
bs_arch = c_spec.architecture
- bs_arch_family = (bs_arch.target
- .microarchitecture
- .family)
- if (c_spec.satisfies(compiler_pkg_spec) and
- bs_arch_family == spec_arch_family):
+ bs_arch_family = bs_arch.target.microarchitecture.family
+ if (
+ c_spec.satisfies(compiler_pkg_spec)
+ and bs_arch_family == spec_arch_family
+ ):
# We found the bootstrap compiler this release spec
# should be built with, so for DAG scheduling
# purposes, we will at least add the compiler spec
@@ -1040,94 +1049,94 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
# dependencies, we artificially force the spec to
# be rebuilt if the compiler targeted to build it
# needs to be rebuilt.
- bs_specs, _, _ = staged_phases[bs['phase-name']]
+ bs_specs, _, _ = staged_phases[bs["phase-name"]]
c_spec_key = _spec_deps_key(c_spec)
- rbld_comp = bs_specs[c_spec_key]['needs_rebuild']
+ rbld_comp = bs_specs[c_spec_key]["needs_rebuild"]
rebuild_spec = rebuild_spec or rbld_comp
# Also update record so dependents do not fail to
# add this spec to their "needs"
- spec_record['needs_rebuild'] = rebuild_spec
+ spec_record["needs_rebuild"] = rebuild_spec
dep_jobs = [c_spec]
if enable_artifacts_buildcache:
- dep_jobs = [
- d for d in c_spec.traverse(deptype=all)
- ]
+ dep_jobs = [d for d in c_spec.traverse(deptype=all)]
job_dependencies.extend(
- _format_job_needs(bs['phase-name'],
- bs['strip-compilers'],
- dep_jobs,
- str(bs_arch),
- build_group,
- prune_dag,
- bs_specs,
- enable_artifacts_buildcache))
+ _format_job_needs(
+ bs["phase-name"],
+ bs["strip-compilers"],
+ dep_jobs,
+ str(bs_arch),
+ build_group,
+ prune_dag,
+ bs_specs,
+ enable_artifacts_buildcache,
+ )
+ )
else:
- debug_msg = ''.join([
- 'Considered compiler {0} for spec ',
- '{1}, but rejected it either because it was ',
- 'not the compiler required by the spec, or ',
- 'because the target arch families of the ',
- 'spec and the compiler did not match'
- ]).format(c_spec, release_spec)
+ debug_msg = "".join(
+ [
+ "Considered compiler {0} for spec ",
+ "{1}, but rejected it either because it was ",
+ "not the compiler required by the spec, or ",
+ "because the target arch families of the ",
+ "spec and the compiler did not match",
+ ]
+ ).format(c_spec, release_spec)
tty.debug(debug_msg)
if prune_dag and not rebuild_spec:
- tty.debug('Pruning {0}, does not need rebuild.'.format(
- release_spec.name))
+ tty.debug("Pruning {0}, does not need rebuild.".format(release_spec.name))
continue
- if (broken_spec_urls is not None and
- release_spec_dag_hash in broken_spec_urls):
- known_broken_specs_encountered.append('{0} ({1})'.format(
- release_spec, release_spec_dag_hash))
+ if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
+ known_broken_specs_encountered.append(
+ "{0} ({1})".format(release_spec, release_spec_dag_hash)
+ )
if artifacts_root:
- job_dependencies.append({
- 'job': generate_job_name,
- 'pipeline': '{0}'.format(parent_pipeline_id)
- })
+ job_dependencies.append(
+ {"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
+ )
- job_vars['SPACK_SPEC_NEEDS_REBUILD'] = str(rebuild_spec)
+ job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
if enable_cdash_reporting:
- cdash_build_name = _get_cdash_build_name(
- release_spec, build_group)
+ cdash_build_name = _get_cdash_build_name(release_spec, build_group)
all_job_names.append(cdash_build_name)
- job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
+ job_vars["SPACK_CDASH_BUILD_NAME"] = cdash_build_name
variables.update(job_vars)
- artifact_paths = [
- rel_job_log_dir,
- rel_job_repro_dir,
- rel_user_artifacts_dir
- ]
+ artifact_paths = [rel_job_log_dir, rel_job_repro_dir, rel_user_artifacts_dir]
if enable_artifacts_buildcache:
- bc_root = os.path.join(
- local_mirror_dir, 'build_cache')
- artifact_paths.extend([os.path.join(bc_root, p) for p in [
- bindist.tarball_name(release_spec, '.spec.json'),
- bindist.tarball_directory_name(release_spec),
- ]])
+ bc_root = os.path.join(local_mirror_dir, "build_cache")
+ artifact_paths.extend(
+ [
+ os.path.join(bc_root, p)
+ for p in [
+ bindist.tarball_name(release_spec, ".spec.json"),
+ bindist.tarball_directory_name(release_spec),
+ ]
+ ]
+ )
job_object = {
- 'stage': stage_name,
- 'variables': variables,
- 'script': job_script,
- 'tags': tags,
- 'artifacts': {
- 'paths': artifact_paths,
- 'when': 'always',
+ "stage": stage_name,
+ "variables": variables,
+ "script": job_script,
+ "tags": tags,
+ "artifacts": {
+ "paths": artifact_paths,
+ "when": "always",
},
- 'needs': sorted(job_dependencies, key=lambda d: d['job']),
- 'retry': {
- 'max': 2,
- 'when': JOB_RETRY_CONDITIONS,
+ "needs": sorted(job_dependencies, key=lambda d: d["job"]),
+ "retry": {
+ "max": 2,
+ "when": JOB_RETRY_CONDITIONS,
},
- 'interruptible': True
+ "interruptible": True,
}
length_needs = len(job_dependencies)
@@ -1136,17 +1145,17 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
max_needs_job = job_name
if before_script:
- job_object['before_script'] = before_script
+ job_object["before_script"] = before_script
if after_script:
- job_object['after_script'] = after_script
+ job_object["after_script"] = after_script
if image_name:
- job_object['image'] = image_name
+ job_object["image"] = image_name
if image_entry is not None:
- job_object['image'] = {
- 'name': image_name,
- 'entrypoint': image_entry,
+ job_object["image"] = {
+ "name": image_name,
+ "entrypoint": image_entry,
}
output_object[job_name] = job_object
@@ -1154,115 +1163,106 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
if print_summary:
for phase in phases:
- phase_name = phase['name']
+ phase_name = phase["name"]
tty.msg('Stages for phase "{0}"'.format(phase_name))
phase_stages = staged_phases[phase_name]
_print_staging_summary(*phase_stages)
- tty.debug('{0} build jobs generated in {1} stages'.format(
- job_id, stage_id))
+ tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
if job_id > 0:
- tty.debug('The max_needs_job is {0}, with {1} needs'.format(
- max_needs_job, max_length_needs))
+ tty.debug(
+ "The max_needs_job is {0}, with {1} needs".format(max_needs_job, max_length_needs)
+ )
# Use "all_job_names" to populate the build group for this set
if enable_cdash_reporting and cdash_auth_token:
try:
- _populate_buildgroup(all_job_names, build_group, cdash_project,
- cdash_site, cdash_auth_token, cdash_url)
+ _populate_buildgroup(
+ all_job_names, build_group, cdash_project, cdash_site, cdash_auth_token, cdash_url
+ )
except (SpackError, HTTPError, URLError) as err:
- tty.warn('Problem populating buildgroup: {0}'.format(err))
+ tty.warn("Problem populating buildgroup: {0}".format(err))
else:
- tty.warn('Unable to populate buildgroup without CDash credentials')
+ tty.warn("Unable to populate buildgroup without CDash credentials")
service_job_config = None
- if 'service-job-attributes' in gitlab_ci:
- service_job_config = gitlab_ci['service-job-attributes']
+ if "service-job-attributes" in gitlab_ci:
+ service_job_config = gitlab_ci["service-job-attributes"]
default_attrs = [
- 'image',
- 'tags',
- 'variables',
- 'before_script',
+ "image",
+ "tags",
+ "variables",
+ "before_script",
# 'script',
- 'after_script',
+ "after_script",
]
- service_job_retries = {
- 'max': 2,
- 'when': [
- 'runner_system_failure',
- 'stuck_or_timeout_failure'
- ]
- }
+ service_job_retries = {"max": 2, "when": ["runner_system_failure", "stuck_or_timeout_failure"]}
if job_id > 0:
if temp_storage_url_prefix:
# There were some rebuild jobs scheduled, so we will need to
# schedule a job to clean up the temporary storage location
# associated with this pipeline.
- stage_names.append('cleanup-temp-storage')
+ stage_names.append("cleanup-temp-storage")
cleanup_job = {}
if service_job_config:
- _copy_attributes(default_attrs,
- service_job_config,
- cleanup_job)
-
- if 'tags' in cleanup_job:
- service_tags = _remove_reserved_tags(cleanup_job['tags'])
- cleanup_job['tags'] = service_tags
-
- cleanup_job['stage'] = 'cleanup-temp-storage'
- cleanup_job['script'] = [
- 'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
- temp_storage_url_prefix)
+ _copy_attributes(default_attrs, service_job_config, cleanup_job)
+
+ if "tags" in cleanup_job:
+ service_tags = _remove_reserved_tags(cleanup_job["tags"])
+ cleanup_job["tags"] = service_tags
+
+ cleanup_job["stage"] = "cleanup-temp-storage"
+ cleanup_job["script"] = [
+ "spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID".format(
+ temp_storage_url_prefix
+ )
]
- cleanup_job['when'] = 'always'
- cleanup_job['retry'] = service_job_retries
- cleanup_job['interruptible'] = True
+ cleanup_job["when"] = "always"
+ cleanup_job["retry"] = service_job_retries
+ cleanup_job["interruptible"] = True
- output_object['cleanup'] = cleanup_job
+ output_object["cleanup"] = cleanup_job
- if ('signing-job-attributes' in gitlab_ci and
- spack_pipeline_type == 'spack_protected_branch'):
+ if (
+ "signing-job-attributes" in gitlab_ci
+ and spack_pipeline_type == "spack_protected_branch"
+ ):
# External signing: generate a job to check and sign binary pkgs
- stage_names.append('stage-sign-pkgs')
- signing_job_config = gitlab_ci['signing-job-attributes']
+ stage_names.append("stage-sign-pkgs")
+ signing_job_config = gitlab_ci["signing-job-attributes"]
signing_job = {}
signing_job_attrs_to_copy = [
- 'image',
- 'tags',
- 'variables',
- 'before_script',
- 'script',
- 'after_script',
+ "image",
+ "tags",
+ "variables",
+ "before_script",
+ "script",
+ "after_script",
]
- _copy_attributes(signing_job_attrs_to_copy,
- signing_job_config,
- signing_job)
+ _copy_attributes(signing_job_attrs_to_copy, signing_job_config, signing_job)
signing_job_tags = []
- if 'tags' in signing_job:
- signing_job_tags = _remove_reserved_tags(signing_job['tags'])
+ if "tags" in signing_job:
+ signing_job_tags = _remove_reserved_tags(signing_job["tags"])
- for tag in ['aws', 'protected', 'notary']:
+ for tag in ["aws", "protected", "notary"]:
if tag not in signing_job_tags:
signing_job_tags.append(tag)
- signing_job['tags'] = signing_job_tags
+ signing_job["tags"] = signing_job_tags
- signing_job['stage'] = 'stage-sign-pkgs'
- signing_job['when'] = 'always'
- signing_job['retry'] = {
- 'max': 2,
- 'when': ['always']
- }
- signing_job['interruptible'] = True
+ signing_job["stage"] = "stage-sign-pkgs"
+ signing_job["when"] = "always"
+ signing_job["retry"] = {"max": 2, "when": ["always"]}
+ signing_job["interruptible"] = True
- output_object['sign-pkgs'] = signing_job
+ output_object["sign-pkgs"] = signing_job
if spack_buildcache_copy:
# Generate a job to copy the contents from wherever the builds are getting
@@ -1271,54 +1271,52 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
src_url = remote_mirror_override or remote_mirror_url
dest_url = spack_buildcache_copy
- stage_names.append('stage-copy-buildcache')
+ stage_names.append("stage-copy-buildcache")
copy_job = {
- 'stage': 'stage-copy-buildcache',
- 'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
- 'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
- 'when': 'on_success',
- 'interruptible': True,
- 'retry': service_job_retries,
- 'script': [
- '. ./share/spack/setup-env.sh',
- 'spack --version',
- 'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
- src_url, dest_url)
- ]
+ "stage": "stage-copy-buildcache",
+ "tags": ["spack", "public", "medium", "aws", "x86_64"],
+ "image": "ghcr.io/spack/python-aws-bash:0.0.1",
+ "when": "on_success",
+ "interruptible": True,
+ "retry": service_job_retries,
+ "script": [
+ ". ./share/spack/setup-env.sh",
+ "spack --version",
+ "aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}".format(
+ src_url, dest_url
+ ),
+ ],
}
- output_object['copy-mirror'] = copy_job
+ output_object["copy-mirror"] = copy_job
if rebuild_index_enabled:
# Add a final job to regenerate the index
- stage_names.append('stage-rebuild-index')
+ stage_names.append("stage-rebuild-index")
final_job = {}
if service_job_config:
- _copy_attributes(default_attrs,
- service_job_config,
- final_job)
+ _copy_attributes(default_attrs, service_job_config, final_job)
- if 'tags' in final_job:
- service_tags = _remove_reserved_tags(final_job['tags'])
- final_job['tags'] = service_tags
+ if "tags" in final_job:
+ service_tags = _remove_reserved_tags(final_job["tags"])
+ final_job["tags"] = service_tags
index_target_mirror = mirror_urls[0]
if remote_mirror_override:
index_target_mirror = remote_mirror_override
- final_job['stage'] = 'stage-rebuild-index'
- final_job['script'] = [
- 'spack buildcache update-index --keys -d {0}'.format(
- index_target_mirror)
+ final_job["stage"] = "stage-rebuild-index"
+ final_job["script"] = [
+ "spack buildcache update-index --keys -d {0}".format(index_target_mirror)
]
- final_job['when'] = 'always'
- final_job['retry'] = service_job_retries
- final_job['interruptible'] = True
+ final_job["when"] = "always"
+ final_job["retry"] = service_job_retries
+ final_job["interruptible"] = True
- output_object['rebuild-index'] = final_job
+ output_object["rebuild-index"] = final_job
- output_object['stages'] = stage_names
+ output_object["stages"] = stage_names
# Capture the version of spack used to generate the pipeline, transform it
# into a value that can be passed to "git checkout", and save it in a
@@ -1327,7 +1325,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
version_to_clone = None
v_match = re.match(r"^\d+\.\d+\.\d+$", spack_version)
if v_match:
- version_to_clone = 'v{0}'.format(v_match.group(0))
+ version_to_clone = "v{0}".format(v_match.group(0))
else:
v_match = re.match(r"^[^-]+-[^-]+-([a-f\d]+)$", spack_version)
if v_match:
@@ -1335,25 +1333,24 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
else:
version_to_clone = spack_version
- output_object['variables'] = {
- 'SPACK_ARTIFACTS_ROOT': rel_artifacts_root,
- 'SPACK_CONCRETE_ENV_DIR': rel_concrete_env_dir,
- 'SPACK_VERSION': spack_version,
- 'SPACK_CHECKOUT_VERSION': version_to_clone,
- 'SPACK_REMOTE_MIRROR_URL': remote_mirror_url,
- 'SPACK_JOB_LOG_DIR': rel_job_log_dir,
- 'SPACK_JOB_REPRO_DIR': rel_job_repro_dir,
- 'SPACK_LOCAL_MIRROR_DIR': rel_local_mirror_dir,
- 'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
+ output_object["variables"] = {
+ "SPACK_ARTIFACTS_ROOT": rel_artifacts_root,
+ "SPACK_CONCRETE_ENV_DIR": rel_concrete_env_dir,
+ "SPACK_VERSION": spack_version,
+ "SPACK_CHECKOUT_VERSION": version_to_clone,
+ "SPACK_REMOTE_MIRROR_URL": remote_mirror_url,
+ "SPACK_JOB_LOG_DIR": rel_job_log_dir,
+ "SPACK_JOB_REPRO_DIR": rel_job_repro_dir,
+ "SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
+ "SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
}
if remote_mirror_override:
- (output_object['variables']
- ['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
+ (output_object["variables"]["SPACK_REMOTE_MIRROR_OVERRIDE"]) = remote_mirror_override
- spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
+ spack_stack_name = os.environ.get("SPACK_CI_STACK_NAME", None)
if spack_stack_name:
- output_object['variables']['SPACK_CI_STACK_NAME'] = spack_stack_name
+ output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name
sorted_output = {}
for output_key, output_value in sorted(output_object.items()):
@@ -1362,52 +1359,53 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
# TODO(opadron): remove this or refactor
if run_optimizer:
import spack.ci_optimization as ci_opt
+
sorted_output = ci_opt.optimizer(sorted_output)
# TODO(opadron): remove this or refactor
if use_dependencies:
import spack.ci_needs_workaround as cinw
+
sorted_output = cinw.needs_to_dependencies(sorted_output)
else:
# No jobs were generated
- tty.debug('No specs to rebuild, generating no-op job')
+ tty.debug("No specs to rebuild, generating no-op job")
noop_job = {}
if service_job_config:
- _copy_attributes(default_attrs,
- service_job_config,
- noop_job)
+ _copy_attributes(default_attrs, service_job_config, noop_job)
- if 'script' not in noop_job:
- noop_job['script'] = [
+ if "script" not in noop_job:
+ noop_job["script"] = [
'echo "All specs already up to date, nothing to rebuild."',
]
- noop_job['retry'] = service_job_retries
+ noop_job["retry"] = service_job_retries
- sorted_output = {'no-specs-to-rebuild': noop_job}
+ sorted_output = {"no-specs-to-rebuild": noop_job}
if known_broken_specs_encountered:
error_msg = (
- 'Pipeline generation failed due to the presence of the '
- 'following specs that are known to be broken in develop:\n')
+ "Pipeline generation failed due to the presence of the "
+ "following specs that are known to be broken in develop:\n"
+ )
for broken_spec in known_broken_specs_encountered:
- error_msg += '* {0}\n'.format(broken_spec)
+ error_msg += "* {0}\n".format(broken_spec)
tty.die(error_msg)
- with open(output_file, 'w') as outf:
+ with open(output_file, "w") as outf:
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
def _url_encode_string(input_string):
- encoded_keyval = urlencode({'donotcare': input_string})
- eq_idx = encoded_keyval.find('=') + 1
+ encoded_keyval = urlencode({"donotcare": input_string})
+ eq_idx = encoded_keyval.find("=") + 1
encoded_value = encoded_keyval[eq_idx:]
return encoded_value
def import_signing_key(base64_signing_key):
- """ Given Base64-encoded gpg key, decode and import it to use for
+ """Given Base64-encoded gpg key, decode and import it to use for
signing packages.
Arguments:
@@ -1419,56 +1417,56 @@ def import_signing_key(base64_signing_key):
https://github.com/spack/spack-infrastructure/blob/main/gitlab-docker/files/gen-key
"""
if not base64_signing_key:
- tty.warn('No key found for signing/verifying packages')
+ tty.warn("No key found for signing/verifying packages")
return
- tty.debug('ci.import_signing_key() will attempt to import a key')
+ tty.debug("ci.import_signing_key() will attempt to import a key")
# This command has the side-effect of creating the directory referred
# to as GNUPGHOME in setup_environment()
- list_output = spack_gpg('list', output=str)
+ list_output = spack_gpg("list", output=str)
- tty.debug('spack gpg list:')
+ tty.debug("spack gpg list:")
tty.debug(list_output)
decoded_key = base64.b64decode(base64_signing_key)
if isinstance(decoded_key, bytes):
- decoded_key = decoded_key.decode('utf8')
+ decoded_key = decoded_key.decode("utf8")
with TemporaryDirectory() as tmpdir:
- sign_key_path = os.path.join(tmpdir, 'signing_key')
- with open(sign_key_path, 'w') as fd:
+ sign_key_path = os.path.join(tmpdir, "signing_key")
+ with open(sign_key_path, "w") as fd:
fd.write(decoded_key)
- key_import_output = spack_gpg('trust', sign_key_path, output=str)
- tty.debug('spack gpg trust {0}'.format(sign_key_path))
+ key_import_output = spack_gpg("trust", sign_key_path, output=str)
+ tty.debug("spack gpg trust {0}".format(sign_key_path))
tty.debug(key_import_output)
# Now print the keys we have for verifying and signing
- trusted_keys_output = spack_gpg('list', '--trusted', output=str)
- signing_keys_output = spack_gpg('list', '--signing', output=str)
+ trusted_keys_output = spack_gpg("list", "--trusted", output=str)
+ signing_keys_output = spack_gpg("list", "--signing", output=str)
- tty.debug('spack gpg list --trusted')
+ tty.debug("spack gpg list --trusted")
tty.debug(trusted_keys_output)
- tty.debug('spack gpg list --signing')
+ tty.debug("spack gpg list --signing")
tty.debug(signing_keys_output)
def can_sign_binaries():
- """ Utility method to determine if this spack instance is capable of
- signing binary packages. This is currently only possible if the
- spack gpg keystore contains exactly one secret key."""
+ """Utility method to determine if this spack instance is capable of
+ signing binary packages. This is currently only possible if the
+ spack gpg keystore contains exactly one secret key."""
return len(gpg_util.signing_keys()) == 1
def can_verify_binaries():
- """ Utility method to determin if this spack instance is capable (at
- least in theory) of verifying signed binaries."""
+ """Utility method to determin if this spack instance is capable (at
+ least in theory) of verifying signed binaries."""
return len(gpg_util.public_keys()) >= 1
def configure_compilers(compiler_action, scope=None):
- """ Depending on the compiler_action parameter, either turn on the
+ """Depending on the compiler_action parameter, either turn on the
install_missing_compilers config option, or find spack compilers,
or do nothing. This is used from rebuild jobs in bootstrapping
pipelines, where in the bootsrapping phase we would pass
@@ -1483,30 +1481,30 @@ def configure_compilers(compiler_action, scope=None):
scope (spack.config.ConfigScope): Optional. The scope in which to look for
compilers, in case 'FIND_ANY' was provided.
"""
- if compiler_action == 'INSTALL_MISSING':
- tty.debug('Make sure bootstrapped compiler will be installed')
- config = cfg.get('config')
- config['install_missing_compilers'] = True
- cfg.set('config', config)
- elif compiler_action == 'FIND_ANY':
- tty.debug('Just find any available compiler')
- find_args = ['find']
+ if compiler_action == "INSTALL_MISSING":
+ tty.debug("Make sure bootstrapped compiler will be installed")
+ config = cfg.get("config")
+ config["install_missing_compilers"] = True
+ cfg.set("config", config)
+ elif compiler_action == "FIND_ANY":
+ tty.debug("Just find any available compiler")
+ find_args = ["find"]
if scope:
- find_args.extend(['--scope', scope])
+ find_args.extend(["--scope", scope])
output = spack_compiler(*find_args)
- tty.debug('spack compiler find')
+ tty.debug("spack compiler find")
tty.debug(output)
- output = spack_compiler('list')
- tty.debug('spack compiler list')
+ output = spack_compiler("list")
+ tty.debug("spack compiler list")
tty.debug(output)
else:
- tty.debug('No compiler action to be taken')
+ tty.debug("No compiler action to be taken")
return None
def get_concrete_specs(env, root_spec, job_name, compiler_action):
- """ Build a dictionary of concrete specs relevant to a particular
+ """Build a dictionary of concrete specs relevant to a particular
rebuild job. This includes the root spec and the spec to be
rebuilt (which could be the same).
@@ -1536,17 +1534,17 @@ def get_concrete_specs(env, root_spec, job_name, compiler_action):
"""
spec_map = {
- 'root': None,
+ "root": None,
}
- if compiler_action == 'FIND_ANY':
+ if compiler_action == "FIND_ANY":
# This corresponds to a bootstrapping phase where we need to
# rely on any available compiler to build the package (i.e. the
# compiler needed to be stripped from the spec when we generated
# the job), and thus we need to concretize the root spec again.
- tty.debug('About to concretize {0}'.format(root_spec))
+ tty.debug("About to concretize {0}".format(root_spec))
concrete_root = Spec(root_spec).concretized()
- tty.debug('Resulting concrete root: {0}'.format(concrete_root))
+ tty.debug("Resulting concrete root: {0}".format(concrete_root))
else:
# in this case, either we're relying on Spack to install missing
# compiler bootstrapped in a previous phase, or else we only had one
@@ -1557,7 +1555,7 @@ def get_concrete_specs(env, root_spec, job_name, compiler_action):
# going to ask spack to "install_missing_compilers".
concrete_root = env.specs_by_hash[root_spec]
- spec_map['root'] = concrete_root
+ spec_map["root"] = concrete_root
spec_map[job_name] = concrete_root[job_name]
return spec_map
@@ -1566,22 +1564,17 @@ def get_concrete_specs(env, root_spec, job_name, compiler_action):
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
"""Unchecked version of the public API, for easier mocking"""
unsigned = not sign_binaries
- tty.debug('Creating buildcache ({0})'.format(
- 'unsigned' if unsigned else 'signed'))
+ tty.debug("Creating buildcache ({0})".format("unsigned" if unsigned else "signed"))
hashes = env.all_hashes() if env else None
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
push_url = spack.mirror.push_url_from_mirror_url(mirror_url)
- spec_kwargs = {'include_root': True, 'include_dependencies': False}
- kwargs = {
- 'force': True,
- 'allow_root': True,
- 'unsigned': unsigned
- }
+ spec_kwargs = {"include_root": True, "include_dependencies": False}
+ kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
bindist.push(matches, push_url, spec_kwargs, **kwargs)
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
- """ Push one or more binary packages to the mirror.
+ """Push one or more binary packages to the mirror.
Arguments:
@@ -1608,17 +1601,16 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
# Exception
# BaseException
# object
- err_msg = 'Error msg: {0}'.format(inst)
- if any(x in err_msg for x in ['Access Denied', 'InvalidAccessKeyId']):
- tty.msg('Permission problem writing to {0}'.format(
- mirror_url))
+ err_msg = "Error msg: {0}".format(inst)
+ if any(x in err_msg for x in ["Access Denied", "InvalidAccessKeyId"]):
+ tty.msg("Permission problem writing to {0}".format(mirror_url))
tty.msg(err_msg)
else:
raise inst
def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
- """ Looks for spack-build-out.txt in the stage directory of the given
+ """Looks for spack-build-out.txt in the stage directory of the given
job_spec, and attempts to copy the file into the directory given
by job_log_dir.
@@ -1630,23 +1622,24 @@ def copy_stage_logs_to_artifacts(job_spec, job_log_dir):
try:
pkg_cls = spack.repo.path.get_pkg_class(job_spec.name)
job_pkg = pkg_cls(job_spec)
- tty.debug('job package: {0.fullname}'.format(job_pkg))
+ tty.debug("job package: {0.fullname}".format(job_pkg))
stage_dir = job_pkg.stage.path
- tty.debug('stage dir: {0}'.format(stage_dir))
- build_out_src = os.path.join(stage_dir, 'spack-build-out.txt')
- build_out_dst = os.path.join(
- job_log_dir, 'spack-build-out.txt')
- tty.debug('Copying build log ({0}) to artifacts ({1})'.format(
- build_out_src, build_out_dst))
+ tty.debug("stage dir: {0}".format(stage_dir))
+ build_out_src = os.path.join(stage_dir, "spack-build-out.txt")
+ build_out_dst = os.path.join(job_log_dir, "spack-build-out.txt")
+ tty.debug(
+ "Copying build log ({0}) to artifacts ({1})".format(build_out_src, build_out_dst)
+ )
shutil.copyfile(build_out_src, build_out_dst)
except Exception as inst:
- msg = ('Unable to copy build logs from stage to artifacts '
- 'due to exception: {0}').format(inst)
+ msg = (
+ "Unable to copy build logs from stage to artifacts " "due to exception: {0}"
+ ).format(inst)
tty.error(msg)
def download_and_extract_artifacts(url, work_dir):
- """ Look for gitlab artifacts.zip at the given url, and attempt to download
+ """Look for gitlab artifacts.zip at the given url, and attempt to download
and extract the contents into the given work_dir
Arguments:
@@ -1654,35 +1647,34 @@ def download_and_extract_artifacts(url, work_dir):
url (str): Complete url to artifacts.zip file
work_dir (str): Path to destination where artifacts should be extracted
"""
- tty.msg('Fetching artifacts from: {0}\n'.format(url))
+ tty.msg("Fetching artifacts from: {0}\n".format(url))
headers = {
- 'Content-Type': 'application/zip',
+ "Content-Type": "application/zip",
}
- token = os.environ.get('GITLAB_PRIVATE_TOKEN', None)
+ token = os.environ.get("GITLAB_PRIVATE_TOKEN", None)
if token:
- headers['PRIVATE-TOKEN'] = token
+ headers["PRIVATE-TOKEN"] = token
opener = build_opener(HTTPHandler)
request = Request(url, headers=headers)
- request.get_method = lambda: 'GET'
+ request.get_method = lambda: "GET"
response = opener.open(request)
response_code = response.getcode()
if response_code != 200:
- msg = 'Error response code ({0}) in reproduce_ci_job'.format(
- response_code)
+ msg = "Error response code ({0}) in reproduce_ci_job".format(response_code)
raise SpackError(msg)
- artifacts_zip_path = os.path.join(work_dir, 'artifacts.zip')
+ artifacts_zip_path = os.path.join(work_dir, "artifacts.zip")
if not os.path.exists(work_dir):
os.makedirs(work_dir)
- with open(artifacts_zip_path, 'wb') as out_file:
+ with open(artifacts_zip_path, "wb") as out_file:
shutil.copyfileobj(response, out_file)
zip_file = zipfile.ZipFile(artifacts_zip_path)
@@ -1693,24 +1685,22 @@ def download_and_extract_artifacts(url, work_dir):
def get_spack_info():
- """ If spack is running from a git repo, return the most recent git log
- entry, otherwise, return a string containing the spack version. """
+ """If spack is running from a git repo, return the most recent git log
+ entry, otherwise, return a string containing the spack version."""
git_path = os.path.join(spack.paths.prefix, ".git")
if os.path.exists(git_path):
git = exe.which("git")
if git:
with fs.working_dir(spack.paths.prefix):
- git_log = git("log", "-1",
- output=str, error=os.devnull,
- fail_on_error=False)
+ git_log = git("log", "-1", output=str, error=os.devnull, fail_on_error=False)
return git_log
- return 'no git repo, use spack {0}'.format(spack.spack_version)
+ return "no git repo, use spack {0}".format(spack.spack_version)
def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
- """ Look in the local spack clone to find the checkout_commit, and if
+ """Look in the local spack clone to find the checkout_commit, and if
provided, the merge_commit given as arguments. If those commits can
be found locally, then clone spack and attempt to recreate a merge
commit with the same parent commits as tested in gitlab. This looks
@@ -1729,12 +1719,12 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
"""
# figure out the path to the spack git version being used for the
# reproduction
- print('checkout_commit: {0}'.format(checkout_commit))
- print('merge_commit: {0}'.format(merge_commit))
+ print("checkout_commit: {0}".format(checkout_commit))
+ print("merge_commit: {0}".format(merge_commit))
dot_git_path = os.path.join(spack.paths.prefix, ".git")
if not os.path.exists(dot_git_path):
- tty.error('Unable to find the path to your local spack clone')
+ tty.error("Unable to find the path to your local spack clone")
return False
spack_git_path = spack.paths.prefix
@@ -1746,54 +1736,59 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
# Check if we can find the tested commits in your local spack repo
with fs.working_dir(spack_git_path):
- git("log", "-1", checkout_commit, output=str, error=os.devnull,
- fail_on_error=False)
+ git("log", "-1", checkout_commit, output=str, error=os.devnull, fail_on_error=False)
if git.returncode != 0:
- tty.error('Missing commit: {0}'.format(checkout_commit))
+ tty.error("Missing commit: {0}".format(checkout_commit))
return False
if merge_commit:
- git("log", "-1", merge_commit, output=str, error=os.devnull,
- fail_on_error=False)
+ git("log", "-1", merge_commit, output=str, error=os.devnull, fail_on_error=False)
if git.returncode != 0:
- tty.error('Missing commit: {0}'.format(merge_commit))
+ tty.error("Missing commit: {0}".format(merge_commit))
return False
# Next attempt to clone your local spack repo into the repro dir
with fs.working_dir(repro_dir):
- clone_out = git("clone", spack_git_path, "spack",
- output=str, error=os.devnull,
- fail_on_error=False)
+ clone_out = git(
+ "clone", spack_git_path, "spack", output=str, error=os.devnull, fail_on_error=False
+ )
if git.returncode != 0:
- tty.error('Unable to clone your local spack repo:')
+ tty.error("Unable to clone your local spack repo:")
tty.msg(clone_out)
return False
# Finally, attempt to put the cloned repo into the same state used during
# the pipeline build job
- repro_spack_path = os.path.join(repro_dir, 'spack')
+ repro_spack_path = os.path.join(repro_dir, "spack")
with fs.working_dir(repro_spack_path):
- co_out = git("checkout", checkout_commit,
- output=str, error=os.devnull,
- fail_on_error=False)
+ co_out = git(
+ "checkout", checkout_commit, output=str, error=os.devnull, fail_on_error=False
+ )
if git.returncode != 0:
- tty.error('Unable to checkout {0}'.format(checkout_commit))
+ tty.error("Unable to checkout {0}".format(checkout_commit))
tty.msg(co_out)
return False
if merge_commit:
- merge_out = git("-c", "user.name=cirepro", "-c",
- "user.email=user@email.org", "merge",
- "--no-edit", merge_commit,
- output=str, error=os.devnull,
- fail_on_error=False)
+ merge_out = git(
+ "-c",
+ "user.name=cirepro",
+ "-c",
+ "user.email=user@email.org",
+ "merge",
+ "--no-edit",
+ merge_commit,
+ output=str,
+ error=os.devnull,
+ fail_on_error=False,
+ )
if git.returncode != 0:
- tty.error('Unable to merge {0}'.format(merge_commit))
+ tty.error("Unable to merge {0}".format(merge_commit))
tty.msg(merge_out)
return False
@@ -1801,31 +1796,30 @@ def setup_spack_repro_version(repro_dir, checkout_commit, merge_commit=None):
def reproduce_ci_job(url, work_dir):
- """ Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
- attempt to setup an environment in which the failure can be reproduced
- locally. This entails the following:
-
- First download and extract artifacts. Then look through those artifacts
- to glean some information needed for the reproduer (e.g. one of the
- artifacts contains information about the version of spack tested by
- gitlab, another is the generated pipeline yaml containing details
- of the job like the docker image used to run it). The output of this
- function is a set of printed instructions for running docker and then
- commands to run to reproduce the build once inside the container.
+ """Given a url to gitlab artifacts.zip from a failed 'spack ci rebuild' job,
+ attempt to setup an environment in which the failure can be reproduced
+ locally. This entails the following:
+
+ First download and extract artifacts. Then look through those artifacts
+ to glean some information needed for the reproduer (e.g. one of the
+ artifacts contains information about the version of spack tested by
+ gitlab, another is the generated pipeline yaml containing details
+ of the job like the docker image used to run it). The output of this
+ function is a set of printed instructions for running docker and then
+ commands to run to reproduce the build once inside the container.
"""
download_and_extract_artifacts(url, work_dir)
- lock_file = fs.find(work_dir, 'spack.lock')[0]
+ lock_file = fs.find(work_dir, "spack.lock")[0]
concrete_env_dir = os.path.dirname(lock_file)
- tty.debug('Concrete environment directory: {0}'.format(
- concrete_env_dir))
+ tty.debug("Concrete environment directory: {0}".format(concrete_env_dir))
- yaml_files = fs.find(work_dir, ['*.yaml', '*.yml'])
+ yaml_files = fs.find(work_dir, ["*.yaml", "*.yml"])
- tty.debug('yaml files:')
+ tty.debug("yaml files:")
for yaml_file in yaml_files:
- tty.debug(' {0}'.format(yaml_file))
+ tty.debug(" {0}".format(yaml_file))
pipeline_yaml = None
@@ -1836,14 +1830,14 @@ def reproduce_ci_job(url, work_dir):
for yf in yaml_files:
with open(yf) as y_fd:
yaml_obj = syaml.load(y_fd)
- if 'variables' in yaml_obj and 'stages' in yaml_obj:
+ if "variables" in yaml_obj and "stages" in yaml_obj:
pipeline_yaml = yaml_obj
if pipeline_yaml:
- tty.debug('\n{0} is likely your pipeline file'.format(yf))
+ tty.debug("\n{0} is likely your pipeline file".format(yf))
# Find the install script in the unzipped artifacts and make it executable
- install_script = fs.find(work_dir, 'install.sh')[0]
+ install_script = fs.find(work_dir, "install.sh")[0]
st = os.stat(install_script)
os.chmod(install_script, st.st_mode | stat.S_IEXEC)
@@ -1851,40 +1845,40 @@ def reproduce_ci_job(url, work_dir):
# during `spack ci rebuild` to make reproduction easier. E.g. the job
# name is written here so we can easily find the configuration of the
# job from the generated pipeline file.
- repro_file = fs.find(work_dir, 'repro.json')[0]
+ repro_file = fs.find(work_dir, "repro.json")[0]
repro_details = None
with open(repro_file) as fd:
repro_details = json.load(fd)
repro_dir = os.path.dirname(repro_file)
- rel_repro_dir = repro_dir.replace(work_dir, '').lstrip(os.path.sep)
+ rel_repro_dir = repro_dir.replace(work_dir, "").lstrip(os.path.sep)
# Find the spack info text file that should contain the git log
# of the HEAD commit used during the CI build
- spack_info_file = fs.find(work_dir, 'spack_info.txt')[0]
+ spack_info_file = fs.find(work_dir, "spack_info.txt")[0]
with open(spack_info_file) as fd:
spack_info = fd.read()
# Access the specific job configuration
- job_name = repro_details['job_name']
+ job_name = repro_details["job_name"]
job_yaml = None
if job_name in pipeline_yaml:
job_yaml = pipeline_yaml[job_name]
if job_yaml:
- tty.debug('Found job:')
+ tty.debug("Found job:")
tty.debug(job_yaml)
job_image = None
setup_result = False
- if 'image' in job_yaml:
- job_image_elt = job_yaml['image']
- if 'name' in job_image_elt:
- job_image = job_image_elt['name']
+ if "image" in job_yaml:
+ job_image_elt = job_yaml["image"]
+ if "name" in job_image_elt:
+ job_image = job_image_elt["name"]
else:
job_image = job_image_elt
- tty.msg('Job ran with the following image: {0}'.format(job_image))
+ tty.msg("Job ran with the following image: {0}".format(job_image))
# Because we found this job was run with a docker image, so we will try
# to print a "docker run" command that bind-mounts the directory where
@@ -1893,9 +1887,9 @@ def reproduce_ci_job(url, work_dir):
# Destination of bind-mounted reproduction directory. It makes for a
# more faithful reproducer if everything appears to run in the same
# absolute path used during the CI build.
- mount_as_dir = '/work'
+ mount_as_dir = "/work"
if repro_details:
- mount_as_dir = repro_details['ci_project_dir']
+ mount_as_dir = repro_details["ci_project_dir"]
mounted_repro_dir = os.path.join(mount_as_dir, rel_repro_dir)
# We will also try to clone spack from your local checkout and
@@ -1927,8 +1921,7 @@ def reproduce_ci_job(url, work_dir):
setup_result = False
if commit_1:
if commit_2:
- setup_result = setup_spack_repro_version(
- work_dir, commit_2, merge_commit=commit_1)
+ setup_result = setup_spack_repro_version(work_dir, commit_2, merge_commit=commit_1)
else:
setup_result = setup_spack_repro_version(work_dir, commit_1)
@@ -1943,8 +1936,10 @@ def reproduce_ci_job(url, work_dir):
Alternatively, you can also manually clone spack if you know the version
you want to test.
"""
- tty.error('Failed to automatically setup the tested version of spack '
- 'in your local reproduction directory.')
+ tty.error(
+ "Failed to automatically setup the tested version of spack "
+ "in your local reproduction directory."
+ )
print(setup_msg)
# In cases where CI build was run on a shell runner, it might be useful
@@ -1952,37 +1947,41 @@ def reproduce_ci_job(url, work_dir):
# runner was used. But in that case in general, we cannot do nearly as
# much to set up the reproducer.
job_tags = None
- if 'tags' in job_yaml:
- job_tags = job_yaml['tags']
- tty.msg('Job ran with the following tags: {0}'.format(job_tags))
+ if "tags" in job_yaml:
+ job_tags = job_yaml["tags"]
+ tty.msg("Job ran with the following tags: {0}".format(job_tags))
inst_list = []
# Finally, print out some instructions to reproduce the build
if job_image:
- inst_list.append('\nRun the following command:\n\n')
- inst_list.append(' $ docker run --rm -v {0}:{1} -ti {2}\n'.format(
- work_dir, mount_as_dir, job_image))
- inst_list.append('\nOnce inside the container:\n\n')
+ inst_list.append("\nRun the following command:\n\n")
+ inst_list.append(
+ " $ docker run --rm -v {0}:{1} -ti {2}\n".format(work_dir, mount_as_dir, job_image)
+ )
+ inst_list.append("\nOnce inside the container:\n\n")
else:
- inst_list.append('\nOnce on the tagged runner:\n\n')
+ inst_list.append("\nOnce on the tagged runner:\n\n")
if not setup_result:
- inst_list.append(' - Clone spack and acquire tested commit\n')
- inst_list.append('{0}'.format(spack_info))
- spack_root = '<spack-clone-path>'
+ inst_list.append(" - Clone spack and acquire tested commit\n")
+ inst_list.append("{0}".format(spack_info))
+ spack_root = "<spack-clone-path>"
else:
- spack_root = '{0}/spack'.format(mount_as_dir)
+ spack_root = "{0}/spack".format(mount_as_dir)
- inst_list.append(' - Activate the environment\n\n')
- inst_list.append(' $ source {0}/share/spack/setup-env.sh\n'.format(
- spack_root))
+ inst_list.append(" - Activate the environment\n\n")
+ inst_list.append(" $ source {0}/share/spack/setup-env.sh\n".format(spack_root))
+ inst_list.append(
+ " $ spack env activate --without-view {0}\n\n".format(
+ mounted_repro_dir if job_image else repro_dir
+ )
+ )
+ inst_list.append(" - Run the install script\n\n")
inst_list.append(
- ' $ spack env activate --without-view {0}\n\n'.format(
- mounted_repro_dir if job_image else repro_dir))
- inst_list.append(' - Run the install script\n\n')
- inst_list.append(' $ {0}\n'.format(
- os.path.join(mounted_repro_dir, 'install.sh')
- if job_image else install_script))
-
- print(''.join(inst_list))
+ " $ {0}\n".format(
+ os.path.join(mounted_repro_dir, "install.sh") if job_image else install_script
+ )
+ )
+
+ print("".join(inst_list))
diff --git a/lib/spack/spack/ci_needs_workaround.py b/lib/spack/spack/ci_needs_workaround.py
index ae4cf33cd3..16f18db0a0 100644
--- a/lib/spack/spack/ci_needs_workaround.py
+++ b/lib/spack/spack/ci_needs_workaround.py
@@ -6,32 +6,29 @@
from llnl.util.compat import Mapping
get_job_name = lambda needs_entry: (
- needs_entry.get('job') if (
- isinstance(needs_entry, Mapping) and
- needs_entry.get('artifacts', True))
-
- else
-
- needs_entry if isinstance(needs_entry, str)
-
- else None)
+ needs_entry.get("job")
+ if (isinstance(needs_entry, Mapping) and needs_entry.get("artifacts", True))
+ else needs_entry
+ if isinstance(needs_entry, str)
+ else None
+)
def convert_job(job_entry):
if not isinstance(job_entry, Mapping):
return job_entry
- needs = job_entry.get('needs')
+ needs = job_entry.get("needs")
if needs is None:
return job_entry
new_job = {}
new_job.update(job_entry)
- del new_job['needs']
+ del new_job["needs"]
- new_job['dependencies'] = list(filter(
- (lambda x: x is not None),
- (get_job_name(needs_entry) for needs_entry in needs)))
+ new_job["dependencies"] = list(
+ filter((lambda x: x is not None), (get_job_name(needs_entry) for needs_entry in needs))
+ )
return new_job
diff --git a/lib/spack/spack/ci_optimization.py b/lib/spack/spack/ci_optimization.py
index c50b2bfb1b..f4f05f0acb 100644
--- a/lib/spack/spack/ci_optimization.py
+++ b/lib/spack/spack/ci_optimization.py
@@ -15,9 +15,8 @@ import spack.util.spack_yaml as syaml
def sort_yaml_obj(obj):
if isinstance(obj, Mapping):
return syaml.syaml_dict(
- (k, sort_yaml_obj(v))
- for k, v in
- sorted(obj.items(), key=(lambda item: str(item[0]))))
+ (k, sort_yaml_obj(v)) for k, v in sorted(obj.items(), key=(lambda item: str(item[0])))
+ )
if isinstance(obj, Sequence) and not isinstance(obj, str):
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
@@ -43,25 +42,17 @@ def matches(obj, proto):
if not isinstance(proto, Mapping):
return False
- return all(
- (key in obj and matches(obj[key], val))
- for key, val in proto.items()
- )
+ return all((key in obj and matches(obj[key], val)) for key, val in proto.items())
- if (isinstance(obj, Sequence) and
- not isinstance(obj, str)):
+ if isinstance(obj, Sequence) and not isinstance(obj, str):
- if not (isinstance(proto, Sequence) and
- not isinstance(proto, str)):
+ if not (isinstance(proto, Sequence) and not isinstance(proto, str)):
return False
if len(obj) != len(proto):
return False
- return all(
- matches(obj[index], val)
- for index, val in enumerate(proto)
- )
+ return all(matches(obj[index], val) for index, val in enumerate(proto))
return obj == proto
@@ -85,8 +76,7 @@ def subkeys(obj, proto):
Otherwise, obj is returned.
"""
- if not (isinstance(obj, Mapping) and
- isinstance(proto, Mapping)):
+ if not (isinstance(obj, Mapping) and isinstance(proto, Mapping)):
return obj
new_obj = {}
@@ -95,8 +85,7 @@ def subkeys(obj, proto):
new_obj[key] = value
continue
- if (matches(value, proto[key]) and
- matches(proto[key], value)):
+ if matches(value, proto[key]) and matches(proto[key], value):
continue
if isinstance(value, Mapping):
@@ -124,19 +113,19 @@ def add_extends(yaml, key):
Otherwise, yaml is left unchanged.
"""
- has_key = ('extends' in yaml)
- extends = yaml.get('extends')
+ has_key = "extends" in yaml
+ extends = yaml.get("extends")
if has_key and not isinstance(extends, (str, Sequence)):
return
if extends is None:
- yaml['extends'] = key
+ yaml["extends"] = key
return
if isinstance(extends, str):
if extends != key:
- yaml['extends'] = [extends, key]
+ yaml["extends"] = [extends, key]
return
if key not in extends:
@@ -164,11 +153,11 @@ def common_subobject(yaml, sub):
if not match_list:
return yaml, None
- common_prefix = '.c'
+ common_prefix = ".c"
common_index = 0
while True:
- common_key = ''.join((common_prefix, str(common_index)))
+ common_key = "".join((common_prefix, str(common_index)))
if common_key not in yaml:
break
common_index += 1
@@ -195,22 +184,18 @@ def print_delta(name, old, new, applied=None):
reldelta = (reldelta // 10, reldelta % 10)
if applied is None:
- applied = (new <= old)
-
- print('\n'.join((
- '{0} {1}:',
- ' before: {2: 10d}',
- ' after : {3: 10d}',
- ' delta : {4:+10d} ({5:=+3d}.{6}%)',
- )).format(
- name,
- ('+' if applied else 'x'),
- old,
- new,
- delta,
- reldelta[0],
- reldelta[1]
- ))
+ applied = new <= old
+
+ print(
+ "\n".join(
+ (
+ "{0} {1}:",
+ " before: {2: 10d}",
+ " after : {3: 10d}",
+ " delta : {4:+10d} ({5:=+3d}.{6}%)",
+ )
+ ).format(name, ("+" if applied else "x"), old, new, delta, reldelta[0], reldelta[1])
+ )
def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
@@ -243,13 +228,11 @@ def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
# pass was not applied
return (yaml, new_yaml, False, other_results)
- pre_size = len(syaml.dump_config(
- sort_yaml_obj(yaml), default_flow_style=True))
- post_size = len(syaml.dump_config(
- sort_yaml_obj(new_yaml), default_flow_style=True))
+ pre_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
+ post_size = len(syaml.dump_config(sort_yaml_obj(new_yaml), default_flow_style=True))
# pass makes the size worse: not applying
- applied = (post_size <= pre_size)
+ applied = post_size <= pre_size
if applied:
yaml, new_yaml = new_yaml, yaml
@@ -297,69 +280,62 @@ def build_histogram(iterator, key):
buckets[value_hash] += 1
values[value_hash] = val
- return [(h, buckets[h], float(buckets[h]) / num_objects, values[h])
- for h in sorted(buckets.keys(), key=lambda k: -buckets[k])]
+ return [
+ (h, buckets[h], float(buckets[h]) / num_objects, values[h])
+ for h in sorted(buckets.keys(), key=lambda k: -buckets[k])
+ ]
def optimizer(yaml):
- original_size = len(syaml.dump_config(
- sort_yaml_obj(yaml), default_flow_style=True))
+ original_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
# try factoring out commonly repeated portions
common_job = {
- 'variables': {
- 'SPACK_COMPILER_ACTION': 'NONE'
- },
-
- 'after_script': ['rm -rf "./spack"'],
-
- 'artifacts': {
- 'paths': ['jobs_scratch_dir', 'cdash_report'],
- 'when': 'always'
- },
+ "variables": {"SPACK_COMPILER_ACTION": "NONE"},
+ "after_script": ['rm -rf "./spack"'],
+ "artifacts": {"paths": ["jobs_scratch_dir", "cdash_report"], "when": "always"},
}
# look for a list of tags that appear frequently
- _, count, proportion, tags = next(iter(
- build_histogram(yaml.values(), 'tags')),
- (None,) * 4)
+ _, count, proportion, tags = next(iter(build_histogram(yaml.values(), "tags")), (None,) * 4)
# If a list of tags is found, and there are more than one job that uses it,
# *and* the jobs that do use it represent at least 70% of all jobs, then
# add the list to the prototype object.
if tags and count > 1 and proportion >= 0.70:
- common_job['tags'] = tags
+ common_job["tags"] = tags
# apply common object factorization
yaml, other, applied, rest = try_optimization_pass(
- 'general common object factorization',
- yaml, common_subobject, common_job)
+ "general common object factorization", yaml, common_subobject, common_job
+ )
# look for a common script, and try factoring that out
- _, count, proportion, script = next(iter(
- build_histogram(yaml.values(), 'script')),
- (None,) * 4)
+ _, count, proportion, script = next(
+ iter(build_histogram(yaml.values(), "script")), (None,) * 4
+ )
if script and count > 1 and proportion >= 0.70:
yaml, other, applied, rest = try_optimization_pass(
- 'script factorization',
- yaml, common_subobject, {'script': script})
+ "script factorization", yaml, common_subobject, {"script": script}
+ )
# look for a common before_script, and try factoring that out
- _, count, proportion, script = next(iter(
- build_histogram(yaml.values(), 'before_script')),
- (None,) * 4)
+ _, count, proportion, script = next(
+ iter(build_histogram(yaml.values(), "before_script")), (None,) * 4
+ )
if script and count > 1 and proportion >= 0.70:
yaml, other, applied, rest = try_optimization_pass(
- 'before_script factorization',
- yaml, common_subobject, {'before_script': script})
+ "before_script factorization", yaml, common_subobject, {"before_script": script}
+ )
# Look specifically for the SPACK_ROOT_SPEC environment variables.
# Try to factor them out.
- h = build_histogram((
- getattr(val, 'get', lambda *args: {})('variables')
- for val in yaml.values()), 'SPACK_ROOT_SPEC')
+ h = build_histogram(
+ (getattr(val, "get", lambda *args: {})("variables") for val in yaml.values()),
+ "SPACK_ROOT_SPEC",
+ )
# In this case, we try to factor out *all* instances of the SPACK_ROOT_SPEC
# environment variable; not just the one that appears with the greatest
@@ -374,15 +350,15 @@ def optimizer(yaml):
counter += 1
yaml, other, applied, rest = try_optimization_pass(
- 'SPACK_ROOT_SPEC factorization ({count})'.format(count=counter),
+ "SPACK_ROOT_SPEC factorization ({count})".format(count=counter),
yaml,
common_subobject,
- {'variables': {'SPACK_ROOT_SPEC': spec}})
+ {"variables": {"SPACK_ROOT_SPEC": spec}},
+ )
- new_size = len(syaml.dump_config(
- sort_yaml_obj(yaml), default_flow_style=True))
+ new_size = len(syaml.dump_config(sort_yaml_obj(yaml), default_flow_style=True))
- print('\n')
- print_delta('overall summary', original_size, new_size)
- print('\n')
+ print("\n")
+ print_delta("overall summary", original_size, new_size)
+ print("\n")
return yaml
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 608db2776b..49e5c70019 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -38,7 +38,7 @@ import spack.util.string
python_list = list
# Patterns to ignore in the commands directory when looking for commands.
-ignore_files = r'^\.|^__init__.py$|^#'
+ignore_files = r"^\.|^__init__.py$|^#"
SETUP_PARSER = "setup_parser"
DESCRIPTION = "description"
@@ -59,7 +59,7 @@ def require_python_name(pname):
def cmd_name(python_name):
"""Convert module name (with ``_``) to command name (with ``-``)."""
- return python_name.replace('_', '-')
+ return python_name.replace("_", "-")
def require_cmd_name(cname):
@@ -90,7 +90,7 @@ def all_commands():
for path in command_paths:
for file in os.listdir(path):
if file.endswith(".py") and not re.search(ignore_files, file):
- cmd = re.sub(r'.py$', '', file)
+ cmd = re.sub(r".py$", "", file)
_all_commands.append(cmd_name(cmd))
_all_commands.sort()
@@ -102,7 +102,7 @@ def remove_options(parser, *options):
"""Remove some options from a parser."""
for option in options:
for action in parser._actions:
- if vars(action)['option_strings'][0] == option:
+ if vars(action)["option_strings"][0] == option:
parser._handle_conflict_resolve(None, [(option, action)])
break
@@ -120,10 +120,8 @@ def get_module(cmd_name):
try:
# Try to import the command from the built-in directory
module_name = "%s.%s" % (__name__, pname)
- module = __import__(module_name,
- fromlist=[pname, SETUP_PARSER, DESCRIPTION],
- level=0)
- tty.debug('Imported {0} from built-in commands'.format(pname))
+ module = __import__(module_name, fromlist=[pname, SETUP_PARSER, DESCRIPTION], level=0)
+ tty.debug("Imported {0} from built-in commands".format(pname))
except ImportError:
module = spack.extensions.get_module(cmd_name)
@@ -131,8 +129,10 @@ def get_module(cmd_name):
attr_setdefault(module, DESCRIPTION, "")
if not hasattr(module, pname):
- tty.die("Command module %s (%s) must define function '%s'." %
- (module.__name__, module.__file__, pname))
+ tty.die(
+ "Command module %s (%s) must define function '%s'."
+ % (module.__name__, module.__file__, pname)
+ )
return module
@@ -161,8 +161,9 @@ class _UnquotedFlags(object):
flags_arg_pattern = re.compile(
r'^({0})=([^\'"].*)$'.format(
- '|'.join(spack.spec.FlagMap.valid_compiler_flags()),
- ))
+ "|".join(spack.spec.FlagMap.valid_compiler_flags()),
+ )
+ )
def __init__(self, all_unquoted_flag_pairs):
# type: (List[Tuple[re.Match, str]]) -> None
@@ -186,33 +187,37 @@ class _UnquotedFlags(object):
def report(self):
# type: () -> str
single_errors = [
- '({0}) {1} {2} => {3}'.format(
- i + 1, match.group(0), next_arg,
+ "({0}) {1} {2} => {3}".format(
+ i + 1,
+ match.group(0),
+ next_arg,
'{0}="{1} {2}"'.format(match.group(1), match.group(2), next_arg),
)
for i, (match, next_arg) in enumerate(self._flag_pairs)
]
- return dedent("""\
+ return dedent(
+ """\
Some compiler or linker flags were provided without quoting their arguments,
which now causes spack to try to parse the *next* argument as a spec component
such as a variant instead of an additional compiler or linker flag. If the
intent was to set multiple flags, try quoting them together as described below.
Possible flag quotation errors (with the correctly-quoted version after the =>):
- {0}""").format('\n'.join(single_errors))
+ {0}"""
+ ).format("\n".join(single_errors))
def parse_specs(args, **kwargs):
"""Convenience function for parsing arguments from specs. Handles common
- exceptions and dies if there are errors.
+ exceptions and dies if there are errors.
"""
- concretize = kwargs.get('concretize', False)
- normalize = kwargs.get('normalize', False)
- tests = kwargs.get('tests', False)
+ concretize = kwargs.get("concretize", False)
+ normalize = kwargs.get("normalize", False)
+ tests = kwargs.get("tests", False)
sargs = args
if not isinstance(args, six.string_types):
- sargs = ' '.join(args)
+ sargs = " ".join(args)
unquoted_flags = _UnquotedFlags.extract(sargs)
try:
@@ -230,7 +235,7 @@ def parse_specs(args, **kwargs):
if e.long_message:
msg += e.long_message
if unquoted_flags:
- msg += '\n\n'
+ msg += "\n\n"
msg += unquoted_flags.report()
raise spack.error.SpackError(msg)
@@ -265,8 +270,7 @@ def disambiguate_spec(spec, env, local=False, installed=True, first=False):
return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
-def disambiguate_spec_from_hashes(spec, hashes, local=False,
- installed=True, first=False):
+def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True, first=False):
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
Arguments:
@@ -278,11 +282,9 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False,
See ``spack.database.Database._query`` for details.
"""
if local:
- matching_specs = spack.store.db.query_local(spec, hashes=hashes,
- installed=installed)
+ matching_specs = spack.store.db.query_local(spec, hashes=hashes, installed=installed)
else:
- matching_specs = spack.store.db.query(spec, hashes=hashes,
- installed=installed)
+ matching_specs = spack.store.db.query(spec, hashes=hashes, installed=installed)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
@@ -290,11 +292,12 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False,
return matching_specs[0]
elif len(matching_specs) > 1:
- format_string = '{name}{@version}{%compiler}{arch=architecture}'
- args = ["%s matches multiple packages." % spec,
- "Matching packages:"]
- args += [colorize(" @K{%s} " % s.dag_hash(7)) +
- s.cformat(format_string) for s in matching_specs]
+ format_string = "{name}{@version}{%compiler}{arch=architecture}"
+ args = ["%s matches multiple packages." % spec, "Matching packages:"]
+ args += [
+ colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(format_string)
+ for s in matching_specs
+ ]
args += ["Use a more specific spec."]
tty.die(*args)
@@ -305,8 +308,8 @@ def gray_hash(spec, length):
if not length:
# default to maximum hash length
length = 32
- h = spec.dag_hash(length) if spec.concrete else '-' * length
- return colorize('@K{%s}' % h)
+ h = spec.dag_hash(length) if spec.concrete else "-" * length
+ return colorize("@K{%s}" % h)
def display_specs_as_json(specs, deps=False):
@@ -334,8 +337,8 @@ def display_specs_as_json(specs, deps=False):
def iter_groups(specs, indent, all_headers):
"""Break a list of specs into groups indexed by arch/compiler."""
# Make a dict with specs keyed by architecture and compiler.
- index = index_by(specs, ('architecture', 'compiler'))
- ispace = indent * ' '
+ index = index_by(specs, ("architecture", "compiler"))
+ ispace = indent * " "
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
@@ -344,16 +347,17 @@ def iter_groups(specs, indent, all_headers):
header = "%s{%s} / %s{%s}" % (
spack.spec.architecture_color,
- architecture if architecture else 'no arch',
+ architecture if architecture else "no arch",
spack.spec.compiler_color,
- compiler if compiler else 'no compiler')
+ compiler if compiler else "no compiler",
+ )
# Sometimes we want to display specs that are not yet concretized.
# If they don't have a compiler / architecture attached to them,
# then skip the header
if all_headers or (architecture is not None or compiler is not None):
sys.stdout.write(ispace)
- tty.hline(colorize(header), char='-')
+ tty.hline(colorize(header), char="-")
specs = index[(architecture, compiler)]
specs.sort()
@@ -394,6 +398,7 @@ def display_specs(specs, args=None, **kwargs):
output (typing.IO): A file object to write to. Default is ``sys.stdout``
"""
+
def get_arg(name, default=None):
"""Prefer kwargs, then args, then default."""
if name in kwargs:
@@ -403,47 +408,47 @@ def display_specs(specs, args=None, **kwargs):
else:
return default
- paths = get_arg('paths', False)
- deps = get_arg('deps', False)
- hashes = get_arg('long', False)
- namespace = get_arg('namespace', False)
- flags = get_arg('show_flags', False)
- full_compiler = get_arg('show_full_compiler', False)
- variants = get_arg('variants', False)
- groups = get_arg('groups', True)
- all_headers = get_arg('all_headers', False)
- output = get_arg('output', sys.stdout)
-
- decorator = get_arg('decorator', None)
+ paths = get_arg("paths", False)
+ deps = get_arg("deps", False)
+ hashes = get_arg("long", False)
+ namespace = get_arg("namespace", False)
+ flags = get_arg("show_flags", False)
+ full_compiler = get_arg("show_full_compiler", False)
+ variants = get_arg("variants", False)
+ groups = get_arg("groups", True)
+ all_headers = get_arg("all_headers", False)
+ output = get_arg("output", sys.stdout)
+
+ decorator = get_arg("decorator", None)
if decorator is None:
decorator = lambda s, f: f
- indent = get_arg('indent', 0)
+ indent = get_arg("indent", 0)
hlen = 7
- if get_arg('very_long', False):
+ if get_arg("very_long", False):
hashes = True
hlen = None
- format_string = get_arg('format', None)
+ format_string = get_arg("format", None)
if format_string is None:
- nfmt = '{fullname}' if namespace else '{name}'
- ffmt = ''
+ nfmt = "{fullname}" if namespace else "{name}"
+ ffmt = ""
if full_compiler or flags:
- ffmt += '{%compiler.name}'
+ ffmt += "{%compiler.name}"
if full_compiler:
- ffmt += '{@compiler.version}'
- ffmt += ' {compiler_flags}'
- vfmt = '{variants}' if variants else ''
- format_string = nfmt + '{@version}' + ffmt + vfmt
+ ffmt += "{@compiler.version}"
+ ffmt += " {compiler_flags}"
+ vfmt = "{variants}" if variants else ""
+ format_string = nfmt + "{@version}" + ffmt + vfmt
- transform = {'package': decorator, 'fullpackage': decorator}
+ transform = {"package": decorator, "fullpackage": decorator}
def fmt(s, depth=0):
"""Formatter function for all output specs"""
string = ""
if hashes:
- string += gray_hash(s, hlen) + ' '
+ string += gray_hash(s, hlen) + " "
string += depth * " "
string += s.cformat(format_string, transform=transform)
return string
@@ -457,35 +462,35 @@ def display_specs(specs, args=None, **kwargs):
if deps:
for depth, dep in spec.traverse(root=False, depth=True):
formatted.append((fmt(dep, depth), dep))
- formatted.append(('', None)) # mark newlines
+ formatted.append(("", None)) # mark newlines
# unless any of these are set, we can just colify and be done.
if not any((deps, paths)):
colify((f[0] for f in formatted), indent=indent, output=output)
- return ''
+ return ""
# otherwise, we'll print specs one by one
max_width = max(len(f[0]) for f in formatted)
path_fmt = "%%-%ds%%s" % (max_width + 2)
- out = ''
+ out = ""
# getting lots of prefixes requires DB lookups. Ensure
# all spec.prefix calls are in one transaction.
with spack.store.db.read_transaction():
for string, spec in formatted:
if not string:
# print newline from above
- out += '\n'
+ out += "\n"
continue
if paths:
- out += path_fmt % (string, spec.prefix) + '\n'
+ out += path_fmt % (string, spec.prefix) + "\n"
else:
- out += string + '\n'
+ out += string + "\n"
return out
- out = ''
+ out = ""
if groups:
for specs in iter_groups(specs, indent, all_headers):
output.write(format_list(specs))
@@ -499,7 +504,7 @@ def display_specs(specs, args=None, **kwargs):
def filter_loaded_specs(specs):
"""Filter a list of specs returning only those that are
currently loaded."""
- hashes = os.environ.get(uenv.spack_loaded_hashes_var, '').split(':')
+ hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
return [x for x in specs if x.dag_hash() in hashes]
@@ -514,8 +519,7 @@ def print_how_many_pkgs(specs, pkg_type=""):
category, e.g. if pkg_type is "installed" then the message
would be "3 installed packages"
"""
- tty.msg("%s" % spack.util.string.plural(
- len(specs), pkg_type + " package"))
+ tty.msg("%s" % spack.util.string.plural(len(specs), pkg_type + " package"))
def spack_is_git_repo():
@@ -524,7 +528,7 @@ def spack_is_git_repo():
def is_git_repo(path):
- dotgit_path = join_path(path, '.git')
+ dotgit_path = join_path(path, ".git")
if os.path.isdir(dotgit_path):
# we are in a regular git repo
return True
@@ -541,18 +545,20 @@ def is_git_repo(path):
class PythonNameError(spack.error.SpackError):
"""Exception class thrown for impermissible python names"""
+
def __init__(self, name):
self.name = name
- super(PythonNameError, self).__init__(
- '{0} is not a permissible Python name.'.format(name))
+ super(PythonNameError, self).__init__("{0} is not a permissible Python name.".format(name))
class CommandNameError(spack.error.SpackError):
"""Exception class thrown for impermissible command names"""
+
def __init__(self, name):
self.name = name
super(CommandNameError, self).__init__(
- '{0} is not a permissible Spack command name.'.format(name))
+ "{0} is not a permissible Spack command name.".format(name)
+ )
########################################
@@ -563,7 +569,7 @@ def extant_file(f):
Argparse type for files that exist.
"""
if not os.path.isfile(f):
- raise argparse.ArgumentTypeError('%s does not exist' % f)
+ raise argparse.ArgumentTypeError("%s does not exist" % f)
return f
@@ -585,11 +591,12 @@ def require_active_env(cmd_name):
return env
else:
tty.die(
- '`spack %s` requires an environment' % cmd_name,
- 'activate an environment first:',
- ' spack env activate ENV',
- 'or use:',
- ' spack -e ENV %s ...' % cmd_name)
+ "`spack %s` requires an environment" % cmd_name,
+ "activate an environment first:",
+ " spack env activate ENV",
+ "or use:",
+ " spack -e ENV %s ..." % cmd_name,
+ )
def find_environment(args):
@@ -632,4 +639,4 @@ def find_environment(args):
if ev.is_env_dir(env):
return ev.Environment(env)
- raise ev.SpackEnvironmentError('no environment in %s' % env)
+ raise ev.SpackEnvironmentError("no environment in %s" % env)
diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py
index 65c0d8d0be..bfb0de5528 100644
--- a/lib/spack/spack/cmd/activate.py
+++ b/lib/spack/spack/cmd/activate.py
@@ -17,18 +17,17 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-f', '--force', action='store_true',
- help="activate without first activating dependencies")
- subparser.add_argument(
- '-v', '--view', metavar='VIEW', type=str,
- help="the view to operate on")
- arguments.add_common_arguments(subparser, ['installed_spec'])
+ "-f", "--force", action="store_true", help="activate without first activating dependencies"
+ )
+ subparser.add_argument("-v", "--view", metavar="VIEW", type=str, help="the view to operate on")
+ arguments.add_common_arguments(subparser, ["installed_spec"])
def activate(parser, args):
- tty.warn("spack activate is deprecated in favor of "
- "environments and will be removed in v0.19.0")
+ tty.warn(
+ "spack activate is deprecated in favor of " "environments and will be removed in v0.19.0"
+ )
specs = spack.cmd.parse_specs(args.spec)
if len(specs) != 1:
diff --git a/lib/spack/spack/cmd/add.py b/lib/spack/spack/cmd/add.py
index 96f8420649..39c9c9e535 100644
--- a/lib/spack/spack/cmd/add.py
+++ b/lib/spack/spack/cmd/add.py
@@ -8,26 +8,29 @@ import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
-description = 'add a spec to an environment'
+description = "add a spec to an environment"
section = "environments"
level = "long"
def setup_parser(subparser):
- subparser.add_argument('-l', '--list-name',
- dest='list_name', default='specs',
- help="name of the list to add specs to")
- arguments.add_common_arguments(subparser, ['specs'])
+ subparser.add_argument(
+ "-l",
+ "--list-name",
+ dest="list_name",
+ default="specs",
+ help="name of the list to add specs to",
+ )
+ arguments.add_common_arguments(subparser, ["specs"])
def add(parser, args):
- env = spack.cmd.require_active_env(cmd_name='add')
+ env = spack.cmd.require_active_env(cmd_name="add")
with env.write_transaction():
for spec in spack.cmd.parse_specs(args.specs):
if not env.add(spec, args.list_name):
- tty.msg("Package {0} was already added to {1}"
- .format(spec.name, env.name))
+ tty.msg("Package {0} was already added to {1}".format(spec.name, env.name))
else:
- tty.msg('Adding %s to environment %s' % (spec, env.name))
+ tty.msg("Adding %s to environment %s" % (spec, env.name))
env.write()
diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py
index 289f13de5c..af1a31c308 100644
--- a/lib/spack/spack/cmd/arch.py
+++ b/lib/spack/spack/cmd/arch.py
@@ -21,30 +21,32 @@ level = "short"
def setup_parser(subparser):
subparser.add_argument(
- '-g', '--generic-target', action='store_true',
- help='show the best generic target'
+ "-g", "--generic-target", action="store_true", help="show the best generic target"
)
subparser.add_argument(
- '--known-targets', action='store_true',
- help='show a list of all known targets and exit'
+ "--known-targets", action="store_true", help="show a list of all known targets and exit"
)
parts = subparser.add_mutually_exclusive_group()
parts2 = subparser.add_mutually_exclusive_group()
parts.add_argument(
- '-p', '--platform', action='store_true', default=False,
- help='print only the platform')
+ "-p", "--platform", action="store_true", default=False, help="print only the platform"
+ )
parts.add_argument(
- '-o', '--operating-system', action='store_true', default=False,
- help='print only the operating system')
+ "-o",
+ "--operating-system",
+ action="store_true",
+ default=False,
+ help="print only the operating system",
+ )
parts.add_argument(
- '-t', '--target', action='store_true', default=False,
- help='print only the target')
+ "-t", "--target", action="store_true", default=False, help="print only the target"
+ )
parts2.add_argument(
- '-f', '--frontend', action='store_true', default=False,
- help='print frontend')
+ "-f", "--frontend", action="store_true", default=False, help="print frontend"
+ )
parts2.add_argument(
- '-b', '--backend', action='store_true', default=False,
- help='print backend')
+ "-b", "--backend", action="store_true", default=False, help="print backend"
+ )
def display_targets(targets):
@@ -56,11 +58,11 @@ def display_targets(targets):
def display_target_group(header, target_group):
print(header)
colify.colify(target_group, indent=4)
- print('')
+ print("")
- generic_architectures = by_vendor.pop('generic', None)
+ generic_architectures = by_vendor.pop("generic", None)
if generic_architectures:
- header = color.colorize(r'@*B{Generic architectures (families)}')
+ header = color.colorize(r"@*B{Generic architectures (families)}")
group = sorted(generic_architectures, key=lambda x: str(x))
display_target_group(header, group)
@@ -70,9 +72,9 @@ def display_targets(targets):
by_family[str(t.family)].append(t)
for family, group in by_family.items():
- vendor = color.colorize(r'@*B{' + vendor + r'}')
- family = color.colorize(r'@*B{' + family + r'}')
- header = ' - '.join([vendor, family])
+ vendor = color.colorize(r"@*B{" + vendor + r"}")
+ family = color.colorize(r"@*B{" + family + r"}")
+ header = " - ".join([vendor, family])
group = sorted(group, key=lambda x: len(x.ancestors))
display_target_group(header, group)
@@ -86,18 +88,16 @@ def arch(parser, args):
display_targets(archspec.cpu.TARGETS)
return
- os_args, target_args = 'default_os', 'default_target'
+ os_args, target_args = "default_os", "default_target"
if args.frontend:
- os_args, target_args = 'frontend', 'frontend'
+ os_args, target_args = "frontend", "frontend"
elif args.backend:
- os_args, target_args = 'backend', 'backend'
+ os_args, target_args = "backend", "backend"
host_platform = spack.platforms.host()
host_os = host_platform.operating_system(os_args)
host_target = host_platform.target(target_args)
- architecture = spack.spec.ArchSpec(
- (str(host_platform), str(host_os), str(host_target))
- )
+ architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
if args.platform:
print(architecture.platform)
diff --git a/lib/spack/spack/cmd/audit.py b/lib/spack/spack/cmd/audit.py
index 2fb352fac0..8773fc760d 100644
--- a/lib/spack/spack/cmd/audit.py
+++ b/lib/spack/spack/cmd/audit.py
@@ -15,32 +15,30 @@ level = "short"
def setup_parser(subparser):
# Top level flags, valid for every audit class
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subcommand')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subcommand")
# Audit configuration files
- sp.add_parser('configs', help='audit configuration files')
+ sp.add_parser("configs", help="audit configuration files")
# Https and other linting
- https_parser = sp.add_parser('packages-https', help='check https in packages')
+ https_parser = sp.add_parser("packages-https", help="check https in packages")
https_parser.add_argument(
- '--all',
- action='store_true',
- default=False,
- dest='check_all',
- help="audit all packages"
+ "--all", action="store_true", default=False, dest="check_all", help="audit all packages"
)
# Audit package recipes
- pkg_parser = sp.add_parser('packages', help='audit package recipes')
+ pkg_parser = sp.add_parser("packages", help="audit package recipes")
for group in [pkg_parser, https_parser]:
group.add_argument(
- 'name', metavar='PKG', nargs='*',
- help='package to be analyzed (if none all packages will be processed)',
+ "name",
+ metavar="PKG",
+ nargs="*",
+ help="package to be analyzed (if none all packages will be processed)",
)
# List all checks
- sp.add_parser('list', help='list available checks and exits')
+ sp.add_parser("list", help="list available checks and exits")
def configs(parser, args):
@@ -67,23 +65,23 @@ def packages_https(parser, args):
def list(parser, args):
for subcommand, check_tags in spack.audit.GROUPS.items():
- print(cl.colorize('@*b{' + subcommand + '}:'))
+ print(cl.colorize("@*b{" + subcommand + "}:"))
for tag in check_tags:
audit_obj = spack.audit.CALLBACKS[tag]
- print(' ' + audit_obj.description)
+ print(" " + audit_obj.description)
if args.verbose:
for idx, fn in enumerate(audit_obj.callbacks):
- print(' {0}. '.format(idx + 1) + fn.__doc__)
+ print(" {0}. ".format(idx + 1) + fn.__doc__)
print()
print()
def audit(parser, args):
subcommands = {
- 'configs': configs,
- 'packages': packages,
- 'packages-https': packages_https,
- 'list': list
+ "configs": configs,
+ "packages": packages,
+ "packages-https": packages_https,
+ "list": list,
}
subcommands[args.subcommand](parser, args)
@@ -91,15 +89,15 @@ def audit(parser, args):
def _process_reports(reports):
for check, errors in reports:
if errors:
- msg = '{0}: {1} issue{2} found'.format(
- check, len(errors), '' if len(errors) == 1 else 's'
+ msg = "{0}: {1} issue{2} found".format(
+ check, len(errors), "" if len(errors) == 1 else "s"
)
- header = '@*b{' + msg + '}'
+ header = "@*b{" + msg + "}"
print(cl.colorize(header))
for idx, error in enumerate(errors):
- print(str(idx + 1) + '. ' + str(error))
+ print(str(idx + 1) + ". " + str(error))
raise SystemExit(1)
else:
- msg = '{0}: 0 issues found.'.format(check)
- header = '@*b{' + msg + '}'
+ msg = "{0}: 0 issues found.".format(check)
+ header = "@*b{" + msg + "}"
print(cl.colorize(header))
diff --git a/lib/spack/spack/cmd/blame.py b/lib/spack/spack/cmd/blame.py
index 9f32be5790..23ac147580 100644
--- a/lib/spack/spack/cmd/blame.py
+++ b/lib/spack/spack/cmd/blame.py
@@ -26,39 +26,61 @@ level = "long"
def setup_parser(subparser):
view_group = subparser.add_mutually_exclusive_group()
view_group.add_argument(
- '-t', '--time', dest='view', action='store_const', const='time',
- default='time', help='sort by last modification date (default)')
+ "-t",
+ "--time",
+ dest="view",
+ action="store_const",
+ const="time",
+ default="time",
+ help="sort by last modification date (default)",
+ )
view_group.add_argument(
- '-p', '--percent', dest='view', action='store_const', const='percent',
- help='sort by percent of code')
+ "-p",
+ "--percent",
+ dest="view",
+ action="store_const",
+ const="percent",
+ help="sort by percent of code",
+ )
view_group.add_argument(
- '-g', '--git', dest='view', action='store_const', const='git',
- help='show git blame output instead of summary')
+ "-g",
+ "--git",
+ dest="view",
+ action="store_const",
+ const="git",
+ help="show git blame output instead of summary",
+ )
subparser.add_argument(
- "--json", action="store_true", default=False,
- help="output blame as machine-readable json records")
+ "--json",
+ action="store_true",
+ default=False,
+ help="output blame as machine-readable json records",
+ )
subparser.add_argument(
- 'package_or_file', help='name of package to show contributions for, '
- 'or path to a file in the spack repo')
+ "package_or_file",
+ help="name of package to show contributions for, " "or path to a file in the spack repo",
+ )
def print_table(rows, last_mod, total_lines, emails):
"""
Given a set of rows with authors and lines, print a table.
"""
- table = [['LAST_COMMIT', 'LINES', '%', 'AUTHOR', 'EMAIL']]
+ table = [["LAST_COMMIT", "LINES", "%", "AUTHOR", "EMAIL"]]
for author, nlines in rows:
- table += [[
- pretty_date(last_mod[author]),
- nlines,
- round(nlines / float(total_lines) * 100, 1),
- author,
- emails[author]]]
-
- table += [[''] * 5]
- table += [[pretty_date(max(last_mod.values())), total_lines, '100.0'] +
- [''] * 3]
+ table += [
+ [
+ pretty_date(last_mod[author]),
+ nlines,
+ round(nlines / float(total_lines) * 100, 1),
+ author,
+ emails[author],
+ ]
+ ]
+
+ table += [[""] * 5]
+ table += [[pretty_date(max(last_mod.values())), total_lines, "100.0"] + [""] * 3]
colify_table(table)
@@ -70,17 +92,22 @@ def dump_json(rows, last_mod, total_lines, emails):
result = {}
authors = []
for author, nlines in rows:
- authors.append({
- "last_commit": pretty_date(last_mod[author]),
- "lines": nlines,
- "percentage": round(nlines / float(total_lines) * 100, 1),
- "author": author,
- "email": emails[author]
- })
-
- result['authors'] = authors
- result["totals"] = {"last_commit": pretty_date(max(last_mod.values())),
- "lines": total_lines, "percentage": "100.0"}
+ authors.append(
+ {
+ "last_commit": pretty_date(last_mod[author]),
+ "lines": nlines,
+ "percentage": round(nlines / float(total_lines) * 100, 1),
+ "author": author,
+ "email": emails[author],
+ }
+ )
+
+ result["authors"] = authors
+ result["totals"] = {
+ "last_commit": pretty_date(max(last_mod.values())),
+ "lines": total_lines,
+ "percentage": "100.0",
+ }
sjson.dump(result, sys.stdout)
@@ -89,7 +116,7 @@ def blame(parser, args):
# make sure this is a git repo
if not spack_is_git_repo():
tty.die("This spack is not a git clone. Can't use 'spack blame'")
- git = which('git', required=True)
+ git = which("git", required=True)
# Get name of file to blame
blame_file = None
@@ -100,16 +127,16 @@ def blame(parser, args):
if not blame_file:
pkg_cls = spack.repo.path.get_pkg_class(args.package_or_file)
- blame_file = pkg_cls.module.__file__.rstrip('c') # .pyc -> .py
+ blame_file = pkg_cls.module.__file__.rstrip("c") # .pyc -> .py
# get git blame for the package
with working_dir(spack.paths.prefix):
- if args.view == 'git':
- git('blame', blame_file)
+ if args.view == "git":
+ git("blame", blame_file)
return
else:
- output = git('blame', '--line-porcelain', blame_file, output=str)
- lines = output.split('\n')
+ output = git("blame", "--line-porcelain", blame_file, output=str)
+ lines = output.split("\n")
# Histogram authors
counts = {}
@@ -117,28 +144,27 @@ def blame(parser, args):
last_mod = {}
total_lines = 0
for line in lines:
- match = re.match(r'^author (.*)', line)
+ match = re.match(r"^author (.*)", line)
if match:
author = match.group(1)
- match = re.match(r'^author-mail (.*)', line)
+ match = re.match(r"^author-mail (.*)", line)
if match:
email = match.group(1)
- match = re.match(r'^author-time (.*)', line)
+ match = re.match(r"^author-time (.*)", line)
if match:
mod = int(match.group(1))
last_mod[author] = max(last_mod.setdefault(author, 0), mod)
# ignore comments
- if re.match(r'^\t[^#]', line):
+ if re.match(r"^\t[^#]", line):
counts[author] = counts.setdefault(author, 0) + 1
emails.setdefault(author, email)
total_lines += 1
- if args.view == 'time':
- rows = sorted(
- counts.items(), key=lambda t: last_mod[t[0]], reverse=True)
+ if args.view == "time":
+ rows = sorted(counts.items(), key=lambda t: last_mod[t[0]], reverse=True)
else: # args.view == 'percent'
rows = sorted(counts.items(), key=lambda t: t[1], reverse=True)
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index c4048b02fa..9a782b64ea 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -28,34 +28,34 @@ level = "long"
# Tarball to be downloaded if binary packages are requested in a local mirror
-BINARY_TARBALL = 'https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz'
+BINARY_TARBALL = "https://github.com/spack/spack-bootstrap-mirrors/releases/download/v0.2/bootstrap-buildcache.tar.gz"
#: Subdirectory where to create the mirror
-LOCAL_MIRROR_DIR = 'bootstrap_cache'
+LOCAL_MIRROR_DIR = "bootstrap_cache"
# Metadata for a generated binary mirror
BINARY_METADATA = {
- 'type': 'buildcache',
- 'description': ('Buildcache copied from a public tarball available on Github.'
- 'The sha256 checksum of binaries is checked before installation.'),
- 'info': {
- 'url': os.path.join('..', '..', LOCAL_MIRROR_DIR),
- 'homepage': 'https://github.com/spack/spack-bootstrap-mirrors',
- 'releases': 'https://github.com/spack/spack-bootstrap-mirrors/releases',
- 'tarball': BINARY_TARBALL
- }
+ "type": "buildcache",
+ "description": (
+ "Buildcache copied from a public tarball available on Github."
+ "The sha256 checksum of binaries is checked before installation."
+ ),
+ "info": {
+ "url": os.path.join("..", "..", LOCAL_MIRROR_DIR),
+ "homepage": "https://github.com/spack/spack-bootstrap-mirrors",
+ "releases": "https://github.com/spack/spack-bootstrap-mirrors/releases",
+ "tarball": BINARY_TARBALL,
+ },
}
-CLINGO_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json'
-GNUPG_JSON = '$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json'
+CLINGO_JSON = "$spack/share/spack/bootstrap/github-actions-v0.2/clingo.json"
+GNUPG_JSON = "$spack/share/spack/bootstrap/github-actions-v0.2/gnupg.json"
# Metadata for a generated source mirror
SOURCE_METADATA = {
- 'type': 'install',
- 'description': 'Mirror with software needed to bootstrap Spack',
- 'info': {
- 'url': os.path.join('..', '..', LOCAL_MIRROR_DIR)
- }
+ "type": "install",
+ "description": "Mirror with software needed to bootstrap Spack",
+ "info": {"url": os.path.join("..", "..", LOCAL_MIRROR_DIR)},
}
@@ -63,109 +63,83 @@ def _add_scope_option(parser):
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
- help="configuration scope to read/modify"
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
+ help="configuration scope to read/modify",
)
def setup_parser(subparser):
- sp = subparser.add_subparsers(dest='subcommand')
+ sp = subparser.add_subparsers(dest="subcommand")
- status = sp.add_parser('status', help='get the status of Spack')
+ status = sp.add_parser("status", help="get the status of Spack")
status.add_argument(
- '--optional', action='store_true', default=False,
- help='show the status of rarely used optional dependencies'
+ "--optional",
+ action="store_true",
+ default=False,
+ help="show the status of rarely used optional dependencies",
)
status.add_argument(
- '--dev', action='store_true', default=False,
- help='show the status of dependencies needed to develop Spack'
+ "--dev",
+ action="store_true",
+ default=False,
+ help="show the status of dependencies needed to develop Spack",
)
- enable = sp.add_parser('enable', help='enable bootstrapping')
+ enable = sp.add_parser("enable", help="enable bootstrapping")
_add_scope_option(enable)
- disable = sp.add_parser('disable', help='disable bootstrapping')
+ disable = sp.add_parser("disable", help="disable bootstrapping")
_add_scope_option(disable)
- reset = sp.add_parser(
- 'reset', help='reset bootstrapping configuration to Spack defaults'
- )
- spack.cmd.common.arguments.add_common_arguments(
- reset, ['yes_to_all']
- )
+ reset = sp.add_parser("reset", help="reset bootstrapping configuration to Spack defaults")
+ spack.cmd.common.arguments.add_common_arguments(reset, ["yes_to_all"])
- root = sp.add_parser(
- 'root', help='get/set the root bootstrap directory'
- )
+ root = sp.add_parser("root", help="get/set the root bootstrap directory")
_add_scope_option(root)
root.add_argument(
- 'path', nargs='?', default=None,
- help='set the bootstrap directory to this value'
+ "path", nargs="?", default=None, help="set the bootstrap directory to this value"
)
- list = sp.add_parser(
- 'list', help='list all the sources of software to bootstrap Spack'
- )
+ list = sp.add_parser("list", help="list all the sources of software to bootstrap Spack")
_add_scope_option(list)
- trust = sp.add_parser(
- 'trust', help='trust a bootstrapping source'
- )
+ trust = sp.add_parser("trust", help="trust a bootstrapping source")
_add_scope_option(trust)
- trust.add_argument(
- 'name', help='name of the source to be trusted'
- )
+ trust.add_argument("name", help="name of the source to be trusted")
- untrust = sp.add_parser(
- 'untrust', help='untrust a bootstrapping source'
- )
+ untrust = sp.add_parser("untrust", help="untrust a bootstrapping source")
_add_scope_option(untrust)
- untrust.add_argument(
- 'name', help='name of the source to be untrusted'
- )
+ untrust.add_argument("name", help="name of the source to be untrusted")
- add = sp.add_parser(
- 'add', help='add a new source for bootstrapping'
- )
+ add = sp.add_parser("add", help="add a new source for bootstrapping")
_add_scope_option(add)
add.add_argument(
- '--trust', action='store_true',
- help='trust the source immediately upon addition')
- add.add_argument(
- 'name', help='name of the new source of software'
- )
- add.add_argument(
- 'metadata_dir', help='directory where to find metadata files'
+ "--trust", action="store_true", help="trust the source immediately upon addition"
)
+ add.add_argument("name", help="name of the new source of software")
+ add.add_argument("metadata_dir", help="directory where to find metadata files")
- remove = sp.add_parser(
- 'remove', help='remove a bootstrapping source'
- )
- remove.add_argument(
- 'name', help='name of the source to be removed'
- )
+ remove = sp.add_parser("remove", help="remove a bootstrapping source")
+ remove.add_argument("name", help="name of the source to be removed")
- mirror = sp.add_parser(
- 'mirror', help='create a local mirror to bootstrap Spack'
- )
- mirror.add_argument(
- '--binary-packages', action='store_true',
- help='download public binaries in the mirror'
- )
+ mirror = sp.add_parser("mirror", help="create a local mirror to bootstrap Spack")
mirror.add_argument(
- '--dev', action='store_true',
- help='download dev dependencies too'
+ "--binary-packages", action="store_true", help="download public binaries in the mirror"
)
+ mirror.add_argument("--dev", action="store_true", help="download dev dependencies too")
mirror.add_argument(
- metavar='DIRECTORY', dest='root_dir',
- help='root directory in which to create the mirror and metadata'
+ metavar="DIRECTORY",
+ dest="root_dir",
+ help="root directory in which to create the mirror and metadata",
)
def _enable_or_disable(args):
# Set to True if we called "enable", otherwise set to false
- value = args.subcommand == 'enable'
- spack.config.set('bootstrap:enable', value, scope=args.scope)
+ value = args.subcommand == "enable"
+ spack.config.set("bootstrap:enable", value, scope=args.scope)
def _reset(args):
@@ -173,38 +147,35 @@ def _reset(args):
msg = [
"Bootstrapping configuration is being reset to Spack's defaults. "
"Current configuration will be lost.\n",
- "Do you want to continue?"
+ "Do you want to continue?",
]
- ok_to_continue = llnl.util.tty.get_yes_or_no(
- ''.join(msg), default=True
- )
+ ok_to_continue = llnl.util.tty.get_yes_or_no("".join(msg), default=True)
if not ok_to_continue:
- raise RuntimeError('Aborting')
+ raise RuntimeError("Aborting")
for scope in spack.config.config.file_scopes:
# The default scope should stay untouched
- if scope.name == 'defaults':
+ if scope.name == "defaults":
continue
# If we are in an env scope we can't delete a file, but the best we
# can do is nullify the corresponding configuration
- if (scope.name.startswith('env') and
- spack.config.get('bootstrap', scope=scope.name)):
- spack.config.set('bootstrap', {}, scope=scope.name)
+ if scope.name.startswith("env") and spack.config.get("bootstrap", scope=scope.name):
+ spack.config.set("bootstrap", {}, scope=scope.name)
continue
# If we are outside of an env scope delete the bootstrap.yaml file
- bootstrap_yaml = os.path.join(scope.path, 'bootstrap.yaml')
- backup_file = bootstrap_yaml + '.bkp'
+ bootstrap_yaml = os.path.join(scope.path, "bootstrap.yaml")
+ backup_file = bootstrap_yaml + ".bkp"
if os.path.exists(bootstrap_yaml):
shutil.move(bootstrap_yaml, backup_file)
def _root(args):
if args.path:
- spack.config.set('bootstrap:root', args.path, scope=args.scope)
+ spack.config.set("bootstrap:root", args.path, scope=args.scope)
- root = spack.config.get('bootstrap:root', default=None, scope=args.scope)
+ root = spack.config.get("bootstrap:root", default=None, scope=args.scope)
if root:
root = spack.util.path.canonicalize_path(root)
print(root)
@@ -213,9 +184,7 @@ def _root(args):
def _list(args):
sources = spack.bootstrap.bootstrapping_sources(scope=args.scope)
if not sources:
- llnl.util.tty.msg(
- "No method available for bootstrapping Spack's dependencies"
- )
+ llnl.util.tty.msg("No method available for bootstrapping Spack's dependencies")
return
def _print_method(source, trusted):
@@ -231,51 +200,53 @@ def _list(args):
elif trusted is False:
trust_str = "@*r{UNTRUSTED}"
- fmt("Name", source['name'] + ' ' + trust_str)
+ fmt("Name", source["name"] + " " + trust_str)
print()
- fmt(" Type", source['type'])
+ fmt(" Type", source["type"])
print()
- info_lines = ['\n']
- for key, value in source.get('info', {}).items():
- info_lines.append(' ' * 4 + '@*{{{0}}}: {1}\n'.format(key, value))
+ info_lines = ["\n"]
+ for key, value in source.get("info", {}).items():
+ info_lines.append(" " * 4 + "@*{{{0}}}: {1}\n".format(key, value))
if len(info_lines) > 1:
- fmt(" Info", ''.join(info_lines))
+ fmt(" Info", "".join(info_lines))
- description_lines = ['\n']
- for line in source['description'].split('\n'):
- description_lines.append(' ' * 4 + line + '\n')
+ description_lines = ["\n"]
+ for line in source["description"].split("\n"):
+ description_lines.append(" " * 4 + line + "\n")
- fmt(" Description", ''.join(description_lines))
+ fmt(" Description", "".join(description_lines))
- trusted = spack.config.get('bootstrap:trusted', {})
+ trusted = spack.config.get("bootstrap:trusted", {})
for s in sources:
- _print_method(s, trusted.get(s['name'], None))
+ _print_method(s, trusted.get(s["name"], None))
def _write_trust_state(args, value):
name = args.name
- sources = spack.config.get('bootstrap:sources')
+ sources = spack.config.get("bootstrap:sources")
- matches = [s for s in sources if s['name'] == name]
+ matches = [s for s in sources if s["name"] == name]
if not matches:
- names = [s['name'] for s in sources]
- msg = ('there is no bootstrapping method named "{0}". Valid '
- 'method names are: {1}'.format(name, ', '.join(names)))
+ names = [s["name"] for s in sources]
+ msg = (
+ 'there is no bootstrapping method named "{0}". Valid '
+ "method names are: {1}".format(name, ", ".join(names))
+ )
raise RuntimeError(msg)
if len(matches) > 1:
- msg = ('there is more than one bootstrapping method named "{0}". '
- 'Please delete all methods but one from bootstrap.yaml '
- 'before proceeding').format(name)
+ msg = (
+ 'there is more than one bootstrapping method named "{0}". '
+ "Please delete all methods but one from bootstrap.yaml "
+ "before proceeding"
+ ).format(name)
raise RuntimeError(msg)
# Setting the scope explicitly is needed to not copy over to a new scope
# the entire default configuration for bootstrap.yaml
- scope = args.scope or spack.config.default_modify_scope('bootstrap')
- spack.config.add(
- 'bootstrap:trusted:{0}:{1}'.format(name, str(value)), scope=scope
- )
+ scope = args.scope or spack.config.default_modify_scope("bootstrap")
+ spack.config.add("bootstrap:trusted:{0}:{1}".format(name, str(value)), scope=scope)
def _trust(args):
@@ -291,11 +262,11 @@ def _untrust(args):
def _status(args):
- sections = ['core', 'buildcache']
+ sections = ["core", "buildcache"]
if args.optional:
- sections.append('optional')
+ sections.append("optional")
if args.dev:
- sections.append('develop')
+ sections.append("develop")
header = "@*b{{Spack v{0} - {1}}}".format(
spack.spack_version, spack.bootstrap.spec_for_current_python()
@@ -312,9 +283,11 @@ def _status(args):
if status_msg:
print(llnl.util.tty.color.colorize(status_msg))
print()
- legend = ('Spack will take care of bootstrapping any missing dependency marked'
- ' as [@*y{B}]. Dependencies marked as [@*y{-}] are instead required'
- ' to be found on the system.')
+ legend = (
+ "Spack will take care of bootstrapping any missing dependency marked"
+ " as [@*y{B}]. Dependencies marked as [@*y{-}] are instead required"
+ " to be found on the system."
+ )
if missing:
print(llnl.util.tty.color.colorize(legend))
print()
@@ -322,7 +295,7 @@ def _status(args):
def _add(args):
initial_sources = spack.bootstrap.bootstrapping_sources()
- names = [s['name'] for s in initial_sources]
+ names = [s["name"] for s in initial_sources]
# If the name is already used error out
if args.name in names:
@@ -332,21 +305,17 @@ def _add(args):
# Check that the metadata file exists
metadata_dir = spack.util.path.canonicalize_path(args.metadata_dir)
if not os.path.exists(metadata_dir) or not os.path.isdir(metadata_dir):
- raise RuntimeError(
- 'the directory "{0}" does not exist'.format(args.metadata_dir)
- )
+ raise RuntimeError('the directory "{0}" does not exist'.format(args.metadata_dir))
- file = os.path.join(metadata_dir, 'metadata.yaml')
+ file = os.path.join(metadata_dir, "metadata.yaml")
if not os.path.exists(file):
raise RuntimeError('the file "{0}" does not exist'.format(file))
# Insert the new source as the highest priority one
- write_scope = args.scope or spack.config.default_modify_scope(section='bootstrap')
- sources = spack.config.get('bootstrap:sources', scope=write_scope) or []
- sources = [
- {'name': args.name, 'metadata': args.metadata_dir}
- ] + sources
- spack.config.set('bootstrap:sources', sources, scope=write_scope)
+ write_scope = args.scope or spack.config.default_modify_scope(section="bootstrap")
+ sources = spack.config.get("bootstrap:sources", scope=write_scope) or []
+ sources = [{"name": args.name, "metadata": args.metadata_dir}] + sources
+ spack.config.set("bootstrap:sources", sources, scope=write_scope)
msg = 'New bootstrapping source "{0}" added in the "{1}" configuration scope'
llnl.util.tty.msg(msg.format(args.name, write_scope))
@@ -356,37 +325,39 @@ def _add(args):
def _remove(args):
initial_sources = spack.bootstrap.bootstrapping_sources()
- names = [s['name'] for s in initial_sources]
+ names = [s["name"] for s in initial_sources]
if args.name not in names:
- msg = ('cannot find any bootstrapping source named "{0}". '
- 'Run `spack bootstrap list` to see available sources.')
+ msg = (
+ 'cannot find any bootstrapping source named "{0}". '
+ "Run `spack bootstrap list` to see available sources."
+ )
raise RuntimeError(msg.format(args.name))
for current_scope in spack.config.scopes():
- sources = spack.config.get('bootstrap:sources', scope=current_scope) or []
- if args.name in [s['name'] for s in sources]:
- sources = [s for s in sources if s['name'] != args.name]
- spack.config.set('bootstrap:sources', sources, scope=current_scope)
- msg = ('Removed the bootstrapping source named "{0}" from the '
- '"{1}" configuration scope.')
+ sources = spack.config.get("bootstrap:sources", scope=current_scope) or []
+ if args.name in [s["name"] for s in sources]:
+ sources = [s for s in sources if s["name"] != args.name]
+ spack.config.set("bootstrap:sources", sources, scope=current_scope)
+ msg = (
+ 'Removed the bootstrapping source named "{0}" from the '
+ '"{1}" configuration scope.'
+ )
llnl.util.tty.msg(msg.format(args.name, current_scope))
- trusted = spack.config.get('bootstrap:trusted', scope=current_scope) or []
+ trusted = spack.config.get("bootstrap:trusted", scope=current_scope) or []
if args.name in trusted:
trusted.pop(args.name)
- spack.config.set('bootstrap:trusted', trusted, scope=current_scope)
+ spack.config.set("bootstrap:trusted", trusted, scope=current_scope)
msg = 'Deleting information on "{0}" from list of trusted sources'
llnl.util.tty.msg(msg.format(args.name))
def _mirror(args):
- mirror_dir = spack.util.path.canonicalize_path(
- os.path.join(args.root_dir, LOCAL_MIRROR_DIR)
- )
+ mirror_dir = spack.util.path.canonicalize_path(os.path.join(args.root_dir, LOCAL_MIRROR_DIR))
# TODO: Here we are adding gnuconfig manually, but this can be fixed
# TODO: as soon as we have an option to add to a mirror all the possible
# TODO: dependencies of a spec
- root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ['gnuconfig']
+ root_specs = spack.bootstrap.all_root_specs(development=args.dev) + ["gnuconfig"]
for spec_str in root_specs:
msg = 'Adding "{0}" and dependencies to the mirror at {1}'
llnl.util.tty.msg(msg.format(spec_str, mirror_dir))
@@ -405,48 +376,46 @@ def _mirror(args):
stage.create()
stage.fetch()
stage.expand_archive()
- build_cache_dir = os.path.join(stage.source_path, 'build_cache')
+ build_cache_dir = os.path.join(stage.source_path, "build_cache")
shutil.move(build_cache_dir, mirror_dir)
llnl.util.tty.set_msg_enabled(True)
def write_metadata(subdir, metadata):
- metadata_rel_dir = os.path.join('metadata', subdir)
- metadata_yaml = os.path.join(
- args.root_dir, metadata_rel_dir, 'metadata.yaml'
- )
+ metadata_rel_dir = os.path.join("metadata", subdir)
+ metadata_yaml = os.path.join(args.root_dir, metadata_rel_dir, "metadata.yaml")
llnl.util.filesystem.mkdirp(os.path.dirname(metadata_yaml))
- with open(metadata_yaml, mode='w') as f:
+ with open(metadata_yaml, mode="w") as f:
spack.util.spack_yaml.dump(metadata, stream=f)
return os.path.dirname(metadata_yaml), metadata_rel_dir
- instructions = ('\nTo register the mirror on the platform where it\'s supposed '
- 'to be used, move "{0}" to its final location and run the '
- 'following command(s):\n\n').format(args.root_dir)
- cmd = ' % spack bootstrap add --trust {0} <final-path>/{1}\n'
- _, rel_directory = write_metadata(subdir='sources', metadata=SOURCE_METADATA)
- instructions += cmd.format('local-sources', rel_directory)
+ instructions = (
+ "\nTo register the mirror on the platform where it's supposed "
+ 'to be used, move "{0}" to its final location and run the '
+ "following command(s):\n\n"
+ ).format(args.root_dir)
+ cmd = " % spack bootstrap add --trust {0} <final-path>/{1}\n"
+ _, rel_directory = write_metadata(subdir="sources", metadata=SOURCE_METADATA)
+ instructions += cmd.format("local-sources", rel_directory)
if args.binary_packages:
- abs_directory, rel_directory = write_metadata(
- subdir='binaries', metadata=BINARY_METADATA
- )
+ abs_directory, rel_directory = write_metadata(subdir="binaries", metadata=BINARY_METADATA)
shutil.copy(spack.util.path.canonicalize_path(CLINGO_JSON), abs_directory)
shutil.copy(spack.util.path.canonicalize_path(GNUPG_JSON), abs_directory)
- instructions += cmd.format('local-binaries', rel_directory)
+ instructions += cmd.format("local-binaries", rel_directory)
print(instructions)
def bootstrap(parser, args):
callbacks = {
- 'status': _status,
- 'enable': _enable_or_disable,
- 'disable': _enable_or_disable,
- 'reset': _reset,
- 'root': _root,
- 'list': _list,
- 'trust': _trust,
- 'untrust': _untrust,
- 'add': _add,
- 'remove': _remove,
- 'mirror': _mirror
+ "status": _status,
+ "enable": _enable_or_disable,
+ "disable": _enable_or_disable,
+ "reset": _reset,
+ "root": _root,
+ "list": _list,
+ "trust": _trust,
+ "untrust": _untrust,
+ "add": _add,
+ "remove": _remove,
+ "mirror": _mirror,
}
callbacks[args.subcommand](args)
diff --git a/lib/spack/spack/cmd/build_env.py b/lib/spack/spack/cmd/build_env.py
index 7ad37b6a34..eecc23d006 100644
--- a/lib/spack/spack/cmd/build_env.py
+++ b/lib/spack/spack/cmd/build_env.py
@@ -4,8 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.cmd.common.env_utility as env_utility
-description = "run a command in a spec's install environment, " \
- "or dump its environment to screen or file"
+description = (
+ "run a command in a spec's install environment, " "or dump its environment to screen or file"
+)
section = "build"
level = "long"
@@ -13,4 +14,4 @@ setup_parser = env_utility.setup_parser
def build_env(parser, args):
- env_utility.emulate_env_utility('build-env', 'build', args)
+ env_utility.emulate_env_utility("build-env", "build", args)
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index b66d5a5699..1da0a51bc9 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -38,226 +38,278 @@ level = "long"
def setup_parser(subparser):
setup_parser.parser = subparser
- subparsers = subparser.add_subparsers(help='buildcache sub-commands')
-
- create = subparsers.add_parser('create', help=create_fn.__doc__)
- create.add_argument('-r', '--rel', action='store_true',
- help="make all rpaths relative" +
- " before creating tarballs.")
- create.add_argument('-f', '--force', action='store_true',
- help="overwrite tarball if it exists.")
- create.add_argument('-u', '--unsigned', action='store_true',
- help="create unsigned buildcache" +
- " tarballs for testing")
- create.add_argument('-a', '--allow-root', action='store_true',
- help="allow install root string in binary files " +
- "after RPATH substitution")
- create.add_argument('-k', '--key', metavar='key',
- type=str, default=None,
- help="Key for signing.")
+ subparsers = subparser.add_subparsers(help="buildcache sub-commands")
+
+ create = subparsers.add_parser("create", help=create_fn.__doc__)
+ create.add_argument(
+ "-r",
+ "--rel",
+ action="store_true",
+ help="make all rpaths relative" + " before creating tarballs.",
+ )
+ create.add_argument(
+ "-f", "--force", action="store_true", help="overwrite tarball if it exists."
+ )
+ create.add_argument(
+ "-u",
+ "--unsigned",
+ action="store_true",
+ help="create unsigned buildcache" + " tarballs for testing",
+ )
+ create.add_argument(
+ "-a",
+ "--allow-root",
+ action="store_true",
+ help="allow install root string in binary files " + "after RPATH substitution",
+ )
+ create.add_argument(
+ "-k", "--key", metavar="key", type=str, default=None, help="Key for signing."
+ )
output = create.add_mutually_exclusive_group(required=True)
- output.add_argument('-d', '--directory',
- metavar='directory',
- type=str,
- help="local directory where " +
- "buildcaches will be written.")
- output.add_argument('-m', '--mirror-name',
- metavar='mirror-name',
- type=str,
- help="name of the mirror where " +
- "buildcaches will be written.")
- output.add_argument('--mirror-url',
- metavar='mirror-url',
- type=str,
- help="URL of the mirror where " +
- "buildcaches will be written.")
- create.add_argument('--rebuild-index', action='store_true',
- default=False, help="Regenerate buildcache index " +
- "after building package(s)")
- create.add_argument('--spec-file', default=None,
- help=('Create buildcache entry for spec from json or ' +
- 'yaml file'))
- create.add_argument('--only', default='package,dependencies',
- dest='things_to_install',
- choices=['package', 'dependencies'],
- help=('Select the buildcache mode. the default is to'
- ' build a cache for the package along with all'
- ' its dependencies. Alternatively, one can'
- ' decide to build a cache for only the package'
- ' or only the dependencies'))
- arguments.add_common_arguments(create, ['specs'])
+ output.add_argument(
+ "-d",
+ "--directory",
+ metavar="directory",
+ type=str,
+ help="local directory where " + "buildcaches will be written.",
+ )
+ output.add_argument(
+ "-m",
+ "--mirror-name",
+ metavar="mirror-name",
+ type=str,
+ help="name of the mirror where " + "buildcaches will be written.",
+ )
+ output.add_argument(
+ "--mirror-url",
+ metavar="mirror-url",
+ type=str,
+ help="URL of the mirror where " + "buildcaches will be written.",
+ )
+ create.add_argument(
+ "--rebuild-index",
+ action="store_true",
+ default=False,
+ help="Regenerate buildcache index " + "after building package(s)",
+ )
+ create.add_argument(
+ "--spec-file",
+ default=None,
+ help=("Create buildcache entry for spec from json or " + "yaml file"),
+ )
+ create.add_argument(
+ "--only",
+ default="package,dependencies",
+ dest="things_to_install",
+ choices=["package", "dependencies"],
+ help=(
+ "Select the buildcache mode. the default is to"
+ " build a cache for the package along with all"
+ " its dependencies. Alternatively, one can"
+ " decide to build a cache for only the package"
+ " or only the dependencies"
+ ),
+ )
+ arguments.add_common_arguments(create, ["specs"])
create.set_defaults(func=create_fn)
- install = subparsers.add_parser('install', help=install_fn.__doc__)
- install.add_argument('-f', '--force', action='store_true',
- help="overwrite install directory if it exists.")
- install.add_argument('-m', '--multiple', action='store_true',
- help="allow all matching packages ")
- install.add_argument('-a', '--allow-root', action='store_true',
- help="allow install root string in binary files " +
- "after RPATH substitution")
- install.add_argument('-u', '--unsigned', action='store_true',
- help="install unsigned buildcache" +
- " tarballs for testing")
- install.add_argument('-o', '--otherarch', action='store_true',
- help="install specs from other architectures" +
- " instead of default platform and OS")
-
- arguments.add_common_arguments(install, ['specs'])
+ install = subparsers.add_parser("install", help=install_fn.__doc__)
+ install.add_argument(
+ "-f", "--force", action="store_true", help="overwrite install directory if it exists."
+ )
+ install.add_argument(
+ "-m", "--multiple", action="store_true", help="allow all matching packages "
+ )
+ install.add_argument(
+ "-a",
+ "--allow-root",
+ action="store_true",
+ help="allow install root string in binary files " + "after RPATH substitution",
+ )
+ install.add_argument(
+ "-u",
+ "--unsigned",
+ action="store_true",
+ help="install unsigned buildcache" + " tarballs for testing",
+ )
+ install.add_argument(
+ "-o",
+ "--otherarch",
+ action="store_true",
+ help="install specs from other architectures" + " instead of default platform and OS",
+ )
+
+ arguments.add_common_arguments(install, ["specs"])
install.set_defaults(func=install_fn)
- listcache = subparsers.add_parser('list', help=list_fn.__doc__)
- arguments.add_common_arguments(listcache, ['long', 'very_long'])
- listcache.add_argument('-v', '--variants',
- action='store_true',
- dest='variants',
- help='show variants in output (can be long)')
- listcache.add_argument('-a', '--allarch', action='store_true',
- help="list specs for all available architectures" +
- " instead of default platform and OS")
- arguments.add_common_arguments(listcache, ['specs'])
+ listcache = subparsers.add_parser("list", help=list_fn.__doc__)
+ arguments.add_common_arguments(listcache, ["long", "very_long"])
+ listcache.add_argument(
+ "-v",
+ "--variants",
+ action="store_true",
+ dest="variants",
+ help="show variants in output (can be long)",
+ )
+ listcache.add_argument(
+ "-a",
+ "--allarch",
+ action="store_true",
+ help="list specs for all available architectures" + " instead of default platform and OS",
+ )
+ arguments.add_common_arguments(listcache, ["specs"])
listcache.set_defaults(func=list_fn)
- keys = subparsers.add_parser('keys', help=keys_fn.__doc__)
+ keys = subparsers.add_parser("keys", help=keys_fn.__doc__)
keys.add_argument(
- '-i', '--install', action='store_true',
- help="install Keys pulled from mirror")
- keys.add_argument(
- '-t', '--trust', action='store_true',
- help="trust all downloaded keys")
- keys.add_argument('-f', '--force', action='store_true',
- help="force new download of keys")
+ "-i", "--install", action="store_true", help="install Keys pulled from mirror"
+ )
+ keys.add_argument("-t", "--trust", action="store_true", help="trust all downloaded keys")
+ keys.add_argument("-f", "--force", action="store_true", help="force new download of keys")
keys.set_defaults(func=keys_fn)
- preview = subparsers.add_parser('preview', help=preview_fn.__doc__)
- arguments.add_common_arguments(preview, ['installed_specs'])
+ preview = subparsers.add_parser("preview", help=preview_fn.__doc__)
+ arguments.add_common_arguments(preview, ["installed_specs"])
preview.set_defaults(func=preview_fn)
# Check if binaries need to be rebuilt on remote mirror
- check = subparsers.add_parser('check', help=check_fn.__doc__)
+ check = subparsers.add_parser("check", help=check_fn.__doc__)
check.add_argument(
- '-m', '--mirror-url', default=None,
- help='Override any configured mirrors with this mirror url')
+ "-m",
+ "--mirror-url",
+ default=None,
+ help="Override any configured mirrors with this mirror url",
+ )
check.add_argument(
- '-o', '--output-file', default=None,
- help='File where rebuild info should be written')
+ "-o", "--output-file", default=None, help="File where rebuild info should be written"
+ )
# used to construct scope arguments below
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
check.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
- help="configuration scope containing mirrors to check")
+ help="configuration scope containing mirrors to check",
+ )
check.add_argument(
- '-s', '--spec', default=None,
- help='Check single spec instead of release specs file')
+ "-s", "--spec", default=None, help="Check single spec instead of release specs file"
+ )
check.add_argument(
- '--spec-file', default=None,
- help=('Check single spec from json or yaml file instead of release ' +
- 'specs file'))
+ "--spec-file",
+ default=None,
+ help=("Check single spec from json or yaml file instead of release " + "specs file"),
+ )
check.set_defaults(func=check_fn)
# Download tarball and specfile
- download = subparsers.add_parser('download', help=download_fn.__doc__)
+ download = subparsers.add_parser("download", help=download_fn.__doc__)
download.add_argument(
- '-s', '--spec', default=None,
- help="Download built tarball for spec from mirror")
+ "-s", "--spec", default=None, help="Download built tarball for spec from mirror"
+ )
download.add_argument(
- '--spec-file', default=None,
- help=("Download built tarball for spec (from json or yaml file) " +
- "from mirror"))
+ "--spec-file",
+ default=None,
+ help=("Download built tarball for spec (from json or yaml file) " + "from mirror"),
+ )
download.add_argument(
- '-p', '--path', default=None,
- help="Path to directory where tarball should be downloaded")
+ "-p", "--path", default=None, help="Path to directory where tarball should be downloaded"
+ )
download.set_defaults(func=download_fn)
# Get buildcache name
- getbuildcachename = subparsers.add_parser('get-buildcache-name',
- help=get_buildcache_name_fn.__doc__)
+ getbuildcachename = subparsers.add_parser(
+ "get-buildcache-name", help=get_buildcache_name_fn.__doc__
+ )
getbuildcachename.add_argument(
- '-s', '--spec', default=None,
- help='Spec string for which buildcache name is desired')
+ "-s", "--spec", default=None, help="Spec string for which buildcache name is desired"
+ )
getbuildcachename.add_argument(
- '--spec-file', default=None,
- help=('Path to spec json or yaml file for which buildcache name is ' +
- 'desired'))
+ "--spec-file",
+ default=None,
+ help=("Path to spec json or yaml file for which buildcache name is " + "desired"),
+ )
getbuildcachename.set_defaults(func=get_buildcache_name_fn)
# Given the root spec, save the yaml of the dependent spec to a file
- savespecfile = subparsers.add_parser('save-specfile',
- help=save_specfile_fn.__doc__)
- savespecfile.add_argument(
- '--root-spec', default=None,
- help='Root spec of dependent spec')
+ savespecfile = subparsers.add_parser("save-specfile", help=save_specfile_fn.__doc__)
+ savespecfile.add_argument("--root-spec", default=None, help="Root spec of dependent spec")
savespecfile.add_argument(
- '--root-specfile', default=None,
- help='Path to json or yaml file containing root spec of dependent spec')
+ "--root-specfile",
+ default=None,
+ help="Path to json or yaml file containing root spec of dependent spec",
+ )
savespecfile.add_argument(
- '-s', '--specs', default=None,
- help='List of dependent specs for which saved yaml is desired')
+ "-s",
+ "--specs",
+ default=None,
+ help="List of dependent specs for which saved yaml is desired",
+ )
savespecfile.add_argument(
- '--specfile-dir', default=None,
- help='Path to directory where spec yamls should be saved')
+ "--specfile-dir", default=None, help="Path to directory where spec yamls should be saved"
+ )
savespecfile.set_defaults(func=save_specfile_fn)
# Copy buildcache from some directory to another mirror url
- copy = subparsers.add_parser('copy', help=copy_fn.__doc__)
+ copy = subparsers.add_parser("copy", help=copy_fn.__doc__)
copy.add_argument(
- '--base-dir', default=None,
- help='Path to mirror directory (root of existing buildcache)')
+ "--base-dir", default=None, help="Path to mirror directory (root of existing buildcache)"
+ )
copy.add_argument(
- '--spec-file', default=None,
- help=('Path to spec json or yaml file representing buildcache entry to' +
- ' copy'))
- copy.add_argument(
- '--destination-url', default=None,
- help='Destination mirror url')
+ "--spec-file",
+ default=None,
+ help=("Path to spec json or yaml file representing buildcache entry to" + " copy"),
+ )
+ copy.add_argument("--destination-url", default=None, help="Destination mirror url")
copy.set_defaults(func=copy_fn)
# Sync buildcache entries from one mirror to another
- sync = subparsers.add_parser('sync', help=sync_fn.__doc__)
+ sync = subparsers.add_parser("sync", help=sync_fn.__doc__)
source = sync.add_mutually_exclusive_group(required=True)
- source.add_argument('--src-directory',
- metavar='DIRECTORY',
- type=str,
- help="Source mirror as a local file path")
- source.add_argument('--src-mirror-name',
- metavar='MIRROR_NAME',
- type=str,
- help="Name of the source mirror")
- source.add_argument('--src-mirror-url',
- metavar='MIRROR_URL',
- type=str,
- help="URL of the source mirror")
+ source.add_argument(
+ "--src-directory", metavar="DIRECTORY", type=str, help="Source mirror as a local file path"
+ )
+ source.add_argument(
+ "--src-mirror-name", metavar="MIRROR_NAME", type=str, help="Name of the source mirror"
+ )
+ source.add_argument(
+ "--src-mirror-url", metavar="MIRROR_URL", type=str, help="URL of the source mirror"
+ )
dest = sync.add_mutually_exclusive_group(required=True)
- dest.add_argument('--dest-directory',
- metavar='DIRECTORY',
- type=str,
- help="Destination mirror as a local file path")
- dest.add_argument('--dest-mirror-name',
- metavar='MIRROR_NAME',
- type=str,
- help="Name of the destination mirror")
- dest.add_argument('--dest-mirror-url',
- metavar='MIRROR_URL',
- type=str,
- help="URL of the destination mirror")
+ dest.add_argument(
+ "--dest-directory",
+ metavar="DIRECTORY",
+ type=str,
+ help="Destination mirror as a local file path",
+ )
+ dest.add_argument(
+ "--dest-mirror-name",
+ metavar="MIRROR_NAME",
+ type=str,
+ help="Name of the destination mirror",
+ )
+ dest.add_argument(
+ "--dest-mirror-url", metavar="MIRROR_URL", type=str, help="URL of the destination mirror"
+ )
sync.set_defaults(func=sync_fn)
# Update buildcache index without copying any additional packages
- update_index = subparsers.add_parser(
- 'update-index', help=update_index_fn.__doc__)
- update_index.add_argument(
- '-d', '--mirror-url', default=None, help='Destination mirror url')
+ update_index = subparsers.add_parser("update-index", help=update_index_fn.__doc__)
+ update_index.add_argument("-d", "--mirror-url", default=None, help="Destination mirror url")
update_index.add_argument(
- '-k', '--keys', default=False, action='store_true',
- help='If provided, key index will be updated as well as package index')
+ "-k",
+ "--keys",
+ default=False,
+ action="store_true",
+ help="If provided, key index will be updated as well as package index",
+ )
update_index.set_defaults(func=update_index_fn)
@@ -277,17 +329,19 @@ def _matching_specs(args):
if env:
return [env.specs_by_hash[h] for h in env.concretized_order]
- tty.die("build cache file creation requires at least one" +
- " installed package spec, an active environment," +
- " or else a path to a json or yaml file containing a spec" +
- " to install")
+ tty.die(
+ "build cache file creation requires at least one"
+ + " installed package spec, an active environment,"
+ + " or else a path to a json or yaml file containing a spec"
+ + " to install"
+ )
def _concrete_spec_from_args(args):
spec_str, specfile_path = args.spec, args.spec_file
if not spec_str and not specfile_path:
- tty.error('must provide either spec string or path to YAML or JSON specfile')
+ tty.error("must provide either spec string or path to YAML or JSON specfile")
sys.exit(1)
if spec_str:
@@ -296,7 +350,7 @@ def _concrete_spec_from_args(args):
spec = spack.store.find(constraints)[0]
spec.concretize()
except SpecError as spec_error:
- tty.error('Unable to concretize spec {0}'.format(spec_str))
+ tty.error("Unable to concretize spec {0}".format(spec_str))
tty.debug(spec_error)
sys.exit(1)
@@ -318,19 +372,19 @@ def create_fn(args):
matches = _matching_specs(args)
- msg = 'Pushing binary packages to {0}/build_cache'.format(push_url)
+ msg = "Pushing binary packages to {0}/build_cache".format(push_url)
tty.msg(msg)
specs_kwargs = {
- 'include_root': 'package' in args.things_to_install,
- 'include_dependencies': 'dependencies' in args.things_to_install
+ "include_root": "package" in args.things_to_install,
+ "include_dependencies": "dependencies" in args.things_to_install,
}
kwargs = {
- 'key': args.key,
- 'force': args.force,
- 'relative': args.rel,
- 'unsigned': args.unsigned,
- 'allow_root': args.allow_root,
- 'regenerate_index': args.rebuild_index
+ "key": args.key,
+ "force": args.force,
+ "relative": args.rel,
+ "unsigned": args.unsigned,
+ "allow_root": args.allow_root,
+ "regenerate_index": args.rebuild_index,
}
bindist.push(matches, push_url, specs_kwargs, **kwargs)
@@ -344,10 +398,7 @@ def install_fn(args):
matches = spack.store.find(args.specs, multiple=args.multiple, query_fn=query)
for match in matches:
bindist.install_single_spec(
- match,
- allow_root=args.allow_root,
- unsigned=args.unsigned,
- force=args.force
+ match, allow_root=args.allow_root, unsigned=args.unsigned, force=args.force
)
@@ -367,10 +418,12 @@ def list_fn(args):
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
if sys.stdout.isatty():
builds = len(specs)
- tty.msg("%s." % plural(builds, 'cached build'))
+ tty.msg("%s." % plural(builds, "cached build"))
if not builds and not args.allarch:
- tty.msg("You can query all available architectures with:",
- "spack buildcache list --allarch")
+ tty.msg(
+ "You can query all available architectures with:",
+ "spack buildcache list --allarch",
+ )
display_specs(specs, args, all_headers=True)
@@ -403,29 +456,28 @@ def check_fn(args):
if args.spec or args.spec_file:
specs = [_concrete_spec_from_args(args)]
else:
- env = spack.cmd.require_active_env(cmd_name='buildcache')
+ env = spack.cmd.require_active_env(cmd_name="buildcache")
env.concretize()
specs = env.all_specs()
if not specs:
- tty.msg('No specs provided, exiting.')
+ tty.msg("No specs provided, exiting.")
sys.exit(0)
for spec in specs:
spec.concretize()
# Next see if there are any configured binary mirrors
- configured_mirrors = spack.config.get('mirrors', scope=args.scope)
+ configured_mirrors = spack.config.get("mirrors", scope=args.scope)
if args.mirror_url:
- configured_mirrors = {'additionalMirrorUrl': args.mirror_url}
+ configured_mirrors = {"additionalMirrorUrl": args.mirror_url}
if not configured_mirrors:
- tty.msg('No mirrors provided, exiting.')
+ tty.msg("No mirrors provided, exiting.")
sys.exit(0)
- sys.exit(bindist.check_specs_against_mirrors(
- configured_mirrors, specs, args.output_file))
+ sys.exit(bindist.check_specs_against_mirrors(configured_mirrors, specs, args.output_file))
def download_fn(args):
@@ -434,11 +486,11 @@ def download_fn(args):
a non-zero exit code indicates that the command failed to download at
least one of the required buildcache components."""
if not args.spec and not args.spec_file:
- tty.msg('No specs provided, exiting.')
+ tty.msg("No specs provided, exiting.")
sys.exit(0)
if not args.path:
- tty.msg('No download path provided, exiting')
+ tty.msg("No download path provided, exiting")
sys.exit(0)
spec = _concrete_spec_from_args(args)
@@ -451,8 +503,8 @@ def download_fn(args):
def get_buildcache_name_fn(args):
"""Get name (prefix) of buildcache entries for this spec"""
spec = _concrete_spec_from_args(args)
- buildcache_name = bindist.tarball_name(spec, '')
- print('{0}'.format(buildcache_name))
+ buildcache_name = bindist.tarball_name(spec, "")
+ print("{0}".format(buildcache_name))
def save_specfile_fn(args):
@@ -464,28 +516,29 @@ def save_specfile_fn(args):
non-zero.
"""
if not args.root_spec and not args.root_specfile:
- tty.msg('No root spec provided, exiting.')
+ tty.msg("No root spec provided, exiting.")
sys.exit(1)
if not args.specs:
- tty.msg('No dependent specs provided, exiting.')
+ tty.msg("No dependent specs provided, exiting.")
sys.exit(1)
if not args.specfile_dir:
- tty.msg('No yaml directory provided, exiting.')
+ tty.msg("No yaml directory provided, exiting.")
sys.exit(1)
if args.root_specfile:
with open(args.root_specfile) as fd:
root_spec_as_json = fd.read()
- spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
+ spec_format = "yaml" if args.root_specfile.endswith("yaml") else "json"
else:
root_spec = Spec(args.root_spec)
root_spec.concretize()
root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
- spec_format = 'json'
+ spec_format = "json"
save_dependency_specfiles(
- root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
+ root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format
+ )
sys.exit(0)
@@ -496,56 +549,55 @@ def copy_fn(args):
The specific buildcache entry to be copied from one location to the
other is identified using the '--spec-file' argument."""
# TODO: Remove after v0.18.0 release
- msg = ('"spack buildcache copy" is deprecated and will be removed from '
- 'Spack starting in v0.19.0')
+ msg = (
+ '"spack buildcache copy" is deprecated and will be removed from '
+ "Spack starting in v0.19.0"
+ )
warnings.warn(msg)
if not args.spec_file:
- tty.msg('No spec yaml provided, exiting.')
+ tty.msg("No spec yaml provided, exiting.")
sys.exit(1)
if not args.base_dir:
- tty.msg('No base directory provided, exiting.')
+ tty.msg("No base directory provided, exiting.")
sys.exit(1)
if not args.destination_url:
- tty.msg('No destination mirror url provided, exiting.')
+ tty.msg("No destination mirror url provided, exiting.")
sys.exit(1)
dest_url = args.destination_url
- if dest_url[0:7] != 'file://' and dest_url[0] != '/':
- tty.msg('Only urls beginning with "file://" or "/" are supported ' +
- 'by buildcache copy.')
+ if dest_url[0:7] != "file://" and dest_url[0] != "/":
+ tty.msg('Only urls beginning with "file://" or "/" are supported ' + "by buildcache copy.")
sys.exit(1)
try:
- with open(args.spec_file, 'r') as fd:
+ with open(args.spec_file, "r") as fd:
spec = Spec.from_yaml(fd.read())
except Exception as e:
tty.debug(e)
- tty.error('Unable to concrectize spec from yaml {0}'.format(
- args.spec_file))
+ tty.error("Unable to concrectize spec from yaml {0}".format(args.spec_file))
sys.exit(1)
dest_root_path = dest_url
- if dest_url[0:7] == 'file://':
+ if dest_url[0:7] == "file://":
dest_root_path = dest_url[7:]
build_cache_dir = bindist.build_cache_relative_path()
- tarball_rel_path = os.path.join(
- build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
+ tarball_rel_path = os.path.join(build_cache_dir, bindist.tarball_path_name(spec, ".spack"))
tarball_src_path = os.path.join(args.base_dir, tarball_rel_path)
tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path)
- specfile_rel_path = os.path.join(
- build_cache_dir, bindist.tarball_name(spec, '.spec.json'))
+ specfile_rel_path = os.path.join(build_cache_dir, bindist.tarball_name(spec, ".spec.json"))
specfile_src_path = os.path.join(args.base_dir, specfile_rel_path)
specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path)
specfile_rel_path_yaml = os.path.join(
- build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
+ build_cache_dir, bindist.tarball_name(spec, ".spec.yaml")
+ )
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
@@ -553,18 +605,18 @@ def copy_fn(args):
os.makedirs(os.path.dirname(tarball_dest_path))
# Now copy the specfile and tarball files to the destination mirror
- tty.msg('Copying {0}'.format(tarball_rel_path))
+ tty.msg("Copying {0}".format(tarball_rel_path))
shutil.copyfile(tarball_src_path, tarball_dest_path)
- tty.msg('Copying {0}'.format(specfile_rel_path))
+ tty.msg("Copying {0}".format(specfile_rel_path))
shutil.copyfile(specfile_src_path, specfile_dest_path)
- tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
+ tty.msg("Copying {0}".format(specfile_rel_path_yaml))
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
def sync_fn(args):
- """ Syncs binaries (and associated metadata) from one mirror to another.
+ """Syncs binaries (and associated metadata) from one mirror to another.
Requires an active environment in order to know which specs to sync.
Args:
@@ -575,25 +627,21 @@ def sync_fn(args):
source_location = None
if args.src_directory:
source_location = args.src_directory
- scheme = url_util.parse(source_location, scheme='<missing>').scheme
- if scheme != '<missing>':
- raise ValueError(
- '"--src-directory" expected a local path; got a URL, instead')
+ scheme = url_util.parse(source_location, scheme="<missing>").scheme
+ if scheme != "<missing>":
+ raise ValueError('"--src-directory" expected a local path; got a URL, instead')
# Ensure that the mirror lookup does not mistake this for named mirror
- source_location = 'file://' + source_location
+ source_location = "file://" + source_location
elif args.src_mirror_name:
source_location = args.src_mirror_name
result = spack.mirror.MirrorCollection().lookup(source_location)
if result.name == "<unnamed>":
- raise ValueError(
- 'no configured mirror named "{name}"'.format(
- name=source_location))
+ raise ValueError('no configured mirror named "{name}"'.format(name=source_location))
elif args.src_mirror_url:
source_location = args.src_mirror_url
- scheme = url_util.parse(source_location, scheme='<missing>').scheme
- if scheme == '<missing>':
- raise ValueError(
- '"{url}" is not a valid URL'.format(url=source_location))
+ scheme = url_util.parse(source_location, scheme="<missing>").scheme
+ if scheme == "<missing>":
+ raise ValueError('"{url}" is not a valid URL'.format(url=source_location))
src_mirror = spack.mirror.MirrorCollection().lookup(source_location)
src_mirror_url = url_util.format(src_mirror.fetch_url)
@@ -602,51 +650,48 @@ def sync_fn(args):
dest_location = None
if args.dest_directory:
dest_location = args.dest_directory
- scheme = url_util.parse(dest_location, scheme='<missing>').scheme
- if scheme != '<missing>':
- raise ValueError(
- '"--dest-directory" expected a local path; got a URL, instead')
+ scheme = url_util.parse(dest_location, scheme="<missing>").scheme
+ if scheme != "<missing>":
+ raise ValueError('"--dest-directory" expected a local path; got a URL, instead')
# Ensure that the mirror lookup does not mistake this for named mirror
- dest_location = 'file://' + dest_location
+ dest_location = "file://" + dest_location
elif args.dest_mirror_name:
dest_location = args.dest_mirror_name
result = spack.mirror.MirrorCollection().lookup(dest_location)
if result.name == "<unnamed>":
- raise ValueError(
- 'no configured mirror named "{name}"'.format(
- name=dest_location))
+ raise ValueError('no configured mirror named "{name}"'.format(name=dest_location))
elif args.dest_mirror_url:
dest_location = args.dest_mirror_url
- scheme = url_util.parse(dest_location, scheme='<missing>').scheme
- if scheme == '<missing>':
- raise ValueError(
- '"{url}" is not a valid URL'.format(url=dest_location))
+ scheme = url_util.parse(dest_location, scheme="<missing>").scheme
+ if scheme == "<missing>":
+ raise ValueError('"{url}" is not a valid URL'.format(url=dest_location))
dest_mirror = spack.mirror.MirrorCollection().lookup(dest_location)
dest_mirror_url = url_util.format(dest_mirror.fetch_url)
# Get the active environment
- env = spack.cmd.require_active_env(cmd_name='buildcache sync')
+ env = spack.cmd.require_active_env(cmd_name="buildcache sync")
- tty.msg('Syncing environment buildcache files from {0} to {1}'.format(
- src_mirror_url, dest_mirror_url))
+ tty.msg(
+ "Syncing environment buildcache files from {0} to {1}".format(
+ src_mirror_url, dest_mirror_url
+ )
+ )
build_cache_dir = bindist.build_cache_relative_path()
buildcache_rel_paths = []
- tty.debug('Syncing the following specs:')
+ tty.debug("Syncing the following specs:")
for s in env.all_specs():
- tty.debug(' {0}{1}: {2}'.format(
- '* ' if s in env.roots() else ' ', s.name, s.dag_hash()))
-
- buildcache_rel_paths.extend([
- os.path.join(
- build_cache_dir, bindist.tarball_path_name(s, '.spack')),
- os.path.join(
- build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
- os.path.join(
- build_cache_dir, bindist.tarball_name(s, '.spec.json')),
- ])
+ tty.debug(" {0}{1}: {2}".format("* " if s in env.roots() else " ", s.name, s.dag_hash()))
+
+ buildcache_rel_paths.extend(
+ [
+ os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")),
+ os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")),
+ os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")),
+ ]
+ )
tmpdir = tempfile.mkdtemp()
@@ -656,23 +701,18 @@ def sync_fn(args):
local_path = os.path.join(tmpdir, rel_path)
dest_url = url_util.join(dest_mirror_url, rel_path)
- tty.debug('Copying {0} to {1} via {2}'.format(
- src_url, dest_url, local_path))
+ tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path))
- stage = Stage(src_url,
- name="temporary_file",
- path=os.path.dirname(local_path),
- keep=True)
+ stage = Stage(
+ src_url, name="temporary_file", path=os.path.dirname(local_path), keep=True
+ )
try:
stage.create()
stage.fetch()
- web_util.push_to_url(
- local_path,
- dest_url,
- keep_original=True)
+ web_util.push_to_url(local_path, dest_url, keep_original=True)
except fs.FetchError as e:
- tty.debug('spack buildcache unable to sync {0}'.format(rel_path))
+ tty.debug("spack buildcache unable to sync {0}".format(rel_path))
tty.debug(e)
finally:
stage.destroy()
@@ -684,20 +724,19 @@ def update_index(mirror_url, update_keys=False):
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
outdir = url_util.format(mirror.push_url)
- bindist.generate_package_index(
- url_util.join(outdir, bindist.build_cache_relative_path()))
+ bindist.generate_package_index(url_util.join(outdir, bindist.build_cache_relative_path()))
if update_keys:
- keys_url = url_util.join(outdir,
- bindist.build_cache_relative_path(),
- bindist.build_cache_keys_relative_path())
+ keys_url = url_util.join(
+ outdir, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
+ )
bindist.generate_key_index(keys_url)
def update_index_fn(args):
"""Update a buildcache index."""
- outdir = 'file://.'
+ outdir = "file://."
if args.mirror_url:
outdir = args.mirror_url
diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py
index 0892719114..dd1bca28c9 100644
--- a/lib/spack/spack/cmd/cd.py
+++ b/lib/spack/spack/cmd/cd.py
@@ -13,14 +13,13 @@ level = "long"
def setup_parser(subparser):
"""This is for decoration -- spack cd is used through spack's
- shell support. This allows spack cd to print a descriptive
- help message when called with -h."""
+ shell support. This allows spack cd to print a descriptive
+ help message when called with -h."""
spack.cmd.location.setup_parser(subparser)
def cd(parser, args):
spec = " ".join(args.spec) if args.spec else "SPEC"
spack.cmd.common.shell_init_instructions(
- "spack cd",
- "cd `spack location --install-dir %s`" % spec
+ "spack cd", "cd `spack location --install-dir %s`" % spec
)
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 89509cca29..3cfdcabff9 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -26,29 +26,44 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '--keep-stage', action='store_true', default=False,
- help="don't clean up staging area when command completes")
+ "--keep-stage",
+ action="store_true",
+ default=False,
+ help="don't clean up staging area when command completes",
+ )
sp = subparser.add_mutually_exclusive_group()
sp.add_argument(
- '-b', '--batch', action='store_true', default=False,
- help="don't ask which versions to checksum")
+ "-b",
+ "--batch",
+ action="store_true",
+ default=False,
+ help="don't ask which versions to checksum",
+ )
sp.add_argument(
- '-l', '--latest', action='store_true', default=False,
- help="checksum the latest available version only")
+ "-l",
+ "--latest",
+ action="store_true",
+ default=False,
+ help="checksum the latest available version only",
+ )
sp.add_argument(
- '-p', '--preferred', action='store_true', default=False,
- help="checksum the preferred version only")
- arguments.add_common_arguments(subparser, ['package'])
+ "-p",
+ "--preferred",
+ action="store_true",
+ default=False,
+ help="checksum the preferred version only",
+ )
+ arguments.add_common_arguments(subparser, ["package"])
subparser.add_argument(
- 'versions', nargs=argparse.REMAINDER,
- help='versions to generate checksums for')
+ "versions", nargs=argparse.REMAINDER, help="versions to generate checksums for"
+ )
def checksum(parser, args):
# Did the user pass 'package@version' string?
- if len(args.versions) == 0 and '@' in args.package:
- args.versions = [args.package.split('@')[1]]
- args.package = args.package.split('@')[0]
+ if len(args.versions) == 0 and "@" in args.package:
+ args.versions = [args.package.split("@")[1]]
+ args.package = args.package.split("@")[0]
# Make sure the user provided a package and not a URL
if not valid_fully_qualified_module_name(args.package):
@@ -68,8 +83,10 @@ def checksum(parser, args):
for version in versions:
version = ver(version)
if not isinstance(version, VersionBase):
- tty.die("Cannot generate checksums for version lists or "
- "version ranges. Use unambiguous versions.")
+ tty.die(
+ "Cannot generate checksums for version lists or "
+ "version ranges. Use unambiguous versions."
+ )
url = pkg.find_valid_url_for_version(version)
if url is not None:
url_dict[version] = url
@@ -87,9 +104,13 @@ def checksum(parser, args):
tty.die("Could not find any versions for {0}".format(pkg.name))
version_lines = spack.stage.get_checksums_for_versions(
- url_dict, pkg.name, keep_stage=args.keep_stage,
+ url_dict,
+ pkg.name,
+ keep_stage=args.keep_stage,
batch=(args.batch or len(args.versions) > 0 or len(url_dict) == 1),
- latest=args.latest, fetch_options=pkg.fetch_options)
+ latest=args.latest,
+ fetch_options=pkg.fetch_options,
+ )
print()
print(version_lines)
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
index fc30c1c1bf..4944f61b2a 100644
--- a/lib/spack/spack/cmd/ci.py
+++ b/lib/spack/spack/cmd/ci.py
@@ -30,7 +30,7 @@ description = "manage continuous integration pipelines"
section = "build"
level = "long"
-CI_REBUILD_INSTALL_BASE_ARGS = ['spack', '-d', '-v']
+CI_REBUILD_INSTALL_BASE_ARGS = ["spack", "-d", "-v"]
INSTALL_FAIL_CODE = 1
@@ -42,45 +42,67 @@ def get_env_var(variable_name):
def setup_parser(subparser):
setup_parser.parser = subparser
- subparsers = subparser.add_subparsers(help='CI sub-commands')
+ subparsers = subparser.add_subparsers(help="CI sub-commands")
# Dynamic generation of the jobs yaml from a spack environment
- generate = subparsers.add_parser('generate', help=ci_generate.__doc__)
+ generate = subparsers.add_parser("generate", help=ci_generate.__doc__)
generate.add_argument(
- '--output-file', default=None,
- help="Path to file where generated jobs file should be " +
- "written. The default is .gitlab-ci.yml in the root of the " +
- "repository.")
+ "--output-file",
+ default=None,
+ help="Path to file where generated jobs file should be "
+ + "written. The default is .gitlab-ci.yml in the root of the "
+ + "repository.",
+ )
generate.add_argument(
- '--copy-to', default=None,
- help="Absolute path of additional location where generated jobs " +
- "yaml file should be copied. Default is not to copy.")
+ "--copy-to",
+ default=None,
+ help="Absolute path of additional location where generated jobs "
+ + "yaml file should be copied. Default is not to copy.",
+ )
generate.add_argument(
- '--optimize', action='store_true', default=False,
+ "--optimize",
+ action="store_true",
+ default=False,
help="(Experimental) run the generated document through a series of "
- "optimization passes designed to reduce the size of the "
- "generated file.")
+ "optimization passes designed to reduce the size of the "
+ "generated file.",
+ )
generate.add_argument(
- '--dependencies', action='store_true', default=False,
- help="(Experimental) disable DAG scheduling; use "
- ' "plain" dependencies.')
+ "--dependencies",
+ action="store_true",
+ default=False,
+ help="(Experimental) disable DAG scheduling; use " ' "plain" dependencies.',
+ )
generate.add_argument(
- '--buildcache-destination', default=None,
- help="Override the mirror configured in the environment (spack.yaml) " +
- "in order to push binaries from the generated pipeline to a " +
- "different location.")
+ "--buildcache-destination",
+ default=None,
+ help="Override the mirror configured in the environment (spack.yaml) "
+ + "in order to push binaries from the generated pipeline to a "
+ + "different location.",
+ )
prune_group = generate.add_mutually_exclusive_group()
prune_group.add_argument(
- '--prune-dag', action='store_true', dest='prune_dag',
- default=True, help="""Do not generate jobs for specs already up to
-date on the mirror""")
+ "--prune-dag",
+ action="store_true",
+ dest="prune_dag",
+ default=True,
+ help="""Do not generate jobs for specs already up to
+date on the mirror""",
+ )
prune_group.add_argument(
- '--no-prune-dag', action='store_false', dest='prune_dag',
- default=True, help="""Generate jobs for specs already up to date
-on the mirror""")
+ "--no-prune-dag",
+ action="store_false",
+ dest="prune_dag",
+ default=True,
+ help="""Generate jobs for specs already up to date
+on the mirror""",
+ )
generate.add_argument(
- '--check-index-only', action='store_true', dest='index_only',
- default=False, help="""Spack always check specs against configured
+ "--check-index-only",
+ action="store_true",
+ dest="index_only",
+ default=False,
+ help="""Spack always check specs against configured
binary mirrors when generating the pipeline, regardless of whether or not
DAG pruning is enabled. This flag controls whether it might attempt to
fetch remote spec files directly (ensuring no spec is rebuilt if it
@@ -88,42 +110,47 @@ is present on the mirror), or whether it should reduce pipeline generation time
by assuming all remote buildcache indices are up to date and only use those
to determine whether a given spec is up to date on mirrors. In the latter
case, specs might be needlessly rebuilt if remote buildcache indices are out
-of date.""")
+of date.""",
+ )
generate.add_argument(
- '--artifacts-root', default=None,
+ "--artifacts-root",
+ default=None,
help="""Path to root of artifacts directory. If provided, concrete
environment files (spack.yaml, spack.lock) will be generated under this
path and their location sent to generated child jobs via the custom job
-variable SPACK_CONCRETE_ENVIRONMENT_PATH.""")
+variable SPACK_CONCRETE_ENVIRONMENT_PATH.""",
+ )
generate.set_defaults(func=ci_generate)
# Rebuild the buildcache index associated with the mirror in the
# active, gitlab-enabled environment.
- index = subparsers.add_parser('rebuild-index', help=ci_reindex.__doc__)
+ index = subparsers.add_parser("rebuild-index", help=ci_reindex.__doc__)
index.set_defaults(func=ci_reindex)
# Handle steps of a ci build/rebuild
- rebuild = subparsers.add_parser('rebuild', help=ci_rebuild.__doc__)
+ rebuild = subparsers.add_parser("rebuild", help=ci_rebuild.__doc__)
rebuild.set_defaults(func=ci_rebuild)
# Facilitate reproduction of a failed CI build job
- reproduce = subparsers.add_parser('reproduce-build',
- help=ci_reproduce.__doc__)
- reproduce.add_argument('job_url', help='Url of job artifacts bundle')
- reproduce.add_argument('--working-dir', help="Where to unpack artifacts",
- default=os.path.join(os.getcwd(), 'ci_reproduction'))
+ reproduce = subparsers.add_parser("reproduce-build", help=ci_reproduce.__doc__)
+ reproduce.add_argument("job_url", help="Url of job artifacts bundle")
+ reproduce.add_argument(
+ "--working-dir",
+ help="Where to unpack artifacts",
+ default=os.path.join(os.getcwd(), "ci_reproduction"),
+ )
reproduce.set_defaults(func=ci_reproduce)
def ci_generate(args):
"""Generate jobs file from a spack environment file containing CI info.
- Before invoking this command, you can set the environment variable
- SPACK_CDASH_AUTH_TOKEN to contain the CDash authorization token
- for creating a build group for the generated workload and registering
- all generated jobs under that build group. If this environment
- variable is not set, no build group will be created on CDash."""
- env = spack.cmd.require_active_env(cmd_name='ci generate')
+ Before invoking this command, you can set the environment variable
+ SPACK_CDASH_AUTH_TOKEN to contain the CDash authorization token
+ for creating a build group for the generated workload and registering
+ all generated jobs under that build group. If this environment
+ variable is not set, no build group will be created on CDash."""
+ env = spack.cmd.require_active_env(cmd_name="ci generate")
output_file = args.output_file
copy_yaml_to = args.copy_to
@@ -144,10 +171,16 @@ def ci_generate(args):
# Generate the jobs
spack_ci.generate_gitlab_ci_yaml(
- env, True, output_file, prune_dag=prune_dag,
- check_index_only=index_only, run_optimizer=run_optimizer,
- use_dependencies=use_dependencies, artifacts_root=artifacts_root,
- remote_mirror_override=buildcache_destination)
+ env,
+ True,
+ output_file,
+ prune_dag=prune_dag,
+ check_index_only=index_only,
+ run_optimizer=run_optimizer,
+ use_dependencies=use_dependencies,
+ artifacts_root=artifacts_root,
+ remote_mirror_override=buildcache_destination,
+ )
if copy_yaml_to:
copy_to_dir = os.path.dirname(copy_yaml_to)
@@ -158,14 +191,14 @@ def ci_generate(args):
def ci_reindex(args):
"""Rebuild the buildcache index associated with the mirror in the
- active, gitlab-enabled environment. """
- env = spack.cmd.require_active_env(cmd_name='ci rebuild-index')
+ active, gitlab-enabled environment."""
+ env = spack.cmd.require_active_env(cmd_name="ci rebuild-index")
yaml_root = ev.config_dict(env.yaml)
- if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
- tty.die('spack ci rebuild-index requires an env containing a mirror')
+ if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1:
+ tty.die("spack ci rebuild-index requires an env containing a mirror")
- ci_mirrors = yaml_root['mirrors']
+ ci_mirrors = yaml_root["mirrors"]
mirror_urls = [url for url in ci_mirrors.values()]
remote_mirror_url = mirror_urls[0]
@@ -174,85 +207,90 @@ def ci_reindex(args):
def ci_rebuild(args):
"""Check a single spec against the remote mirror, and rebuild it from
- source if the mirror does not contain the hash. """
- env = spack.cmd.require_active_env(cmd_name='ci rebuild')
+ source if the mirror does not contain the hash."""
+ env = spack.cmd.require_active_env(cmd_name="ci rebuild")
# Make sure the environment is "gitlab-enabled", or else there's nothing
# to do.
yaml_root = ev.config_dict(env.yaml)
gitlab_ci = None
- if 'gitlab-ci' in yaml_root:
- gitlab_ci = yaml_root['gitlab-ci']
+ if "gitlab-ci" in yaml_root:
+ gitlab_ci = yaml_root["gitlab-ci"]
if not gitlab_ci:
- tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
+ tty.die("spack ci rebuild requires an env containing gitlab-ci cfg")
- tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
- os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
+ tty.msg(
+ "SPACK_BUILDCACHE_DESTINATION={0}".format(
+ os.environ.get("SPACK_BUILDCACHE_DESTINATION", None)
+ )
+ )
# Grab the environment variables we need. These either come from the
# pipeline generation step ("spack ci generate"), where they were written
# out as variables, or else provided by GitLab itself.
- pipeline_artifacts_dir = get_env_var('SPACK_ARTIFACTS_ROOT')
- job_log_dir = get_env_var('SPACK_JOB_LOG_DIR')
- repro_dir = get_env_var('SPACK_JOB_REPRO_DIR')
- local_mirror_dir = get_env_var('SPACK_LOCAL_MIRROR_DIR')
- concrete_env_dir = get_env_var('SPACK_CONCRETE_ENV_DIR')
- ci_pipeline_id = get_env_var('CI_PIPELINE_ID')
- ci_job_name = get_env_var('CI_JOB_NAME')
- signing_key = get_env_var('SPACK_SIGNING_KEY')
- root_spec = get_env_var('SPACK_ROOT_SPEC')
- job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
- compiler_action = get_env_var('SPACK_COMPILER_ACTION')
- cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
- spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
- remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
- remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
+ pipeline_artifacts_dir = get_env_var("SPACK_ARTIFACTS_ROOT")
+ job_log_dir = get_env_var("SPACK_JOB_LOG_DIR")
+ repro_dir = get_env_var("SPACK_JOB_REPRO_DIR")
+ local_mirror_dir = get_env_var("SPACK_LOCAL_MIRROR_DIR")
+ concrete_env_dir = get_env_var("SPACK_CONCRETE_ENV_DIR")
+ ci_pipeline_id = get_env_var("CI_PIPELINE_ID")
+ ci_job_name = get_env_var("CI_JOB_NAME")
+ signing_key = get_env_var("SPACK_SIGNING_KEY")
+ root_spec = get_env_var("SPACK_ROOT_SPEC")
+ job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME")
+ compiler_action = get_env_var("SPACK_COMPILER_ACTION")
+ cdash_build_name = get_env_var("SPACK_CDASH_BUILD_NAME")
+ spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE")
+ remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
+ remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
# Construct absolute paths relative to current $CI_PROJECT_DIR
- ci_project_dir = get_env_var('CI_PROJECT_DIR')
- pipeline_artifacts_dir = os.path.join(
- ci_project_dir, pipeline_artifacts_dir)
+ ci_project_dir = get_env_var("CI_PROJECT_DIR")
+ pipeline_artifacts_dir = os.path.join(ci_project_dir, pipeline_artifacts_dir)
job_log_dir = os.path.join(ci_project_dir, job_log_dir)
repro_dir = os.path.join(ci_project_dir, repro_dir)
local_mirror_dir = os.path.join(ci_project_dir, local_mirror_dir)
concrete_env_dir = os.path.join(ci_project_dir, concrete_env_dir)
# Debug print some of the key environment variables we should have received
- tty.debug('pipeline_artifacts_dir = {0}'.format(pipeline_artifacts_dir))
- tty.debug('root_spec = {0}'.format(root_spec))
- tty.debug('remote_mirror_url = {0}'.format(remote_mirror_url))
- tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name))
- tty.debug('compiler_action = {0}'.format(compiler_action))
+ tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
+ tty.debug("root_spec = {0}".format(root_spec))
+ tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
+ tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
+ tty.debug("compiler_action = {0}".format(compiler_action))
# Query the environment manifest to find out whether we're reporting to a
# CDash instance, and if so, gather some information from the manifest to
# support that task.
enable_cdash = False
- if 'cdash' in yaml_root:
+ if "cdash" in yaml_root:
enable_cdash = True
- ci_cdash = yaml_root['cdash']
- job_spec_buildgroup = ci_cdash['build-group']
- cdash_base_url = ci_cdash['url']
- cdash_project = ci_cdash['project']
- proj_enc = urlencode({'project': cdash_project})
- eq_idx = proj_enc.find('=') + 1
+ ci_cdash = yaml_root["cdash"]
+ job_spec_buildgroup = ci_cdash["build-group"]
+ cdash_base_url = ci_cdash["url"]
+ cdash_project = ci_cdash["project"]
+ proj_enc = urlencode({"project": cdash_project})
+ eq_idx = proj_enc.find("=") + 1
cdash_project_enc = proj_enc[eq_idx:]
- cdash_site = ci_cdash['site']
- tty.debug('cdash_base_url = {0}'.format(cdash_base_url))
- tty.debug('cdash_project = {0}'.format(cdash_project))
- tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
- tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
- tty.debug('cdash_site = {0}'.format(cdash_site))
- tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))
+ cdash_site = ci_cdash["site"]
+ tty.debug("cdash_base_url = {0}".format(cdash_base_url))
+ tty.debug("cdash_project = {0}".format(cdash_project))
+ tty.debug("cdash_project_enc = {0}".format(cdash_project_enc))
+ tty.debug("cdash_build_name = {0}".format(cdash_build_name))
+ tty.debug("cdash_site = {0}".format(cdash_site))
+ tty.debug("job_spec_buildgroup = {0}".format(job_spec_buildgroup))
# Is this a pipeline run on a spack PR or a merge to develop? It might
# be neither, e.g. a pipeline run on some environment repository.
- spack_is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
- spack_is_develop_pipeline = spack_pipeline_type == 'spack_protected_branch'
+ spack_is_pr_pipeline = spack_pipeline_type == "spack_pull_request"
+ spack_is_develop_pipeline = spack_pipeline_type == "spack_protected_branch"
- tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
- spack_is_pr_pipeline, spack_is_develop_pipeline))
+ tty.debug(
+ "Pipeline type - PR: {0}, develop: {1}".format(
+ spack_is_pr_pipeline, spack_is_develop_pipeline
+ )
+ )
# If no override url exists, then just push binary package to the
# normal remote mirror url.
@@ -265,16 +303,16 @@ def ci_rebuild(args):
pipeline_mirror_url = None
temp_storage_url_prefix = None
- if 'temporary-storage-url-prefix' in gitlab_ci:
- temp_storage_url_prefix = gitlab_ci['temporary-storage-url-prefix']
- pipeline_mirror_url = url_util.join(
- temp_storage_url_prefix, ci_pipeline_id)
+ if "temporary-storage-url-prefix" in gitlab_ci:
+ temp_storage_url_prefix = gitlab_ci["temporary-storage-url-prefix"]
+ pipeline_mirror_url = url_util.join(temp_storage_url_prefix, ci_pipeline_id)
enable_artifacts_mirror = False
- if 'enable-artifacts-buildcache' in gitlab_ci:
- enable_artifacts_mirror = gitlab_ci['enable-artifacts-buildcache']
- if (enable_artifacts_mirror or (spack_is_pr_pipeline and
- not enable_artifacts_mirror and not temp_storage_url_prefix)):
+ if "enable-artifacts-buildcache" in gitlab_ci:
+ enable_artifacts_mirror = gitlab_ci["enable-artifacts-buildcache"]
+ if enable_artifacts_mirror or (
+ spack_is_pr_pipeline and not enable_artifacts_mirror and not temp_storage_url_prefix
+ ):
# If you explicitly enabled the artifacts buildcache feature, or
# if this is a PR pipeline but you did not enable either of the
# per-pipeline temporary storage features, we force the use of
@@ -282,18 +320,16 @@ def ci_rebuild(args):
# dependencies from previous stages available since we do not
# allow pushing binaries to the remote mirror during PR pipelines.
enable_artifacts_mirror = True
- pipeline_mirror_url = 'file://' + local_mirror_dir
- mirror_msg = 'artifact buildcache enabled, mirror url: {0}'.format(
- pipeline_mirror_url)
+ pipeline_mirror_url = "file://" + local_mirror_dir
+ mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url)
tty.debug(mirror_msg)
# Whatever form of root_spec we got, use it to get a map giving us concrete
# specs for this job and all of its dependencies.
- spec_map = spack_ci.get_concrete_specs(
- env, root_spec, job_spec_pkg_name, compiler_action)
+ spec_map = spack_ci.get_concrete_specs(env, root_spec, job_spec_pkg_name, compiler_action)
job_spec = spec_map[job_spec_pkg_name]
- job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
+ job_spec_json_file = "{0}.json".format(job_spec_pkg_name)
job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
# To provide logs, cdash reports, etc for developer download/perusal,
@@ -301,7 +337,7 @@ def ci_rebuild(args):
# jobs that "need" this job will get those artifacts too. So here we
# need to clean out the artifacts we may have got from upstream jobs.
- cdash_report_dir = os.path.join(pipeline_artifacts_dir, 'cdash_report')
+ cdash_report_dir = os.path.join(pipeline_artifacts_dir, "cdash_report")
if os.path.exists(cdash_report_dir):
shutil.rmtree(cdash_report_dir)
@@ -323,10 +359,7 @@ def ci_rebuild(args):
# Try to cast a slightly wider net too, and hopefully get the generated
# pipeline yaml. If we miss it, the user will still be able to go to the
# pipeline generation job and get it from there.
- target_dirs = [
- concrete_env_dir,
- pipeline_artifacts_dir
- ]
+ target_dirs = [concrete_env_dir, pipeline_artifacts_dir]
for dir_to_list in target_dirs:
for file_name in os.listdir(dir_to_list):
@@ -350,61 +383,58 @@ def ci_rebuild(args):
# Write this job's spec json into the reproduction directory, and it will
# also be used in the generated "spack install" command to install the spec
- tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
- with open(job_spec_json_path, 'w') as fd:
+ tty.debug("job concrete spec path: {0}".format(job_spec_json_path))
+ with open(job_spec_json_path, "w") as fd:
fd.write(job_spec.to_json(hash=ht.dag_hash))
# Write the concrete root spec json into the reproduction directory
- root_spec_json_path = os.path.join(repro_dir, 'root.json')
- with open(root_spec_json_path, 'w') as fd:
- fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
+ root_spec_json_path = os.path.join(repro_dir, "root.json")
+ with open(root_spec_json_path, "w") as fd:
+ fd.write(spec_map["root"].to_json(hash=ht.dag_hash))
# Write some other details to aid in reproduction into an artifact
- repro_file = os.path.join(repro_dir, 'repro.json')
+ repro_file = os.path.join(repro_dir, "repro.json")
repro_details = {
- 'job_name': ci_job_name,
- 'job_spec_json': job_spec_json_file,
- 'root_spec_json': 'root.json',
- 'ci_project_dir': ci_project_dir
+ "job_name": ci_job_name,
+ "job_spec_json": job_spec_json_file,
+ "root_spec_json": "root.json",
+ "ci_project_dir": ci_project_dir,
}
- with open(repro_file, 'w') as fd:
+ with open(repro_file, "w") as fd:
fd.write(json.dumps(repro_details))
# Write information about spack into an artifact in the repro dir
spack_info = spack_ci.get_spack_info()
- spack_info_file = os.path.join(repro_dir, 'spack_info.txt')
- with open(spack_info_file, 'wb') as fd:
- fd.write(b'\n')
- fd.write(spack_info.encode('utf8'))
- fd.write(b'\n')
+ spack_info_file = os.path.join(repro_dir, "spack_info.txt")
+ with open(spack_info_file, "wb") as fd:
+ fd.write(b"\n")
+ fd.write(spack_info.encode("utf8"))
+ fd.write(b"\n")
# If we decided there should be a temporary storage mechanism, add that
# mirror now so it's used when we check for a hash match already
# built for this spec.
if pipeline_mirror_url:
- spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
- pipeline_mirror_url,
- cfg.default_modify_scope())
+ spack.mirror.add(
+ spack_ci.TEMP_STORAGE_MIRROR_NAME, pipeline_mirror_url, cfg.default_modify_scope()
+ )
# Check configured mirrors for a built spec with a matching hash
mirrors_to_check = None
- if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
+ if remote_mirror_override and spack_pipeline_type == "spack_protected_branch":
# Passing "mirrors_to_check" below means we *only* look in the override
# mirror to see if we should skip building, which is what we want.
- mirrors_to_check = {
- 'override': remote_mirror_override
- }
+ mirrors_to_check = {"override": remote_mirror_override}
# Adding this mirror to the list of configured mirrors means dependencies
# could be installed from either the override mirror or any other configured
# mirror (e.g. remote_mirror_url which is defined in the environment or
# pipeline_mirror_url), which is also what we want.
- spack.mirror.add('mirror_override',
- remote_mirror_override,
- cfg.default_modify_scope())
+ spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
matches = bindist.get_mirrors_for_spec(
- job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
+ job_spec, mirrors_to_check=mirrors_to_check, index_only=False
+ )
if matches:
# Got a hash match on at least one configured mirror. All
@@ -413,21 +443,15 @@ def ci_rebuild(args):
# of the matches and download the buildcache files from there to
# the artifacts, so they're available to be used by dependent
# jobs in subsequent stages.
- tty.msg('No need to rebuild {0}, found hash match at: '.format(
- job_spec_pkg_name))
+ tty.msg("No need to rebuild {0}, found hash match at: ".format(job_spec_pkg_name))
for match in matches:
- tty.msg(' {0}'.format(match['mirror_url']))
+ tty.msg(" {0}".format(match["mirror_url"]))
if enable_artifacts_mirror:
- matching_mirror = matches[0]['mirror_url']
- build_cache_dir = os.path.join(local_mirror_dir, 'build_cache')
- tty.debug('Getting {0} buildcache from {1}'.format(
- job_spec_pkg_name, matching_mirror))
- tty.debug('Downloading to {0}'.format(build_cache_dir))
- bindist.download_single_spec(
- job_spec,
- build_cache_dir,
- mirror_url=matching_mirror
- )
+ matching_mirror = matches[0]["mirror_url"]
+ build_cache_dir = os.path.join(local_mirror_dir, "build_cache")
+ tty.debug("Getting {0} buildcache from {1}".format(job_spec_pkg_name, matching_mirror))
+ tty.debug("Downloading to {0}".format(build_cache_dir))
+ bindist.download_single_spec(job_spec, build_cache_dir, mirror_url=matching_mirror)
# Now we are done and successful
sys.exit(0)
@@ -437,109 +461,114 @@ def ci_rebuild(args):
# Start with spack arguments
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
- config = cfg.get('config')
- if not config['verify_ssl']:
- install_args.append('-k')
+ config = cfg.get("config")
+ if not config["verify_ssl"]:
+ install_args.append("-k")
- install_args.extend([
- 'install',
- '--keep-stage',
- ])
+ install_args.extend(
+ [
+ "install",
+ "--keep-stage",
+ ]
+ )
can_verify = spack_ci.can_verify_binaries()
verify_binaries = can_verify and spack_is_pr_pipeline is False
if not verify_binaries:
- install_args.append('--no-check-signature')
+ install_args.append("--no-check-signature")
if enable_cdash:
# Add additional arguments to `spack install` for CDash reporting.
- cdash_upload_url = '{0}/submit.php?project={1}'.format(
- cdash_base_url, cdash_project_enc)
-
- install_args.extend([
- '--cdash-upload-url', cdash_upload_url,
- '--cdash-build', cdash_build_name,
- '--cdash-site', cdash_site,
- '--cdash-track', job_spec_buildgroup,
- ])
+ cdash_upload_url = "{0}/submit.php?project={1}".format(cdash_base_url, cdash_project_enc)
+
+ install_args.extend(
+ [
+ "--cdash-upload-url",
+ cdash_upload_url,
+ "--cdash-build",
+ cdash_build_name,
+ "--cdash-site",
+ cdash_site,
+ "--cdash-track",
+ job_spec_buildgroup,
+ ]
+ )
# A compiler action of 'FIND_ANY' means we are building a bootstrap
# compiler or one of its deps.
# TODO: when compilers are dependencies, we should include --no-add
- if compiler_action != 'FIND_ANY':
- install_args.append('--no-add')
+ if compiler_action != "FIND_ANY":
+ install_args.append("--no-add")
# TODO: once we have the concrete spec registry, use the DAG hash
# to identify the spec to install, rather than the concrete spec
# json file.
- install_args.extend(['-f', job_spec_json_path])
+ install_args.extend(["-f", job_spec_json_path])
- tty.debug('Installing {0} from source'.format(job_spec.name))
- tty.debug('spack install arguments: {0}'.format(
- install_args))
+ tty.debug("Installing {0} from source".format(job_spec.name))
+ tty.debug("spack install arguments: {0}".format(install_args))
# Write the install command to a shell script
- with open('install.sh', 'w') as fd:
- fd.write('#!/bin/bash\n\n')
- fd.write('\n# spack install command\n')
- fd.write(' '.join(['"{0}"'.format(i) for i in install_args]))
- fd.write('\n')
+ with open("install.sh", "w") as fd:
+ fd.write("#!/bin/bash\n\n")
+ fd.write("\n# spack install command\n")
+ fd.write(" ".join(['"{0}"'.format(i) for i in install_args]))
+ fd.write("\n")
- st = os.stat('install.sh')
- os.chmod('install.sh', st.st_mode | stat.S_IEXEC)
+ st = os.stat("install.sh")
+ os.chmod("install.sh", st.st_mode | stat.S_IEXEC)
- install_copy_path = os.path.join(repro_dir, 'install.sh')
- shutil.copyfile('install.sh', install_copy_path)
+ install_copy_path = os.path.join(repro_dir, "install.sh")
+ shutil.copyfile("install.sh", install_copy_path)
# Run the generated install.sh shell script as if it were being run in
# a login shell.
try:
- install_process = subprocess.Popen(['bash', '-l', './install.sh'])
+ install_process = subprocess.Popen(["bash", "-l", "./install.sh"])
install_process.wait()
install_exit_code = install_process.returncode
except (ValueError, subprocess.CalledProcessError, OSError) as inst:
- tty.error('Encountered error running install script')
+ tty.error("Encountered error running install script")
tty.error(inst)
# Now do the post-install tasks
- tty.debug('spack install exited {0}'.format(install_exit_code))
+ tty.debug("spack install exited {0}".format(install_exit_code))
# If a spec fails to build in a spack develop pipeline, we add it to a
# list of known broken hashes. This allows spack PR pipelines to
# avoid wasting compute cycles attempting to build those hashes.
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
- tty.debug('Install failed on develop')
- if 'broken-specs-url' in gitlab_ci:
- broken_specs_url = gitlab_ci['broken-specs-url']
+ tty.debug("Install failed on develop")
+ if "broken-specs-url" in gitlab_ci:
+ broken_specs_url = gitlab_ci["broken-specs-url"]
dev_fail_hash = job_spec.dag_hash()
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
- tty.msg('Reporting broken develop build as: {0}'.format(
- broken_spec_path))
+ tty.msg("Reporting broken develop build as: {0}".format(broken_spec_path))
tmpdir = tempfile.mkdtemp()
- empty_file_path = os.path.join(tmpdir, 'empty.txt')
+ empty_file_path = os.path.join(tmpdir, "empty.txt")
broken_spec_details = {
- 'broken-spec': {
- 'job-url': get_env_var('CI_JOB_URL'),
- 'pipeline-url': get_env_var('CI_PIPELINE_URL'),
- 'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
+ "broken-spec": {
+ "job-url": get_env_var("CI_JOB_URL"),
+ "pipeline-url": get_env_var("CI_PIPELINE_URL"),
+ "concrete-spec-dict": job_spec.to_dict(hash=ht.dag_hash),
}
}
try:
- with open(empty_file_path, 'w') as efd:
+ with open(empty_file_path, "w") as efd:
efd.write(syaml.dump(broken_spec_details))
web_util.push_to_url(
empty_file_path,
broken_spec_path,
keep_original=False,
- extra_args={'ContentType': 'text/plain'})
+ extra_args={"ContentType": "text/plain"},
+ )
except Exception as err:
# If we got some kind of S3 (access denied or other connection
# error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return
- msg = 'Error writing to broken specs list {0}: {1}'.format(
- broken_spec_path, err)
+ msg = "Error writing to broken specs list {0}: {1}".format(broken_spec_path, err)
tty.warn(msg)
finally:
shutil.rmtree(tmpdir)
@@ -574,32 +603,33 @@ def ci_rebuild(args):
# If this is a develop pipeline, check if the spec that we just built is
# on the broken-specs list. If so, remove it.
- if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
- broken_specs_url = gitlab_ci['broken-specs-url']
+ if spack_is_develop_pipeline and "broken-specs-url" in gitlab_ci:
+ broken_specs_url = gitlab_ci["broken-specs-url"]
just_built_hash = job_spec.dag_hash()
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
if web_util.url_exists(broken_spec_path):
- tty.msg('Removing {0} from the list of broken specs'.format(
- broken_spec_path))
+ tty.msg("Removing {0} from the list of broken specs".format(broken_spec_path))
try:
web_util.remove_url(broken_spec_path)
except Exception as err:
# If we got some kind of S3 (access denied or other connection
# error), the first non boto-specific class in the exception
# hierarchy is Exception. Just print a warning and return
- msg = 'Error removing {0} from broken specs list: {1}'.format(
- broken_spec_path, err)
+ msg = "Error removing {0} from broken specs list: {1}".format(
+ broken_spec_path, err
+ )
tty.warn(msg)
else:
- tty.debug('spack install exited non-zero, will not create buildcache')
+ tty.debug("spack install exited non-zero, will not create buildcache")
- api_root_url = get_env_var('CI_API_V4_URL')
- ci_project_id = get_env_var('CI_PROJECT_ID')
- ci_job_id = get_env_var('CI_JOB_ID')
+ api_root_url = get_env_var("CI_API_V4_URL")
+ ci_project_id = get_env_var("CI_PROJECT_ID")
+ ci_job_id = get_env_var("CI_JOB_ID")
- repro_job_url = '{0}/projects/{1}/jobs/{2}/artifacts'.format(
- api_root_url, ci_project_id, ci_job_id)
+ repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
+ api_root_url, ci_project_id, ci_job_id
+ )
# Control characters cause this to be printed in blue so it stands out
reproduce_msg = """
@@ -614,7 +644,9 @@ If this project does not have public pipelines, you will need to first:
... then follow the printed instructions.\033[0;0m
-""".format(repro_job_url)
+""".format(
+ repro_job_url
+ )
print(reproduce_msg)
diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py
index 71fee4f3ff..05d9f6cd73 100644
--- a/lib/spack/spack/cmd/clean.py
+++ b/lib/spack/spack/cmd/clean.py
@@ -27,94 +27,116 @@ level = "long"
class AllClean(argparse.Action):
"""Activates flags -s -d -f -m and -p simultaneously"""
+
def __call__(self, parser, namespace, values, option_string=None):
- parser.parse_args(['-sdfmp'], namespace=namespace)
+ parser.parse_args(["-sdfmp"], namespace=namespace)
def setup_parser(subparser):
subparser.add_argument(
- '-s', '--stage', action='store_true',
- help="remove all temporary build stages (default)")
+ "-s", "--stage", action="store_true", help="remove all temporary build stages (default)"
+ )
subparser.add_argument(
- '-d', '--downloads', action='store_true',
- help="remove cached downloads")
+ "-d", "--downloads", action="store_true", help="remove cached downloads"
+ )
subparser.add_argument(
- '-f', '--failures', action='store_true',
- help="force removal of all install failure tracking markers")
+ "-f",
+ "--failures",
+ action="store_true",
+ help="force removal of all install failure tracking markers",
+ )
subparser.add_argument(
- '-m', '--misc-cache', action='store_true',
- help="remove long-lived caches, like the virtual package index")
+ "-m",
+ "--misc-cache",
+ action="store_true",
+ help="remove long-lived caches, like the virtual package index",
+ )
subparser.add_argument(
- '-p', '--python-cache', action='store_true',
- help="remove .pyc, .pyo files and __pycache__ folders")
+ "-p",
+ "--python-cache",
+ action="store_true",
+ help="remove .pyc, .pyo files and __pycache__ folders",
+ )
subparser.add_argument(
- '-b', '--bootstrap', action='store_true',
- help="remove software and configuration needed to bootstrap Spack")
+ "-b",
+ "--bootstrap",
+ action="store_true",
+ help="remove software and configuration needed to bootstrap Spack",
+ )
subparser.add_argument(
- '-a', '--all', action=AllClean,
+ "-a",
+ "--all",
+ action=AllClean,
help="equivalent to -sdfmp (does not include --bootstrap)",
- nargs=0
+ nargs=0,
)
- arguments.add_common_arguments(subparser, ['specs'])
+ arguments.add_common_arguments(subparser, ["specs"])
def remove_python_cache():
for directory in [lib_path, var_path]:
for root, dirs, files in os.walk(directory):
for f in files:
- if f.endswith('.pyc') or f.endswith('.pyo'):
+ if f.endswith(".pyc") or f.endswith(".pyo"):
fname = os.path.join(root, f)
- tty.debug('Removing {0}'.format(fname))
+ tty.debug("Removing {0}".format(fname))
os.remove(fname)
for d in dirs:
- if d == '__pycache__':
+ if d == "__pycache__":
dname = os.path.join(root, d)
- tty.debug('Removing {0}'.format(dname))
+ tty.debug("Removing {0}".format(dname))
shutil.rmtree(dname)
def clean(parser, args):
# If nothing was set, activate the default
- if not any([args.specs, args.stage, args.downloads, args.failures,
- args.misc_cache, args.python_cache, args.bootstrap]):
+ if not any(
+ [
+ args.specs,
+ args.stage,
+ args.downloads,
+ args.failures,
+ args.misc_cache,
+ args.python_cache,
+ args.bootstrap,
+ ]
+ ):
args.stage = True
# Then do the cleaning falling through the cases
if args.specs:
specs = spack.cmd.parse_specs(args.specs, concretize=True)
for spec in specs:
- msg = 'Cleaning build stage [{0}]'
+ msg = "Cleaning build stage [{0}]"
tty.msg(msg.format(spec.short_spec))
spec.package.do_clean()
if args.stage:
- tty.msg('Removing all temporary build stages')
+ tty.msg("Removing all temporary build stages")
spack.stage.purge()
# Temp directory where buildcaches are extracted
- extract_tmp = os.path.join(spack.store.layout.root, '.tmp')
+ extract_tmp = os.path.join(spack.store.layout.root, ".tmp")
if os.path.exists(extract_tmp):
- tty.debug('Removing {0}'.format(extract_tmp))
+ tty.debug("Removing {0}".format(extract_tmp))
shutil.rmtree(extract_tmp)
if args.downloads:
- tty.msg('Removing cached downloads')
+ tty.msg("Removing cached downloads")
spack.caches.fetch_cache.destroy()
if args.failures:
- tty.msg('Removing install failure marks')
+ tty.msg("Removing install failure marks")
spack.installer.clear_failures()
if args.misc_cache:
- tty.msg('Removing cached information on repositories')
+ tty.msg("Removing cached information on repositories")
spack.caches.misc_cache.destroy()
if args.python_cache:
- tty.msg('Removing python cache files')
+ tty.msg("Removing python cache files")
remove_python_cache()
if args.bootstrap:
- bootstrap_prefix = spack.util.path.canonicalize_path(
- spack.config.get('bootstrap:root')
- )
+ bootstrap_prefix = spack.util.path.canonicalize_path(spack.config.get("bootstrap:root"))
msg = 'Removing bootstrapped software and configuration in "{0}"'
tty.msg(msg.format(bootstrap_prefix))
llnl.util.filesystem.remove_directory_contents(bootstrap_prefix)
diff --git a/lib/spack/spack/cmd/clone.py b/lib/spack/spack/cmd/clone.py
index abc9293bb0..349bf1b2f7 100644
--- a/lib/spack/spack/cmd/clone.py
+++ b/lib/spack/spack/cmd/clone.py
@@ -11,7 +11,7 @@ from llnl.util.filesystem import mkdirp, working_dir
import spack.paths
from spack.util.executable import ProcessError, which
-_SPACK_UPSTREAM = 'https://github.com/spack/spack'
+_SPACK_UPSTREAM = "https://github.com/spack/spack"
description = "create a new installation of spack in another prefix"
section = "admin"
@@ -20,34 +20,34 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-r', '--remote', action='store', dest='remote',
- help="name of the remote to clone from", default='origin')
- subparser.add_argument(
- 'prefix',
- help="name of prefix where we should install spack")
+ "-r",
+ "--remote",
+ action="store",
+ dest="remote",
+ help="name of the remote to clone from",
+ default="origin",
+ )
+ subparser.add_argument("prefix", help="name of prefix where we should install spack")
def get_origin_info(remote):
- git_dir = os.path.join(spack.paths.prefix, '.git')
- git = which('git', required=True)
+ git_dir = os.path.join(spack.paths.prefix, ".git")
+ git = which("git", required=True)
try:
- branch = git('symbolic-ref', '--short', 'HEAD', output=str)
+ branch = git("symbolic-ref", "--short", "HEAD", output=str)
except ProcessError:
- branch = 'develop'
- tty.warn('No branch found; using default branch: %s' % branch)
- if remote == 'origin' and \
- branch not in ('master', 'develop'):
- branch = 'develop'
- tty.warn('Unknown branch found; using default branch: %s' % branch)
+ branch = "develop"
+ tty.warn("No branch found; using default branch: %s" % branch)
+ if remote == "origin" and branch not in ("master", "develop"):
+ branch = "develop"
+ tty.warn("Unknown branch found; using default branch: %s" % branch)
try:
origin_url = git(
- '--git-dir=%s' % git_dir,
- 'config', '--get', 'remote.%s.url' % remote,
- output=str)
+ "--git-dir=%s" % git_dir, "config", "--get", "remote.%s.url" % remote, output=str
+ )
except ProcessError:
origin_url = _SPACK_UPSTREAM
- tty.warn('No git repository found; '
- 'using default upstream URL: %s' % origin_url)
+ tty.warn("No git repository found; " "using default upstream URL: %s" % origin_url)
return (origin_url.strip(), branch.strip())
@@ -62,27 +62,27 @@ def clone(parser, args):
mkdirp(prefix)
- if os.path.exists(os.path.join(prefix, '.git')):
+ if os.path.exists(os.path.join(prefix, ".git")):
tty.die("There already seems to be a git repository in %s" % prefix)
files_in_the_way = os.listdir(prefix)
if files_in_the_way:
- tty.die("There are already files there! "
- "Delete these files before boostrapping spack.",
- *files_in_the_way)
+ tty.die(
+ "There are already files there! " "Delete these files before boostrapping spack.",
+ *files_in_the_way
+ )
- tty.msg("Installing:",
- "%s/bin/spack" % prefix,
- "%s/lib/spack/..." % prefix)
+ tty.msg("Installing:", "%s/bin/spack" % prefix, "%s/lib/spack/..." % prefix)
with working_dir(prefix):
- git = which('git', required=True)
- git('init', '--shared', '-q')
- git('remote', 'add', 'origin', origin_url)
- git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
- '-n', '-q')
- git('reset', '--hard', 'origin/%s' % branch, '-q')
- git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
-
- tty.msg("Successfully created a new spack in %s" % prefix,
- "Run %s/bin/spack to use this installation." % prefix)
+ git = which("git", required=True)
+ git("init", "--shared", "-q")
+ git("remote", "add", "origin", origin_url)
+ git("fetch", "origin", "%s:refs/remotes/origin/%s" % (branch, branch), "-n", "-q")
+ git("reset", "--hard", "origin/%s" % branch, "-q")
+ git("checkout", "-B", branch, "origin/%s" % branch, "-q")
+
+ tty.msg(
+ "Successfully created a new spack in %s" % prefix,
+ "Run %s/bin/spack to use this installation." % prefix,
+ )
diff --git a/lib/spack/spack/cmd/commands.py b/lib/spack/spack/cmd/commands.py
index 25653b58ae..3890b2330b 100644
--- a/lib/spack/spack/cmd/commands.py
+++ b/lib/spack/spack/cmd/commands.py
@@ -37,13 +37,11 @@ formatters = {}
#: standard arguments for updating completion scripts
#: we iterate through these when called with --update-completion
update_completion_args = {
- "bash": {
+ "bash": {
"aliases": True,
"format": "bash",
- "header": os.path.join(
- spack.paths.share_path, "bash", "spack-completion.in"),
- "update": os.path.join(
- spack.paths.share_path, "spack-completion.bash"),
+ "header": os.path.join(spack.paths.share_path, "bash", "spack-completion.in"),
+ "update": os.path.join(spack.paths.share_path, "spack-completion.bash"),
},
}
@@ -56,68 +54,88 @@ def formatter(func):
def setup_parser(subparser):
subparser.add_argument(
- "--update-completion", action='store_true', default=False,
- help="regenerate spack's tab completion scripts")
+ "--update-completion",
+ action="store_true",
+ default=False,
+ help="regenerate spack's tab completion scripts",
+ )
subparser.add_argument(
- '-a', '--aliases', action='store_true', default=False,
- help='include command aliases')
+ "-a", "--aliases", action="store_true", default=False, help="include command aliases"
+ )
subparser.add_argument(
- '--format', default='names', choices=formatters,
- help='format to be used to print the output (default: names)')
+ "--format",
+ default="names",
+ choices=formatters,
+ help="format to be used to print the output (default: names)",
+ )
subparser.add_argument(
- '--header', metavar='FILE', default=None, action='store',
- help='prepend contents of FILE to the output (useful for rst format)')
+ "--header",
+ metavar="FILE",
+ default=None,
+ action="store",
+ help="prepend contents of FILE to the output (useful for rst format)",
+ )
subparser.add_argument(
- '--update', metavar='FILE', default=None, action='store',
- help='write output to the specified file, if any command is newer')
+ "--update",
+ metavar="FILE",
+ default=None,
+ action="store",
+ help="write output to the specified file, if any command is newer",
+ )
subparser.add_argument(
- 'rst_files', nargs=argparse.REMAINDER,
- help='list of rst files to search for `_cmd-spack-<cmd>` cross-refs')
+ "rst_files",
+ nargs=argparse.REMAINDER,
+ help="list of rst files to search for `_cmd-spack-<cmd>` cross-refs",
+ )
class SpackArgparseRstWriter(ArgparseRstWriter):
"""RST writer tailored for spack documentation."""
- def __init__(self, prog, out=None, aliases=False,
- documented_commands=[],
- rst_levels=['-', '-', '^', '~', ':', '`']):
+ def __init__(
+ self,
+ prog,
+ out=None,
+ aliases=False,
+ documented_commands=[],
+ rst_levels=["-", "-", "^", "~", ":", "`"],
+ ):
out = sys.stdout if out is None else out
- super(SpackArgparseRstWriter, self).__init__(
- prog, out, aliases, rst_levels)
+ super(SpackArgparseRstWriter, self).__init__(prog, out, aliases, rst_levels)
self.documented = documented_commands
def usage(self, *args):
string = super(SpackArgparseRstWriter, self).usage(*args)
- cmd = self.parser.prog.replace(' ', '-')
+ cmd = self.parser.prog.replace(" ", "-")
if cmd in self.documented:
- string += '\n:ref:`More documentation <cmd-{0}>`\n'.format(cmd)
+ string += "\n:ref:`More documentation <cmd-{0}>`\n".format(cmd)
return string
class SubcommandWriter(ArgparseWriter):
def format(self, cmd):
- return ' ' * self.level + cmd.prog + '\n'
+ return " " * self.level + cmd.prog + "\n"
_positional_to_subroutine = {
- 'package': '_all_packages',
- 'spec': '_all_packages',
- 'filter': '_all_packages',
- 'installed': '_installed_packages',
- 'compiler': '_installed_compilers',
- 'section': '_config_sections',
- 'env': '_environments',
- 'extendable': '_extensions',
- 'keys': '_keys',
- 'help_command': '_subcommands',
- 'mirror': '_mirrors',
- 'virtual': '_providers',
- 'namespace': '_repos',
- 'hash': '_all_resource_hashes',
- 'pytest': '_unit_tests',
+ "package": "_all_packages",
+ "spec": "_all_packages",
+ "filter": "_all_packages",
+ "installed": "_installed_packages",
+ "compiler": "_installed_compilers",
+ "section": "_config_sections",
+ "env": "_environments",
+ "extendable": "_extensions",
+ "keys": "_keys",
+ "help_command": "_subcommands",
+ "mirror": "_mirrors",
+ "virtual": "_providers",
+ "namespace": "_repos",
+ "hash": "_all_resource_hashes",
+ "pytest": "_unit_tests",
}
@@ -133,7 +151,9 @@ class BashCompletionWriter(ArgparseCompletionWriter):
else
{1}
fi
-""".format(self.optionals(optionals), self.positionals(positionals))
+""".format(
+ self.optionals(optionals), self.positionals(positionals)
+ )
elif subcommands:
return """
if $list_options
@@ -142,11 +162,15 @@ class BashCompletionWriter(ArgparseCompletionWriter):
else
{1}
fi
-""".format(self.optionals(optionals), self.subcommands(subcommands))
+""".format(
+ self.optionals(optionals), self.subcommands(subcommands)
+ )
else:
return """
{0}
-""".format(self.optionals(optionals))
+""".format(
+ self.optionals(optionals)
+ )
def positionals(self, positionals):
# If match found, return function name
@@ -159,10 +183,10 @@ class BashCompletionWriter(ArgparseCompletionWriter):
return 'SPACK_COMPREPLY=""'
def optionals(self, optionals):
- return 'SPACK_COMPREPLY="{0}"'.format(' '.join(optionals))
+ return 'SPACK_COMPREPLY="{0}"'.format(" ".join(optionals))
def subcommands(self, subcommands):
- return 'SPACK_COMPREPLY="{0}"'.format(' '.join(subcommands))
+ return 'SPACK_COMPREPLY="{0}"'.format(" ".join(subcommands))
@formatter
@@ -174,16 +198,16 @@ def subcommands(args, out):
def rst_index(out):
- out.write('\n')
+ out.write("\n")
index = spack.main.index_commands()
- sections = index['long']
+ sections = index["long"]
dmax = max(len(section_descriptions.get(s, s)) for s in sections) + 2
cmax = max(len(c) for _, c in sections.items()) + 60
- row = "%s %s\n" % ('=' * dmax, '=' * cmax)
- line = '%%-%ds %%s\n' % dmax
+ row = "%s %s\n" % ("=" * dmax, "=" * cmax)
+ line = "%%-%ds %%s\n" % dmax
out.write(row)
out.write(line % (" Category ", " Commands "))
@@ -192,10 +216,10 @@ def rst_index(out):
description = section_descriptions.get(section, section)
for i, cmd in enumerate(sorted(commands)):
- description = description.capitalize() if i == 0 else ''
- ref = ':ref:`%s <spack-%s>`' % (cmd, cmd)
- comma = ',' if i != len(commands) - 1 else ''
- bar = '| ' if i % 8 == 0 else ' '
+ description = description.capitalize() if i == 0 else ""
+ ref = ":ref:`%s <spack-%s>`" % (cmd, cmd)
+ comma = "," if i != len(commands) - 1 else ""
+ bar = "| " if i % 8 == 0 else " "
out.write(line % (description, bar + ref + comma))
out.write(row)
@@ -211,17 +235,16 @@ def rst(args, out):
for filename in args.rst_files:
with open(filename) as f:
for line in f:
- match = re.match(r'\.\. _cmd-(spack-.*):', line)
+ match = re.match(r"\.\. _cmd-(spack-.*):", line)
if match:
documented_commands.add(match.group(1).strip())
# print an index to each command
rst_index(out)
- out.write('\n')
+ out.write("\n")
# print sections for each command and subcommand
- writer = SpackArgparseRstWriter(
- parser.prog, out, args.aliases, documented_commands)
+ writer = SpackArgparseRstWriter(parser.prog, out, args.aliases, documented_commands)
writer.write(parser)
@@ -264,8 +287,8 @@ def _commands(parser, args):
tty.die("No such file: '%s'" % args.header)
if args.update:
- tty.msg('Updating file: %s' % args.update)
- with open(args.update, 'w') as f:
+ tty.msg("Updating file: %s" % args.update)
+ with open(args.update, "w") as f:
prepend_header(args, f)
formatter(args, f)
@@ -292,9 +315,7 @@ def update_completion(parser, args):
def commands(parser, args):
if args.update_completion:
- if args.format != 'names' or any([
- args.aliases, args.update, args.header
- ]):
+ if args.format != "names" or any([args.aliases, args.update, args.header]):
tty.die("--update-completion can only be specified alone.")
# this runs the command multiple times with different arguments
diff --git a/lib/spack/spack/cmd/common/__init__.py b/lib/spack/spack/cmd/common/__init__.py
index f4d7a57dd3..b49caec944 100644
--- a/lib/spack/spack/cmd/common/__init__.py
+++ b/lib/spack/spack/cmd/common/__init__.py
@@ -38,8 +38,9 @@ def shell_init_instructions(cmd, equivalent):
color.colorize("@*c{For Windows batch:}"),
" source %s/spack_cmd.bat" % spack.paths.share_path,
"",
- "Or, if you do not want to use shell support, run " + (
- "one of these" if shell_specific else "this") + " instead:",
+ "Or, if you do not want to use shell support, run "
+ + ("one of these" if shell_specific else "this")
+ + " instead:",
"",
]
@@ -48,7 +49,7 @@ def shell_init_instructions(cmd, equivalent):
equivalent.format(sh_arg="--sh ") + " # bash/zsh/sh",
equivalent.format(sh_arg="--csh ") + " # csh/tcsh",
equivalent.format(sh_arg="--fish") + " # fish",
- equivalent.format(sh_arg="--bat ") + " # batch"
+ equivalent.format(sh_arg="--bat ") + " # batch",
]
else:
msg += [" " + equivalent]
@@ -60,5 +61,5 @@ def shell_init_instructions(cmd, equivalent):
"without any path components (such as `bin/spack`).",
]
- msg += ['']
+ msg += [""]
tty.error(*msg)
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
index d55b4fbb91..c9f15cfa98 100644
--- a/lib/spack/spack/cmd/common/arguments.py
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -15,7 +15,7 @@ import spack.spec
import spack.store
from spack.util.pattern import Args
-__all__ = ['add_common_arguments']
+__all__ = ["add_common_arguments"]
#: dictionary of argument-generating functions, keyed by name
_arguments = {}
@@ -58,6 +58,7 @@ class ConstraintAction(argparse.Action):
To obtain the specs from a command the function must be called.
"""
+
def __call__(self, parser, namespace, values, option_string=None):
# Query specs from command line
self.values = values
@@ -71,7 +72,7 @@ class ConstraintAction(argparse.Action):
# only its installed packages.
env = ev.active_environment()
if env:
- kwargs['hashes'] = set(env.all_hashes())
+ kwargs["hashes"] = set(env.all_hashes())
# return everything for an empty query.
if not qspecs:
@@ -93,27 +94,28 @@ class SetParallelJobs(argparse.Action):
The value is is set in the command line configuration scope so that
it can be retrieved using the spack.config API.
"""
+
def __call__(self, parser, namespace, jobs, option_string):
# Jobs is a single integer, type conversion is already applied
# see https://docs.python.org/3/library/argparse.html#action-classes
if jobs < 1:
- msg = 'invalid value for argument "{0}" '\
- '[expected a positive integer, got "{1}"]'
+ msg = 'invalid value for argument "{0}" ' '[expected a positive integer, got "{1}"]'
raise ValueError(msg.format(option_string, jobs))
- spack.config.set('config:build_jobs', jobs, scope='command_line')
+ spack.config.set("config:build_jobs", jobs, scope="command_line")
- setattr(namespace, 'jobs', jobs)
+ setattr(namespace, "jobs", jobs)
class DeptypeAction(argparse.Action):
"""Creates a tuple of valid dependency types from a deptype argument."""
+
def __call__(self, parser, namespace, values, option_string=None):
deptype = dep.all_deptypes
if values:
- deptype = tuple(x.strip() for x in values.split(','))
- if deptype == ('all',):
- deptype = 'all'
+ deptype = tuple(x.strip() for x in values.split(","))
+ if deptype == ("all",):
+ deptype = "all"
deptype = dep.canonical_deptype(deptype)
setattr(namespace, self.dest, deptype)
@@ -123,21 +125,22 @@ class DeptypeAction(argparse.Action):
@arg
def constraint():
return Args(
- 'constraint', nargs=argparse.REMAINDER, action=ConstraintAction,
- help='constraint to select a subset of installed packages',
- metavar='installed_specs')
+ "constraint",
+ nargs=argparse.REMAINDER,
+ action=ConstraintAction,
+ help="constraint to select a subset of installed packages",
+ metavar="installed_specs",
+ )
@arg
def package():
- return Args('package', help='package name')
+ return Args("package", help="package name")
@arg
def packages():
- return Args(
- 'packages', nargs='+', help='one or more package names',
- metavar='package')
+ return Args("packages", nargs="+", help="one or more package names", metavar="package")
# Specs must use `nargs=argparse.REMAINDER` because a single spec can
@@ -145,181 +148,209 @@ def packages():
# are a collection of optional flags.
@arg
def spec():
- return Args('spec', nargs=argparse.REMAINDER, help='package spec')
+ return Args("spec", nargs=argparse.REMAINDER, help="package spec")
@arg
def specs():
- return Args(
- 'specs', nargs=argparse.REMAINDER, help='one or more package specs')
+ return Args("specs", nargs=argparse.REMAINDER, help="one or more package specs")
@arg
def installed_spec():
return Args(
- 'spec', nargs=argparse.REMAINDER, help='installed package spec',
- metavar='installed_spec')
+ "spec", nargs=argparse.REMAINDER, help="installed package spec", metavar="installed_spec"
+ )
@arg
def installed_specs():
return Args(
- 'specs', nargs=argparse.REMAINDER,
- help='one or more installed package specs', metavar='installed_specs')
+ "specs",
+ nargs=argparse.REMAINDER,
+ help="one or more installed package specs",
+ metavar="installed_specs",
+ )
@arg
def yes_to_all():
return Args(
- '-y', '--yes-to-all', action='store_true', dest='yes_to_all',
- help='assume "yes" is the answer to every confirmation request')
+ "-y",
+ "--yes-to-all",
+ action="store_true",
+ dest="yes_to_all",
+ help='assume "yes" is the answer to every confirmation request',
+ )
@arg
def recurse_dependencies():
return Args(
- '-r', '--dependencies', action='store_true',
- dest='recurse_dependencies',
- help='recursively traverse spec dependencies')
+ "-r",
+ "--dependencies",
+ action="store_true",
+ dest="recurse_dependencies",
+ help="recursively traverse spec dependencies",
+ )
@arg
def recurse_dependents():
return Args(
- '-R', '--dependents', action='store_true', dest='dependents',
- help='also uninstall any packages that depend on the ones given '
- 'via command line')
+ "-R",
+ "--dependents",
+ action="store_true",
+ dest="dependents",
+ help="also uninstall any packages that depend on the ones given " "via command line",
+ )
@arg
def clean():
return Args(
- '--clean',
- action='store_false',
- default=spack.config.get('config:dirty'),
- dest='dirty',
- help='unset harmful variables in the build environment (default)')
+ "--clean",
+ action="store_false",
+ default=spack.config.get("config:dirty"),
+ dest="dirty",
+ help="unset harmful variables in the build environment (default)",
+ )
@arg
def deptype():
return Args(
- '--deptype', action=DeptypeAction, default=dep.all_deptypes,
+ "--deptype",
+ action=DeptypeAction,
+ default=dep.all_deptypes,
help="comma-separated list of deptypes to traverse\ndefault=%s"
- % ','.join(dep.all_deptypes))
+ % ",".join(dep.all_deptypes),
+ )
@arg
def dirty():
return Args(
- '--dirty',
- action='store_true',
- default=spack.config.get('config:dirty'),
- dest='dirty',
- help="preserve user environment in spack's build environment (danger!)"
+ "--dirty",
+ action="store_true",
+ default=spack.config.get("config:dirty"),
+ dest="dirty",
+ help="preserve user environment in spack's build environment (danger!)",
)
@arg
def long():
return Args(
- '-l', '--long', action='store_true',
- help='show dependency hashes as well as versions')
+ "-l", "--long", action="store_true", help="show dependency hashes as well as versions"
+ )
@arg
def very_long():
return Args(
- '-L', '--very-long', action='store_true',
- help='show full dependency hashes as well as versions')
+ "-L",
+ "--very-long",
+ action="store_true",
+ help="show full dependency hashes as well as versions",
+ )
@arg
def tags():
return Args(
- '-t', '--tag', action='append', dest='tags', metavar='TAG',
- help='filter a package query by tag (multiple use allowed)')
+ "-t",
+ "--tag",
+ action="append",
+ dest="tags",
+ metavar="TAG",
+ help="filter a package query by tag (multiple use allowed)",
+ )
@arg
def jobs():
return Args(
- '-j', '--jobs', action=SetParallelJobs, type=int, dest='jobs',
- help='explicitly set number of parallel jobs')
+ "-j",
+ "--jobs",
+ action=SetParallelJobs,
+ type=int,
+ dest="jobs",
+ help="explicitly set number of parallel jobs",
+ )
@arg
def install_status():
return Args(
- '-I', '--install-status', action='store_true', default=False,
- help='show install status of packages. packages can be: '
- 'installed [+], missing and needed by an installed package [-], '
- 'installed in and upstream instance [^], '
- 'or not installed (no annotation)')
+ "-I",
+ "--install-status",
+ action="store_true",
+ default=False,
+ help="show install status of packages. packages can be: "
+ "installed [+], missing and needed by an installed package [-], "
+ "installed in and upstream instance [^], "
+ "or not installed (no annotation)",
+ )
@arg
def no_checksum():
return Args(
- '-n', '--no-checksum', action='store_true', default=False,
- help="do not use checksums to verify downloaded files (unsafe)")
+ "-n",
+ "--no-checksum",
+ action="store_true",
+ default=False,
+ help="do not use checksums to verify downloaded files (unsafe)",
+ )
@arg
def deprecated():
return Args(
- '--deprecated', action='store_true', default=False,
- help='fetch deprecated versions without warning')
+ "--deprecated",
+ action="store_true",
+ default=False,
+ help="fetch deprecated versions without warning",
+ )
def add_cdash_args(subparser, add_help):
cdash_help = {}
if add_help:
- cdash_help['upload-url'] = "CDash URL where reports will be uploaded"
- cdash_help['build'] = """The name of the build that will be reported to CDash.
+ cdash_help["upload-url"] = "CDash URL where reports will be uploaded"
+ cdash_help[
+ "build"
+ ] = """The name of the build that will be reported to CDash.
Defaults to spec of the package to operate on."""
- cdash_help['site'] = """The site name that will be reported to CDash.
+ cdash_help[
+ "site"
+ ] = """The site name that will be reported to CDash.
Defaults to current system hostname."""
- cdash_help['track'] = """Results will be reported to this group on CDash.
+ cdash_help[
+ "track"
+ ] = """Results will be reported to this group on CDash.
Defaults to Experimental."""
- cdash_help['buildstamp'] = """Instead of letting the CDash reporter prepare the
+ cdash_help[
+ "buildstamp"
+ ] = """Instead of letting the CDash reporter prepare the
buildstamp which, when combined with build name, site and project,
uniquely identifies the build, provide this argument to identify
the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
else:
- cdash_help['upload-url'] = argparse.SUPPRESS
- cdash_help['build'] = argparse.SUPPRESS
- cdash_help['site'] = argparse.SUPPRESS
- cdash_help['track'] = argparse.SUPPRESS
- cdash_help['buildstamp'] = argparse.SUPPRESS
+ cdash_help["upload-url"] = argparse.SUPPRESS
+ cdash_help["build"] = argparse.SUPPRESS
+ cdash_help["site"] = argparse.SUPPRESS
+ cdash_help["track"] = argparse.SUPPRESS
+ cdash_help["buildstamp"] = argparse.SUPPRESS
- subparser.add_argument(
- '--cdash-upload-url',
- default=None,
- help=cdash_help['upload-url']
- )
- subparser.add_argument(
- '--cdash-build',
- default=None,
- help=cdash_help['build']
- )
- subparser.add_argument(
- '--cdash-site',
- default=None,
- help=cdash_help['site']
- )
+ subparser.add_argument("--cdash-upload-url", default=None, help=cdash_help["upload-url"])
+ subparser.add_argument("--cdash-build", default=None, help=cdash_help["build"])
+ subparser.add_argument("--cdash-site", default=None, help=cdash_help["site"])
cdash_subgroup = subparser.add_mutually_exclusive_group()
- cdash_subgroup.add_argument(
- '--cdash-track',
- default='Experimental',
- help=cdash_help['track']
- )
- cdash_subgroup.add_argument(
- '--cdash-buildstamp',
- default=None,
- help=cdash_help['buildstamp']
- )
+ cdash_subgroup.add_argument("--cdash-track", default="Experimental", help=cdash_help["track"])
+ cdash_subgroup.add_argument("--cdash-buildstamp", default=None, help=cdash_help["buildstamp"])
class ConfigSetAction(argparse.Action):
@@ -329,14 +360,10 @@ class ConfigSetAction(argparse.Action):
``dest`` to some Spack configuration path (like ``concretizer:reuse``)
and the ``const`` will be stored there using ``spack.config.set()``
"""
- def __init__(self,
- option_strings,
- dest,
- const,
- default=None,
- required=False,
- help=None,
- metavar=None):
+
+ def __init__(
+ self, option_strings, dest, const, default=None, required=False, help=None, metavar=None
+ ):
# save the config option we're supposed to set
self.config_path = dest
@@ -351,7 +378,7 @@ class ConfigSetAction(argparse.Action):
const=const,
default=default,
required=required,
- help=help
+ help=help,
)
def __call__(self, parser, namespace, values, option_string):
@@ -376,31 +403,37 @@ def add_concretizer_args(subparser):
"""
subgroup = subparser.add_argument_group("concretizer arguments")
subgroup.add_argument(
- '-U', '--fresh', action=ConfigSetAction, dest="concretizer:reuse",
- const=False, default=None,
- help='do not reuse installed deps; build newest configuration'
+ "-U",
+ "--fresh",
+ action=ConfigSetAction,
+ dest="concretizer:reuse",
+ const=False,
+ default=None,
+ help="do not reuse installed deps; build newest configuration",
)
subgroup.add_argument(
- '--reuse', action=ConfigSetAction, dest="concretizer:reuse",
- const=True, default=None,
- help='reuse installed dependencies/buildcaches when possible'
+ "--reuse",
+ action=ConfigSetAction,
+ dest="concretizer:reuse",
+ const=True,
+ default=None,
+ help="reuse installed dependencies/buildcaches when possible",
)
def add_s3_connection_args(subparser, add_help):
subparser.add_argument(
- '--s3-access-key-id',
- help="ID string to use to connect to this S3 mirror")
+ "--s3-access-key-id", help="ID string to use to connect to this S3 mirror"
+ )
subparser.add_argument(
- '--s3-access-key-secret',
- help="Secret string to use to connect to this S3 mirror")
+ "--s3-access-key-secret", help="Secret string to use to connect to this S3 mirror"
+ )
subparser.add_argument(
- '--s3-access-token',
- help="Access Token to use to connect to this S3 mirror")
+ "--s3-access-token", help="Access Token to use to connect to this S3 mirror"
+ )
subparser.add_argument(
- '--s3-profile',
- help="S3 profile name to use to connect to this S3 mirror",
- default=None)
+ "--s3-profile", help="S3 profile name to use to connect to this S3 mirror", default=None
+ )
subparser.add_argument(
- '--s3-endpoint-url',
- help="Endpoint URL to use to connect to this S3 mirror")
+ "--s3-endpoint-url", help="Endpoint URL to use to connect to this S3 mirror"
+ )
diff --git a/lib/spack/spack/cmd/common/env_utility.py b/lib/spack/spack/cmd/common/env_utility.py
index 2081263eab..16f61ba1c0 100644
--- a/lib/spack/spack/cmd/common/env_utility.py
+++ b/lib/spack/spack/cmd/common/env_utility.py
@@ -17,26 +17,25 @@ from spack.util.environment import dump_environment, pickle_environment
def setup_parser(subparser):
- arguments.add_common_arguments(subparser, ['clean', 'dirty'])
+ arguments.add_common_arguments(subparser, ["clean", "dirty"])
arguments.add_concretizer_args(subparser)
+ subparser.add_argument("--dump", metavar="FILE", help="dump a source-able environment to FILE")
subparser.add_argument(
- '--dump', metavar="FILE",
- help="dump a source-able environment to FILE"
+ "--pickle", metavar="FILE", help="dump a pickled source-able environment to FILE"
)
subparser.add_argument(
- '--pickle', metavar="FILE",
- help="dump a pickled source-able environment to FILE"
+ "spec",
+ nargs=argparse.REMAINDER,
+ metavar="spec [--] [cmd]...",
+ help="specs of package environment to emulate",
+ )
+ subparser.epilog = (
+ "If a command is not specified, the environment will be printed "
+ "to standard output (cf /usr/bin/env) unless --dump and/or --pickle "
+ "are specified.\n\nIf a command is specified and spec is "
+ "multi-word, then the -- separator is obligatory."
)
- subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER,
- metavar='spec [--] [cmd]...',
- help="specs of package environment to emulate")
- subparser.epilog\
- = 'If a command is not specified, the environment will be printed ' \
- 'to standard output (cf /usr/bin/env) unless --dump and/or --pickle ' \
- 'are specified.\n\nIf a command is specified and spec is ' \
- 'multi-word, then the -- separator is obligatory.'
def emulate_env_utility(cmd_name, context, args):
@@ -47,11 +46,11 @@ def emulate_env_utility(cmd_name, context, args):
# caller put a '--' between the spec and the command to be
# executed. If there is no '--', assume that the spec is the
# first argument.
- sep = '--'
+ sep = "--"
if sep in args.spec:
s = args.spec.index(sep)
spec = args.spec[:s]
- cmd = args.spec[s + 1:]
+ cmd = args.spec[s + 1 :]
else:
spec = args.spec[0]
cmd = args.spec[1:]
@@ -75,8 +74,7 @@ def emulate_env_utility(cmd_name, context, args):
if args.pickle:
# Dump a source-able environment to a pickle file.
- tty.msg(
- "Pickling a source-able environment to {0}".format(args.pickle))
+ tty.msg("Pickling a source-able environment to {0}".format(args.pickle))
pickle_environment(args.pickle)
if cmd:
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index e8d5301f31..75c5f05d0f 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -25,53 +25,65 @@ level = "long"
def setup_parser(subparser):
- sp = subparser.add_subparsers(
- metavar='SUBCOMMAND', dest='compiler_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="compiler_command")
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Find
find_parser = sp.add_parser(
- 'find', aliases=['add'],
- help='search the system for compilers to add to Spack configuration')
- find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
+ "find",
+ aliases=["add"],
+ help="search the system for compilers to add to Spack configuration",
+ )
+ find_parser.add_argument("add_paths", nargs=argparse.REMAINDER)
find_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
- default=spack.config.default_modify_scope('compilers'),
- help="configuration scope to modify")
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
+ default=spack.config.default_modify_scope("compilers"),
+ help="configuration scope to modify",
+ )
# Remove
- remove_parser = sp.add_parser(
- 'remove', aliases=['rm'], help='remove compiler by spec')
+ remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove compiler by spec")
remove_parser.add_argument(
- '-a', '--all', action='store_true',
- help='remove ALL compilers that match spec')
- remove_parser.add_argument('compiler_spec')
+ "-a", "--all", action="store_true", help="remove ALL compilers that match spec"
+ )
+ remove_parser.add_argument("compiler_spec")
remove_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
- default=spack.config.default_modify_scope('compilers'),
- help="configuration scope to modify")
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
+ default=spack.config.default_modify_scope("compilers"),
+ help="configuration scope to modify",
+ )
# List
- list_parser = sp.add_parser('list', help='list available compilers')
+ list_parser = sp.add_parser("list", help="list available compilers")
list_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_list_scope(),
- help="configuration scope to read from")
+ help="configuration scope to read from",
+ )
# Info
- info_parser = sp.add_parser('info', help='show compiler paths')
- info_parser.add_argument('compiler_spec')
+ info_parser = sp.add_parser("info", help="show compiler paths")
+ info_parser.add_argument("compiler_spec")
info_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_list_scope(),
- help="configuration scope to read from")
+ help="configuration scope to read from",
+ )
def compiler_find(args):
"""Search either $PATH or a list of paths OR MODULES for compilers and
- add them to Spack's configuration.
+ add them to Spack's configuration.
"""
# None signals spack.compiler.find_compilers to use its default logic
@@ -81,14 +93,12 @@ def compiler_find(args):
# in any other configuration.
new_compilers = spack.compilers.find_new_compilers(paths, scope=None)
if new_compilers:
- spack.compilers.add_compilers_to_config(
- new_compilers, scope=args.scope, init_config=False
- )
+ spack.compilers.add_compilers_to_config(new_compilers, scope=args.scope, init_config=False)
n = len(new_compilers)
- s = 's' if n > 1 else ''
+ s = "s" if n > 1 else ""
config = spack.config.config
- filename = config.get_config_filename(args.scope, 'compilers')
+ filename = config.get_config_filename(args.scope, "compilers")
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
else:
@@ -109,8 +119,7 @@ def compiler_remove(args):
sys.exit(1)
for compiler in compilers:
- spack.compilers.remove_compiler_from_config(
- compiler.spec, scope=args.scope)
+ spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec)
@@ -125,17 +134,17 @@ def compiler_info(args):
for c in compilers:
print(str(c.spec) + ":")
print("\tpaths:")
- for cpath in ['cc', 'cxx', 'f77', 'fc']:
+ for cpath in ["cc", "cxx", "f77", "fc"]:
print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
if c.flags:
print("\tflags:")
for flag, flag_value in iteritems(c.flags):
print("\t\t%s = %s" % (flag, flag_value))
if len(c.environment) != 0:
- if len(c.environment.get('set', {})) != 0:
+ if len(c.environment.get("set", {})) != 0:
print("\tenvironment:")
print("\t set:")
- for key, value in iteritems(c.environment['set']):
+ for key, value in iteritems(c.environment["set"]):
print("\t %s = %s" % (key, value))
if c.extra_rpaths:
print("\tExtra rpaths:")
@@ -168,12 +177,9 @@ def compiler_list(args):
# convert them to '' (in which case it still evaluates to False but is a
# string type). Tuples produced by this are guaranteed to be comparable in
# Python 3
- convert_str = (
- lambda tuple_container:
- tuple(str(x) if x else '' for x in tuple_container))
+ convert_str = lambda tuple_container: tuple(str(x) if x else "" for x in tuple_container)
- index_str_keys = list(
- (convert_str(x), y) for x, y in index.items())
+ index_str_keys = list((convert_str(x), y) for x, y in index.items())
ordered_sections = sorted(index_str_keys, key=lambda item: item[0])
for i, (key, compilers) in enumerate(ordered_sections):
if i >= 1:
@@ -183,15 +189,17 @@ def compiler_list(args):
if target:
os_str += "-%s" % target
cname = "%s{%s} %s" % (spack.spec.compiler_color, name, os_str)
- tty.hline(colorize(cname), char='-')
+ tty.hline(colorize(cname), char="-")
colify(reversed(sorted(c.spec for c in compilers)))
def compiler(parser, args):
- action = {'add': compiler_find,
- 'find': compiler_find,
- 'remove': compiler_remove,
- 'rm': compiler_remove,
- 'info': compiler_info,
- 'list': compiler_list}
+ action = {
+ "add": compiler_find,
+ "find": compiler_find,
+ "remove": compiler_remove,
+ "rm": compiler_remove,
+ "info": compiler_info,
+ "list": compiler_list,
+ }
action[args.compiler_command](args)
diff --git a/lib/spack/spack/cmd/compilers.py b/lib/spack/spack/cmd/compilers.py
index 409f0b142b..f61b612502 100644
--- a/lib/spack/spack/cmd/compilers.py
+++ b/lib/spack/spack/cmd/compilers.py
@@ -16,8 +16,11 @@ def setup_parser(subparser):
scopes_metavar = spack.config.scopes_metavar
subparser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
- help="configuration scope to read/modify")
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
+ help="configuration scope to read/modify",
+ )
def compilers(parser, args):
diff --git a/lib/spack/spack/cmd/concretize.py b/lib/spack/spack/cmd/concretize.py
index cbdc53907f..efb12f1672 100644
--- a/lib/spack/spack/cmd/concretize.py
+++ b/lib/spack/spack/cmd/concretize.py
@@ -7,34 +7,36 @@ import spack.cmd
import spack.cmd.common.arguments
import spack.environment as ev
-description = 'concretize an environment and write a lockfile'
+description = "concretize an environment and write a lockfile"
section = "environments"
level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-f', '--force', action='store_true',
- help="Re-concretize even if already concretized.")
+ "-f", "--force", action="store_true", help="Re-concretize even if already concretized."
+ )
subparser.add_argument(
- '--test', default=None,
- choices=['root', 'all'],
+ "--test",
+ default=None,
+ choices=["root", "all"],
help="""Concretize with test dependencies. When 'root' is chosen, test
dependencies are only added for the environment's root specs. When 'all' is
-chosen, test dependencies are enabled for all packages in the environment.""")
+chosen, test dependencies are enabled for all packages in the environment.""",
+ )
subparser.add_argument(
- '-q', '--quiet', action='store_true',
- help="Don't print concretized specs")
+ "-q", "--quiet", action="store_true", help="Don't print concretized specs"
+ )
spack.cmd.common.arguments.add_concretizer_args(subparser)
def concretize(parser, args):
- env = spack.cmd.require_active_env(cmd_name='concretize')
+ env = spack.cmd.require_active_env(cmd_name="concretize")
- if args.test == 'all':
+ if args.test == "all":
tests = True
- elif args.test == 'root':
+ elif args.test == "root":
tests = [spec.name for spec in env.user_specs]
else:
tests = False
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index c6eca2fd8a..f36dffd40a 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -32,89 +32,92 @@ def setup_parser(subparser):
# User can only choose one
subparser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
- help="configuration scope to read/modify")
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
+ help="configuration scope to read/modify",
+ )
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="config_command")
- get_parser = sp.add_parser('get', help='print configuration values')
- get_parser.add_argument('section',
- help="configuration section to print. "
- "options: %(choices)s",
- nargs='?',
- metavar='section',
- choices=spack.config.section_schemas)
+ get_parser = sp.add_parser("get", help="print configuration values")
+ get_parser.add_argument(
+ "section",
+ help="configuration section to print. " "options: %(choices)s",
+ nargs="?",
+ metavar="section",
+ choices=spack.config.section_schemas,
+ )
blame_parser = sp.add_parser(
- 'blame', help='print configuration annotated with source file:line')
- blame_parser.add_argument('section',
- help="configuration section to print. "
- "options: %(choices)s",
- metavar='section',
- choices=spack.config.section_schemas)
-
- edit_parser = sp.add_parser('edit', help='edit configuration file')
- edit_parser.add_argument('section',
- help="configuration section to edit. "
- "options: %(choices)s",
- metavar='section',
- nargs='?',
- choices=spack.config.section_schemas)
+ "blame", help="print configuration annotated with source file:line"
+ )
+ blame_parser.add_argument(
+ "section",
+ help="configuration section to print. " "options: %(choices)s",
+ metavar="section",
+ choices=spack.config.section_schemas,
+ )
+
+ edit_parser = sp.add_parser("edit", help="edit configuration file")
+ edit_parser.add_argument(
+ "section",
+ help="configuration section to edit. " "options: %(choices)s",
+ metavar="section",
+ nargs="?",
+ choices=spack.config.section_schemas,
+ )
edit_parser.add_argument(
- '--print-file', action='store_true',
- help="print the file name that would be edited")
+ "--print-file", action="store_true", help="print the file name that would be edited"
+ )
- sp.add_parser('list', help='list configuration sections')
+ sp.add_parser("list", help="list configuration sections")
- add_parser = sp.add_parser('add', help='add configuration parameters')
- add_parser.add_argument(
- 'path', nargs='?',
- help="colon-separated path to config that should be added,"
- " e.g. 'config:default:true'")
+ add_parser = sp.add_parser("add", help="add configuration parameters")
add_parser.add_argument(
- '-f', '--file',
- help="file from which to set all config values"
+ "path",
+ nargs="?",
+ help="colon-separated path to config that should be added," " e.g. 'config:default:true'",
)
+ add_parser.add_argument("-f", "--file", help="file from which to set all config values")
prefer_upstream_parser = sp.add_parser(
- 'prefer-upstream',
- help='set package preferences from upstream')
+ "prefer-upstream", help="set package preferences from upstream"
+ )
prefer_upstream_parser.add_argument(
- '--local', action='store_true', default=False,
- help="Set packages preferences based on local installs, rather "
- "than upstream."
+ "--local",
+ action="store_true",
+ default=False,
+ help="Set packages preferences based on local installs, rather " "than upstream.",
)
- remove_parser = sp.add_parser('remove', aliases=['rm'],
- help='remove configuration parameters')
+ remove_parser = sp.add_parser("remove", aliases=["rm"], help="remove configuration parameters")
remove_parser.add_argument(
- 'path',
+ "path",
help="colon-separated path to config that should be removed,"
- " e.g. 'config:default:true'")
+ " e.g. 'config:default:true'",
+ )
# Make the add parser available later
setup_parser.add_parser = add_parser
- update = sp.add_parser(
- 'update', help='update configuration files to the latest format'
- )
- spack.cmd.common.arguments.add_common_arguments(update, ['yes_to_all'])
- update.add_argument('section', help='section to update')
+ update = sp.add_parser("update", help="update configuration files to the latest format")
+ spack.cmd.common.arguments.add_common_arguments(update, ["yes_to_all"])
+ update.add_argument("section", help="section to update")
revert = sp.add_parser(
- 'revert',
- help='revert configuration files to their state before update'
+ "revert", help="revert configuration files to their state before update"
)
- spack.cmd.common.arguments.add_common_arguments(revert, ['yes_to_all'])
- revert.add_argument('section', help='section to update')
+ spack.cmd.common.arguments.add_common_arguments(revert, ["yes_to_all"])
+ revert.add_argument("section", help="section to update")
def _get_scope_and_section(args):
"""Extract config scope and section from arguments."""
scope = args.scope
- section = getattr(args, 'section', None)
- path = getattr(args, 'path', None)
+ section = getattr(args, "section", None)
+ path = getattr(args, "path", None)
# w/no args and an active environment, point to env manifest
if not section:
@@ -128,7 +131,7 @@ def _get_scope_and_section(args):
# special handling for commands that take value instead of section
if path:
- section = path[:path.find(':')] if ':' in path else path
+ section = path[: path.find(":")] if ":" in path else path
if not scope:
scope = spack.config.default_modify_scope(section)
@@ -146,17 +149,16 @@ def config_get(args):
if section is not None:
spack.config.config.print_section(section)
- elif scope and scope.startswith('env:'):
+ elif scope and scope.startswith("env:"):
config_file = spack.config.config.get_config_filename(scope, section)
if os.path.exists(config_file):
with open(config_file) as f:
print(f.read())
else:
- tty.die('environment has no %s file' % ev.manifest_name)
+ tty.die("environment has no %s file" % ev.manifest_name)
else:
- tty.die('`spack config get` requires a section argument '
- 'or an active environment.')
+ tty.die("`spack config get` requires a section argument " "or an active environment.")
def config_blame(args):
@@ -179,8 +181,7 @@ def config_edit(args):
# If we aren't editing a spack.yaml file, get config path from scope.
scope, section = _get_scope_and_section(args)
if not scope and not section:
- tty.die('`spack config edit` requires a section argument '
- 'or an active environment.')
+ tty.die("`spack config edit` requires a section argument " "or an active environment.")
config_file = spack.config.config.get_config_filename(scope, section)
if args.print_file:
@@ -194,7 +195,7 @@ def config_list(args):
Used primarily for shell tab completion scripts.
"""
- print(' '.join(list(spack.config.section_schemas)))
+ print(" ".join(list(spack.config.section_schemas)))
def config_add(args):
@@ -221,11 +222,11 @@ def config_remove(args):
This is a stateful operation that edits the config files."""
scope, _ = _get_scope_and_section(args)
- path, _, value = args.path.rpartition(':')
+ path, _, value = args.path.rpartition(":")
existing = spack.config.get(path, scope=scope)
if not isinstance(existing, (list, dict)):
- path, _, value = path.rpartition(':')
+ path, _, value = path.rpartition(":")
existing = spack.config.get(path, scope=scope)
value = syaml.load(value)
@@ -238,7 +239,7 @@ def config_remove(args):
existing.pop(value, None)
else:
# This should be impossible to reach
- raise spack.config.ConfigError('Config has nested non-dict values')
+ raise spack.config.ConfigError("Config has nested non-dict values")
spack.config.set(path, existing, scope)
@@ -256,31 +257,33 @@ def config_update(args):
cannot_overwrite, skip_system_scope = [], False
for scope in updates:
- cfg_file = spack.config.config.get_config_filename(
- scope.name, args.section
- )
+ cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
scope_dir = scope.path
can_be_updated = _can_update_config_file(scope_dir, cfg_file)
if not can_be_updated:
- if scope.name == 'system':
+ if scope.name == "system":
skip_system_scope = True
- msg = ('Not enough permissions to write to "system" scope. '
- 'Skipping update at that location [cfg={0}]')
+ msg = (
+ 'Not enough permissions to write to "system" scope. '
+ "Skipping update at that location [cfg={0}]"
+ )
tty.warn(msg.format(cfg_file))
continue
cannot_overwrite.append((scope, cfg_file))
if cannot_overwrite:
- msg = 'Detected permission issues with the following scopes:\n\n'
+ msg = "Detected permission issues with the following scopes:\n\n"
for scope, cfg_file in cannot_overwrite:
- msg += '\t[scope={0}, cfg={1}]\n'.format(scope.name, cfg_file)
- msg += ('\nEither ensure that you have sufficient permissions to '
- 'modify these files or do not include these scopes in the '
- 'update.')
+ msg += "\t[scope={0}, cfg={1}]\n".format(scope.name, cfg_file)
+ msg += (
+ "\nEither ensure that you have sufficient permissions to "
+ "modify these files or do not include these scopes in the "
+ "update."
+ )
tty.die(msg)
if skip_system_scope:
- updates = [x for x in updates if x.name != 'system']
+ updates = [x for x in updates if x.name != "system"]
# Report if there are no updates to be done
if not updates:
@@ -290,40 +293,38 @@ def config_update(args):
proceed = True
if not args.yes_to_all:
- msg = ('The following configuration files are going to be updated to'
- ' the latest schema format:\n\n')
+ msg = (
+ "The following configuration files are going to be updated to"
+ " the latest schema format:\n\n"
+ )
for scope in updates:
- cfg_file = spack.config.config.get_config_filename(
- scope.name, args.section
- )
- msg += '\t[scope={0}, file={1}]\n'.format(scope.name, cfg_file)
- msg += ('\nIf the configuration files are updated, versions of Spack '
- 'that are older than this version may not be able to read '
- 'them. Spack stores backups of the updated files which can '
- 'be retrieved with "spack config revert"')
+ cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
+ msg += "\t[scope={0}, file={1}]\n".format(scope.name, cfg_file)
+ msg += (
+ "\nIf the configuration files are updated, versions of Spack "
+ "that are older than this version may not be able to read "
+ "them. Spack stores backups of the updated files which can "
+ 'be retrieved with "spack config revert"'
+ )
tty.msg(msg)
- proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
+ proceed = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not proceed:
- tty.die('Operation aborted.')
+ tty.die("Operation aborted.")
# Get a function to update the format
update_fn = spack.config.ensure_latest_format_fn(args.section)
for scope in updates:
- cfg_file = spack.config.config.get_config_filename(
- scope.name, args.section
- )
+ cfg_file = spack.config.config.get_config_filename(scope.name, args.section)
with open(cfg_file) as f:
data = syaml.load_config(f) or {}
data = data.pop(args.section, {})
update_fn(data)
# Make a backup copy and rewrite the file
- bkp_file = cfg_file + '.bkp'
+ bkp_file = cfg_file + ".bkp"
shutil.copy(cfg_file, bkp_file)
- spack.config.config.update_config(
- args.section, data, scope=scope.name, force=True
- )
+ spack.config.config.update_config(args.section, data, scope=scope.name, force=True)
msg = 'File "{0}" updated [backup={1}]'
tty.msg(msg.format(cfg_file, bkp_file))
@@ -336,16 +337,14 @@ def _can_revert_update(scope_dir, cfg_file, bkp_file):
def config_revert(args):
- scopes = [args.scope] if args.scope else [
- x.name for x in spack.config.config.file_scopes
- ]
+ scopes = [args.scope] if args.scope else [x.name for x in spack.config.config.file_scopes]
# Search for backup files in the configuration scopes
- Entry = collections.namedtuple('Entry', ['scope', 'cfg', 'bkp'])
+ Entry = collections.namedtuple("Entry", ["scope", "cfg", "bkp"])
to_be_restored, cannot_overwrite = [], []
for scope in scopes:
cfg_file = spack.config.config.get_config_filename(scope, args.section)
- bkp_file = cfg_file + '.bkp'
+ bkp_file = cfg_file + ".bkp"
# If the backup files doesn't exist move to the next scope
if not os.path.exists(bkp_file):
@@ -364,25 +363,26 @@ def config_revert(args):
# Report errors if we can't revert a configuration
if cannot_overwrite:
- msg = 'Detected permission issues with the following scopes:\n\n'
+ msg = "Detected permission issues with the following scopes:\n\n"
for e in cannot_overwrite:
- msg += '\t[scope={0.scope}, cfg={0.cfg}, bkp={0.bkp}]\n'.format(e)
- msg += ('\nEither ensure to have the right permissions before retrying'
- ' or be more specific on the scope to revert.')
+ msg += "\t[scope={0.scope}, cfg={0.cfg}, bkp={0.bkp}]\n".format(e)
+ msg += (
+ "\nEither ensure to have the right permissions before retrying"
+ " or be more specific on the scope to revert."
+ )
tty.die(msg)
proceed = True
if not args.yes_to_all:
- msg = ('The following scopes will be restored from the corresponding'
- ' backup files:\n')
+ msg = "The following scopes will be restored from the corresponding" " backup files:\n"
for entry in to_be_restored:
- msg += '\t[scope={0.scope}, bkp={0.bkp}]\n'.format(entry)
- msg += 'This operation cannot be undone.'
+ msg += "\t[scope={0.scope}, bkp={0.bkp}]\n".format(entry)
+ msg += "This operation cannot be undone."
tty.msg(msg)
- proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
+ proceed = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not proceed:
- tty.die('Operation aborted.')
+ tty.die("Operation aborted.")
for _, cfg_file, bkp_file in to_be_restored:
shutil.copy(bkp_file, cfg_file)
@@ -397,7 +397,7 @@ def config_prefer_upstream(args):
scope = args.scope
if scope is None:
- scope = spack.config.default_modify_scope('packages')
+ scope = spack.config.default_modify_scope("packages")
all_specs = set(spack.store.db.query(installed=True))
local_specs = set(spack.store.db.query_local(installed=True))
@@ -408,58 +408,61 @@ def config_prefer_upstream(args):
pkgs = {}
for spec in pref_specs:
# Collect all the upstream compilers and versions for this package.
- pkg = pkgs.get(spec.name, {
- 'version': [],
- 'compiler': [],
- })
+ pkg = pkgs.get(
+ spec.name,
+ {
+ "version": [],
+ "compiler": [],
+ },
+ )
pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version.
- existing_variants = pkg.get('variants',
- None if not pkg['version'] else '')
+ existing_variants = pkg.get("variants", None if not pkg["version"] else "")
version = spec.version.string
- if version not in pkg['version']:
- pkg['version'].append(version)
+ if version not in pkg["version"]:
+ pkg["version"].append(version)
compiler = str(spec.compiler)
- if compiler not in pkg['compiler']:
- pkg['compiler'].append(compiler)
+ if compiler not in pkg["compiler"]:
+ pkg["compiler"].append(compiler)
# Get and list all the variants that differ from the default.
variants = []
for var_name, variant in spec.variants.items():
- if (var_name in ['patches']
- or var_name not in spec.package.variants):
+ if var_name in ["patches"] or var_name not in spec.package.variants:
continue
variant_desc, _ = spec.package.variants[var_name]
if variant.value != variant_desc.default:
variants.append(str(variant))
variants.sort()
- variants = ' '.join(variants)
+ variants = " ".join(variants)
if spec.name not in conflicting_variants:
# Only specify the variants if there's a single variant
# set across all versions/compilers.
if existing_variants is not None and existing_variants != variants:
conflicting_variants.add(spec.name)
- pkg.pop('variants', None)
+ pkg.pop("variants", None)
elif variants:
- pkg['variants'] = variants
+ pkg["variants"] = variants
if conflicting_variants:
tty.warn(
"The following packages have multiple conflicting upstream "
"specs. You may have to specify, by "
"concretized hash, which spec you want when building "
- "packages that depend on them:\n - {0}"
- .format("\n - ".join(sorted(conflicting_variants))))
+ "packages that depend on them:\n - {0}".format(
+ "\n - ".join(sorted(conflicting_variants))
+ )
+ )
# Simply write the config to the specified file.
- existing = spack.config.get('packages', scope=scope)
+ existing = spack.config.get("packages", scope=scope)
new = spack.config.merge_yaml(existing, pkgs)
- spack.config.set('packages', new, scope)
+ spack.config.set("packages", new, scope)
config_file = spack.config.config.get_config_filename(scope, section)
tty.msg("Updated config at {0}".format(config_file))
@@ -467,15 +470,15 @@ def config_prefer_upstream(args):
def config(parser, args):
action = {
- 'get': config_get,
- 'blame': config_blame,
- 'edit': config_edit,
- 'list': config_list,
- 'add': config_add,
- 'rm': config_remove,
- 'remove': config_remove,
- 'update': config_update,
- 'revert': config_revert,
- 'prefer-upstream': config_prefer_upstream,
+ "get": config_get,
+ "blame": config_blame,
+ "edit": config_edit,
+ "list": config_list,
+ "add": config_add,
+ "rm": config_remove,
+ "remove": config_remove,
+ "update": config_update,
+ "revert": config_revert,
+ "prefer-upstream": config_prefer_upstream,
}
action[args.config_command](args)
diff --git a/lib/spack/spack/cmd/containerize.py b/lib/spack/spack/cmd/containerize.py
index d3b717ab47..ffd0c7093f 100644
--- a/lib/spack/spack/cmd/containerize.py
+++ b/lib/spack/spack/cmd/containerize.py
@@ -10,37 +10,38 @@ import llnl.util.tty
import spack.container
import spack.container.images
-description = ("creates recipes to build images for different"
- " container runtimes")
+description = "creates recipes to build images for different" " container runtimes"
section = "container"
level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '--list-os', action='store_true', default=False,
- help='list all the OS that can be used in the bootstrap phase and exit'
+ "--list-os",
+ action="store_true",
+ default=False,
+ help="list all the OS that can be used in the bootstrap phase and exit",
)
subparser.add_argument(
- '--last-stage',
- choices=('bootstrap', 'build', 'final'),
- default='final',
- help='last stage in the container recipe'
+ "--last-stage",
+ choices=("bootstrap", "build", "final"),
+ default="final",
+ help="last stage in the container recipe",
)
def containerize(parser, args):
if args.list_os:
possible_os = spack.container.images.all_bootstrap_os()
- msg = 'The following operating systems can be used to bootstrap Spack:'
- msg += '\n{0}'.format(' '.join(possible_os))
+ msg = "The following operating systems can be used to bootstrap Spack:"
+ msg += "\n{0}".format(" ".join(possible_os))
llnl.util.tty.msg(msg)
return
config_dir = args.env_dir or os.getcwd()
- config_file = os.path.abspath(os.path.join(config_dir, 'spack.yaml'))
+ config_file = os.path.abspath(os.path.join(config_dir, "spack.yaml"))
if not os.path.exists(config_file):
- msg = 'file not found: {0}'
+ msg = "file not found: {0}"
raise ValueError(msg.format(config_file))
config = spack.container.validate(config_file)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index b2a8e17a28..ea2f6d2ed9 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -84,7 +84,7 @@ class BundlePackageTemplate(object):
Provides the default values to be used for a bundle package file template.
"""
- base_class_name = 'BundlePackage'
+ base_class_name = "BundlePackage"
dependencies = """\
# FIXME: Add dependencies if required.
@@ -94,29 +94,32 @@ class BundlePackageTemplate(object):
body_def = " # There is no need for install() since there is no code."
def __init__(self, name, versions):
- self.name = name
+ self.name = name
self.class_name = mod_to_class(name)
- self.versions = versions
+ self.versions = versions
def write(self, pkg_path):
"""Writes the new package file."""
# Write out a template for the file
with open(pkg_path, "w") as pkg_file:
- pkg_file.write(package_template.format(
- name=self.name,
- class_name=self.class_name,
- base_class_name=self.base_class_name,
- url_def=self.url_def,
- versions=self.versions,
- dependencies=self.dependencies,
- body_def=self.body_def))
+ pkg_file.write(
+ package_template.format(
+ name=self.name,
+ class_name=self.class_name,
+ base_class_name=self.base_class_name,
+ url_def=self.url_def,
+ versions=self.versions,
+ dependencies=self.dependencies,
+ body_def=self.body_def,
+ )
+ )
class PackageTemplate(BundlePackageTemplate):
"""Provides the default values to be used for the package file template"""
- base_class_name = 'Package'
+ base_class_name = "Package"
body_def = """\
def install(self, spec, prefix):
@@ -136,7 +139,7 @@ class AutotoolsPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for Autotools-based packages
that *do* come with a ``configure`` script"""
- base_class_name = 'AutotoolsPackage'
+ base_class_name = "AutotoolsPackage"
body_def = """\
def configure_args(self):
@@ -150,7 +153,7 @@ class AutoreconfPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for Autotools-based packages
that *do not* come with a ``configure`` script"""
- base_class_name = 'AutotoolsPackage'
+ base_class_name = "AutotoolsPackage"
dependencies = """\
depends_on('autoconf', type='build')
@@ -176,7 +179,7 @@ class AutoreconfPackageTemplate(PackageTemplate):
class CMakePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for CMake-based packages"""
- base_class_name = 'CMakePackage'
+ base_class_name = "CMakePackage"
body_def = """\
def cmake_args(self):
@@ -190,7 +193,7 @@ class CMakePackageTemplate(PackageTemplate):
class LuaPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for LuaRocks-based packages"""
- base_class_name = 'LuaPackage'
+ base_class_name = "LuaPackage"
body_def = """\
def luarocks_args(self):
@@ -201,17 +204,17 @@ class LuaPackageTemplate(PackageTemplate):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name lua-lpeg`, don't rename it lua-lua-lpeg
- if not name.startswith('lua-'):
+ if not name.startswith("lua-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to lua-{0}".format(name))
- name = 'lua-{0}'.format(name)
+ name = "lua-{0}".format(name)
super(LuaPackageTemplate, self).__init__(name, url, *args, **kwargs)
class MesonPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for meson-based packages"""
- base_class_name = 'MesonPackage'
+ base_class_name = "MesonPackage"
body_def = """\
def meson_args(self):
@@ -223,7 +226,7 @@ class MesonPackageTemplate(PackageTemplate):
class QMakePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for QMake-based packages"""
- base_class_name = 'QMakePackage'
+ base_class_name = "QMakePackage"
body_def = """\
def qmake_args(self):
@@ -235,7 +238,7 @@ class QMakePackageTemplate(PackageTemplate):
class MavenPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for Maven-based packages"""
- base_class_name = 'MavenPackage'
+ base_class_name = "MavenPackage"
body_def = """\
def build(self, spec, prefix):
@@ -246,7 +249,7 @@ class MavenPackageTemplate(PackageTemplate):
class SconsPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for SCons-based packages"""
- base_class_name = 'SConsPackage'
+ base_class_name = "SConsPackage"
body_def = """\
def build_args(self, spec, prefix):
@@ -259,7 +262,7 @@ class SconsPackageTemplate(PackageTemplate):
class WafPackageTemplate(PackageTemplate):
"""Provides appropriate override for Waf-based packages"""
- base_class_name = 'WafPackage'
+ base_class_name = "WafPackage"
body_def = """\
# FIXME: Override configure_args(), build_args(),
@@ -281,7 +284,8 @@ class BazelPackageTemplate(PackageTemplate):
class RacketPackageTemplate(PackageTemplate):
"""Provides approriate overrides for Racket extensions"""
- base_class_name = 'RacketPackage'
+
+ base_class_name = "RacketPackage"
url_line = """\
# FIXME: set the proper location from which to fetch your package
@@ -307,17 +311,18 @@ class RacketPackageTemplate(PackageTemplate):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name rkt-scribble`, don't rename it rkt-rkt-scribble
- if not name.startswith('rkt-'):
+ if not name.startswith("rkt-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to rkt-{0}".format(name))
- name = 'rkt-{0}'.format(name)
+ name = "rkt-{0}".format(name)
self.body_def = self.body_def.format(name[4:])
super(RacketPackageTemplate, self).__init__(name, url, *args, **kwargs)
class PythonPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for python extensions"""
- base_class_name = 'PythonPackage'
+
+ base_class_name = "PythonPackage"
dependencies = """\
# FIXME: Only add the python/pip/wheel dependencies if you need specific versions
@@ -351,10 +356,10 @@ class PythonPackageTemplate(PackageTemplate):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
- if not name.startswith('py-'):
+ if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
- name = 'py-{0}'.format(name)
+ name = "py-{0}".format(name)
# Simple PyPI URLs:
# https://<hostname>/packages/<type>/<first character of project>/<project>/<download file>
@@ -377,38 +382,39 @@ class PythonPackageTemplate(PackageTemplate):
# https://files.pythonhosted.org/packages/cp35.cp36.cp37.cp38.cp39/s/shiboken2/shiboken2-5.15.2-5.15.2-cp35.cp36.cp37.cp38.cp39-abi3-manylinux1_x86_64.whl
# https://files.pythonhosted.org/packages/f4/99/ad2ef1aeeb395ee2319bb981ea08dbbae878d30dd28ebf27e401430ae77a/azureml_core-1.36.0.post2-py3-none-any.whl#sha256=60bcad10b4380d78a8280deb7365de2c2cd66527aacdcb4a173f613876cbe739
- match = re.search(
- r'(?:pypi|pythonhosted)[^/]+/packages' + '/([^/#]+)' * 4,
- url
- )
+ match = re.search(r"(?:pypi|pythonhosted)[^/]+/packages" + "/([^/#]+)" * 4, url)
if match:
# PyPI URLs for wheels are too complicated, ignore them for now
# https://www.python.org/dev/peps/pep-0427/#file-name-convention
- if not match.group(4).endswith('.whl'):
+ if not match.group(4).endswith(".whl"):
if len(match.group(2)) == 1:
# Simple PyPI URL
- url = '/'.join(match.group(3, 4))
+ url = "/".join(match.group(3, 4))
else:
# PyPI URL containing hash
# Project name doesn't necessarily match download name, but it
# usually does, so this is the best we can do
project = parse_name(url)
- url = '/'.join([project, match.group(4)])
+ url = "/".join([project, match.group(4)])
self.url_line = ' pypi = "{url}"'
else:
# Add a reminder about spack preferring PyPI URLs
- self.url_line = '''
+ self.url_line = (
+ """
# FIXME: ensure the package is not available through PyPI. If it is,
# re-run `spack create --force` with the PyPI URL.
-''' + self.url_line
+"""
+ + self.url_line
+ )
super(PythonPackageTemplate, self).__init__(name, url, *args, **kwargs)
class RPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for R extensions"""
- base_class_name = 'RPackage'
+
+ base_class_name = "RPackage"
dependencies = """\
# FIXME: Add dependencies if required.
@@ -423,30 +429,23 @@ class RPackageTemplate(PackageTemplate):
def __init__(self, name, url, *args, **kwargs):
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
- if not name.startswith('r-'):
+ if not name.startswith("r-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to r-{0}".format(name))
- name = 'r-{0}'.format(name)
+ name = "r-{0}".format(name)
r_name = parse_name(url)
- cran = re.search(
- r'(?:r-project|rstudio)[^/]+/src' + '/([^/]+)' * 2,
- url
- )
+ cran = re.search(r"(?:r-project|rstudio)[^/]+/src" + "/([^/]+)" * 2, url)
if cran:
url = r_name
self.url_line = ' cran = "{url}"'
- bioc = re.search(
- r'(?:bioconductor)[^/]+/packages' + '/([^/]+)' * 5,
- url
- )
+ bioc = re.search(r"(?:bioconductor)[^/]+/packages" + "/([^/]+)" * 5, url)
if bioc:
- self.url_line = ' url = "{0}"\n'\
- ' bioc = "{1}"'.format(url, r_name)
+ self.url_line = ' url = "{0}"\n' ' bioc = "{1}"'.format(url, r_name)
super(RPackageTemplate, self).__init__(name, url, *args, **kwargs)
@@ -454,7 +453,8 @@ class RPackageTemplate(PackageTemplate):
class PerlmakePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for Perl extensions
that come with a Makefile.PL"""
- base_class_name = 'PerlPackage'
+
+ base_class_name = "PerlPackage"
dependencies = """\
# FIXME: Add dependencies if required:
@@ -469,10 +469,10 @@ class PerlmakePackageTemplate(PackageTemplate):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
- if not name.startswith('perl-'):
+ if not name.startswith("perl-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to perl-{0}".format(name))
- name = 'perl-{0}'.format(name)
+ name = "perl-{0}".format(name)
super(PerlmakePackageTemplate, self).__init__(name, *args, **kwargs)
@@ -480,6 +480,7 @@ class PerlmakePackageTemplate(PackageTemplate):
class PerlbuildPackageTemplate(PerlmakePackageTemplate):
"""Provides appropriate overrides for Perl extensions
that come with a Build.PL instead of a Makefile.PL"""
+
dependencies = """\
depends_on('perl-module-build', type='build')
@@ -490,7 +491,7 @@ class PerlbuildPackageTemplate(PerlmakePackageTemplate):
class OctavePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for octave packages"""
- base_class_name = 'OctavePackage'
+ base_class_name = "OctavePackage"
dependencies = """\
extends('octave')
@@ -501,10 +502,10 @@ class OctavePackageTemplate(PackageTemplate):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name octave-splines`, don't rename it
# octave-octave-splines
- if not name.startswith('octave-'):
+ if not name.startswith("octave-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to octave-{0}".format(name))
- name = 'octave-{0}'.format(name)
+ name = "octave-{0}".format(name)
super(OctavePackageTemplate, self).__init__(name, *args, **kwargs)
@@ -512,7 +513,7 @@ class OctavePackageTemplate(PackageTemplate):
class RubyPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for Ruby packages"""
- base_class_name = 'RubyPackage'
+ base_class_name = "RubyPackage"
dependencies = """\
# FIXME: Add dependencies if required. Only add the ruby dependency
@@ -529,10 +530,10 @@ class RubyPackageTemplate(PackageTemplate):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name ruby-numpy`, don't rename it
# ruby-ruby-numpy
- if not name.startswith('ruby-'):
+ if not name.startswith("ruby-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to ruby-{0}".format(name))
- name = 'ruby-{0}'.format(name)
+ name = "ruby-{0}".format(name)
super(RubyPackageTemplate, self).__init__(name, *args, **kwargs)
@@ -540,7 +541,7 @@ class RubyPackageTemplate(PackageTemplate):
class MakefilePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for Makefile packages"""
- base_class_name = 'MakefilePackage'
+ base_class_name = "MakefilePackage"
body_def = """\
def edit(self, spec, prefix):
@@ -553,7 +554,7 @@ class MakefilePackageTemplate(PackageTemplate):
class IntelPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for licensed Intel software"""
- base_class_name = 'IntelPackage'
+ base_class_name = "IntelPackage"
body_def = """\
# FIXME: Override `setup_environment` if necessary."""
@@ -562,7 +563,7 @@ class IntelPackageTemplate(PackageTemplate):
class SIPPackageTemplate(PackageTemplate):
"""Provides appropriate overrides for SIP packages."""
- base_class_name = 'SIPPackage'
+ base_class_name = "SIPPackage"
body_def = """\
def configure_args(self, spec, prefix):
@@ -573,70 +574,78 @@ class SIPPackageTemplate(PackageTemplate):
def __init__(self, name, *args, **kwargs):
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
- if not name.startswith('py-'):
+ if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
- name = 'py-{0}'.format(name)
+ name = "py-{0}".format(name)
super(SIPPackageTemplate, self).__init__(name, *args, **kwargs)
templates = {
- 'autotools': AutotoolsPackageTemplate,
- 'autoreconf': AutoreconfPackageTemplate,
- 'cmake': CMakePackageTemplate,
- 'bundle': BundlePackageTemplate,
- 'qmake': QMakePackageTemplate,
- 'maven': MavenPackageTemplate,
- 'scons': SconsPackageTemplate,
- 'waf': WafPackageTemplate,
- 'bazel': BazelPackageTemplate,
- 'python': PythonPackageTemplate,
- 'r': RPackageTemplate,
- 'racket': RacketPackageTemplate,
- 'perlmake': PerlmakePackageTemplate,
- 'perlbuild': PerlbuildPackageTemplate,
- 'octave': OctavePackageTemplate,
- 'ruby': RubyPackageTemplate,
- 'makefile': MakefilePackageTemplate,
- 'intel': IntelPackageTemplate,
- 'meson': MesonPackageTemplate,
- 'lua': LuaPackageTemplate,
- 'sip': SIPPackageTemplate,
- 'generic': PackageTemplate,
+ "autotools": AutotoolsPackageTemplate,
+ "autoreconf": AutoreconfPackageTemplate,
+ "cmake": CMakePackageTemplate,
+ "bundle": BundlePackageTemplate,
+ "qmake": QMakePackageTemplate,
+ "maven": MavenPackageTemplate,
+ "scons": SconsPackageTemplate,
+ "waf": WafPackageTemplate,
+ "bazel": BazelPackageTemplate,
+ "python": PythonPackageTemplate,
+ "r": RPackageTemplate,
+ "racket": RacketPackageTemplate,
+ "perlmake": PerlmakePackageTemplate,
+ "perlbuild": PerlbuildPackageTemplate,
+ "octave": OctavePackageTemplate,
+ "ruby": RubyPackageTemplate,
+ "makefile": MakefilePackageTemplate,
+ "intel": IntelPackageTemplate,
+ "meson": MesonPackageTemplate,
+ "lua": LuaPackageTemplate,
+ "sip": SIPPackageTemplate,
+ "generic": PackageTemplate,
}
def setup_parser(subparser):
+ subparser.add_argument("url", nargs="?", help="url of package archive")
subparser.add_argument(
- 'url', nargs='?',
- help="url of package archive")
- subparser.add_argument(
- '--keep-stage', action='store_true',
- help="don't clean up staging area when command completes")
+ "--keep-stage",
+ action="store_true",
+ help="don't clean up staging area when command completes",
+ )
+ subparser.add_argument("-n", "--name", help="name of the package to create")
subparser.add_argument(
- '-n', '--name',
- help="name of the package to create")
- subparser.add_argument(
- '-t', '--template', metavar='TEMPLATE',
+ "-t",
+ "--template",
+ metavar="TEMPLATE",
choices=sorted(templates.keys()),
- help="build system template to use. options: %(choices)s")
+ help="build system template to use. options: %(choices)s",
+ )
subparser.add_argument(
- '-r', '--repo',
- help="path to a repository where the package should be created")
+ "-r", "--repo", help="path to a repository where the package should be created"
+ )
subparser.add_argument(
- '-N', '--namespace',
+ "-N",
+ "--namespace",
help="specify a namespace for the package. must be the namespace of "
- "a repository registered with Spack")
+ "a repository registered with Spack",
+ )
subparser.add_argument(
- '-f', '--force', action='store_true',
- help="overwrite any existing package file with the same name")
+ "-f",
+ "--force",
+ action="store_true",
+ help="overwrite any existing package file with the same name",
+ )
subparser.add_argument(
- '--skip-editor', action='store_true',
- help="skip the edit session for the package (e.g., automation)")
+ "--skip-editor",
+ action="store_true",
+ help="skip the edit session for the package (e.g., automation)",
+ )
subparser.add_argument(
- '-b', '--batch', action='store_true',
- help="don't ask which versions to checksum")
+ "-b", "--batch", action="store_true", help="don't ask which versions to checksum"
+ )
class BuildSystemGuesser:
@@ -647,7 +656,7 @@ class BuildSystemGuesser:
def __init__(self):
"""Sets the default build system."""
- self.build_system = 'generic'
+ self.build_system = "generic"
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
@@ -657,17 +666,17 @@ class BuildSystemGuesser:
# Most octave extensions are hosted on Octave-Forge:
# https://octave.sourceforge.net/index.html
# They all have the same base URL.
- if 'downloads.sourceforge.net/octave/' in url:
- self.build_system = 'octave'
+ if "downloads.sourceforge.net/octave/" in url:
+ self.build_system = "octave"
return
- if url.endswith('.gem'):
- self.build_system = 'ruby'
+ if url.endswith(".gem"):
+ self.build_system = "ruby"
return
- if url.endswith('.whl') or '.whl#' in url:
- self.build_system = 'python'
+ if url.endswith(".whl") or ".whl#" in url:
+ self.build_system = "python"
return
- if url.endswith('.rock'):
- self.build_system = 'lua'
+ if url.endswith(".rock"):
+ self.build_system = "lua"
return
# A list of clues that give us an idea of the build system a package
@@ -676,46 +685,44 @@ class BuildSystemGuesser:
# NOTE: Order is important here. If a package supports multiple
# build systems, we choose the first match in this list.
clues = [
- (r'/CMakeLists\.txt$', 'cmake'),
- (r'/NAMESPACE$', 'r'),
- (r'/configure$', 'autotools'),
- (r'/configure\.(in|ac)$', 'autoreconf'),
- (r'/Makefile\.am$', 'autoreconf'),
- (r'/pom\.xml$', 'maven'),
- (r'/SConstruct$', 'scons'),
- (r'/waf$', 'waf'),
- (r'/pyproject.toml', 'python'),
- (r'/setup\.(py|cfg)$', 'python'),
- (r'/WORKSPACE$', 'bazel'),
- (r'/Build\.PL$', 'perlbuild'),
- (r'/Makefile\.PL$', 'perlmake'),
- (r'/.*\.gemspec$', 'ruby'),
- (r'/Rakefile$', 'ruby'),
- (r'/setup\.rb$', 'ruby'),
- (r'/.*\.pro$', 'qmake'),
- (r'/.*\.rockspec$', 'lua'),
- (r'/(GNU)?[Mm]akefile$', 'makefile'),
- (r'/DESCRIPTION$', 'octave'),
- (r'/meson\.build$', 'meson'),
- (r'/configure\.py$', 'sip'),
+ (r"/CMakeLists\.txt$", "cmake"),
+ (r"/NAMESPACE$", "r"),
+ (r"/configure$", "autotools"),
+ (r"/configure\.(in|ac)$", "autoreconf"),
+ (r"/Makefile\.am$", "autoreconf"),
+ (r"/pom\.xml$", "maven"),
+ (r"/SConstruct$", "scons"),
+ (r"/waf$", "waf"),
+ (r"/pyproject.toml", "python"),
+ (r"/setup\.(py|cfg)$", "python"),
+ (r"/WORKSPACE$", "bazel"),
+ (r"/Build\.PL$", "perlbuild"),
+ (r"/Makefile\.PL$", "perlmake"),
+ (r"/.*\.gemspec$", "ruby"),
+ (r"/Rakefile$", "ruby"),
+ (r"/setup\.rb$", "ruby"),
+ (r"/.*\.pro$", "qmake"),
+ (r"/.*\.rockspec$", "lua"),
+ (r"/(GNU)?[Mm]akefile$", "makefile"),
+ (r"/DESCRIPTION$", "octave"),
+ (r"/meson\.build$", "meson"),
+ (r"/configure\.py$", "sip"),
]
# Peek inside the compressed file.
- if (stage.archive_file.endswith('.zip') or
- '.zip#' in stage.archive_file):
+ if stage.archive_file.endswith(".zip") or ".zip#" in stage.archive_file:
try:
- unzip = which('unzip')
- output = unzip('-lq', stage.archive_file, output=str)
+ unzip = which("unzip")
+ output = unzip("-lq", stage.archive_file, output=str)
except ProcessError:
- output = ''
+ output = ""
else:
try:
- tar = which('tar')
- output = tar('--exclude=*/*/*', '-tf',
- stage.archive_file, output=str)
+ tar = which("tar")
+ output = tar("--exclude=*/*/*", "-tf", stage.archive_file, output=str)
except ProcessError:
- output = ''
- lines = output.split('\n')
+ output = ""
+ lines = output.split("\n")
# Determine the build system based on the files contained
# in the archive.
@@ -740,7 +747,7 @@ def get_name(args):
"""
# Default package name
- name = 'example'
+ name = "example"
if args.name is not None:
# Use a user-supplied name if one is present
@@ -754,14 +761,16 @@ def get_name(args):
try:
name = parse_name(args.url)
if name != args.url:
- desc = 'URL'
+ desc = "URL"
else:
- desc = 'package name'
+ desc = "package name"
tty.msg("This looks like a {0} for {1}".format(desc, name))
except UndetectableNameError:
- tty.die("Couldn't guess a name for this package.",
- " Please report this bug. In the meantime, try running:",
- " `spack create --name <name> <url>`")
+ tty.die(
+ "Couldn't guess a name for this package.",
+ " Please report this bug. In the meantime, try running:",
+ " `spack create --name <name> <url>`",
+ )
name = simplify_name(name)
@@ -784,7 +793,7 @@ def get_url(args):
"""
# Default URL
- url = 'https://www.example.com/example-1.2.3.tar.gz'
+ url = "https://www.example.com/example-1.2.3.tar.gz"
if args.url:
# Use a user-supplied URL if one is present
@@ -824,12 +833,12 @@ def get_versions(args, name):
valid_url = True
try:
spack.util.url.require_url_format(args.url)
- if args.url.startswith('file://'):
+ if args.url.startswith("file://"):
valid_url = False # No point in spidering these
except (ValueError, TypeError):
valid_url = False
- if args.url is not None and args.template != 'bundle' and valid_url:
+ if args.url is not None and args.template != "bundle" and valid_url:
# Find available versions
try:
url_dict = spack.util.web.find_versions_of_archive(args.url)
@@ -844,9 +853,12 @@ def get_versions(args, name):
url_dict = {version: args.url}
versions = spack.stage.get_checksums_for_versions(
- url_dict, name, first_stage_function=guesser,
+ url_dict,
+ name,
+ first_stage_function=guesser,
keep_stage=args.keep_stage,
- batch=(args.batch or len(url_dict) == 1))
+ batch=(args.batch or len(url_dict) == 1),
+ )
else:
versions = unhashed_versions
@@ -869,7 +881,7 @@ def get_build_system(args, guesser):
str: The name of the build system template to use
"""
# Default template
- template = 'generic'
+ template = "generic"
if args.template is not None:
# Use a user-supplied template if one is present
@@ -878,9 +890,8 @@ def get_build_system(args, guesser):
elif args.url is not None:
# Use whatever build system the guesser detected
template = guesser.build_system
- if template == 'generic':
- tty.warn("Unable to detect a build system. "
- "Using a generic package template.")
+ if template == "generic":
+ tty.warn("Unable to detect a build system. " "Using a generic package template.")
else:
msg = "This package looks like it uses the {0} build system"
tty.msg(msg.format(template))
@@ -903,8 +914,7 @@ def get_repository(args, name):
spec = Spec(name)
# Figure out namespace for spec
if spec.namespace and args.namespace and spec.namespace != args.namespace:
- tty.die("Namespaces '{0}' and '{1}' do not match.".format(
- spec.namespace, args.namespace))
+ tty.die("Namespaces '{0}' and '{1}' do not match.".format(spec.namespace, args.namespace))
if not spec.namespace and args.namespace:
spec.namespace = args.namespace
@@ -914,8 +924,10 @@ def get_repository(args, name):
if repo_path is not None:
repo = spack.repo.Repo(repo_path)
if spec.namespace and spec.namespace != repo.namespace:
- tty.die("Can't create package with namespace {0} in repo with "
- "namespace {1}".format(spec.namespace, repo.namespace))
+ tty.die(
+ "Can't create package with namespace {0} in repo with "
+ "namespace {1}".format(spec.namespace, repo.namespace)
+ )
else:
if spec.namespace:
repo = spack.repo.path.get_repo(spec.namespace, None)
@@ -939,10 +951,10 @@ def create(parser, args):
build_system = get_build_system(args, guesser)
# Create the package template object
- constr_args = {'name': name, 'versions': versions}
+ constr_args = {"name": name, "versions": versions}
package_class = templates[build_system]
if package_class != BundlePackageTemplate:
- constr_args['url'] = url
+ constr_args["url"] = url
package = package_class(**constr_args)
tty.msg("Created template for {0} package".format(package.name))
@@ -950,8 +962,10 @@ def create(parser, args):
repo = get_repository(args, name)
pkg_path = repo.filename_for_package_name(package.name)
if os.path.exists(pkg_path) and not args.force:
- tty.die('{0} already exists.'.format(pkg_path),
- ' Try running `spack create --force` to overwrite it.')
+ tty.die(
+ "{0} already exists.".format(pkg_path),
+ " Try running `spack create --force` to overwrite it.",
+ )
else:
mkdirp(os.path.dirname(pkg_path))
diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py
index 3d5b549400..d68341037f 100644
--- a/lib/spack/spack/cmd/deactivate.py
+++ b/lib/spack/spack/cmd/deactivate.py
@@ -19,22 +19,27 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-f', '--force', action='store_true',
- help="run deactivation even if spec is NOT currently activated")
+ "-f",
+ "--force",
+ action="store_true",
+ help="run deactivation even if spec is NOT currently activated",
+ )
+ subparser.add_argument("-v", "--view", metavar="VIEW", type=str, help="the view to operate on")
subparser.add_argument(
- '-v', '--view', metavar='VIEW', type=str,
- help="the view to operate on")
- subparser.add_argument(
- '-a', '--all', action='store_true',
+ "-a",
+ "--all",
+ action="store_true",
help="deactivate all extensions of an extendable package, or "
- "deactivate an extension AND its dependencies")
- arguments.add_common_arguments(subparser, ['installed_spec'])
+ "deactivate an extension AND its dependencies",
+ )
+ arguments.add_common_arguments(subparser, ["installed_spec"])
def deactivate(parser, args):
- tty.warn("spack deactivate is deprecated in favor of "
- "environments and will be removed in v0.19.0")
+ tty.warn(
+ "spack deactivate is deprecated in favor of " "environments and will be removed in v0.19.0"
+ )
specs = spack.cmd.parse_specs(args.spec)
if len(specs) != 1:
@@ -56,8 +61,7 @@ def deactivate(parser, args):
if args.all:
if pkg.extendable:
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
- ext_pkgs = spack.store.db.activated_extensions_for(
- spec, view.extensions_layout)
+ ext_pkgs = spack.store.db.activated_extensions_for(spec, view.extensions_layout)
for ext_pkg in ext_pkgs:
ext_pkg.spec.normalize()
@@ -65,12 +69,10 @@ def deactivate(parser, args):
ext_pkg.do_deactivate(view, force=True)
elif pkg.is_extension:
- if not args.force and \
- not spec.package.is_activated(view):
+ if not args.force and not spec.package.is_activated(view):
tty.die("%s is not activated." % pkg.spec.short_spec)
- tty.msg("Deactivating %s and all dependencies." %
- pkg.spec.short_spec)
+ tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
nodes_in_topological_order = spack.graph.topological_sort(spec)
for espec in reversed(nodes_in_topological_order):
@@ -80,17 +82,15 @@ def deactivate(parser, args):
epkg.do_deactivate(view, force=args.force)
else:
- tty.die(
- "spack deactivate --all requires an extendable package "
- "or an extension.")
+ tty.die("spack deactivate --all requires an extendable package " "or an extension.")
else:
if not pkg.is_extension:
- tty.die("spack deactivate requires an extension.",
- "Did you mean 'spack deactivate --all'?")
+ tty.die(
+ "spack deactivate requires an extension.", "Did you mean 'spack deactivate --all'?"
+ )
- if not args.force and \
- not spec.package.is_activated(view):
+ if not args.force and not spec.package.is_activated(view):
tty.die("Package %s is not activated." % specs[0].short_spec)
spec.package.do_deactivate(view, force=args.force)
diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py
index aa4ed8432d..f593e3d80c 100644
--- a/lib/spack/spack/cmd/debug.py
+++ b/lib/spack/spack/cmd/debug.py
@@ -26,32 +26,29 @@ level = "long"
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command')
- sp.add_parser('create-db-tarball',
- help="create a tarball of Spack's installation metadata")
- sp.add_parser('report', help='print information useful for bug reports')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="debug_command")
+ sp.add_parser("create-db-tarball", help="create a tarball of Spack's installation metadata")
+ sp.add_parser("report", help="print information useful for bug reports")
def _debug_tarball_suffix():
now = datetime.now()
- suffix = now.strftime('%Y-%m-%d-%H%M%S')
+ suffix = now.strftime("%Y-%m-%d-%H%M%S")
- git = which('git')
+ git = which("git")
if not git:
- return 'nobranch-nogit-%s' % suffix
+ return "nobranch-nogit-%s" % suffix
with working_dir(spack.paths.prefix):
- if not os.path.isdir('.git'):
- return 'nobranch.nogit.%s' % suffix
+ if not os.path.isdir(".git"):
+ return "nobranch.nogit.%s" % suffix
# Get symbolic branch name and strip any special chars (mainly '/')
- symbolic = git(
- 'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip()
- symbolic = re.sub(r'[^\w.-]', '-', symbolic)
+ symbolic = git("rev-parse", "--abbrev-ref", "--short", "HEAD", output=str).strip()
+ symbolic = re.sub(r"[^\w.-]", "-", symbolic)
# Get the commit hash too.
- commit = git(
- 'rev-parse', '--short', 'HEAD', output=str).strip()
+ commit = git("rev-parse", "--short", "HEAD", output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
@@ -60,48 +57,46 @@ def _debug_tarball_suffix():
def create_db_tarball(args):
- tar = which('tar')
+ tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(str(spack.store.root))
transform_args = []
- if 'GNU' in tar('--version', output=str):
- transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)]
+ if "GNU" in tar("--version", output=str):
+ transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
else:
- transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)]
+ transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(str(spack.store.root))
with working_dir(wd):
files = [spack.store.db._index_path]
- files += glob('%s/*/*/*/.spack/spec.json' % base)
- files += glob('%s/*/*/*/.spack/spec.yaml' % base)
+ files += glob("%s/*/*/*/.spack/spec.json" % base)
+ files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
- args = ['-czf', tarball_path]
+ args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
- tty.msg('Created %s' % tarball_name)
+ tty.msg("Created %s" % tarball_name)
def report(args):
host_platform = spack.platforms.host()
- host_os = host_platform.operating_system('frontend')
- host_target = host_platform.target('frontend')
- architecture = spack.spec.ArchSpec(
- (str(host_platform), str(host_os), str(host_target))
- )
- print('* **Spack:**', get_version())
- print('* **Python:**', platform.python_version())
- print('* **Platform:**', architecture)
- print('* **Concretizer:**', spack.config.get('config:concretizer'))
+ host_os = host_platform.operating_system("frontend")
+ host_target = host_platform.target("frontend")
+ architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
+ print("* **Spack:**", get_version())
+ print("* **Python:**", platform.python_version())
+ print("* **Platform:**", architecture)
+ print("* **Concretizer:**", spack.config.get("config:concretizer"))
def debug(parser, args):
action = {
- 'create-db-tarball': create_db_tarball,
- 'report': report,
+ "create-db-tarball": create_db_tarball,
+ "report": report,
}
action[args.debug_command](args)
diff --git a/lib/spack/spack/cmd/dependencies.py b/lib/spack/spack/cmd/dependencies.py
index b273c74297..bffbacfc28 100644
--- a/lib/spack/spack/cmd/dependencies.py
+++ b/lib/spack/spack/cmd/dependencies.py
@@ -22,17 +22,30 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-i', '--installed', action='store_true', default=False,
+ "-i",
+ "--installed",
+ action="store_true",
+ default=False,
help="List installed dependencies of an installed spec, "
- "instead of possible dependencies of a package.")
+ "instead of possible dependencies of a package.",
+ )
subparser.add_argument(
- '-t', '--transitive', action='store_true', default=False,
- help="show all transitive dependencies")
- arguments.add_common_arguments(subparser, ['deptype'])
+ "-t",
+ "--transitive",
+ action="store_true",
+ default=False,
+ help="show all transitive dependencies",
+ )
+ arguments.add_common_arguments(subparser, ["deptype"])
subparser.add_argument(
- '-V', '--no-expand-virtuals', action='store_false', default=True,
- dest="expand_virtuals", help="do not expand virtual dependencies")
- arguments.add_common_arguments(subparser, ['spec'])
+ "-V",
+ "--no-expand-virtuals",
+ action="store_false",
+ default=True,
+ dest="expand_virtuals",
+ help="do not expand virtual dependencies",
+ )
+ arguments.add_common_arguments(subparser, ["spec"])
def dependencies(parser, args):
@@ -44,12 +57,12 @@ def dependencies(parser, args):
env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env)
- format_string = '{name}{@version}{%compiler}{/hash:7}'
+ format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
- tty.msg(
- "Dependencies of %s" % spec.format(format_string, color=True))
+ tty.msg("Dependencies of %s" % spec.format(format_string, color=True))
deps = spack.store.db.installed_relatives(
- spec, 'children', args.transitive, deptype=args.deptype)
+ spec, "children", args.transitive, deptype=args.deptype
+ )
if deps:
spack.cmd.display_specs(deps, long=True)
else:
@@ -61,7 +74,7 @@ def dependencies(parser, args):
spec,
transitive=args.transitive,
expand_virtuals=args.expand_virtuals,
- deptype=args.deptype
+ deptype=args.deptype,
)
if spec.name in dependencies:
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index cbaac206dc..e931ad7606 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -21,22 +21,30 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-i', '--installed', action='store_true', default=False,
+ "-i",
+ "--installed",
+ action="store_true",
+ default=False,
help="List installed dependents of an installed spec, "
- "instead of possible dependents of a package.")
+ "instead of possible dependents of a package.",
+ )
subparser.add_argument(
- '-t', '--transitive', action='store_true', default=False,
- help="Show all transitive dependents.")
- arguments.add_common_arguments(subparser, ['spec'])
+ "-t",
+ "--transitive",
+ action="store_true",
+ default=False,
+ help="Show all transitive dependents.",
+ )
+ arguments.add_common_arguments(subparser, ["spec"])
def inverted_dependencies():
"""Iterate through all packages and return a dictionary mapping package
- names to possible dependencies.
+ names to possible dependencies.
- Virtual packages are included as sources, so that you can query
- dependents of, e.g., `mpi`, but virtuals are not included as
- actual dependents.
+ Virtual packages are included as sources, so that you can query
+ dependents of, e.g., `mpi`, but virtuals are not included as
+ actual dependents.
"""
dag = {}
for pkg_cls in spack.repo.path.all_package_classes():
@@ -85,11 +93,10 @@ def dependents(parser, args):
env = ev.active_environment()
spec = spack.cmd.disambiguate_spec(specs[0], env)
- format_string = '{name}{@version}{%compiler}{/hash:7}'
+ format_string = "{name}{@version}{%compiler}{/hash:7}"
if sys.stdout.isatty():
tty.msg("Dependents of %s" % spec.cformat(format_string))
- deps = spack.store.db.installed_relatives(
- spec, 'parents', args.transitive)
+ deps = spack.store.db.installed_relatives(spec, "parents", args.transitive)
if deps:
spack.cmd.display_specs(deps, long=True)
else:
diff --git a/lib/spack/spack/cmd/deprecate.py b/lib/spack/spack/cmd/deprecate.py
index d9f7bd4b6e..cc59475a62 100644
--- a/lib/spack/spack/cmd/deprecate.py
+++ b/lib/spack/spack/cmd/deprecate.py
@@ -2,7 +2,7 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-'''Deprecate one Spack install in favor of another
+"""Deprecate one Spack install in favor of another
Spack packages of different configurations cannot be installed to the same
location. However, in some circumstances (e.g. security patches) old
@@ -12,7 +12,7 @@ place.
It is up to the user to ensure binary compatibility between the deprecated
installation and its deprecator.
-'''
+"""
from __future__ import print_function
import argparse
@@ -34,40 +34,66 @@ level = "long"
# Arguments for display_specs when we find ambiguity
display_args = {
- 'long': True,
- 'show_flags': True,
- 'variants': True,
- 'indent': 4,
+ "long": True,
+ "show_flags": True,
+ "variants": True,
+ "indent": 4,
}
def setup_parser(sp):
setup_parser.parser = sp
- arguments.add_common_arguments(sp, ['yes_to_all'])
+ arguments.add_common_arguments(sp, ["yes_to_all"])
deps = sp.add_mutually_exclusive_group()
- deps.add_argument('-d', '--dependencies', action='store_true',
- default=True, dest='dependencies',
- help='Deprecate dependencies (default)')
- deps.add_argument('-D', '--no-dependencies', action='store_false',
- default=True, dest='dependencies',
- help='Do not deprecate dependencies')
+ deps.add_argument(
+ "-d",
+ "--dependencies",
+ action="store_true",
+ default=True,
+ dest="dependencies",
+ help="Deprecate dependencies (default)",
+ )
+ deps.add_argument(
+ "-D",
+ "--no-dependencies",
+ action="store_false",
+ default=True,
+ dest="dependencies",
+ help="Do not deprecate dependencies",
+ )
install = sp.add_mutually_exclusive_group()
- install.add_argument('-i', '--install-deprecator', action='store_true',
- default=False, dest='install',
- help='Concretize and install deprecator spec')
- install.add_argument('-I', '--no-install-deprecator',
- action='store_false', default=False, dest='install',
- help='Deprecator spec must already be installed (default)')
-
- sp.add_argument('-l', '--link-type', type=str,
- default='soft', choices=['soft', 'hard'],
- help="Type of filesystem link to use for deprecation (default soft)")
-
- sp.add_argument('specs', nargs=argparse.REMAINDER,
- help="spec to deprecate and spec to use as deprecator")
+ install.add_argument(
+ "-i",
+ "--install-deprecator",
+ action="store_true",
+ default=False,
+ dest="install",
+ help="Concretize and install deprecator spec",
+ )
+ install.add_argument(
+ "-I",
+ "--no-install-deprecator",
+ action="store_false",
+ default=False,
+ dest="install",
+ help="Deprecator spec must already be installed (default)",
+ )
+
+ sp.add_argument(
+ "-l",
+ "--link-type",
+ type=str,
+ default="soft",
+ choices=["soft", "hard"],
+ help="Type of filesystem link to use for deprecation (default soft)",
+ )
+
+ sp.add_argument(
+ "specs", nargs=argparse.REMAINDER, help="spec to deprecate and spec to use as deprecator"
+ )
def deprecate(parser, args):
@@ -76,11 +102,10 @@ def deprecate(parser, args):
specs = spack.cmd.parse_specs(args.specs)
if len(specs) != 2:
- raise SpackError('spack deprecate requires exactly two specs')
+ raise SpackError("spack deprecate requires exactly two specs")
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
- deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True,
- installed=install_query)
+ deprecate = spack.cmd.disambiguate_spec(specs[0], env, local=True, installed=install_query)
if args.install:
deprecator = specs[1].concretized()
@@ -91,9 +116,11 @@ def deprecate(parser, args):
all_deprecate = []
all_deprecators = []
- generator = deprecate.traverse(
- order='post', type='link', root=True
- ) if args.dependencies else [deprecate]
+ generator = (
+ deprecate.traverse(order="post", type="link", root=True)
+ if args.dependencies
+ else [deprecate]
+ )
for spec in generator:
all_deprecate.append(spec)
all_deprecators.append(deprecator[spec.name])
@@ -101,7 +128,7 @@ def deprecate(parser, args):
# that matches the name of a dep of the spec
if not args.yes_to_all:
- tty.msg('The following packages will be deprecated:\n')
+ tty.msg("The following packages will be deprecated:\n")
spack.cmd.display_specs(all_deprecate, **display_args)
tty.msg("In favor of (respectively):\n")
spack.cmd.display_specs(all_deprecators, **display_args)
@@ -115,16 +142,16 @@ def deprecate(parser, args):
already_deprecated.append(spec)
already_deprecated_for.append(deprecated_for)
- tty.msg('The following packages are already deprecated:\n')
+ tty.msg("The following packages are already deprecated:\n")
spack.cmd.display_specs(already_deprecated, **display_args)
- tty.msg('In favor of (respectively):\n')
+ tty.msg("In favor of (respectively):\n")
spack.cmd.display_specs(already_deprecated_for, **display_args)
- answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
+ answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not answer:
- tty.die('Will not deprecate any packages.')
+ tty.die("Will not deprecate any packages.")
- link_fn = os.link if args.link_type == 'hard' else symlink
+ link_fn = os.link if args.link_type == "hard" else symlink
for dcate, dcator in zip(all_deprecate, all_deprecators):
dcate.package.do_deprecate(dcator, link_fn)
diff --git a/lib/spack/spack/cmd/dev_build.py b/lib/spack/spack/cmd/dev_build.py
index 50fd0d0cee..db71744442 100644
--- a/lib/spack/spack/cmd/dev_build.py
+++ b/lib/spack/spack/cmd/dev_build.py
@@ -19,45 +19,75 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(subparser, ['jobs'])
+ arguments.add_common_arguments(subparser, ["jobs"])
subparser.add_argument(
- '-d', '--source-path', dest='source_path', default=None,
- help="path to source directory. defaults to the current directory")
+ "-d",
+ "--source-path",
+ dest="source_path",
+ default=None,
+ help="path to source directory. defaults to the current directory",
+ )
subparser.add_argument(
- '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
- help="don't try to install dependencies of requested packages")
- arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
+ "-i",
+ "--ignore-dependencies",
+ action="store_true",
+ dest="ignore_deps",
+ help="don't try to install dependencies of requested packages",
+ )
+ arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
subparser.add_argument(
- '--keep-prefix', action='store_true',
- help="do not remove the install prefix if installation fails")
+ "--keep-prefix",
+ action="store_true",
+ help="do not remove the install prefix if installation fails",
+ )
subparser.add_argument(
- '--skip-patch', action='store_true',
- help="skip patching for the developer build")
+ "--skip-patch", action="store_true", help="skip patching for the developer build"
+ )
subparser.add_argument(
- '-q', '--quiet', action='store_true', dest='quiet',
- help="do not display verbose build output while installing")
+ "-q",
+ "--quiet",
+ action="store_true",
+ dest="quiet",
+ help="do not display verbose build output while installing",
+ )
subparser.add_argument(
- '--drop-in', type=str, dest='shell', default=None,
- help="drop into a build environment in a new shell, e.g. bash, zsh")
+ "--drop-in",
+ type=str,
+ dest="shell",
+ default=None,
+ help="drop into a build environment in a new shell, e.g. bash, zsh",
+ )
subparser.add_argument(
- '--test', default=None,
- choices=['root', 'all'],
+ "--test",
+ default=None,
+ choices=["root", "all"],
help="""If 'root' is chosen, run package tests during
installation for top-level packages (but skip tests for dependencies).
if 'all' is chosen, run package tests during installation for all
-packages. If neither are chosen, don't run tests for any packages.""")
- arguments.add_common_arguments(subparser, ['spec'])
+packages. If neither are chosen, don't run tests for any packages.""",
+ )
+ arguments.add_common_arguments(subparser, ["spec"])
stop_group = subparser.add_mutually_exclusive_group()
stop_group.add_argument(
- '-b', '--before', type=str, dest='before', default=None,
- help="phase to stop before when installing (default None)")
+ "-b",
+ "--before",
+ type=str,
+ dest="before",
+ default=None,
+ help="phase to stop before when installing (default None)",
+ )
stop_group.add_argument(
- '-u', '--until', type=str, dest='until', default=None,
- help="phase to stop after when installing (default None)")
+ "-u",
+ "--until",
+ type=str,
+ dest="until",
+ default=None,
+ help="phase to stop after when installing (default None)",
+ )
cd_group = subparser.add_mutually_exclusive_group()
- arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
+ arguments.add_common_arguments(cd_group, ["clean", "dirty"])
spack.cmd.common.arguments.add_concretizer_args(subparser)
@@ -72,13 +102,16 @@ def dev_build(self, args):
spec = specs[0]
if not spack.repo.path.exists(spec.name):
- tty.die("No package for '{0}' was found.".format(spec.name),
- " Use `spack create` to create a new package")
+ tty.die(
+ "No package for '{0}' was found.".format(spec.name),
+ " Use `spack create` to create a new package",
+ )
if not spec.versions.concrete:
tty.die(
"spack dev-build spec must have a single, concrete version. "
- "Did you forget a package version number?")
+ "Did you forget a package version number?"
+ )
source_path = args.source_path
if source_path is None:
@@ -86,7 +119,7 @@ def dev_build(self, args):
source_path = os.path.abspath(source_path)
# Forces the build to run out of the source directory.
- spec.constrain('dev_path=%s' % source_path)
+ spec.constrain("dev_path=%s" % source_path)
spec.concretize()
if spec.installed:
@@ -96,15 +129,15 @@ def dev_build(self, args):
# disable checksumming if requested
if args.no_checksum:
- spack.config.set('config:checksum', False, scope='command_line')
+ spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
- spack.config.set('config:deprecated', True, scope='command_line')
+ spack.config.set("config:deprecated", True, scope="command_line")
tests = False
- if args.test == 'all':
+ if args.test == "all":
tests = True
- elif args.test == 'root':
+ elif args.test == "root":
tests = [spec.name for spec in specs]
spec.package.do_install(
@@ -116,7 +149,8 @@ def dev_build(self, args):
dirty=args.dirty,
stop_before=args.before,
skip_patch=args.skip_patch,
- stop_at=args.until)
+ stop_at=args.until,
+ )
# drop into the build environment of the package?
if args.shell is not None:
diff --git a/lib/spack/spack/cmd/develop.py b/lib/spack/spack/cmd/develop.py
index 736bbc361d..d95a50cb17 100644
--- a/lib/spack/spack/cmd/develop.py
+++ b/lib/spack/spack/cmd/develop.py
@@ -17,26 +17,33 @@ level = "long"
def setup_parser(subparser):
- subparser.add_argument(
- '-p', '--path', help='Source location of package')
+ subparser.add_argument("-p", "--path", help="Source location of package")
clone_group = subparser.add_mutually_exclusive_group()
clone_group.add_argument(
- '--no-clone', action='store_false', dest='clone', default=None,
- help='Do not clone. The package already exists at the source path')
+ "--no-clone",
+ action="store_false",
+ dest="clone",
+ default=None,
+ help="Do not clone. The package already exists at the source path",
+ )
clone_group.add_argument(
- '--clone', action='store_true', dest='clone', default=None,
- help='Clone the package even if the path already exists')
+ "--clone",
+ action="store_true",
+ dest="clone",
+ default=None,
+ help="Clone the package even if the path already exists",
+ )
subparser.add_argument(
- '-f', '--force',
- help='Remove any files or directories that block cloning source code')
+ "-f", "--force", help="Remove any files or directories that block cloning source code"
+ )
- arguments.add_common_arguments(subparser, ['spec'])
+ arguments.add_common_arguments(subparser, ["spec"])
def develop(parser, args):
- env = spack.cmd.require_active_env(cmd_name='develop')
+ env = spack.cmd.require_active_env(cmd_name="develop")
if not args.spec:
if args.clone is False:
@@ -44,17 +51,16 @@ def develop(parser, args):
# download all dev specs
for name, entry in env.dev_specs.items():
- path = entry.get('path', name)
- abspath = path if os.path.isabs(path) else os.path.join(
- env.path, path)
+ path = entry.get("path", name)
+ abspath = path if os.path.isabs(path) else os.path.join(env.path, path)
if os.path.exists(abspath):
- msg = "Skipping developer download of %s" % entry['spec']
+ msg = "Skipping developer download of %s" % entry["spec"]
msg += " because its path already exists."
tty.msg(msg)
continue
- spec = spack.spec.Spec(entry['spec'])
+ spec = spack.spec.Spec(entry["spec"])
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
pkg_cls(spec).stage.steal_source(abspath)
diff --git a/lib/spack/spack/cmd/diff.py b/lib/spack/spack/cmd/diff.py
index be69e3645b..0b5318b37d 100644
--- a/lib/spack/spack/cmd/diff.py
+++ b/lib/spack/spack/cmd/diff.py
@@ -22,27 +22,27 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(
- subparser, ['specs'])
+ arguments.add_common_arguments(subparser, ["specs"])
subparser.add_argument(
- '--json',
- action='store_true',
+ "--json",
+ action="store_true",
default=False,
- dest='dump_json',
- help="Dump json output instead of pretty printing."
+ dest="dump_json",
+ help="Dump json output instead of pretty printing.",
)
subparser.add_argument(
- '--first',
- action='store_true',
+ "--first",
+ action="store_true",
default=False,
- dest='load_first',
- help="load the first match if multiple packages match the spec"
+ dest="load_first",
+ help="load the first match if multiple packages match the spec",
)
subparser.add_argument(
- '-a', '--attribute',
- action='append',
- help="select the attributes to show (defaults to all)"
+ "-a",
+ "--attribute",
+ action="append",
+ help="select the attributes to show (defaults to all)",
)
@@ -70,12 +70,24 @@ def compare_specs(a, b, to_string=False, color=None):
# get facts for specs, making sure to include build dependencies of concrete
# specs and to descend into dependency hashes so we include all facts.
- a_facts = set(t for t in setup.spec_clauses(
- a, body=True, expand_hashes=True, concrete_build_deps=True,
- ))
- b_facts = set(t for t in setup.spec_clauses(
- b, body=True, expand_hashes=True, concrete_build_deps=True,
- ))
+ a_facts = set(
+ t
+ for t in setup.spec_clauses(
+ a,
+ body=True,
+ expand_hashes=True,
+ concrete_build_deps=True,
+ )
+ )
+ b_facts = set(
+ t
+ for t in setup.spec_clauses(
+ b,
+ body=True,
+ expand_hashes=True,
+ concrete_build_deps=True,
+ )
+ )
# We want to present them to the user as simple key: values
intersect = sorted(a_facts.intersection(b_facts))
@@ -120,8 +132,8 @@ def print_difference(c, attributes="all", out=None):
# Default to standard out unless another stream is provided
out = out or sys.stdout
- A = c['b_not_a']
- B = c['a_not_b']
+ A = c["b_not_a"]
+ B = c["a_not_b"]
cprint("@R{--- %s}" % c["a_name"]) # bright red
cprint("@G{+++ %s}" % c["b_name"]) # bright green
@@ -186,8 +198,10 @@ def diff(parser, args):
if len(args.specs) != 2:
tty.die("You must provide two specs to diff.")
- specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
- for spec in spack.cmd.parse_specs(args.specs)]
+ specs = [
+ spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
+ for spec in spack.cmd.parse_specs(args.specs)
+ ]
# Calculate the comparison (c)
color = False if args.dump_json else get_color_when()
diff --git a/lib/spack/spack/cmd/docs.py b/lib/spack/spack/cmd/docs.py
index 781b298a1f..418d351920 100644
--- a/lib/spack/spack/cmd/docs.py
+++ b/lib/spack/spack/cmd/docs.py
@@ -5,10 +5,10 @@
import webbrowser
-description = 'open spack documentation in a web browser'
-section = 'help'
-level = 'short'
+description = "open spack documentation in a web browser"
+section = "help"
+level = "short"
def docs(parser, args):
- webbrowser.open('https://spack.readthedocs.io')
+ webbrowser.open("https://spack.readthedocs.io")
diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py
index e5800fe948..af35652ae6 100644
--- a/lib/spack/spack/cmd/edit.py
+++ b/lib/spack/spack/cmd/edit.py
@@ -43,8 +43,10 @@ def edit_package(name, repo_path, namespace):
if not os.access(path, os.R_OK):
tty.die("Insufficient permissions on '%s'!" % path)
else:
- tty.die("No package for '{0}' was found.".format(spec.name),
- " Use `spack create` to create a new package")
+ tty.die(
+ "No package for '{0}' was found.".format(spec.name),
+ " Use `spack create` to create a new package",
+ )
editor(path)
@@ -55,36 +57,51 @@ def setup_parser(subparser):
# Various types of Spack files that can be edited
# Edits package files by default
excl_args.add_argument(
- '-b', '--build-system', dest='path', action='store_const',
+ "-b",
+ "--build-system",
+ dest="path",
+ action="store_const",
const=spack.paths.build_systems_path,
- help="Edit the build system with the supplied name.")
+ help="Edit the build system with the supplied name.",
+ )
excl_args.add_argument(
- '-c', '--command', dest='path', action='store_const',
+ "-c",
+ "--command",
+ dest="path",
+ action="store_const",
const=spack.paths.command_path,
- help="edit the command with the supplied name")
+ help="edit the command with the supplied name",
+ )
excl_args.add_argument(
- '-d', '--docs', dest='path', action='store_const',
- const=os.path.join(spack.paths.lib_path, 'docs'),
- help="edit the docs with the supplied name")
+ "-d",
+ "--docs",
+ dest="path",
+ action="store_const",
+ const=os.path.join(spack.paths.lib_path, "docs"),
+ help="edit the docs with the supplied name",
+ )
excl_args.add_argument(
- '-t', '--test', dest='path', action='store_const',
+ "-t",
+ "--test",
+ dest="path",
+ action="store_const",
const=spack.paths.test_path,
- help="edit the test with the supplied name")
+ help="edit the test with the supplied name",
+ )
excl_args.add_argument(
- '-m', '--module', dest='path', action='store_const',
+ "-m",
+ "--module",
+ dest="path",
+ action="store_const",
const=spack.paths.module_path,
- help="edit the main spack module with the supplied name")
+ help="edit the main spack module with the supplied name",
+ )
# Options for editing packages
- excl_args.add_argument(
- '-r', '--repo', default=None,
- help="path to repo to edit package in")
- excl_args.add_argument(
- '-N', '--namespace', default=None,
- help="namespace of package to edit")
+ excl_args.add_argument("-r", "--repo", default=None, help="path to repo to edit package in")
+ excl_args.add_argument("-N", "--namespace", default=None, help="namespace of package to edit")
- subparser.add_argument(
- 'package', nargs='?', default=None, help="package name")
+ subparser.add_argument("package", nargs="?", default=None, help="package name")
def edit(parser, args):
@@ -103,19 +120,17 @@ def edit(parser, args):
path = os.path.join(path, name)
if not os.path.exists(path):
- files = glob.glob(path + '*')
- exclude_list = ['.pyc', '~'] # exclude binaries and backups
- files = list(filter(
- lambda x: all(s not in x for s in exclude_list), files))
+ files = glob.glob(path + "*")
+ exclude_list = [".pyc", "~"] # exclude binaries and backups
+ files = list(filter(lambda x: all(s not in x for s in exclude_list), files))
if len(files) > 1:
- m = 'Multiple files exist with the name {0}.'.format(name)
- m += ' Please specify a suffix. Files are:\n\n'
+ m = "Multiple files exist with the name {0}.".format(name)
+ m += " Please specify a suffix. Files are:\n\n"
for f in files:
- m += ' ' + os.path.basename(f) + '\n'
+ m += " " + os.path.basename(f) + "\n"
tty.die(m)
if not files:
- tty.die("No file for '{0}' was found in {1}".format(name,
- path))
+ tty.die("No file for '{0}' was found in {1}".format(name, path))
path = files[0] # already confirmed only one entry in files
editor(path)
diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py
index 17926a900a..86953a7bcc 100644
--- a/lib/spack/spack/cmd/env.py
+++ b/lib/spack/spack/cmd/env.py
@@ -34,17 +34,17 @@ level = "short"
#: List of subcommands of `spack env`
subcommands = [
- 'activate',
- 'deactivate',
- 'create',
- ['remove', 'rm'],
- ['list', 'ls'],
- ['status', 'st'],
- 'loads',
- 'view',
- 'update',
- 'revert',
- 'depfile'
+ "activate",
+ "deactivate",
+ "create",
+ ["remove", "rm"],
+ ["list", "ls"],
+ ["status", "st"],
+ "loads",
+ "view",
+ "update",
+ "revert",
+ "depfile",
]
@@ -55,42 +55,79 @@ def env_activate_setup_parser(subparser):
"""set the current environment"""
shells = subparser.add_mutually_exclusive_group()
shells.add_argument(
- '--sh', action='store_const', dest='shell', const='sh',
- help="print sh commands to activate the environment")
+ "--sh",
+ action="store_const",
+ dest="shell",
+ const="sh",
+ help="print sh commands to activate the environment",
+ )
shells.add_argument(
- '--csh', action='store_const', dest='shell', const='csh',
- help="print csh commands to activate the environment")
+ "--csh",
+ action="store_const",
+ dest="shell",
+ const="csh",
+ help="print csh commands to activate the environment",
+ )
shells.add_argument(
- '--fish', action='store_const', dest='shell', const='fish',
- help="print fish commands to activate the environment")
+ "--fish",
+ action="store_const",
+ dest="shell",
+ const="fish",
+ help="print fish commands to activate the environment",
+ )
shells.add_argument(
- '--bat', action='store_const', dest='shell', const='bat',
- help="print bat commands to activate the environment")
+ "--bat",
+ action="store_const",
+ dest="shell",
+ const="bat",
+ help="print bat commands to activate the environment",
+ )
view_options = subparser.add_mutually_exclusive_group()
view_options.add_argument(
- '-v', '--with-view', action='store_const', dest='with_view',
- const=True, default=True,
- help="update PATH etc. with associated view")
+ "-v",
+ "--with-view",
+ action="store_const",
+ dest="with_view",
+ const=True,
+ default=True,
+ help="update PATH etc. with associated view",
+ )
view_options.add_argument(
- '-V', '--without-view', action='store_const', dest='with_view',
- const=False, default=True,
- help="do not update PATH etc. with associated view")
+ "-V",
+ "--without-view",
+ action="store_const",
+ dest="with_view",
+ const=False,
+ default=True,
+ help="do not update PATH etc. with associated view",
+ )
subparser.add_argument(
- '-p', '--prompt', action='store_true', default=False,
- help="decorate the command line prompt when activating")
+ "-p",
+ "--prompt",
+ action="store_true",
+ default=False,
+ help="decorate the command line prompt when activating",
+ )
env_options = subparser.add_mutually_exclusive_group()
env_options.add_argument(
- '--temp', action='store_true', default=False,
- help='create and activate an environment in a temporary directory')
+ "--temp",
+ action="store_true",
+ default=False,
+ help="create and activate an environment in a temporary directory",
+ )
env_options.add_argument(
- '-d', '--dir', default=None,
- help="activate the environment in this directory")
+ "-d", "--dir", default=None, help="activate the environment in this directory"
+ )
env_options.add_argument(
- metavar='env', dest='activate_env', nargs='?', default=None,
- help='name of environment to activate')
+ metavar="env",
+ dest="activate_env",
+ nargs="?",
+ default=None,
+ help="name of environment to activate",
+ )
def create_temp_env_directory():
@@ -103,7 +140,7 @@ def create_temp_env_directory():
def env_activate(args):
if not args.activate_env and not args.dir and not args.temp:
- tty.die('spack env activate requires an environment name, directory, or --temp')
+ tty.die("spack env activate requires an environment name, directory, or --temp")
if not args.shell:
spack.cmd.common.shell_init_instructions(
@@ -114,8 +151,7 @@ def env_activate(args):
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
if args.env or args.no_env or args.env_dir:
- tty.die('Calling spack env activate with --env, --env-dir and --no-env '
- 'is ambiguous')
+ tty.die("Calling spack env activate with --env, --env-dir and --no-env " "is ambiguous")
env_name_or_dir = args.activate_env or args.dir
@@ -139,11 +175,11 @@ def env_activate(args):
else:
tty.die("No such environment: '%s'" % env_name_or_dir)
- env_prompt = '[%s]' % short_name
+ env_prompt = "[%s]" % short_name
# We only support one active environment at a time, so deactivate the current one.
if ev.active_environment() is None:
- cmds = ''
+ cmds = ""
env_mods = EnvironmentModifications()
else:
cmds = spack.environment.shell.deactivate_header(shell=args.shell)
@@ -152,14 +188,9 @@ def env_activate(args):
# Activate new environment
active_env = ev.Environment(env_path)
cmds += spack.environment.shell.activate_header(
- env=active_env,
- shell=args.shell,
- prompt=env_prompt if args.prompt else None
- )
- env_mods.extend(spack.environment.shell.activate(
- env=active_env,
- add_view=args.with_view
- ))
+ env=active_env, shell=args.shell, prompt=env_prompt if args.prompt else None
+ )
+ env_mods.extend(spack.environment.shell.activate(env=active_env, add_view=args.with_view))
cmds += env_mods.shell_modifications(args.shell)
sys.stdout.write(cmds)
@@ -171,17 +202,33 @@ def env_deactivate_setup_parser(subparser):
"""deactivate any active environment in the shell"""
shells = subparser.add_mutually_exclusive_group()
shells.add_argument(
- '--sh', action='store_const', dest='shell', const='sh',
- help="print sh commands to deactivate the environment")
+ "--sh",
+ action="store_const",
+ dest="shell",
+ const="sh",
+ help="print sh commands to deactivate the environment",
+ )
shells.add_argument(
- '--csh', action='store_const', dest='shell', const='csh',
- help="print csh commands to deactivate the environment")
+ "--csh",
+ action="store_const",
+ dest="shell",
+ const="csh",
+ help="print csh commands to deactivate the environment",
+ )
shells.add_argument(
- '--fish', action='store_const', dest='shell', const='fish',
- help="print fish commands to activate the environment")
+ "--fish",
+ action="store_const",
+ dest="shell",
+ const="fish",
+ help="print fish commands to activate the environment",
+ )
shells.add_argument(
- '--bat', action='store_const', dest='shell', const='bat',
- help="print bat commands to activate the environment")
+ "--bat",
+ action="store_const",
+ dest="shell",
+ const="bat",
+ help="print bat commands to activate the environment",
+ )
def env_deactivate(args):
@@ -194,11 +241,10 @@ def env_deactivate(args):
# Error out when -e, -E, -D flags are given, cause they are ambiguous.
if args.env or args.no_env or args.env_dir:
- tty.die('Calling spack env deactivate with --env, --env-dir and --no-env '
- 'is ambiguous')
+ tty.die("Calling spack env deactivate with --env, --env-dir and --no-env " "is ambiguous")
if ev.active_environment() is None:
- tty.die('No environment is currently active.')
+ tty.die("No environment is currently active.")
cmds = spack.environment.shell.deactivate_header(args.shell)
env_mods = spack.environment.shell.deactivate()
@@ -211,27 +257,32 @@ def env_deactivate(args):
#
def env_create_setup_parser(subparser):
"""create a new environment"""
+ subparser.add_argument("create_env", metavar="env", help="name of environment to create")
subparser.add_argument(
- 'create_env', metavar='env', help='name of environment to create')
- subparser.add_argument(
- '-d', '--dir', action='store_true',
- help='create an environment in a specific directory')
+ "-d", "--dir", action="store_true", help="create an environment in a specific directory"
+ )
subparser.add_argument(
- '--keep-relative', action='store_true',
- help='copy relative develop paths verbatim into the new environment'
- ' when initializing from envfile')
+ "--keep-relative",
+ action="store_true",
+ help="copy relative develop paths verbatim into the new environment"
+ " when initializing from envfile",
+ )
view_opts = subparser.add_mutually_exclusive_group()
view_opts.add_argument(
- '--without-view', action='store_true',
- help='do not maintain a view for this environment')
+ "--without-view", action="store_true", help="do not maintain a view for this environment"
+ )
view_opts.add_argument(
- '--with-view',
- help='specify that this environment should maintain a view at the'
- ' specified path (by default the view is maintained in the'
- ' environment directory)')
+ "--with-view",
+ help="specify that this environment should maintain a view at the"
+ " specified path (by default the view is maintained in the"
+ " environment directory)",
+ )
subparser.add_argument(
- 'envfile', nargs='?', default=None,
- help='optional init file; can be spack.yaml or spack.lock')
+ "envfile",
+ nargs="?",
+ default=None,
+ help="optional init file; can be spack.yaml or spack.lock",
+ )
def env_create(args):
@@ -246,15 +297,14 @@ def env_create(args):
with_view = None
if args.envfile:
with open(args.envfile) as f:
- _env_create(args.create_env, f, args.dir,
- with_view=with_view, keep_relative=args.keep_relative)
+ _env_create(
+ args.create_env, f, args.dir, with_view=with_view, keep_relative=args.keep_relative
+ )
else:
- _env_create(args.create_env, None, args.dir,
- with_view=with_view)
+ _env_create(args.create_env, None, args.dir, with_view=with_view)
-def _env_create(name_or_path, init_file=None, dir=False, with_view=None,
- keep_relative=False):
+def _env_create(name_or_path, init_file=None, dir=False, with_view=None, keep_relative=False):
"""Create a new environment, with an optional yaml description.
Arguments:
@@ -287,10 +337,8 @@ def _env_create(name_or_path, init_file=None, dir=False, with_view=None,
#
def env_remove_setup_parser(subparser):
"""remove an existing environment"""
- subparser.add_argument(
- 'rm_env', metavar='env', nargs='+',
- help='environment(s) to remove')
- arguments.add_common_arguments(subparser, ['yes_to_all'])
+ subparser.add_argument("rm_env", metavar="env", nargs="+", help="environment(s) to remove")
+ arguments.add_common_arguments(subparser, ["yes_to_all"])
def env_remove(args):
@@ -307,17 +355,19 @@ def env_remove(args):
if not args.yes_to_all:
answer = tty.get_yes_or_no(
- 'Really remove %s %s?' % (
- string.plural(len(args.rm_env), 'environment', show_n=False),
- string.comma_and(args.rm_env)),
- default=False)
+ "Really remove %s %s?"
+ % (
+ string.plural(len(args.rm_env), "environment", show_n=False),
+ string.comma_and(args.rm_env),
+ ),
+ default=False,
+ )
if not answer:
tty.die("Will not remove any environments")
for env in read_envs:
if env.active:
- tty.die("Environment %s can't be removed while activated."
- % env.name)
+ tty.die("Environment %s can't be removed while activated." % env.name)
env.destroy()
tty.msg("Successfully removed environment '%s'" % env.name)
@@ -336,23 +386,23 @@ def env_list(args):
color_names = []
for name in names:
if ev.active(name):
- name = colorize('@*g{%s}' % name)
+ name = colorize("@*g{%s}" % name)
color_names.append(name)
# say how many there are if writing to a tty
if sys.stdout.isatty():
if not names:
- tty.msg('No environments')
+ tty.msg("No environments")
else:
- tty.msg('%d environments' % len(names))
+ tty.msg("%d environments" % len(names))
colify(color_names, indent=4)
class ViewAction(object):
- regenerate = 'regenerate'
- enable = 'enable'
- disable = 'disable'
+ regenerate = "regenerate"
+ enable = "enable"
+ disable = "disable"
@staticmethod
def actions():
@@ -365,11 +415,10 @@ class ViewAction(object):
def env_view_setup_parser(subparser):
"""manage a view associated with the environment"""
subparser.add_argument(
- 'action', choices=ViewAction.actions(),
- help="action to take for the environment's view")
+ "action", choices=ViewAction.actions(), help="action to take for the environment's view"
+ )
subparser.add_argument(
- 'view_path', nargs='?',
- help="when enabling a view, optionally set the path manually"
+ "view_path", nargs="?", help="when enabling a view, optionally set the path manually"
)
@@ -404,15 +453,14 @@ def env_status(args):
env = ev.active_environment()
if env:
if env.path == os.getcwd():
- tty.msg('Using %s in current directory: %s'
- % (ev.manifest_name, env.path))
+ tty.msg("Using %s in current directory: %s" % (ev.manifest_name, env.path))
else:
- tty.msg('In environment %s' % env.name)
+ tty.msg("In environment %s" % env.name)
# Check if environment views can be safely activated
env.check_views()
else:
- tty.msg('No active environment')
+ tty.msg("No active environment")
#
@@ -421,44 +469,48 @@ def env_status(args):
def env_loads_setup_parser(subparser):
"""list modules for an installed environment '(see spack module loads)'"""
subparser.add_argument(
- '-n', '--module-set-name', default='default',
- help='module set for which to generate load operations')
+ "-n",
+ "--module-set-name",
+ default="default",
+ help="module set for which to generate load operations",
+ )
subparser.add_argument(
- '-m', '--module-type', choices=('tcl', 'lmod'),
- help='type of module system to generate loads for')
+ "-m",
+ "--module-type",
+ choices=("tcl", "lmod"),
+ help="type of module system to generate loads for",
+ )
spack.cmd.modules.add_loads_arguments(subparser)
def env_loads(args):
- env = spack.cmd.require_active_env(cmd_name='env loads')
+ env = spack.cmd.require_active_env(cmd_name="env loads")
# Set the module types that have been selected
module_type = args.module_type
if module_type is None:
# If no selection has been made select all of them
- module_type = 'tcl'
+ module_type = "tcl"
recurse_dependencies = args.recurse_dependencies
args.recurse_dependencies = False
- loads_file = fs.join_path(env.path, 'loads')
- with open(loads_file, 'w') as f:
- specs = env._get_environment_specs(
- recurse_dependencies=recurse_dependencies)
+ loads_file = fs.join_path(env.path, "loads")
+ with open(loads_file, "w") as f:
+ specs = env._get_environment_specs(recurse_dependencies=recurse_dependencies)
spack.cmd.modules.loads(module_type, specs, args, f)
- print('To load this environment, type:')
- print(' source %s' % loads_file)
+ print("To load this environment, type:")
+ print(" source %s" % loads_file)
def env_update_setup_parser(subparser):
"""update environments to the latest format"""
subparser.add_argument(
- metavar='env', dest='update_env',
- help='name or directory of the environment to activate'
+ metavar="env", dest="update_env", help="name or directory of the environment to activate"
)
- spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
+ spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
def env_update(args):
@@ -472,16 +524,18 @@ def env_update(args):
proceed = True
if not args.yes_to_all:
- msg = ('The environment "{0}" is going to be updated to the latest '
- 'schema format.\nIf the environment is updated, versions of '
- 'Spack that are older than this version may not be able to '
- 'read it. Spack stores backups of the updated environment '
- 'which can be retrieved with "spack env revert"')
+ msg = (
+ 'The environment "{0}" is going to be updated to the latest '
+ "schema format.\nIf the environment is updated, versions of "
+ "Spack that are older than this version may not be able to "
+ "read it. Spack stores backups of the updated environment "
+ 'which can be retrieved with "spack env revert"'
+ )
tty.msg(msg.format(args.update_env))
- proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
+ proceed = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not proceed:
- tty.die('Operation aborted.')
+ tty.die("Operation aborted.")
ev.update_yaml(manifest_file, backup_file=backup_file)
msg = 'Environment "{0}" has been updated [backup={1}]'
@@ -491,10 +545,9 @@ def env_update(args):
def env_revert_setup_parser(subparser):
"""restore environments to their state before update"""
subparser.add_argument(
- metavar='env', dest='revert_env',
- help='name or directory of the environment to activate'
+ metavar="env", dest="revert_env", help="name or directory of the environment to activate"
)
- spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
+ spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
def env_revert(args):
@@ -504,21 +557,23 @@ def env_revert(args):
# Check that both the spack.yaml and the backup exist, the inform user
# on what is going to happen and ask for confirmation
if not os.path.exists(manifest_file):
- msg = 'cannot fine the manifest file of the environment [file={0}]'
+ msg = "cannot fine the manifest file of the environment [file={0}]"
tty.die(msg.format(manifest_file))
if not os.path.exists(backup_file):
- msg = 'cannot find the old manifest file to be restored [file={0}]'
+ msg = "cannot find the old manifest file to be restored [file={0}]"
tty.die(msg.format(backup_file))
proceed = True
if not args.yes_to_all:
- msg = ('Spack is going to overwrite the current manifest file'
- ' with a backup copy [manifest={0}, backup={1}]')
+ msg = (
+ "Spack is going to overwrite the current manifest file"
+ " with a backup copy [manifest={0}, backup={1}]"
+ )
tty.msg(msg.format(manifest_file, backup_file))
- proceed = tty.get_yes_or_no('Do you want to proceed?', default=False)
+ proceed = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not proceed:
- tty.die('Operation aborted.')
+ tty.die("Operation aborted.")
shutil.copy(backup_file, manifest_file)
os.remove(backup_file)
@@ -529,24 +584,39 @@ def env_revert(args):
def env_depfile_setup_parser(subparser):
"""generate a depfile from the concrete environment specs"""
subparser.add_argument(
- '--make-target-prefix', default=None, metavar='TARGET',
- help='prefix Makefile targets with <TARGET>/<name>. By default the absolute '
- 'path to the directory makedeps under the environment metadata dir is '
- 'used. Can be set to an empty string --make-target-prefix \'\'.')
+ "--make-target-prefix",
+ default=None,
+ metavar="TARGET",
+ help="prefix Makefile targets with <TARGET>/<name>. By default the absolute "
+ "path to the directory makedeps under the environment metadata dir is "
+ "used. Can be set to an empty string --make-target-prefix ''.",
+ )
subparser.add_argument(
- '--make-disable-jobserver', default=True, action='store_false',
- dest='jobserver', help='disable POSIX jobserver support.')
+ "--make-disable-jobserver",
+ default=True,
+ action="store_false",
+ dest="jobserver",
+ help="disable POSIX jobserver support.",
+ )
subparser.add_argument(
- '-o', '--output', default=None, metavar='FILE',
- help='write the depfile to FILE rather than to stdout')
+ "-o",
+ "--output",
+ default=None,
+ metavar="FILE",
+ help="write the depfile to FILE rather than to stdout",
+ )
subparser.add_argument(
- '-G', '--generator', default='make', choices=('make',),
- help='specify the depfile type. Currently only make is supported.')
+ "-G",
+ "--generator",
+ default="make",
+ choices=("make",),
+ help="specify the depfile type. Currently only make is supported.",
+ )
def env_depfile(args):
# Currently only make is supported.
- spack.cmd.require_active_env(cmd_name='env depfile')
+ spack.cmd.require_active_env(cmd_name="env depfile")
env = ev.active_environment()
# Maps each hash in the environment to a string of install prereqs
@@ -554,7 +624,7 @@ def env_depfile(args):
hash_to_spec = {}
if args.make_target_prefix is None:
- target_prefix = os.path.join(env.env_subdir_path, 'makedeps')
+ target_prefix = os.path.join(env.env_subdir_path, "makedeps")
else:
target_prefix = args.make_target_prefix
@@ -563,19 +633,20 @@ def env_depfile(args):
# have /abs/path/to/env/metadir/{all,clean} targets. But it *does* make
# sense to have a prefix like `env/all`, `env/clean` when they are
# supposed to be included
- if name in ('all', 'clean') and os.path.isabs(target_prefix):
+ if name in ("all", "clean") and os.path.isabs(target_prefix):
return name
else:
return os.path.join(target_prefix, name)
def get_install_target(name):
- return os.path.join(target_prefix, '.install', name)
+ return os.path.join(target_prefix, ".install", name)
for _, spec in env.concretized_specs():
for s in spec.traverse(root=True):
hash_to_spec[s.dag_hash()] = s
hash_to_prereqs[s.dag_hash()] = [
- get_install_target(dep.dag_hash()) for dep in s.dependencies()]
+ get_install_target(dep.dag_hash()) for dep in s.dependencies()
+ ]
root_dags = [s.dag_hash() for _, s in env.concretized_specs()]
@@ -587,7 +658,8 @@ def env_depfile(args):
buf = six.StringIO()
- buf.write("""SPACK ?= spack
+ buf.write(
+ """SPACK ?= spack
.PHONY: {} {}
@@ -604,12 +676,21 @@ def env_depfile(args):
\t{}$(SPACK) -e '{}' install $(SPACK_INSTALL_FLAGS) --only-concrete --only=package \
--no-add /$(notdir $@) && touch $@
-""".format(get_target('all'), get_target('clean'),
- get_target('all'), get_target('env'),
- get_target('env'), ' '.join(root_install_targets),
- get_target('dirs'), get_target('.install'),
- get_target('.install/%'), get_target('dirs'),
- '+' if args.jobserver else '', env.path))
+""".format(
+ get_target("all"),
+ get_target("clean"),
+ get_target("all"),
+ get_target("env"),
+ get_target("env"),
+ " ".join(root_install_targets),
+ get_target("dirs"),
+ get_target(".install"),
+ get_target(".install/%"),
+ get_target("dirs"),
+ "+" if args.jobserver else "",
+ env.path,
+ )
+ )
# Targets are of the form <prefix>/<name>: [<prefix>/<depname>]...,
# The prefix can be an empty string, in that case we don't add the `/`.
@@ -617,37 +698,38 @@ def env_depfile(args):
# could be the package name in case of `concretization: together` so
# it can be more easily referred to, but for now we don't special case
# this.
- fmt = '{name}{@version}{%compiler}{variants}{arch=architecture}'
+ fmt = "{name}{@version}{%compiler}{variants}{arch=architecture}"
# Set SPEC for each hash
- buf.write('# Set the human-readable spec for each target\n')
+ buf.write("# Set the human-readable spec for each target\n")
for dag_hash in hash_to_prereqs.keys():
formatted_spec = hash_to_spec[dag_hash].format(fmt)
- buf.write("{}: SPEC = {}\n".format(get_target('%/' + dag_hash), formatted_spec))
- buf.write('\n')
+ buf.write("{}: SPEC = {}\n".format(get_target("%/" + dag_hash), formatted_spec))
+ buf.write("\n")
# Set install dependencies
- buf.write('# Install dependencies\n')
+ buf.write("# Install dependencies\n")
for parent, children in hash_to_prereqs.items():
if not children:
continue
- buf.write('{}: {}\n'.format(get_install_target(parent), ' '.join(children)))
- buf.write('\n')
+ buf.write("{}: {}\n".format(get_install_target(parent), " ".join(children)))
+ buf.write("\n")
# Clean target: remove target files but not their folders, cause
# --make-target-prefix can be any existing directory we do not control,
# including empty string (which means deleting the containing folder
# would delete the folder with the Makefile)
- buf.write("{}:\n\trm -f -- {} {}\n".format(
- get_target('clean'),
- get_target('env'),
- ' '.join(all_install_targets)))
+ buf.write(
+ "{}:\n\trm -f -- {} {}\n".format(
+ get_target("clean"), get_target("env"), " ".join(all_install_targets)
+ )
+ )
makefile = buf.getvalue()
# Finally write to stdout/file.
if args.output:
- with open(args.output, 'w') as f:
+ with open(args.output, "w") as f:
f.write(makefile)
else:
sys.stdout.write(makefile)
@@ -661,7 +743,7 @@ subcommand_functions = {}
# spack env
#
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='env_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="env_command")
for name in subcommands:
if isinstance(name, (list, tuple)):
@@ -670,17 +752,16 @@ def setup_parser(subparser):
aliases = []
# add commands to subcommands dict
- function_name = 'env_%s' % name
+ function_name = "env_%s" % name
function = globals()[function_name]
for alias in [name] + aliases:
subcommand_functions[alias] = function
# make a subparser and run the command's setup function on it
- setup_parser_cmd_name = 'env_%s_setup_parser' % name
+ setup_parser_cmd_name = "env_%s_setup_parser" % name
setup_parser_cmd = globals()[setup_parser_cmd_name]
- subsubparser = sp.add_parser(
- name, aliases=aliases, help=setup_parser_cmd.__doc__)
+ subsubparser = sp.add_parser(name, aliases=aliases, help=setup_parser_cmd.__doc__)
setup_parser_cmd(subsubparser)
diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py
index 059b76e411..50f41529b2 100644
--- a/lib/spack/spack/cmd/extensions.py
+++ b/lib/spack/spack/cmd/extensions.py
@@ -22,25 +22,33 @@ level = "long"
def setup_parser(subparser):
- subparser.epilog = 'If called without argument returns ' \
- 'the list of all valid extendable packages'
- arguments.add_common_arguments(subparser, ['long', 'very_long'])
- subparser.add_argument('-d', '--deps', action='store_true',
- help='output dependencies along with found specs')
-
- subparser.add_argument('-p', '--paths', action='store_true',
- help='show paths to package install directories')
+ subparser.epilog = (
+ "If called without argument returns " "the list of all valid extendable packages"
+ )
+ arguments.add_common_arguments(subparser, ["long", "very_long"])
subparser.add_argument(
- '-s', '--show', action='store', default='all',
- choices=("packages", "installed", "activated", "all"),
- help="show only part of output")
+ "-d", "--deps", action="store_true", help="output dependencies along with found specs"
+ )
+
subparser.add_argument(
- '-v', '--view', metavar='VIEW', type=str,
- help="the view to operate on")
+ "-p", "--paths", action="store_true", help="show paths to package install directories"
+ )
+ subparser.add_argument(
+ "-s",
+ "--show",
+ action="store",
+ default="all",
+ choices=("packages", "installed", "activated", "all"),
+ help="show only part of output",
+ )
+ subparser.add_argument("-v", "--view", metavar="VIEW", type=str, help="the view to operate on")
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER,
- help='spec of package to list extensions for', metavar='extendable')
+ "spec",
+ nargs=argparse.REMAINDER,
+ help="spec of package to list extensions for",
+ metavar="extendable",
+ )
def extensions(parser, args):
@@ -48,7 +56,7 @@ def extensions(parser, args):
# If called without arguments, list all the extendable packages
isatty = sys.stdout.isatty()
if isatty:
- tty.info('Extendable packages:')
+ tty.info("Extendable packages:")
extendable_pkgs = []
for name in spack.repo.all_package_names():
@@ -92,8 +100,7 @@ def extensions(parser, args):
if args.show in ("installed", "all"):
# List specs of installed extensions.
- installed = [
- s.spec for s in spack.store.db.installed_extensions_for(spec)]
+ installed = [s.spec for s in spack.store.db.installed_extensions_for(spec)]
if args.show == "all":
print
diff --git a/lib/spack/spack/cmd/external.py b/lib/spack/spack/cmd/external.py
index 768ec2ebd2..993533d395 100644
--- a/lib/spack/spack/cmd/external.py
+++ b/lib/spack/spack/cmd/external.py
@@ -26,60 +26,68 @@ level = "short"
def setup_parser(subparser):
- sp = subparser.add_subparsers(
- metavar='SUBCOMMAND', dest='external_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="external_command")
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
- find_parser = sp.add_parser(
- 'find', help='add external packages to packages.yaml'
- )
+ find_parser = sp.add_parser("find", help="add external packages to packages.yaml")
find_parser.add_argument(
- '--not-buildable', action='store_true', default=False,
- help="packages with detected externals won't be built with Spack")
+ "--not-buildable",
+ action="store_true",
+ default=False,
+ help="packages with detected externals won't be built with Spack",
+ )
find_parser.add_argument(
- '-p', '--path', default=None, action='append',
- help="Alternative search paths for finding externals. May be repeated")
+ "-p",
+ "--path",
+ default=None,
+ action="append",
+ help="Alternative search paths for finding externals. May be repeated",
+ )
find_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
- default=spack.config.default_modify_scope('packages'),
- help="configuration scope to modify")
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
+ default=spack.config.default_modify_scope("packages"),
+ help="configuration scope to modify",
+ )
find_parser.add_argument(
- '--all', action='store_true',
- help="search for all packages that Spack knows about"
+ "--all", action="store_true", help="search for all packages that Spack knows about"
)
- spack.cmd.common.arguments.add_common_arguments(find_parser, ['tags'])
- find_parser.add_argument('packages', nargs=argparse.REMAINDER)
+ spack.cmd.common.arguments.add_common_arguments(find_parser, ["tags"])
+ find_parser.add_argument("packages", nargs=argparse.REMAINDER)
find_parser.epilog = (
'The search is by default on packages tagged with the "build-tools" or '
'"core-packages" tags. Use the --all option to search for every possible '
- 'package Spack knows how to find.'
+ "package Spack knows how to find."
)
- sp.add_parser(
- 'list', help='list detectable packages, by repository and name'
- )
+ sp.add_parser("list", help="list detectable packages, by repository and name")
read_cray_manifest = sp.add_parser(
- 'read-cray-manifest', help=(
+ "read-cray-manifest",
+ help=(
"consume a Spack-compatible description of externally-installed "
"packages, including dependency relationships"
- )
+ ),
)
read_cray_manifest.add_argument(
- '--file', default=None,
- help="specify a location other than the default")
+ "--file", default=None, help="specify a location other than the default"
+ )
read_cray_manifest.add_argument(
- '--directory', default=None,
- help="specify a directory storing a group of manifest files")
+ "--directory", default=None, help="specify a directory storing a group of manifest files"
+ )
read_cray_manifest.add_argument(
- '--dry-run', action='store_true', default=False,
- help="don't modify DB with files that are read")
+ "--dry-run",
+ action="store_true",
+ default=False,
+ help="don't modify DB with files that are read",
+ )
read_cray_manifest.add_argument(
- '--fail-on-error', action='store_true',
- help=("if a manifest file cannot be parsed, fail and report the "
- "full stack trace")
+ "--fail-on-error",
+ action="store_true",
+ help=("if a manifest file cannot be parsed, fail and report the " "full stack trace"),
)
@@ -98,21 +106,20 @@ def external_find(args):
# For most exceptions, just print a warning and continue.
# Note that KeyboardInterrupt does not subclass Exception
# (so CTRL-C will terminate the program as expected).
- skip_msg = ("Skipping manifest and continuing with other external "
- "checks")
- if ((isinstance(e, IOError) or isinstance(e, OSError)) and
- e.errno in [errno.EPERM, errno.EACCES]):
+ skip_msg = "Skipping manifest and continuing with other external " "checks"
+ if (isinstance(e, IOError) or isinstance(e, OSError)) and e.errno in [
+ errno.EPERM,
+ errno.EACCES,
+ ]:
# The manifest file does not have sufficient permissions enabled:
# print a warning and keep going
- tty.warn("Unable to read manifest due to insufficient "
- "permissions.", skip_msg)
+ tty.warn("Unable to read manifest due to insufficient " "permissions.", skip_msg)
else:
- tty.warn("Unable to read manifest, unexpected error: {0}"
- .format(str(e)), skip_msg)
+ tty.warn("Unable to read manifest, unexpected error: {0}".format(str(e)), skip_msg)
# If the user didn't specify anything, search for build tools by default
if not args.tags and not args.all and not args.packages:
- args.tags = ['core-packages', 'build-tools']
+ args.tags = ["core-packages", "build-tools"]
# If the user specified both --all and --tag, then --all has precedence
if args.all and args.tags:
@@ -123,9 +130,7 @@ def external_find(args):
# Add the packages that have been required explicitly
if args.packages:
- pkg_cls_to_check = [
- spack.repo.path.get_pkg_class(pkg) for pkg in args.packages
- ]
+ pkg_cls_to_check = [spack.repo.path.get_pkg_class(pkg) for pkg in args.packages]
if args.tags:
allowed = set(spack.repo.path.packages_with_tags(*args.tags))
pkg_cls_to_check = [x for x in pkg_cls_to_check if x.name in allowed]
@@ -146,22 +151,19 @@ def external_find(args):
if not args.tags and not pkg_cls_to_check:
pkg_cls_to_check = list(spack.repo.path.all_package_classes())
- detected_packages = spack.detection.by_executable(
- pkg_cls_to_check, path_hints=args.path)
- detected_packages.update(spack.detection.by_library(
- pkg_cls_to_check, path_hints=args.path))
+ detected_packages = spack.detection.by_executable(pkg_cls_to_check, path_hints=args.path)
+ detected_packages.update(spack.detection.by_library(pkg_cls_to_check, path_hints=args.path))
new_entries = spack.detection.update_configuration(
detected_packages, scope=args.scope, buildable=not args.not_buildable
)
if new_entries:
- path = spack.config.config.get_config_filename(args.scope, 'packages')
- msg = ('The following specs have been detected on this system '
- 'and added to {0}')
+ path = spack.config.config.get_config_filename(args.scope, "packages")
+ msg = "The following specs have been detected on this system " "and added to {0}"
tty.msg(msg.format(path))
spack.cmd.display_specs(new_entries)
else:
- tty.msg('No new external packages detected')
+ tty.msg("No new external packages detected")
def external_read_cray_manifest(args):
@@ -169,13 +171,13 @@ def external_read_cray_manifest(args):
manifest_file=args.file,
manifest_directory=args.directory,
dry_run=args.dry_run,
- fail_on_error=args.fail_on_error
+ fail_on_error=args.fail_on_error,
)
def _collect_and_consume_cray_manifest_files(
- manifest_file=None, manifest_directory=None, dry_run=False,
- fail_on_error=False):
+ manifest_file=None, manifest_directory=None, dry_run=False, fail_on_error=False
+):
manifest_files = []
if manifest_file:
@@ -187,24 +189,31 @@ def _collect_and_consume_cray_manifest_files(
if os.path.isdir(cray_manifest.default_path):
tty.debug(
- "Cray manifest path {0} exists: collecting all files to read."
- .format(cray_manifest.default_path))
+ "Cray manifest path {0} exists: collecting all files to read.".format(
+ cray_manifest.default_path
+ )
+ )
manifest_dirs.append(cray_manifest.default_path)
else:
- tty.debug("Default Cray manifest directory {0} does not exist."
- .format(cray_manifest.default_path))
+ tty.debug(
+ "Default Cray manifest directory {0} does not exist.".format(
+ cray_manifest.default_path
+ )
+ )
for directory in manifest_dirs:
for fname in os.listdir(directory):
- if fname.endswith('.json'):
+ if fname.endswith(".json"):
fpath = os.path.join(directory, fname)
tty.debug("Adding manifest file: {0}".format(fpath))
manifest_files.append(os.path.join(directory, fpath))
if not manifest_files:
raise NoManifestFileError(
- "--file/--directory not specified, and no manifest found at {0}"
- .format(cray_manifest.default_path))
+ "--file/--directory not specified, and no manifest found at {0}".format(
+ cray_manifest.default_path
+ )
+ )
for path in manifest_files:
tty.debug("Reading manifest file: " + path)
@@ -214,8 +223,7 @@ def _collect_and_consume_cray_manifest_files(
if fail_on_error:
raise
else:
- tty.warn("Failure reading manifest file: {0}"
- "\n\t{1}".format(path, str(e)))
+ tty.warn("Failure reading manifest file: {0}" "\n\t{1}".format(path, str(e)))
def external_list(args):
@@ -229,8 +237,11 @@ def external_list(args):
def external(parser, args):
- action = {'find': external_find, 'list': external_list,
- 'read-cray-manifest': external_read_cray_manifest}
+ action = {
+ "find": external_find,
+ "list": external_list,
+ "read-cray-manifest": external_read_cray_manifest,
+ }
action[args.external_command](args)
diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py
index 8eb9fffa07..cbf71f9789 100644
--- a/lib/spack/spack/cmd/fetch.py
+++ b/lib/spack/spack/cmd/fetch.py
@@ -17,7 +17,7 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
+ arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
subparser.add_argument(
"-m",
"--missing",
@@ -55,8 +55,7 @@ def fetch(parser, args):
specs = env.all_specs()
if specs == []:
tty.die(
- "No uninstalled specs in environment. Did you "
- "run `spack concretize` yet?"
+ "No uninstalled specs in environment. Did you " "run `spack concretize` yet?"
)
else:
tty.die("fetch requires at least one spec argument")
@@ -65,7 +64,7 @@ def fetch(parser, args):
spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
- spack.config.set('config:deprecated', True, scope='command_line')
+ spack.config.set("config:deprecated", True, scope="command_line")
for spec in specs:
if args.missing or args.dependencies:
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 87bd164932..aea5829975 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -27,92 +27,127 @@ level = "short"
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
- "--format", action="store", default=None,
- help="output specs with the specified format string")
+ "--format",
+ action="store",
+ default=None,
+ help="output specs with the specified format string",
+ )
format_group.add_argument(
- "--json", action="store_true", default=False,
- help="output specs as machine-readable json records")
+ "--json",
+ action="store_true",
+ default=False,
+ help="output specs as machine-readable json records",
+ )
+
+ subparser.add_argument(
+ "-d", "--deps", action="store_true", help="output dependencies along with found specs"
+ )
+
+ subparser.add_argument(
+ "-p", "--paths", action="store_true", help="show paths to package install directories"
+ )
+ subparser.add_argument(
+ "--groups",
+ action="store_true",
+ default=None,
+ dest="groups",
+ help="display specs in arch/compiler groups (default on)",
+ )
+ subparser.add_argument(
+ "--no-groups",
+ action="store_false",
+ default=None,
+ dest="groups",
+ help="do not group specs by arch/compiler",
+ )
- subparser.add_argument('-d', '--deps', action='store_true',
- help='output dependencies along with found specs')
+ arguments.add_common_arguments(subparser, ["long", "very_long", "tags"])
- subparser.add_argument('-p', '--paths', action='store_true',
- help='show paths to package install directories')
subparser.add_argument(
- '--groups', action='store_true', default=None, dest='groups',
- help='display specs in arch/compiler groups (default on)')
+ "-c",
+ "--show-concretized",
+ action="store_true",
+ help="show concretized specs in an environment",
+ )
+ subparser.add_argument(
+ "-f",
+ "--show-flags",
+ action="store_true",
+ dest="show_flags",
+ help="show spec compiler flags",
+ )
subparser.add_argument(
- '--no-groups', action='store_false', default=None, dest='groups',
- help='do not group specs by arch/compiler')
-
- arguments.add_common_arguments(
- subparser, ['long', 'very_long', 'tags'])
-
- subparser.add_argument('-c', '--show-concretized',
- action='store_true',
- help='show concretized specs in an environment')
- subparser.add_argument('-f', '--show-flags',
- action='store_true',
- dest='show_flags',
- help='show spec compiler flags')
- subparser.add_argument('--show-full-compiler',
- action='store_true',
- dest='show_full_compiler',
- help='show full compiler specs')
+ "--show-full-compiler",
+ action="store_true",
+ dest="show_full_compiler",
+ help="show full compiler specs",
+ )
implicit_explicit = subparser.add_mutually_exclusive_group()
implicit_explicit.add_argument(
- '-x', '--explicit',
- action='store_true',
- help='show only specs that were installed explicitly')
+ "-x",
+ "--explicit",
+ action="store_true",
+ help="show only specs that were installed explicitly",
+ )
implicit_explicit.add_argument(
- '-X', '--implicit',
- action='store_true',
- help='show only specs that were installed as dependencies')
+ "-X",
+ "--implicit",
+ action="store_true",
+ help="show only specs that were installed as dependencies",
+ )
subparser.add_argument(
- '-u', '--unknown',
- action='store_true',
- dest='unknown',
- help='show only specs Spack does not have a package for')
+ "-u",
+ "--unknown",
+ action="store_true",
+ dest="unknown",
+ help="show only specs Spack does not have a package for",
+ )
subparser.add_argument(
- '-m', '--missing',
- action='store_true',
- dest='missing',
- help='show missing dependencies as well as installed specs')
+ "-m",
+ "--missing",
+ action="store_true",
+ dest="missing",
+ help="show missing dependencies as well as installed specs",
+ )
subparser.add_argument(
- '-v', '--variants',
- action='store_true',
- dest='variants',
- help='show variants in output (can be long)')
+ "-v",
+ "--variants",
+ action="store_true",
+ dest="variants",
+ help="show variants in output (can be long)",
+ )
subparser.add_argument(
- '--loaded', action='store_true',
- help='show only packages loaded in the user environment')
- subparser.add_argument('-M', '--only-missing',
- action='store_true',
- dest='only_missing',
- help='show only missing dependencies')
+ "--loaded", action="store_true", help="show only packages loaded in the user environment"
+ )
subparser.add_argument(
- '--deprecated', action='store_true',
- help='show deprecated packages as well as installed specs')
+ "-M",
+ "--only-missing",
+ action="store_true",
+ dest="only_missing",
+ help="show only missing dependencies",
+ )
subparser.add_argument(
- '--only-deprecated', action='store_true',
- help='show only deprecated packages')
- subparser.add_argument('-N', '--namespace',
- action='store_true',
- help='show fully qualified package names')
-
+ "--deprecated",
+ action="store_true",
+ help="show deprecated packages as well as installed specs",
+ )
subparser.add_argument(
- '--start-date',
- help='earliest date of installation [YYYY-MM-DD]'
+ "--only-deprecated", action="store_true", help="show only deprecated packages"
)
subparser.add_argument(
- '--end-date', help='latest date of installation [YYYY-MM-DD]'
+ "-N", "--namespace", action="store_true", help="show fully qualified package names"
)
+
+ subparser.add_argument("--start-date", help="earliest date of installation [YYYY-MM-DD]")
+ subparser.add_argument("--end-date", help="latest date of installation [YYYY-MM-DD]")
subparser.add_argument(
- '-b', '--bootstrap', action='store_true',
- help='show software in the internal bootstrap store'
+ "-b",
+ "--bootstrap",
+ action="store_true",
+ help="show software in the internal bootstrap store",
)
- arguments.add_common_arguments(subparser, ['constraint'])
+ arguments.add_common_arguments(subparser, ["constraint"])
def query_arguments(args):
@@ -135,10 +170,10 @@ def query_arguments(args):
if args.implicit:
explicit = False
- q_args = {'installed': installed, 'known': known, "explicit": explicit}
+ q_args = {"installed": installed, "known": known, "explicit": explicit}
# Time window of installation
- for attribute in ('start_date', 'end_date'):
+ for attribute in ("start_date", "end_date"):
date = getattr(args, attribute)
if date:
q_args[attribute] = llnl.util.lang.pretty_string_to_date(date)
@@ -150,7 +185,7 @@ def setup_env(env):
"""Create a function for decorating specs when in an environment."""
def strip_build(seq):
- return set(s.copy(deps=('link', 'run')) for s in seq)
+ return set(s.copy(deps=("link", "run")) for s in seq)
added = set(strip_build(env.added_specs()))
roots = set(strip_build(env.roots()))
@@ -159,22 +194,22 @@ def setup_env(env):
def decorator(spec, fmt):
# add +/-/* to show added/removed/root specs
if any(spec.dag_hash() == r.dag_hash() for r in roots):
- return color.colorize('@*{%s}' % fmt)
+ return color.colorize("@*{%s}" % fmt)
elif spec in removed:
- return color.colorize('@K{%s}' % fmt)
+ return color.colorize("@K{%s}" % fmt)
else:
- return '%s' % fmt
+ return "%s" % fmt
return decorator, added, roots, removed
def display_env(env, args, decorator):
- tty.msg('In environment %s' % env.name)
+ tty.msg("In environment %s" % env.name)
if not env.user_specs:
- tty.msg('No root specs')
+ tty.msg("No root specs")
else:
- tty.msg('Root specs')
+ tty.msg("Root specs")
# Root specs cannot be displayed with prefixes, since those are not
# set for abstract specs. Same for hashes
@@ -186,18 +221,17 @@ def display_env(env, args, decorator):
cmd.display_specs(
env.user_specs,
root_args,
- decorator=lambda s, f: color.colorize('@*{%s}' % f),
+ decorator=lambda s, f: color.colorize("@*{%s}" % f),
namespace=True,
show_flags=True,
show_full_compiler=True,
- variants=True
+ variants=True,
)
print()
if args.show_concretized:
- tty.msg('Concretized roots')
- cmd.display_specs(
- env.specs_by_hash.values(), args, decorator=decorator)
+ tty.msg("Concretized roots")
+ cmd.display_specs(env.specs_by_hash.values(), args, decorator=decorator)
print()
@@ -205,7 +239,7 @@ def find(parser, args):
if args.bootstrap:
tty.warn(
"`spack find --bootstrap` is deprecated and will be removed in v0.19.",
- "Use `spack --bootstrap find` instead."
+ "Use `spack --bootstrap find` instead.",
)
if args.bootstrap:
@@ -234,7 +268,7 @@ def _find(parser, args):
# Exit early with an error code if no package matches the constraint
if not results and args.constraint:
msg = "No package matches the query: {0}"
- msg = msg.format(' '.join(args.constraint))
+ msg = msg.format(" ".join(args.constraint))
tty.msg(msg)
raise SystemExit(1)
@@ -258,5 +292,4 @@ def _find(parser, args):
pkg_type = "loaded" if args.loaded else "installed"
spack.cmd.print_how_many_pkgs(results, pkg_type)
- cmd.display_specs(
- results, args, decorator=decorator, all_headers=True)
+ cmd.display_specs(results, args, decorator=decorator, all_headers=True)
diff --git a/lib/spack/spack/cmd/gc.py b/lib/spack/spack/cmd/gc.py
index 7ccbbb9120..dd7ede2411 100644
--- a/lib/spack/spack/cmd/gc.py
+++ b/lib/spack/spack/cmd/gc.py
@@ -16,7 +16,7 @@ level = "short"
def setup_parser(subparser):
- spack.cmd.common.arguments.add_common_arguments(subparser, ['yes_to_all'])
+ spack.cmd.common.arguments.add_common_arguments(subparser, ["yes_to_all"])
def gc(parser, args):
@@ -31,8 +31,7 @@ def gc(parser, args):
env.concretize()
roots = [s for s in env.roots()]
all_hashes = set([s.dag_hash() for r in roots for s in r.traverse()])
- lr_hashes = set([s.dag_hash() for r in roots
- for s in r.traverse(deptype=('link', 'run'))])
+ lr_hashes = set([s.dag_hash() for r in roots for s in r.traverse(deptype=("link", "run"))])
maybe_to_be_removed = all_hashes - lr_hashes
specs = [s for s in specs if s.dag_hash() in maybe_to_be_removed]
diff --git a/lib/spack/spack/cmd/gpg.py b/lib/spack/spack/cmd/gpg.py
index 4c8b1ca6ee..35f10a680f 100644
--- a/lib/spack/spack/cmd/gpg.py
+++ b/lib/spack/spack/cmd/gpg.py
@@ -19,100 +19,109 @@ level = "long"
def setup_parser(subparser):
setup_parser.parser = subparser
- subparsers = subparser.add_subparsers(help='GPG sub-commands')
+ subparsers = subparser.add_subparsers(help="GPG sub-commands")
- verify = subparsers.add_parser('verify', help=gpg_verify.__doc__)
- arguments.add_common_arguments(verify, ['installed_spec'])
- verify.add_argument('signature', type=str, nargs='?',
- help='the signature file')
+ verify = subparsers.add_parser("verify", help=gpg_verify.__doc__)
+ arguments.add_common_arguments(verify, ["installed_spec"])
+ verify.add_argument("signature", type=str, nargs="?", help="the signature file")
verify.set_defaults(func=gpg_verify)
- trust = subparsers.add_parser('trust', help=gpg_trust.__doc__)
- trust.add_argument('keyfile', type=str,
- help='add a key to the trust store')
+ trust = subparsers.add_parser("trust", help=gpg_trust.__doc__)
+ trust.add_argument("keyfile", type=str, help="add a key to the trust store")
trust.set_defaults(func=gpg_trust)
- untrust = subparsers.add_parser('untrust', help=gpg_untrust.__doc__)
- untrust.add_argument('--signing', action='store_true',
- help='allow untrusting signing keys')
- untrust.add_argument('keys', nargs='+', type=str,
- help='remove keys from the trust store')
+ untrust = subparsers.add_parser("untrust", help=gpg_untrust.__doc__)
+ untrust.add_argument("--signing", action="store_true", help="allow untrusting signing keys")
+ untrust.add_argument("keys", nargs="+", type=str, help="remove keys from the trust store")
untrust.set_defaults(func=gpg_untrust)
- sign = subparsers.add_parser('sign', help=gpg_sign.__doc__)
- sign.add_argument('--output', metavar='DEST', type=str,
- help='the directory to place signatures')
- sign.add_argument('--key', metavar='KEY', type=str,
- help='the key to use for signing')
- sign.add_argument('--clearsign', action='store_true',
- help='if specified, create a clearsign signature')
- arguments.add_common_arguments(sign, ['installed_spec'])
+ sign = subparsers.add_parser("sign", help=gpg_sign.__doc__)
+ sign.add_argument(
+ "--output", metavar="DEST", type=str, help="the directory to place signatures"
+ )
+ sign.add_argument("--key", metavar="KEY", type=str, help="the key to use for signing")
+ sign.add_argument(
+ "--clearsign", action="store_true", help="if specified, create a clearsign signature"
+ )
+ arguments.add_common_arguments(sign, ["installed_spec"])
sign.set_defaults(func=gpg_sign)
- create = subparsers.add_parser('create', help=gpg_create.__doc__)
- create.add_argument('name', type=str,
- help='the name to use for the new key')
- create.add_argument('email', type=str,
- help='the email address to use for the new key')
- create.add_argument('--comment', metavar='COMMENT', type=str,
- default='GPG created for Spack',
- help='a description for the intended use of the key')
- create.add_argument('--expires', metavar='EXPIRATION', type=str,
- default='0', help='when the key should expire')
- create.add_argument('--export', metavar='DEST', type=str,
- help='export the public key to a file')
- create.add_argument('--export-secret', metavar="DEST", type=str,
- dest="secret",
- help='export the private key to a file.')
+ create = subparsers.add_parser("create", help=gpg_create.__doc__)
+ create.add_argument("name", type=str, help="the name to use for the new key")
+ create.add_argument("email", type=str, help="the email address to use for the new key")
+ create.add_argument(
+ "--comment",
+ metavar="COMMENT",
+ type=str,
+ default="GPG created for Spack",
+ help="a description for the intended use of the key",
+ )
+ create.add_argument(
+ "--expires", metavar="EXPIRATION", type=str, default="0", help="when the key should expire"
+ )
+ create.add_argument(
+ "--export", metavar="DEST", type=str, help="export the public key to a file"
+ )
+ create.add_argument(
+ "--export-secret",
+ metavar="DEST",
+ type=str,
+ dest="secret",
+ help="export the private key to a file.",
+ )
create.set_defaults(func=gpg_create)
- list = subparsers.add_parser('list', help=gpg_list.__doc__)
- list.add_argument('--trusted', action='store_true',
- default=True, help='list trusted keys')
- list.add_argument('--signing', action='store_true',
- help='list keys which may be used for signing')
+ list = subparsers.add_parser("list", help=gpg_list.__doc__)
+ list.add_argument("--trusted", action="store_true", default=True, help="list trusted keys")
+ list.add_argument(
+ "--signing", action="store_true", help="list keys which may be used for signing"
+ )
list.set_defaults(func=gpg_list)
- init = subparsers.add_parser('init', help=gpg_init.__doc__)
- init.add_argument('--from', metavar='DIR', type=str,
- dest='import_dir', help=argparse.SUPPRESS)
+ init = subparsers.add_parser("init", help=gpg_init.__doc__)
+ init.add_argument("--from", metavar="DIR", type=str, dest="import_dir", help=argparse.SUPPRESS)
init.set_defaults(func=gpg_init)
- export = subparsers.add_parser('export', help=gpg_export.__doc__)
- export.add_argument('location', type=str,
- help='where to export keys')
- export.add_argument('keys', nargs='*',
- help='the keys to export; '
- 'all public keys if unspecified')
- export.add_argument('--secret', action='store_true',
- help='export secret keys')
+ export = subparsers.add_parser("export", help=gpg_export.__doc__)
+ export.add_argument("location", type=str, help="where to export keys")
+ export.add_argument(
+ "keys", nargs="*", help="the keys to export; " "all public keys if unspecified"
+ )
+ export.add_argument("--secret", action="store_true", help="export secret keys")
export.set_defaults(func=gpg_export)
- publish = subparsers.add_parser('publish', help=gpg_publish.__doc__)
+ publish = subparsers.add_parser("publish", help=gpg_publish.__doc__)
output = publish.add_mutually_exclusive_group(required=True)
- output.add_argument('-d', '--directory',
- metavar='directory',
- type=str,
- help="local directory where " +
- "keys will be published.")
- output.add_argument('-m', '--mirror-name',
- metavar='mirror-name',
- type=str,
- help="name of the mirror where " +
- "keys will be published.")
- output.add_argument('--mirror-url',
- metavar='mirror-url',
- type=str,
- help="URL of the mirror where " +
- "keys will be published.")
- publish.add_argument('--rebuild-index', action='store_true',
- default=False, help=(
- "Regenerate buildcache key index "
- "after publishing key(s)"))
- publish.add_argument('keys', nargs='*',
- help='the keys to publish; '
- 'all public keys if unspecified')
+ output.add_argument(
+ "-d",
+ "--directory",
+ metavar="directory",
+ type=str,
+ help="local directory where " + "keys will be published.",
+ )
+ output.add_argument(
+ "-m",
+ "--mirror-name",
+ metavar="mirror-name",
+ type=str,
+ help="name of the mirror where " + "keys will be published.",
+ )
+ output.add_argument(
+ "--mirror-url",
+ metavar="mirror-url",
+ type=str,
+ help="URL of the mirror where " + "keys will be published.",
+ )
+ publish.add_argument(
+ "--rebuild-index",
+ action="store_true",
+ default=False,
+ help=("Regenerate buildcache key index " "after publishing key(s)"),
+ )
+ publish.add_argument(
+ "keys", nargs="*", help="the keys to publish; " "all public keys if unspecified"
+ )
publish.set_defaults(func=gpg_publish)
@@ -122,8 +131,9 @@ def gpg_create(args):
old_sec_keys = spack.util.gpg.signing_keys()
# Create the new key
- spack.util.gpg.create(name=args.name, email=args.email,
- comment=args.comment, expires=args.expires)
+ spack.util.gpg.create(
+ name=args.name, email=args.email, comment=args.comment, expires=args.expires
+ )
if args.export or args.secret:
new_sec_keys = set(spack.util.gpg.signing_keys())
new_keys = new_sec_keys.difference(old_sec_keys)
@@ -155,15 +165,14 @@ def gpg_sign(args):
if len(keys) == 1:
key = keys[0]
elif not keys:
- raise RuntimeError('no signing keys are available')
+ raise RuntimeError("no signing keys are available")
else:
- raise RuntimeError('multiple signing keys are available; '
- 'please choose one')
+ raise RuntimeError("multiple signing keys are available; " "please choose one")
output = args.output
if not output:
- output = args.spec[0] + '.asc'
+ output = args.spec[0] + ".asc"
# TODO: Support the package format Spack creates.
- spack.util.gpg.sign(key, ' '.join(args.spec), output, args.clearsign)
+ spack.util.gpg.sign(key, " ".join(args.spec), output, args.clearsign)
def gpg_trust(args):
@@ -179,7 +188,7 @@ def gpg_init(args):
for root, _, filenames in os.walk(import_dir):
for filename in filenames:
- if not filename.endswith('.key'):
+ if not filename.endswith(".key"):
continue
spack.util.gpg.trust(os.path.join(root, filename))
@@ -194,8 +203,8 @@ def gpg_verify(args):
# TODO: Support the package format Spack creates.
signature = args.signature
if signature is None:
- signature = args.spec[0] + '.asc'
- spack.util.gpg.verify(signature, ' '.join(args.spec))
+ signature = args.spec[0] + ".asc"
+ spack.util.gpg.verify(signature, " ".join(args.spec))
def gpg_publish(args):
@@ -210,7 +219,8 @@ def gpg_publish(args):
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
spack.binary_distribution.push_keys(
- mirror, keys=args.keys, regenerate_index=args.rebuild_index)
+ mirror, keys=args.keys, regenerate_index=args.rebuild_index
+ )
def gpg(parser, args):
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index 8a2bb0a104..a743f7258e 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -24,21 +24,27 @@ def setup_parser(subparser):
method = subparser.add_mutually_exclusive_group()
method.add_argument(
- '-a', '--ascii', action='store_true',
- help="draw graph as ascii to stdout (default)")
+ "-a", "--ascii", action="store_true", help="draw graph as ascii to stdout (default)"
+ )
method.add_argument(
- '-d', '--dot', action='store_true',
- help="generate graph in dot format and print to stdout")
+ "-d", "--dot", action="store_true", help="generate graph in dot format and print to stdout"
+ )
subparser.add_argument(
- '-s', '--static', action='store_true',
- help="graph static (possible) deps, don't concretize (implies --dot)")
+ "-s",
+ "--static",
+ action="store_true",
+ help="graph static (possible) deps, don't concretize (implies --dot)",
+ )
subparser.add_argument(
- '-i', '--installed', action='store_true',
- help="graph installed specs, or specs in the active env (implies --dot)")
+ "-i",
+ "--installed",
+ action="store_true",
+ help="graph installed specs, or specs in the active env (implies --dot)",
+ )
- arguments.add_common_arguments(subparser, ['deptype', 'specs'])
+ arguments.add_common_arguments(subparser, ["deptype", "specs"])
def graph(parser, args):
@@ -67,7 +73,7 @@ def graph(parser, args):
graph_dot(specs, static=args.static, deptype=args.deptype)
elif specs: # ascii is default: user doesn't need to provide it explicitly
- debug = spack.config.get('config:debug')
+ debug = spack.config.get("config:debug")
graph_ascii(specs[0], debug=debug, deptype=args.deptype)
for spec in specs[1:]:
print() # extra line bt/w independent graphs
diff --git a/lib/spack/spack/cmd/help.py b/lib/spack/spack/cmd/help.py
index 62d49d3605..b4258aa191 100644
--- a/lib/spack/spack/cmd/help.py
+++ b/lib/spack/spack/cmd/help.py
@@ -79,24 +79,35 @@ spec expression syntax:
guides = {
- 'spec': spec_guide,
+ "spec": spec_guide,
}
def setup_parser(subparser):
help_cmd_group = subparser.add_mutually_exclusive_group()
- help_cmd_group.add_argument('help_command', nargs='?', default=None,
- help='command to get help on')
+ help_cmd_group.add_argument(
+ "help_command", nargs="?", default=None, help="command to get help on"
+ )
help_all_group = subparser.add_mutually_exclusive_group()
help_all_group.add_argument(
- '-a', '--all', action='store_const', const='long', default='short',
- help='list all available commands and options')
+ "-a",
+ "--all",
+ action="store_const",
+ const="long",
+ default="short",
+ help="list all available commands and options",
+ )
help_spec_group = subparser.add_mutually_exclusive_group()
help_spec_group.add_argument(
- '--spec', action='store_const', dest='guide', const='spec',
- default=None, help='help on the package specification syntax')
+ "--spec",
+ action="store_const",
+ dest="guide",
+ const="spec",
+ default=None,
+ help="help on the package specification syntax",
+ )
def help(parser, args):
@@ -106,6 +117,6 @@ def help(parser, args):
if args.help_command:
parser.add_command(args.help_command)
- parser.parse_args([args.help_command, '-h'])
+ parser.parse_args([args.help_command, "-h"])
else:
sys.stdout.write(parser.format_help(level=args.all))
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 0f0b26c334..97417eed3a 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -20,12 +20,12 @@ import spack.repo
import spack.spec
from spack.package_base import has_test_method, preferred_version
-description = 'get detailed information on a particular package'
-section = 'basic'
-level = 'short'
+description = "get detailed information on a particular package"
+section = "basic"
+level = "short"
-header_color = '@*b'
-plain_format = '@.'
+header_color = "@*b"
+plain_format = "@."
def padder(str_list, extra=0):
@@ -35,31 +35,31 @@ def padder(str_list, extra=0):
def pad(string):
string = str(string)
padding = max(0, length - len(string))
- return string + (padding * ' ')
+ return string + (padding * " ")
+
return pad
def setup_parser(subparser):
subparser.add_argument(
- '-a', '--all', action='store_true', default=False,
- help="output all package information"
+ "-a", "--all", action="store_true", default=False, help="output all package information"
)
options = [
- ('--detectable', print_detectable.__doc__),
- ('--maintainers', print_maintainers.__doc__),
- ('--no-dependencies', 'do not ' + print_dependencies.__doc__),
- ('--no-variants', 'do not ' + print_variants.__doc__),
- ('--no-versions', 'do not ' + print_versions.__doc__),
- ('--phases', print_phases.__doc__),
- ('--tags', print_tags.__doc__),
- ('--tests', print_tests.__doc__),
- ('--virtuals', print_virtuals.__doc__),
+ ("--detectable", print_detectable.__doc__),
+ ("--maintainers", print_maintainers.__doc__),
+ ("--no-dependencies", "do not " + print_dependencies.__doc__),
+ ("--no-variants", "do not " + print_variants.__doc__),
+ ("--no-versions", "do not " + print_versions.__doc__),
+ ("--phases", print_phases.__doc__),
+ ("--tags", print_tags.__doc__),
+ ("--tests", print_tests.__doc__),
+ ("--virtuals", print_virtuals.__doc__),
]
for opt, help_comment in options:
- subparser.add_argument(opt, action='store_true', help=help_comment)
+ subparser.add_argument(opt, action="store_true", help=help_comment)
- arguments.add_common_arguments(subparser, ['package'])
+ arguments.add_common_arguments(subparser, ["package"])
def section_title(s):
@@ -77,10 +77,10 @@ def variant(s):
class VariantFormatter(object):
def __init__(self, variants):
self.variants = variants
- self.headers = ('Name [Default]', 'When', 'Allowed values', 'Description')
+ self.headers = ("Name [Default]", "When", "Allowed values", "Description")
# Formats
- fmt_name = '{0} [{1}]'
+ fmt_name = "{0} [{1}]"
# Initialize column widths with the length of the
# corresponding headers, as they cannot be shorter
@@ -94,14 +94,14 @@ class VariantFormatter(object):
len(fmt_name.format(k, self.default(v))), # Name [Default]
len(str(w)),
len(v.allowed_values), # Allowed values
- len(v.description) # Description
+ len(v.description), # Description
)
self.column_widths = (
max(self.column_widths[0], candidate_max_widths[0]),
max(self.column_widths[1], candidate_max_widths[1]),
max(self.column_widths[2], candidate_max_widths[2]),
- max(self.column_widths[3], candidate_max_widths[3])
+ max(self.column_widths[3], candidate_max_widths[3]),
)
# Don't let name or possible values be less than max widths
@@ -122,11 +122,11 @@ class VariantFormatter(object):
self.fmt = "%%-%ss%%-%ss%%-%ss%%s" % (
self.column_widths[0] + 4,
self.column_widths[1] + 4,
- self.column_widths[2] + 4
+ self.column_widths[2] + 4,
)
def default(self, v):
- s = 'on' if v.default is True else 'off'
+ s = "on" if v.default is True else "off"
if not isinstance(v.default, bool):
s = v.default
return s
@@ -134,91 +134,88 @@ class VariantFormatter(object):
@property
def lines(self):
if not self.variants:
- yield ' None'
+ yield " None"
else:
- yield ' ' + self.fmt % self.headers
+ yield " " + self.fmt % self.headers
underline = tuple([w * "=" for w in self.column_widths])
- yield ' ' + self.fmt % underline
- yield ''
+ yield " " + self.fmt % underline
+ yield ""
for k, e in sorted(self.variants.items()):
v, w = e
name = textwrap.wrap(
- '{0} [{1}]'.format(k, self.default(v)),
- width=self.column_widths[0]
+ "{0} [{1}]".format(k, self.default(v)), width=self.column_widths[0]
)
if len(w) == 1:
w = w[0]
if w == spack.spec.Spec():
- w = '--'
+ w = "--"
when = textwrap.wrap(str(w), width=self.column_widths[1])
- allowed = v.allowed_values.replace('True, False', 'on, off')
+ allowed = v.allowed_values.replace("True, False", "on, off")
allowed = textwrap.wrap(allowed, width=self.column_widths[2])
description = []
- for d_line in v.description.split('\n'):
- description += textwrap.wrap(
- d_line,
- width=self.column_widths[3]
- )
- for t in zip_longest(
- name, when, allowed, description, fillvalue=''
- ):
+ for d_line in v.description.split("\n"):
+ description += textwrap.wrap(d_line, width=self.column_widths[3])
+ for t in zip_longest(name, when, allowed, description, fillvalue=""):
yield " " + self.fmt % t
def print_dependencies(pkg):
"""output build, link, and run package dependencies"""
- for deptype in ('build', 'link', 'run'):
- color.cprint('')
- color.cprint(section_title('%s Dependencies:' % deptype.capitalize()))
+ for deptype in ("build", "link", "run"):
+ color.cprint("")
+ color.cprint(section_title("%s Dependencies:" % deptype.capitalize()))
deps = sorted(pkg.dependencies_of_type(deptype))
if deps:
colify(deps, indent=4)
else:
- color.cprint(' None')
+ color.cprint(" None")
def print_detectable(pkg):
"""output information on external detection"""
- color.cprint('')
- color.cprint(section_title('Externally Detectable: '))
+ color.cprint("")
+ color.cprint(section_title("Externally Detectable: "))
# If the package has an 'executables' of 'libraries' field, it
# can detect an installation
- if hasattr(pkg, 'executables') or hasattr(pkg, 'libraries'):
+ if hasattr(pkg, "executables") or hasattr(pkg, "libraries"):
find_attributes = []
- if hasattr(pkg, 'determine_version'):
- find_attributes.append('version')
+ if hasattr(pkg, "determine_version"):
+ find_attributes.append("version")
- if hasattr(pkg, 'determine_variants'):
- find_attributes.append('variants')
+ if hasattr(pkg, "determine_variants"):
+ find_attributes.append("variants")
# If the package does not define 'determine_version' nor
# 'determine_variants', then it must use some custom detection
# mechanism. In this case, just inform the user it's detectable somehow.
- color.cprint(' True{0}'.format(
- ' (' + ', '.join(find_attributes) + ')' if find_attributes else ''))
+ color.cprint(
+ " True{0}".format(
+ " (" + ", ".join(find_attributes) + ")" if find_attributes else ""
+ )
+ )
else:
- color.cprint(' False')
+ color.cprint(" False")
def print_maintainers(pkg):
"""output package maintainers"""
if len(pkg.maintainers) > 0:
- mnt = " ".join(['@@' + m for m in pkg.maintainers])
- color.cprint('')
- color.cprint(section_title('Maintainers: ') + mnt)
+ mnt = " ".join(["@@" + m for m in pkg.maintainers])
+ color.cprint("")
+ color.cprint(section_title("Maintainers: ") + mnt)
def print_phases(pkg):
"""output installation phases"""
- if hasattr(pkg, 'phases') and pkg.phases:
- color.cprint('')
- color.cprint(section_title('Installation Phases:'))
- phase_str = ''
+ if hasattr(pkg, "phases") and pkg.phases:
+ color.cprint("")
+ color.cprint(section_title("Installation Phases:"))
+ phase_str = ""
for phase in pkg.phases:
phase_str += " {0}".format(phase)
color.cprint(phase_str)
@@ -227,9 +224,9 @@ def print_phases(pkg):
def print_tags(pkg):
"""output package tags"""
- color.cprint('')
+ color.cprint("")
color.cprint(section_title("Tags: "))
- if hasattr(pkg, 'tags'):
+ if hasattr(pkg, "tags"):
tags = sorted(pkg.tags)
colify(tags, indent=4)
else:
@@ -246,11 +243,12 @@ def print_tests(pkg):
#
# So the presence of a callback in Spack does not necessarily correspond
# to the actual presence of built-time tests for a package.
- for callbacks, phase in [(pkg.build_time_test_callbacks, 'Build'),
- (pkg.install_time_test_callbacks, 'Install')]:
- color.cprint('')
- color.cprint(section_title('Available {0} Phase Test Methods:'
- .format(phase)))
+ for callbacks, phase in [
+ (pkg.build_time_test_callbacks, "Build"),
+ (pkg.install_time_test_callbacks, "Install"),
+ ]:
+ color.cprint("")
+ color.cprint(section_title("Available {0} Phase Test Methods:".format(phase)))
names = []
if callbacks:
for name in callbacks:
@@ -260,18 +258,21 @@ def print_tests(pkg):
if names:
colify(sorted(names), indent=4)
else:
- color.cprint(' None')
+ color.cprint(" None")
# PackageBase defines an empty install/smoke test but we want to know
# if it has been overridden and, therefore, assumed to be implemented.
- color.cprint('')
- color.cprint(section_title('Stand-Alone/Smoke Test Methods:'))
+ color.cprint("")
+ color.cprint(section_title("Stand-Alone/Smoke Test Methods:"))
names = []
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
if has_test_method(pkg_cls):
pkg_base = spack.package_base.PackageBase
- test_pkgs = [str(cls.test) for cls in inspect.getmro(pkg_cls) if
- issubclass(cls, pkg_base) and cls.test != pkg_base.test]
+ test_pkgs = [
+ str(cls.test)
+ for cls in inspect.getmro(pkg_cls)
+ if issubclass(cls, pkg_base) and cls.test != pkg_base.test
+ ]
test_pkgs = list(set(test_pkgs))
names.extend([(test.split()[1]).lower() for test in test_pkgs])
@@ -282,11 +283,11 @@ def print_tests(pkg):
# hack for compilers that are not dependencies (yet)
# TODO: this all eventually goes away
- c_names = ('gcc', 'intel', 'intel-parallel-studio', 'pgi')
+ c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
if pkg.name in c_names:
- v_names.extend(['c', 'cxx', 'fortran'])
- if pkg.spec.satisfies('llvm+clang'):
- v_names.extend(['c', 'cxx'])
+ v_names.extend(["c", "cxx", "fortran"])
+ if pkg.spec.satisfies("llvm+clang"):
+ v_names.extend(["c", "cxx"])
# TODO Refactor END
v_specs = [spack.spec.Spec(v_name) for v_name in v_names]
@@ -294,21 +295,21 @@ def print_tests(pkg):
try:
pkg_cls = spack.repo.path.get_pkg_class(v_spec.name)
if has_test_method(pkg_cls):
- names.append('{0}.test'.format(pkg_cls.name.lower()))
+ names.append("{0}.test".format(pkg_cls.name.lower()))
except spack.repo.UnknownPackageError:
pass
if names:
colify(sorted(names), indent=4)
else:
- color.cprint(' None')
+ color.cprint(" None")
def print_variants(pkg):
"""output variants"""
- color.cprint('')
- color.cprint(section_title('Variants:'))
+ color.cprint("")
+ color.cprint(section_title("Variants:"))
formatter = VariantFormatter(pkg.variants)
for line in formatter.lines:
@@ -318,26 +319,26 @@ def print_variants(pkg):
def print_versions(pkg):
"""output versions"""
- color.cprint('')
- color.cprint(section_title('Preferred version: '))
+ color.cprint("")
+ color.cprint(section_title("Preferred version: "))
if not pkg.versions:
- color.cprint(version(' None'))
- color.cprint('')
- color.cprint(section_title('Safe versions: '))
- color.cprint(version(' None'))
- color.cprint('')
- color.cprint(section_title('Deprecated versions: '))
- color.cprint(version(' None'))
+ color.cprint(version(" None"))
+ color.cprint("")
+ color.cprint(section_title("Safe versions: "))
+ color.cprint(version(" None"))
+ color.cprint("")
+ color.cprint(section_title("Deprecated versions: "))
+ color.cprint(version(" None"))
else:
pad = padder(pkg.versions, 4)
preferred = preferred_version(pkg)
- url = ''
+ url = ""
if pkg.has_code:
url = fs.for_package_version(pkg, preferred)
- line = version(' {0}'.format(pad(preferred))) + color.cescape(url)
+ line = version(" {0}".format(pad(preferred))) + color.cescape(url)
color.cprint(line)
safe = []
@@ -345,28 +346,28 @@ def print_versions(pkg):
for v in reversed(sorted(pkg.versions)):
if pkg.has_code:
url = fs.for_package_version(pkg, v)
- if pkg.versions[v].get('deprecated', False):
+ if pkg.versions[v].get("deprecated", False):
deprecated.append((v, url))
else:
safe.append((v, url))
- for title, vers in [('Safe', safe), ('Deprecated', deprecated)]:
- color.cprint('')
- color.cprint(section_title('{0} versions: '.format(title)))
+ for title, vers in [("Safe", safe), ("Deprecated", deprecated)]:
+ color.cprint("")
+ color.cprint(section_title("{0} versions: ".format(title)))
if not vers:
- color.cprint(version(' None'))
+ color.cprint(version(" None"))
continue
for v, url in vers:
- line = version(' {0}'.format(pad(v))) + color.cescape(url)
+ line = version(" {0}".format(pad(v))) + color.cescape(url)
color.cprint(line)
def print_virtuals(pkg):
"""output virtual packages"""
- color.cprint('')
- color.cprint(section_title('Virtual Packages: '))
+ color.cprint("")
+ color.cprint(section_title("Virtual Packages: "))
if pkg.provided:
inverse_map = {}
for spec, whens in pkg.provided.items():
@@ -376,7 +377,8 @@ def print_virtuals(pkg):
inverse_map[when].add(spec)
for when, specs in reversed(sorted(inverse_map.items())):
line = " %s provides %s" % (
- when.colorized(), ', '.join(s.colorized() for s in specs)
+ when.colorized(),
+ ", ".join(s.colorized() for s in specs),
)
print(line)
@@ -390,19 +392,17 @@ def info(parser, args):
pkg = pkg_cls(spec)
# Output core package information
- header = section_title(
- '{0}: '
- ).format(pkg.build_system_class) + pkg.name
+ header = section_title("{0}: ").format(pkg.build_system_class) + pkg.name
color.cprint(header)
- color.cprint('')
- color.cprint(section_title('Description:'))
+ color.cprint("")
+ color.cprint(section_title("Description:"))
if pkg.__doc__:
color.cprint(color.cescape(pkg.format_doc(indent=4)))
else:
color.cprint(" None")
- color.cprint(section_title('Homepage: ') + pkg.homepage)
+ color.cprint(section_title("Homepage: ") + pkg.homepage)
# Now output optional information in expected order
sections = [
@@ -420,4 +420,4 @@ def info(parser, args):
if print_it:
func(pkg)
- color.cprint('')
+ color.cprint("")
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index bda6850db0..c08eb2f3fd 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -31,144 +31,199 @@ def update_kwargs_from_args(args, kwargs):
"""Parse cli arguments and construct a dictionary
that will be passed to the package installer."""
- kwargs.update({
- 'fail_fast': args.fail_fast,
- 'keep_prefix': args.keep_prefix,
- 'keep_stage': args.keep_stage,
- 'restage': not args.dont_restage,
- 'install_source': args.install_source,
- 'verbose': args.verbose or args.install_verbose,
- 'fake': args.fake,
- 'dirty': args.dirty,
- 'use_cache': args.use_cache,
- 'cache_only': args.cache_only,
- 'include_build_deps': args.include_build_deps,
- 'explicit': True, # Always true for install command
- 'stop_at': args.until,
- 'unsigned': args.unsigned,
- })
-
- kwargs.update({
- 'install_deps': ('dependencies' in args.things_to_install),
- 'install_package': ('package' in args.things_to_install)
- })
-
- if hasattr(args, 'setup'):
+ kwargs.update(
+ {
+ "fail_fast": args.fail_fast,
+ "keep_prefix": args.keep_prefix,
+ "keep_stage": args.keep_stage,
+ "restage": not args.dont_restage,
+ "install_source": args.install_source,
+ "verbose": args.verbose or args.install_verbose,
+ "fake": args.fake,
+ "dirty": args.dirty,
+ "use_cache": args.use_cache,
+ "cache_only": args.cache_only,
+ "include_build_deps": args.include_build_deps,
+ "explicit": True, # Always true for install command
+ "stop_at": args.until,
+ "unsigned": args.unsigned,
+ }
+ )
+
+ kwargs.update(
+ {
+ "install_deps": ("dependencies" in args.things_to_install),
+ "install_package": ("package" in args.things_to_install),
+ }
+ )
+
+ if hasattr(args, "setup"):
setups = set()
for arglist_s in args.setup:
- for arg in [x.strip() for x in arglist_s.split(',')]:
+ for arg in [x.strip() for x in arglist_s.split(",")]:
setups.add(arg)
- kwargs['setup'] = setups
- tty.msg('Setup={0}'.format(kwargs['setup']))
+ kwargs["setup"] = setups
+ tty.msg("Setup={0}".format(kwargs["setup"]))
def setup_parser(subparser):
subparser.add_argument(
- '--only',
- default='package,dependencies',
- dest='things_to_install',
- choices=['package', 'dependencies'],
+ "--only",
+ default="package,dependencies",
+ dest="things_to_install",
+ choices=["package", "dependencies"],
help="""select the mode of installation.
the default is to install the package along with all its dependencies.
alternatively one can decide to install only the package or only
-the dependencies"""
+the dependencies""",
)
subparser.add_argument(
- '-u', '--until', type=str, dest='until', default=None,
- help="phase to stop after when installing (default None)")
- arguments.add_common_arguments(subparser, ['jobs'])
+ "-u",
+ "--until",
+ type=str,
+ dest="until",
+ default=None,
+ help="phase to stop after when installing (default None)",
+ )
+ arguments.add_common_arguments(subparser, ["jobs"])
subparser.add_argument(
- '--overwrite', action='store_true',
- help="reinstall an existing spec, even if it has dependents")
+ "--overwrite",
+ action="store_true",
+ help="reinstall an existing spec, even if it has dependents",
+ )
subparser.add_argument(
- '--fail-fast', action='store_true',
- help="stop all builds if any build fails (default is best effort)")
+ "--fail-fast",
+ action="store_true",
+ help="stop all builds if any build fails (default is best effort)",
+ )
subparser.add_argument(
- '--keep-prefix', action='store_true',
- help="don't remove the install prefix if installation fails")
+ "--keep-prefix",
+ action="store_true",
+ help="don't remove the install prefix if installation fails",
+ )
subparser.add_argument(
- '--keep-stage', action='store_true',
- help="don't remove the build stage if installation succeeds")
+ "--keep-stage",
+ action="store_true",
+ help="don't remove the build stage if installation succeeds",
+ )
subparser.add_argument(
- '--dont-restage', action='store_true',
- help="if a partial install is detected, don't delete prior state")
+ "--dont-restage",
+ action="store_true",
+ help="if a partial install is detected, don't delete prior state",
+ )
cache_group = subparser.add_mutually_exclusive_group()
cache_group.add_argument(
- '--use-cache', action='store_true', dest='use_cache', default=True,
- help="check for pre-built Spack packages in mirrors (default)")
+ "--use-cache",
+ action="store_true",
+ dest="use_cache",
+ default=True,
+ help="check for pre-built Spack packages in mirrors (default)",
+ )
cache_group.add_argument(
- '--no-cache', action='store_false', dest='use_cache', default=True,
- help="do not check for pre-built Spack packages in mirrors")
+ "--no-cache",
+ action="store_false",
+ dest="use_cache",
+ default=True,
+ help="do not check for pre-built Spack packages in mirrors",
+ )
cache_group.add_argument(
- '--cache-only', action='store_true', dest='cache_only', default=False,
- help="only install package from binary mirrors")
+ "--cache-only",
+ action="store_true",
+ dest="cache_only",
+ default=False,
+ help="only install package from binary mirrors",
+ )
subparser.add_argument(
- '--include-build-deps', action='store_true', dest='include_build_deps',
- default=False, help="""include build deps when installing from cache,
-which is useful for CI pipeline troubleshooting""")
+ "--include-build-deps",
+ action="store_true",
+ dest="include_build_deps",
+ default=False,
+ help="""include build deps when installing from cache,
+which is useful for CI pipeline troubleshooting""",
+ )
subparser.add_argument(
- '--no-check-signature', action='store_true',
- dest='unsigned', default=False,
- help="do not check signatures of binary packages")
- subparser.add_argument(
- '--show-log-on-error', action='store_true',
- help="print full build log to stderr if build fails")
+ "--no-check-signature",
+ action="store_true",
+ dest="unsigned",
+ default=False,
+ help="do not check signatures of binary packages",
+ )
subparser.add_argument(
- '--source', action='store_true', dest='install_source',
- help="install source files in prefix")
- arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
+ "--show-log-on-error",
+ action="store_true",
+ help="print full build log to stderr if build fails",
+ )
subparser.add_argument(
- '-v', '--verbose', action='store_true', dest='install_verbose',
- help="display verbose build output while installing")
+ "--source",
+ action="store_true",
+ dest="install_source",
+ help="install source files in prefix",
+ )
+ arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
subparser.add_argument(
- '--fake', action='store_true',
- help="fake install for debug purposes.")
+ "-v",
+ "--verbose",
+ action="store_true",
+ dest="install_verbose",
+ help="display verbose build output while installing",
+ )
+ subparser.add_argument("--fake", action="store_true", help="fake install for debug purposes.")
subparser.add_argument(
- '--only-concrete', action='store_true', default=False,
- help='(with environment) only install already concretized specs')
+ "--only-concrete",
+ action="store_true",
+ default=False,
+ help="(with environment) only install already concretized specs",
+ )
subparser.add_argument(
- '--no-add', action='store_true', default=False,
+ "--no-add",
+ action="store_true",
+ default=False,
help="""(with environment) partially install an environment, limiting
to concrete specs in the environment matching the arguments.
-Non-roots remain installed implicitly.""")
+Non-roots remain installed implicitly.""",
+ )
subparser.add_argument(
- '-f', '--file', action='append', default=[],
- dest='specfiles', metavar='SPEC_YAML_FILE',
- help="install from file. Read specs to install from .yaml files")
+ "-f",
+ "--file",
+ action="append",
+ default=[],
+ dest="specfiles",
+ metavar="SPEC_YAML_FILE",
+ help="install from file. Read specs to install from .yaml files",
+ )
cd_group = subparser.add_mutually_exclusive_group()
- arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
+ arguments.add_common_arguments(cd_group, ["clean", "dirty"])
testing = subparser.add_mutually_exclusive_group()
testing.add_argument(
- '--test', default=None,
- choices=['root', 'all'],
+ "--test",
+ default=None,
+ choices=["root", "all"],
help="""If 'root' is chosen, run package tests during
installation for top-level packages (but skip tests for dependencies).
if 'all' is chosen, run package tests during installation for all
-packages. If neither are chosen, don't run tests for any packages."""
+packages. If neither are chosen, don't run tests for any packages.""",
)
subparser.add_argument(
- '--log-format',
+ "--log-format",
default=None,
choices=spack.report.valid_formats,
- help="format to be used for log files"
+ help="format to be used for log files",
)
subparser.add_argument(
- '--log-file',
+ "--log-file",
default=None,
- help="filename for the log file. if not passed a default will be used"
+ help="filename for the log file. if not passed a default will be used",
)
subparser.add_argument(
- '--help-cdash',
- action='store_true',
- help="Show usage instructions for CDash reporting"
+ "--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
)
arguments.add_cdash_args(subparser, False)
- arguments.add_common_arguments(subparser, ['yes_to_all', 'spec'])
+ arguments.add_common_arguments(subparser, ["yes_to_all", "spec"])
spack.cmd.common.arguments.add_concretizer_args(subparser)
@@ -177,9 +232,9 @@ def default_log_file(spec):
"""Computes the default filename for the log file and creates
the corresponding directory if not present
"""
- fmt = 'test-{x.name}-{x.version}-{hash}.xml'
+ fmt = "test-{x.name}-{x.version}-{hash}.xml"
basename = fmt.format(x=spec, hash=spec.dag_hash())
- dirname = fs.os.path.join(spack.paths.reports_path, 'junit')
+ dirname = fs.os.path.join(spack.paths.reports_path, "junit")
fs.mkdirp(dirname)
return fs.os.path.join(dirname, basename)
@@ -214,49 +269,52 @@ def install_specs(cli_args, kwargs, specs):
# no matches or exactly one match.
if not m_spec:
- tty.debug('{0} matched nothing in the env'.format(
- abstract.name))
+ tty.debug("{0} matched nothing in the env".format(abstract.name))
# no matches in the env
if cli_args.no_add:
- msg = ('You asked to install {0} without adding it ' +
- '(--no-add), but no such spec exists in ' +
- 'environment').format(abstract.name)
+ msg = (
+ "You asked to install {0} without adding it "
+ + "(--no-add), but no such spec exists in "
+ + "environment"
+ ).format(abstract.name)
tty.die(msg)
else:
- tty.debug('adding {0} as a root'.format(abstract.name))
+ tty.debug("adding {0} as a root".format(abstract.name))
specs_to_add.append((abstract, concrete))
continue
- tty.debug('exactly one match for {0} in env -> {1}'.format(
- m_spec.name, m_spec.dag_hash()))
+ tty.debug(
+ "exactly one match for {0} in env -> {1}".format(
+ m_spec.name, m_spec.dag_hash()
+ )
+ )
if m_spec in env.roots() or cli_args.no_add:
# either the single match is a root spec (and --no-add is
# the default for roots) or --no-add was stated explicitly
- tty.debug('just install {0}'.format(m_spec.name))
+ tty.debug("just install {0}".format(m_spec.name))
specs_to_install.append(m_spec)
else:
# the single match is not a root (i.e. it's a dependency),
# and --no-add was not specified, so we'll add it as a
# root before installing
- tty.debug('add {0} then install it'.format(m_spec.name))
+ tty.debug("add {0} then install it".format(m_spec.name))
specs_to_add.append((abstract, concrete))
if specs_to_add:
- tty.debug('Adding the following specs as roots:')
+ tty.debug("Adding the following specs as roots:")
for abstract, concrete in specs_to_add:
- tty.debug(' {0}'.format(abstract.name))
+ tty.debug(" {0}".format(abstract.name))
with env.write_transaction():
- specs_to_install.append(
- env.concretize_and_add(abstract, concrete))
+ specs_to_install.append(env.concretize_and_add(abstract, concrete))
env.write(regenerate=False)
# Install the validated list of cli specs
if specs_to_install:
- tty.debug('Installing the following cli specs:')
+ tty.debug("Installing the following cli specs:")
for s in specs_to_install:
- tty.debug(' {0}'.format(s.name))
+ tty.debug(" {0}".format(s.name))
env.install_specs(specs_to_install, args=cli_args, **kwargs)
else:
installs = [(concrete.package, kwargs) for _, concrete in specs]
@@ -268,7 +326,7 @@ def install_specs(cli_args, kwargs, specs):
if not os.path.exists(e.pkg.build_log_path):
tty.error("'spack install' created no log.")
else:
- sys.stderr.write('Full build log:\n')
+ sys.stderr.write("Full build log:\n")
with open(e.pkg.build_log_path) as log:
shutil.copyfileobj(log, sys.stderr)
raise
@@ -281,24 +339,28 @@ def install(parser, args, **kwargs):
if args.help_cdash:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
- epilog=textwrap.dedent('''\
+ epilog=textwrap.dedent(
+ """\
environment variables:
SPACK_CDASH_AUTH_TOKEN
authentication token to present to CDash
- '''))
+ """
+ ),
+ )
arguments.add_cdash_args(parser, True)
parser.print_help()
return
reporter = spack.report.collect_info(
- spack.package_base.PackageInstaller, '_install_task', args.log_format, args)
+ spack.package_base.PackageInstaller, "_install_task", args.log_format, args
+ )
if args.log_file:
reporter.filename = args.log_file
def get_tests(specs):
- if args.test == 'all':
+ if args.test == "all":
return True
- elif args.test == 'root':
+ elif args.test == "root":
return [spec.name for spec in specs]
else:
return False
@@ -313,7 +375,7 @@ environment variables:
env = ev.active_environment()
if env:
tests = get_tests(env.user_specs)
- kwargs['tests'] = tests
+ kwargs["tests"] = tests
if not args.only_concrete:
with env.write_transaction():
@@ -331,15 +393,14 @@ environment variables:
reporter.specs = specs
tty.msg("Installing environment {0}".format(env.name))
- with reporter('build'):
+ with reporter("build"):
env.install_all(**kwargs)
else:
- msg = '{0} environment has no specs to install'.format(env.name)
+ msg = "{0} environment has no specs to install".format(env.name)
tty.msg(msg)
- tty.debug("Regenerating environment views for {0}"
- .format(env.name))
+ tty.debug("Regenerating environment views for {0}".format(env.name))
with env.write_transaction():
# write env to trigger view generation and modulefile
# generation
@@ -347,7 +408,7 @@ environment variables:
return
else:
msg = "install requires a package argument or active environment"
- if 'spack.yaml' in os.listdir(os.getcwd()):
+ if "spack.yaml" in os.listdir(os.getcwd()):
# There's a spack.yaml file in the working dir, the user may
# have intended to use that
msg += "\n\n"
@@ -360,15 +421,15 @@ environment variables:
tty.die(msg)
if args.no_checksum:
- spack.config.set('config:checksum', False, scope='command_line')
+ spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
- spack.config.set('config:deprecated', True, scope='command_line')
+ spack.config.set("config:deprecated", True, scope="command_line")
# 1. Abstract specs from cli
abstract_specs = spack.cmd.parse_specs(args.spec)
tests = get_tests(abstract_specs)
- kwargs['tests'] = tests
+ kwargs["tests"] = tests
try:
specs = spack.cmd.parse_specs(args.spec, concretize=True, tests=tests)
@@ -379,8 +440,8 @@ environment variables:
# 2. Concrete specs from yaml files
for file in args.specfiles:
- with open(file, 'r') as f:
- if file.endswith('yaml') or file.endswith('yml'):
+ with open(file, "r") as f:
+ if file.endswith("yaml") or file.endswith("yml"):
s = spack.spec.Spec.from_yaml(f)
else:
s = spack.spec.Spec.from_json(f)
@@ -388,7 +449,7 @@ environment variables:
concretized = s.concretized()
if concretized.dag_hash() != s.dag_hash():
msg = 'skipped invalid file "{0}". '
- msg += 'The file does not contain a concrete spec.'
+ msg += "The file does not contain a concrete spec."
tty.warn(msg.format(file))
continue
@@ -396,43 +457,36 @@ environment variables:
specs.append(concretized)
if len(specs) == 0:
- tty.die('The `spack install` command requires a spec to install.')
+ tty.die("The `spack install` command requires a spec to install.")
if not args.log_file and not reporter.filename:
reporter.filename = default_log_file(specs[0])
reporter.specs = specs
- with reporter('build'):
+ with reporter("build"):
if args.overwrite:
- installed = list(filter(lambda x: x,
- map(spack.store.db.query_one, specs)))
+ installed = list(filter(lambda x: x, map(spack.store.db.query_one, specs)))
if not args.yes_to_all:
- display_args = {
- 'long': True,
- 'show_flags': True,
- 'variants': True
- }
+ display_args = {"long": True, "show_flags": True, "variants": True}
if installed:
- tty.msg('The following package specs will be '
- 'reinstalled:\n')
+ tty.msg("The following package specs will be " "reinstalled:\n")
spack.cmd.display_specs(installed, **display_args)
- not_installed = list(filter(lambda x: x not in installed,
- specs))
+ not_installed = list(filter(lambda x: x not in installed, specs))
if not_installed:
- tty.msg('The following package specs are not installed and'
- ' the --overwrite flag was given. The package spec'
- ' will be newly installed:\n')
+ tty.msg(
+ "The following package specs are not installed and"
+ " the --overwrite flag was given. The package spec"
+ " will be newly installed:\n"
+ )
spack.cmd.display_specs(not_installed, **display_args)
# We have some specs, so one of the above must have been true
- answer = tty.get_yes_or_no(
- 'Do you want to proceed?', default=False
- )
+ answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not answer:
- tty.die('Reinstallation aborted.')
+ tty.die("Reinstallation aborted.")
# overwrite all concrete explicit specs from this build
- kwargs['overwrite'] = [spec.dag_hash() for spec in specs]
+ kwargs["overwrite"] = [spec.dag_hash() for spec in specs]
install_specs(args, kwargs, zip(abstract_specs, specs))
diff --git a/lib/spack/spack/cmd/license.py b/lib/spack/spack/cmd/license.py
index fce12645d9..6595eb8e2e 100644
--- a/lib/spack/spack/cmd/license.py
+++ b/lib/spack/spack/cmd/license.py
@@ -15,12 +15,12 @@ import llnl.util.tty as tty
import spack.paths
from spack.util.executable import which
-description = 'list and check license headers on files in spack'
+description = "list and check license headers on files in spack"
section = "developer"
level = "long"
#: need the git command to check new files
-git = which('git')
+git = which("git")
#: SPDX license id must appear in the first <license_lines> lines of a file
license_lines = 7
@@ -31,48 +31,41 @@ apache2_mit_spdx = "(Apache-2.0 OR MIT)"
#: regular expressions for licensed files.
licensed_files = [
# spack scripts
- r'^bin/spack$',
- r'^bin/spack-python$',
-
+ r"^bin/spack$",
+ r"^bin/spack-python$",
# all of spack core except unparse
- r'^lib/spack/spack/(?!(test/)?util/unparse).*\.py$',
- r'^lib/spack/spack/.*\.sh$',
- r'^lib/spack/spack/.*\.lp$',
- r'^lib/spack/llnl/.*\.py$',
- r'^lib/spack/env/cc$',
-
+ r"^lib/spack/spack/(?!(test/)?util/unparse).*\.py$",
+ r"^lib/spack/spack/.*\.sh$",
+ r"^lib/spack/spack/.*\.lp$",
+ r"^lib/spack/llnl/.*\.py$",
+ r"^lib/spack/env/cc$",
# special case this test data file, which has a license header
- r'^lib/spack/spack/test/data/style/broken.dummy',
-
+ r"^lib/spack/spack/test/data/style/broken.dummy",
# rst files in documentation
- r'^lib/spack/docs/(?!command_index|spack|llnl).*\.rst$',
- r'^lib/spack/docs/.*\.py$',
- r'^lib/spack/docs/spack.yaml$',
-
+ r"^lib/spack/docs/(?!command_index|spack|llnl).*\.rst$",
+ r"^lib/spack/docs/.*\.py$",
+ r"^lib/spack/docs/spack.yaml$",
# 1 file in external
- r'^lib/spack/external/__init__.py$',
-
+ r"^lib/spack/external/__init__.py$",
# shell scripts in share
- r'^share/spack/.*\.sh$',
- r'^share/spack/.*\.bash$',
- r'^share/spack/.*\.csh$',
- r'^share/spack/.*\.fish$',
- r'^share/spack/qa/run-[^/]*$',
- r'^share/spack/bash/spack-completion.in$',
- r'^share/spack/templates/misc/coconcretization.pyt$',
-
+ r"^share/spack/.*\.sh$",
+ r"^share/spack/.*\.bash$",
+ r"^share/spack/.*\.csh$",
+ r"^share/spack/.*\.fish$",
+ r"^share/spack/qa/run-[^/]*$",
+ r"^share/spack/bash/spack-completion.in$",
+ r"^share/spack/templates/misc/coconcretization.pyt$",
# action workflows
- r'^.github/actions/.*\.py$',
-
+ r"^.github/actions/.*\.py$",
# all packages
- r'^var/spack/repos/.*/package.py$',
+ r"^var/spack/repos/.*/package.py$",
]
#: licensed files that can have LGPL language in them
#: so far, just this command -- so it can find LGPL things elsewhere
lgpl_exceptions = [
- r'lib/spack/spack/cmd/license.py',
- r'lib/spack/spack/test/cmd/license.py',
+ r"lib/spack/spack/cmd/license.py",
+ r"lib/spack/spack/test/cmd/license.py",
]
@@ -106,15 +99,14 @@ OLD_LICENSE, SPDX_MISMATCH, GENERAL_MISMATCH = range(1, 4)
#: Latest year that copyright applies. UPDATE THIS when bumping copyright.
latest_year = 2022
-strict_date = r'Copyright 2013-%s' % latest_year
+strict_date = r"Copyright 2013-%s" % latest_year
#: regexes for valid license lines at tops of files
license_line_regexes = [
- r'Copyright 2013-(%d|%d) Lawrence Livermore National Security, LLC and other' % (
- latest_year - 1, latest_year # allow a little leeway: current or last year
- ),
- r'Spack Project Developers\. See the top-level COPYRIGHT file for details.',
- r'SPDX-License-Identifier: \(Apache-2\.0 OR MIT\)'
+ r"Copyright 2013-(%d|%d) Lawrence Livermore National Security, LLC and other"
+ % (latest_year - 1, latest_year), # allow a little leeway: current or last year
+ r"Spack Project Developers\. See the top-level COPYRIGHT file for details.",
+ r"SPDX-License-Identifier: \(Apache-2\.0 OR MIT\)",
]
@@ -134,10 +126,11 @@ class LicenseError(object):
spdx_mismatch = self.error_counts[SPDX_MISMATCH]
old_license = self.error_counts[OLD_LICENSE]
return (
- '%d improperly licensed files' % (total),
- 'files with wrong SPDX-License-Identifier: %d' % spdx_mismatch,
- 'files with old license header: %d' % old_license,
- 'files not containing expected license: %d' % missing)
+ "%d improperly licensed files" % (total),
+ "files with wrong SPDX-License-Identifier: %d" % spdx_mismatch,
+ "files with old license header: %d" % old_license,
+ "files not containing expected license: %d" % missing,
+ )
def _check_license(lines, path):
@@ -145,7 +138,7 @@ def _check_license(lines, path):
found = []
for line in lines:
- line = re.sub(r'^[\s#\%\.]*', '', line)
+ line = re.sub(r"^[\s#\%\.]*", "", line)
line = line.rstrip()
for i, line_regex in enumerate(license_line_regexes):
if re.match(line_regex, line):
@@ -154,25 +147,26 @@ def _check_license(lines, path):
# out of date.
if i == 0:
if not re.search(strict_date, line):
- tty.debug('{0}: copyright date mismatch'.format(path))
+ tty.debug("{0}: copyright date mismatch".format(path))
found.append(i)
if len(found) == len(license_line_regexes) and found == list(sorted(found)):
return
def old_license(line, path):
- if re.search('This program is free software', line):
- print('{0}: has old LGPL license header'.format(path))
+ if re.search("This program is free software", line):
+ print("{0}: has old LGPL license header".format(path))
return OLD_LICENSE
# If the SPDX identifier is present, then there is a mismatch (since it
# did not match the above regex)
def wrong_spdx_identifier(line, path):
- m = re.search(r'SPDX-License-Identifier: ([^\n]*)', line)
+ m = re.search(r"SPDX-License-Identifier: ([^\n]*)", line)
if m and m.group(1) != apache2_mit_spdx:
- print('{0}: SPDX license identifier mismatch'
- '(expecting {1}, found {2})'
- .format(path, apache2_mit_spdx, m.group(1)))
+ print(
+ "{0}: SPDX license identifier mismatch"
+ "(expecting {1}, found {2})".format(path, apache2_mit_spdx, m.group(1))
+ )
return SPDX_MISMATCH
checks = [old_license, wrong_spdx_identifier]
@@ -183,8 +177,12 @@ def _check_license(lines, path):
if error:
return error
- print('{0}: the license header at the top of the file does not match the \
- expected format'.format(path))
+ print(
+ "{0}: the license header at the top of the file does not match the \
+ expected format".format(
+ path
+ )
+ )
return GENERAL_MISMATCH
@@ -205,18 +203,18 @@ def verify(args):
if license_errors.has_errors():
tty.die(*license_errors.error_messages())
else:
- tty.msg('No license issues found.')
+ tty.msg("No license issues found.")
def update_copyright_year(args):
"""update copyright for the current year in all licensed files"""
- llns_and_other = ' Lawrence Livermore National Security, LLC and other'
+ llns_and_other = " Lawrence Livermore National Security, LLC and other"
for filename in _licensed_files(args):
fs.filter_file(
- r'Copyright \d{4}-\d{4}' + llns_and_other,
+ r"Copyright \d{4}-\d{4}" + llns_and_other,
strict_date + llns_and_other,
- os.path.join(args.root, filename)
+ os.path.join(args.root, filename),
)
# also update MIT license file at root. Don't use llns_and_other; it uses
@@ -228,24 +226,27 @@ def update_copyright_year(args):
def setup_parser(subparser):
subparser.add_argument(
- '--root', action='store', default=spack.paths.prefix,
- help='scan a different prefix for license issues')
+ "--root",
+ action="store",
+ default=spack.paths.prefix,
+ help="scan a different prefix for license issues",
+ )
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='license_command')
- sp.add_parser('list-files', help=list_files.__doc__)
- sp.add_parser('verify', help=verify.__doc__)
- sp.add_parser('update-copyright-year', help=update_copyright_year.__doc__)
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="license_command")
+ sp.add_parser("list-files", help=list_files.__doc__)
+ sp.add_parser("verify", help=verify.__doc__)
+ sp.add_parser("update-copyright-year", help=update_copyright_year.__doc__)
def license(parser, args):
if not git:
- tty.die('spack license requires git in your environment')
+ tty.die("spack license requires git in your environment")
licensed_files[:] = [re.compile(regex) for regex in licensed_files]
commands = {
- 'list-files': list_files,
- 'verify': verify,
- 'update-copyright-year': update_copyright_year,
+ "list-files": list_files,
+ "verify": verify,
+ "update-copyright-year": update_copyright_year,
}
return commands[args.license_command](args)
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index 8e91831bd7..c51c0959c0 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -41,20 +41,37 @@ def formatter(func):
def setup_parser(subparser):
subparser.add_argument(
- 'filter', nargs=argparse.REMAINDER,
- help='optional case-insensitive glob patterns to filter results')
+ "filter",
+ nargs=argparse.REMAINDER,
+ help="optional case-insensitive glob patterns to filter results",
+ )
subparser.add_argument(
- '-d', '--search-description', action='store_true', default=False,
- help='filtering will also search the description for a match')
+ "-d",
+ "--search-description",
+ action="store_true",
+ default=False,
+ help="filtering will also search the description for a match",
+ )
subparser.add_argument(
- '--format', default='name_only', choices=formatters,
- help='format to be used to print the output [default: name_only]')
+ "--format",
+ default="name_only",
+ choices=formatters,
+ help="format to be used to print the output [default: name_only]",
+ )
subparser.add_argument(
- '--update', metavar='FILE', default=None, action='store',
- help='write output to the specified file, if any package is newer')
+ "--update",
+ metavar="FILE",
+ default=None,
+ action="store",
+ help="write output to the specified file, if any package is newer",
+ )
subparser.add_argument(
- '-v', '--virtuals', action='store_true', default=False,
- help='include virtual packages in list')
+ "-v",
+ "--virtuals",
+ action="store_true",
+ default=False,
+ help="include virtual packages in list",
+ )
def filter_by_name(pkgs, args):
@@ -71,8 +88,8 @@ def filter_by_name(pkgs, args):
if args.filter:
res = []
for f in args.filter:
- if '*' not in f and '?' not in f:
- r = fnmatch.translate('*' + f + '*')
+ if "*" not in f and "?" not in f:
+ r = fnmatch.translate("*" + f + "*")
else:
r = fnmatch.translate(f)
@@ -80,6 +97,7 @@ def filter_by_name(pkgs, args):
res.append(rc)
if args.search_description:
+
def match(p, f):
if f.match(p):
return True
@@ -88,9 +106,12 @@ def filter_by_name(pkgs, args):
if pkg_cls.__doc__:
return f.match(pkg_cls.__doc__)
return False
+
else:
+
def match(p, f):
return f.match(p)
+
pkgs = [p for p in pkgs if any(match(p, f) for f in res)]
return sorted(pkgs, key=lambda s: s.lower())
@@ -106,7 +127,7 @@ def name_only(pkgs, out):
def github_url(pkg):
"""Link to a package file on github."""
- url = 'https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py'
+ url = "https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py"
return url.format(pkg.name)
@@ -135,30 +156,33 @@ def version_json(pkg_names, out):
"""Print all packages with their latest versions."""
pkg_classes = [spack.repo.path.get_pkg_class(name) for name in pkg_names]
- out.write('[\n')
+ out.write("[\n")
# output name and latest version for each package
- pkg_latest = ",\n".join([
- ' {{"name": "{0}",\n'
- ' "latest_version": "{1}",\n'
- ' "versions": {2},\n'
- ' "homepage": "{3}",\n'
- ' "file": "{4}",\n'
- ' "maintainers": {5},\n'
- ' "dependencies": {6}'
- '}}'.format(
- pkg_cls.name,
- VersionList(pkg_cls.versions).preferred(),
- json.dumps([str(v) for v in reversed(sorted(pkg_cls.versions))]),
- pkg_cls.homepage,
- github_url(pkg_cls),
- json.dumps(pkg_cls.maintainers),
- json.dumps(get_dependencies(pkg_cls))
- ) for pkg_cls in pkg_classes
- ])
+ pkg_latest = ",\n".join(
+ [
+ ' {{"name": "{0}",\n'
+ ' "latest_version": "{1}",\n'
+ ' "versions": {2},\n'
+ ' "homepage": "{3}",\n'
+ ' "file": "{4}",\n'
+ ' "maintainers": {5},\n'
+ ' "dependencies": {6}'
+ "}}".format(
+ pkg_cls.name,
+ VersionList(pkg_cls.versions).preferred(),
+ json.dumps([str(v) for v in reversed(sorted(pkg_cls.versions))]),
+ pkg_cls.homepage,
+ github_url(pkg_cls),
+ json.dumps(pkg_cls.maintainers),
+ json.dumps(get_dependencies(pkg_cls)),
+ )
+ for pkg_cls in pkg_classes
+ ]
+ )
out.write(pkg_latest)
# important: no trailing comma in JSON arrays
- out.write('\n]\n')
+ out.write("\n]\n")
@formatter
@@ -181,31 +205,34 @@ def html(pkg_names, out):
def head(n, span_id, title, anchor=None):
if anchor is None:
anchor = title
- out.write(('<span id="id%d"></span>'
- '<h1>%s<a class="headerlink" href="#%s" '
- 'title="Permalink to this headline">&para;</a>'
- '</h1>\n') % (span_id, title, anchor))
+ out.write(
+ (
+ '<span id="id%d"></span>'
+ '<h1>%s<a class="headerlink" href="#%s" '
+ 'title="Permalink to this headline">&para;</a>'
+ "</h1>\n"
+ )
+ % (span_id, title, anchor)
+ )
# Start with the number of packages, skipping the title and intro
# blurb, which we maintain in the RST file.
- out.write('<p>\n')
- out.write('Spack currently has %d mainline packages:\n' % len(pkg_classes))
- out.write('</p>\n')
+ out.write("<p>\n")
+ out.write("Spack currently has %d mainline packages:\n" % len(pkg_classes))
+ out.write("</p>\n")
# Table of links to all packages
out.write('<table border="1" class="docutils">\n')
out.write('<tbody valign="top">\n')
for i, row in enumerate(rows_for_ncols(pkg_names, 3)):
- out.write('<tr class="row-odd">\n' if i % 2 == 0 else
- '<tr class="row-even">\n')
+ out.write('<tr class="row-odd">\n' if i % 2 == 0 else '<tr class="row-even">\n')
for name in row:
- out.write('<td>\n')
- out.write('<a class="reference internal" href="#%s">%s</a></td>\n'
- % (name, name))
- out.write('</td>\n')
- out.write('</tr>\n')
- out.write('</tbody>\n')
- out.write('</table>\n')
+ out.write("<td>\n")
+ out.write('<a class="reference internal" href="#%s">%s</a></td>\n' % (name, name))
+ out.write("</td>\n")
+ out.write("</tr>\n")
+ out.write("</tbody>\n")
+ out.write("</table>\n")
out.write('<hr class="docutils"/>\n')
# Output some text for each package.
@@ -216,53 +243,58 @@ def html(pkg_names, out):
out.write('<dl class="docutils">\n')
- out.write('<dt>Homepage:</dt>\n')
+ out.write("<dt>Homepage:</dt>\n")
out.write('<dd><ul class="first last simple">\n')
if pkg_cls.homepage:
- out.write(('<li>'
- '<a class="reference external" href="%s">%s</a>'
- '</li>\n') % (pkg_cls.homepage, escape(pkg_cls.homepage, True)))
+ out.write(
+ ("<li>" '<a class="reference external" href="%s">%s</a>' "</li>\n")
+ % (pkg_cls.homepage, escape(pkg_cls.homepage, True))
+ )
else:
- out.write('No homepage\n')
- out.write('</ul></dd>\n')
+ out.write("No homepage\n")
+ out.write("</ul></dd>\n")
- out.write('<dt>Spack package:</dt>\n')
+ out.write("<dt>Spack package:</dt>\n")
out.write('<dd><ul class="first last simple">\n')
- out.write(('<li>'
- '<a class="reference external" href="%s">%s/package.py</a>'
- '</li>\n') % (github_url(pkg_cls), pkg_cls.name))
- out.write('</ul></dd>\n')
+ out.write(
+ ("<li>" '<a class="reference external" href="%s">%s/package.py</a>' "</li>\n")
+ % (github_url(pkg_cls), pkg_cls.name)
+ )
+ out.write("</ul></dd>\n")
if pkg_cls.versions:
- out.write('<dt>Versions:</dt>\n')
- out.write('<dd>\n')
- out.write(', '.join(
- str(v) for v in reversed(sorted(pkg_cls.versions))))
- out.write('\n')
- out.write('</dd>\n')
+ out.write("<dt>Versions:</dt>\n")
+ out.write("<dd>\n")
+ out.write(", ".join(str(v) for v in reversed(sorted(pkg_cls.versions))))
+ out.write("\n")
+ out.write("</dd>\n")
for deptype in spack.dependency.all_deptypes:
deps = pkg_cls.dependencies_of_type(deptype)
if deps:
- out.write('<dt>%s Dependencies:</dt>\n' % deptype.capitalize())
- out.write('<dd>\n')
- out.write(', '.join(
- d if d not in pkg_names else
- '<a class="reference internal" href="#%s">%s</a>' % (d, d)
- for d in deps))
- out.write('\n')
- out.write('</dd>\n')
-
- out.write('<dt>Description:</dt>\n')
- out.write('<dd>\n')
+ out.write("<dt>%s Dependencies:</dt>\n" % deptype.capitalize())
+ out.write("<dd>\n")
+ out.write(
+ ", ".join(
+ d
+ if d not in pkg_names
+ else '<a class="reference internal" href="#%s">%s</a>' % (d, d)
+ for d in deps
+ )
+ )
+ out.write("\n")
+ out.write("</dd>\n")
+
+ out.write("<dt>Description:</dt>\n")
+ out.write("<dd>\n")
out.write(escape(pkg_cls.format_doc(indent=2), True))
- out.write('\n')
- out.write('</dd>\n')
- out.write('</dl>\n')
+ out.write("\n")
+ out.write("</dd>\n")
+ out.write("</dl>\n")
out.write('<hr class="docutils"/>\n')
- out.write('</div>\n')
+ out.write("</div>\n")
def list(parser, args):
@@ -278,11 +310,11 @@ def list(parser, args):
# change output stream if user asked for update
if os.path.exists(args.update):
if os.path.getmtime(args.update) > spack.repo.path.last_mtime():
- tty.msg('File is up to date: %s' % args.update)
+ tty.msg("File is up to date: %s" % args.update)
return
- tty.msg('Updating file: %s' % args.update)
- with open(args.update, 'w') as f:
+ tty.msg("Updating file: %s" % args.update)
+ with open(args.update, "w") as f:
formatter(sorted_packages, f)
else:
diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py
index 64ae08930c..11ec2c5e25 100644
--- a/lib/spack/spack/cmd/load.py
+++ b/lib/spack/spack/cmd/load.py
@@ -20,47 +20,63 @@ level = "short"
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
- message with -h. """
- arguments.add_common_arguments(subparser, ['constraint'])
+ message with -h."""
+ arguments.add_common_arguments(subparser, ["constraint"])
shells = subparser.add_mutually_exclusive_group()
shells.add_argument(
- '--sh', action='store_const', dest='shell', const='sh',
- help="print sh commands to load the package")
+ "--sh",
+ action="store_const",
+ dest="shell",
+ const="sh",
+ help="print sh commands to load the package",
+ )
shells.add_argument(
- '--csh', action='store_const', dest='shell', const='csh',
- help="print csh commands to load the package")
+ "--csh",
+ action="store_const",
+ dest="shell",
+ const="csh",
+ help="print csh commands to load the package",
+ )
shells.add_argument(
- '--fish', action='store_const', dest='shell', const='fish',
- help="print fish commands to load the package")
+ "--fish",
+ action="store_const",
+ dest="shell",
+ const="fish",
+ help="print fish commands to load the package",
+ )
shells.add_argument(
- '--bat', action='store_const', dest='shell', const='bat',
- help="print bat commands to load the package")
+ "--bat",
+ action="store_const",
+ dest="shell",
+ const="bat",
+ help="print bat commands to load the package",
+ )
subparser.add_argument(
- '--first',
- action='store_true',
+ "--first",
+ action="store_true",
default=False,
- dest='load_first',
- help="load the first match if multiple packages match the spec"
+ dest="load_first",
+ help="load the first match if multiple packages match the spec",
)
subparser.add_argument(
- '--only',
- default='package,dependencies',
- dest='things_to_load',
- choices=['package', 'dependencies'],
+ "--only",
+ default="package,dependencies",
+ dest="things_to_load",
+ choices=["package", "dependencies"],
help="""select whether to load the package and its dependencies
the default is to load the package and all dependencies
alternatively one can decide to load only the package or only
-the dependencies"""
+the dependencies""",
)
subparser.add_argument(
- '--list',
- action='store_true',
+ "--list",
+ action="store_true",
default=False,
- help="show loaded packages: same as `spack find --loaded`"
+ help="show loaded packages: same as `spack find --loaded`",
)
@@ -74,11 +90,13 @@ def load(parser, args):
spack.cmd.display_specs(results)
return
- specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
- for spec in spack.cmd.parse_specs(args.constraint)]
+ specs = [
+ spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
+ for spec in spack.cmd.parse_specs(args.constraint)
+ ]
if not args.shell:
- specs_str = ' '.join(args.constraint) or "SPECS"
+ specs_str = " ".join(args.constraint) or "SPECS"
spack.cmd.common.shell_init_instructions(
"spack load",
" eval `spack load {sh_arg} %s`" % specs_str,
@@ -86,11 +104,11 @@ def load(parser, args):
return 1
with spack.store.db.read_transaction():
- if 'dependencies' in args.things_to_load:
- include_roots = 'package' in args.things_to_load
- specs = [dep for spec in specs
- for dep in
- spec.traverse(root=include_roots, order='post')]
+ if "dependencies" in args.things_to_load:
+ include_roots = "package" in args.things_to_load
+ specs = [
+ dep for spec in specs for dep in spec.traverse(root=include_roots, order="post")
+ ]
env_mod = spack.util.environment.EnvironmentModifications()
for spec in specs:
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index e15bcbdcab..b5eeb92b06 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -26,40 +26,56 @@ def setup_parser(subparser):
directories = subparser.add_mutually_exclusive_group()
directories.add_argument(
- '-m', '--module-dir', action='store_true',
- help="spack python module directory")
+ "-m", "--module-dir", action="store_true", help="spack python module directory"
+ )
directories.add_argument(
- '-r', '--spack-root', action='store_true',
- help="spack installation root")
+ "-r", "--spack-root", action="store_true", help="spack installation root"
+ )
directories.add_argument(
- '-i', '--install-dir', action='store_true',
- help="install prefix for spec (spec need not be installed)")
+ "-i",
+ "--install-dir",
+ action="store_true",
+ help="install prefix for spec (spec need not be installed)",
+ )
directories.add_argument(
- '-p', '--package-dir', action='store_true',
- help="directory enclosing a spec's package.py file")
+ "-p",
+ "--package-dir",
+ action="store_true",
+ help="directory enclosing a spec's package.py file",
+ )
directories.add_argument(
- '-P', '--packages', action='store_true',
- help="top-level packages directory for Spack")
+ "-P", "--packages", action="store_true", help="top-level packages directory for Spack"
+ )
directories.add_argument(
- '-s', '--stage-dir', action='store_true',
- help="stage directory for a spec")
+ "-s", "--stage-dir", action="store_true", help="stage directory for a spec"
+ )
directories.add_argument(
- '-S', '--stages', action='store_true',
- help="top level stage directory")
+ "-S", "--stages", action="store_true", help="top level stage directory"
+ )
directories.add_argument(
- '--source-dir', action='store_true',
- help="source directory for a spec "
- "(requires it to be staged first)")
+ "--source-dir",
+ action="store_true",
+ help="source directory for a spec " "(requires it to be staged first)",
+ )
directories.add_argument(
- '-b', '--build-dir', action='store_true',
- help="build directory for a spec "
- "(requires it to be staged first)")
+ "-b",
+ "--build-dir",
+ action="store_true",
+ help="build directory for a spec " "(requires it to be staged first)",
+ )
directories.add_argument(
- '-e', '--env', action='store', dest='location_env', nargs='?', metavar="name",
- default=False, help="location of the named or current environment")
+ "-e",
+ "--env",
+ action="store",
+ dest="location_env",
+ nargs="?",
+ metavar="name",
+ default=False,
+ help="location of the named or current environment",
+ )
- arguments.add_common_arguments(subparser, ['spec'])
+ arguments.add_common_arguments(subparser, ["spec"])
def location(parser, args):
@@ -75,7 +91,7 @@ def location(parser, args):
if args.location_env is not False:
if args.location_env is None:
# Get current environment path
- spack.cmd.require_active_env('location -e')
+ spack.cmd.require_active_env("location -e")
path = ev.active_environment().path
else:
# Get named environment path
@@ -125,13 +141,10 @@ def location(parser, args):
if args.build_dir:
# Out of source builds have build_directory defined
- if hasattr(pkg, 'build_directory'):
+ if hasattr(pkg, "build_directory"):
# build_directory can be either absolute or relative to the stage path
# in either case os.path.join makes it absolute
- print(os.path.normpath(os.path.join(
- pkg.stage.path,
- pkg.build_directory
- )))
+ print(os.path.normpath(os.path.join(pkg.stage.path, pkg.build_directory)))
return
# Otherwise assume in-source builds
@@ -140,9 +153,10 @@ def location(parser, args):
# source dir remains, which requires the spec to be staged
if not pkg.stage.expanded:
- tty.die("Source directory does not exist yet. "
- "Run this to create it:",
- "spack stage " + " ".join(args.spec))
+ tty.die(
+ "Source directory does not exist yet. " "Run this to create it:",
+ "spack stage " + " ".join(args.spec),
+ )
# Default to source dir.
print(pkg.stage.source_path)
diff --git a/lib/spack/spack/cmd/log_parse.py b/lib/spack/spack/cmd/log_parse.py
index 27aacdb938..eca864051a 100644
--- a/lib/spack/spack/cmd/log_parse.py
+++ b/lib/spack/spack/cmd/log_parse.py
@@ -13,52 +13,71 @@ description = "filter errors and warnings from build logs"
section = "build"
level = "long"
-event_types = ('errors', 'warnings')
+event_types = ("errors", "warnings")
def setup_parser(subparser):
subparser.add_argument(
- '--show', action='store', default='errors',
- help="comma-separated list of what to show; options: errors, warnings")
+ "--show",
+ action="store",
+ default="errors",
+ help="comma-separated list of what to show; options: errors, warnings",
+ )
subparser.add_argument(
- '-c', '--context', action='store', type=int, default=3,
- help="lines of context to show around lines of interest")
+ "-c",
+ "--context",
+ action="store",
+ type=int,
+ default=3,
+ help="lines of context to show around lines of interest",
+ )
subparser.add_argument(
- '-p', '--profile', action='store_true',
- help="print out a profile of time spent in regexes during parse")
+ "-p",
+ "--profile",
+ action="store_true",
+ help="print out a profile of time spent in regexes during parse",
+ )
subparser.add_argument(
- '-w', '--width', action='store', type=int, default=None,
- help="wrap width: auto-size to terminal by default; 0 for no wrap")
+ "-w",
+ "--width",
+ action="store",
+ type=int,
+ default=None,
+ help="wrap width: auto-size to terminal by default; 0 for no wrap",
+ )
subparser.add_argument(
- '-j', '--jobs', action='store', type=int, default=None,
+ "-j",
+ "--jobs",
+ action="store",
+ type=int,
+ default=None,
help="number of jobs to parse log file (default: 1 for short logs, "
- "ncpus for long logs)")
+ "ncpus for long logs)",
+ )
- subparser.add_argument(
- 'file', help="a log file containing build output, or - for stdin")
+ subparser.add_argument("file", help="a log file containing build output, or - for stdin")
def log_parse(parser, args):
input = args.file
- if args.file == '-':
+ if args.file == "-":
input = sys.stdin
- errors, warnings = parse_log_events(
- input, args.context, args.jobs, args.profile)
+ errors, warnings = parse_log_events(input, args.context, args.jobs, args.profile)
if args.profile:
return
- types = [s.strip() for s in args.show.split(',')]
+ types = [s.strip() for s in args.show.split(",")]
for e in types:
if e not in event_types:
- tty.die('Invalid event type: %s' % e)
+ tty.die("Invalid event type: %s" % e)
events = []
- if 'errors' in types:
+ if "errors" in types:
events.extend(errors)
- print('%d errors' % len(errors))
- if 'warnings' in types:
+ print("%d errors" % len(errors))
+ if "warnings" in types:
events.extend(warnings)
- print('%d warnings' % len(warnings))
+ print("%d warnings" % len(warnings))
print(make_log_context(events, args.width))
diff --git a/lib/spack/spack/cmd/maintainers.py b/lib/spack/spack/cmd/maintainers.py
index 93ed48cb72..5604369ed3 100644
--- a/lib/spack/spack/cmd/maintainers.py
+++ b/lib/spack/spack/cmd/maintainers.py
@@ -22,25 +22,36 @@ level = "long"
def setup_parser(subparser):
maintained_group = subparser.add_mutually_exclusive_group()
maintained_group.add_argument(
- '--maintained', action='store_true', default=False,
- help='show names of maintained packages')
+ "--maintained",
+ action="store_true",
+ default=False,
+ help="show names of maintained packages",
+ )
maintained_group.add_argument(
- '--unmaintained', action='store_true', default=False,
- help='show names of unmaintained packages')
+ "--unmaintained",
+ action="store_true",
+ default=False,
+ help="show names of unmaintained packages",
+ )
subparser.add_argument(
- '-a', '--all', action='store_true', default=False,
- help='show maintainers for all packages')
+ "-a", "--all", action="store_true", default=False, help="show maintainers for all packages"
+ )
subparser.add_argument(
- '--by-user', action='store_true', default=False,
- help='show packages for users instead of users for packages')
+ "--by-user",
+ action="store_true",
+ default=False,
+ help="show packages for users instead of users for packages",
+ )
# options for commands that take package arguments
subparser.add_argument(
- 'package_or_user', nargs=argparse.REMAINDER,
- help='names of packages or users to get info for')
+ "package_or_user",
+ nargs=argparse.REMAINDER,
+ help="names of packages or users to get info for",
+ )
def packages_to_maintainers(package_names=None):
@@ -105,20 +116,18 @@ def maintainers(parser, args):
if args.by_user:
maintainers = maintainers_to_packages(args.package_or_user)
for user, packages in sorted(maintainers.items()):
- color.cprint('@c{%s}: %s'
- % (user, ', '.join(sorted(packages))))
+ color.cprint("@c{%s}: %s" % (user, ", ".join(sorted(packages))))
return 0 if maintainers else 1
else:
packages = packages_to_maintainers(args.package_or_user)
for pkg, maintainers in sorted(packages.items()):
- color.cprint('@c{%s}: %s'
- % (pkg, ', '.join(sorted(maintainers))))
+ color.cprint("@c{%s}: %s" % (pkg, ", ".join(sorted(maintainers))))
return 0 if packages else 1
if args.by_user:
if not args.package_or_user:
- tty.die('spack maintainers --by-user requires a user or --all')
+ tty.die("spack maintainers --by-user requires a user or --all")
packages = union_values(maintainers_to_packages(args.package_or_user))
colify(packages)
@@ -126,7 +135,7 @@ def maintainers(parser, args):
else:
if not args.package_or_user:
- tty.die('spack maintainers requires a package or --all')
+ tty.die("spack maintainers requires a package or --all")
users = union_values(packages_to_maintainers(args.package_or_user))
colify(users)
diff --git a/lib/spack/spack/cmd/make_installer.py b/lib/spack/spack/cmd/make_installer.py
index 761401ea37..500f6402da 100644
--- a/lib/spack/spack/cmd/make_installer.py
+++ b/lib/spack/spack/cmd/make_installer.py
@@ -28,6 +28,7 @@ def txt_to_rtf(file_path):
def line_to_rtf(str):
return str.replace("\n", "\\par")
+
contents = ""
with open(file_path, "r+") as f:
for line in f.readlines():
@@ -38,31 +39,34 @@ def txt_to_rtf(file_path):
def setup_parser(subparser):
spack_source_group = subparser.add_mutually_exclusive_group(required=True)
spack_source_group.add_argument(
- '-v', '--spack-version', default="",
- help='download given spack version e.g. 0.16.0')
+ "-v", "--spack-version", default="", help="download given spack version e.g. 0.16.0"
+ )
spack_source_group.add_argument(
- '-s', '--spack-source', default="",
- help='full path to spack source')
+ "-s", "--spack-source", default="", help="full path to spack source"
+ )
subparser.add_argument(
- '-g', '--git-installer-verbosity', default="",
- choices=set(['SILENT', 'VERYSILENT']),
+ "-g",
+ "--git-installer-verbosity",
+ default="",
+ choices=set(["SILENT", "VERYSILENT"]),
help="Level of verbosity provided by bundled Git Installer.\
Default is fully verbose",
- required=False, action='store', dest="git_verbosity"
+ required=False,
+ action="store",
+ dest="git_verbosity",
)
- subparser.add_argument(
- 'output_dir', help="output directory")
+ subparser.add_argument("output_dir", help="output directory")
def make_installer(parser, args):
"""
- Use CMake to generate WIX installer in newly created build directory
+ Use CMake to generate WIX installer in newly created build directory
"""
- if sys.platform == 'win32':
+ if sys.platform == "win32":
output_dir = args.output_dir
- cmake_spec = Spec('cmake')
+ cmake_spec = Spec("cmake")
cmake_spec.concretize()
cmake_path = os.path.join(cmake_spec.prefix, "bin", "cmake.exe")
cpack_path = os.path.join(cmake_spec.prefix, "bin", "cpack.exe")
@@ -89,21 +93,23 @@ def make_installer(parser, args):
rtf_spack_license = txt_to_rtf(spack_license)
spack_license = posixpath.join(source_dir, "LICENSE.rtf")
- with open(spack_license, 'w') as rtf_license:
+ with open(spack_license, "w") as rtf_license:
written = rtf_license.write(rtf_spack_license)
if written == 0:
raise RuntimeError("Failed to generate properly formatted license file")
- spack_logo = posixpath.join(posix_root,
- "share/spack/logo/favicon.ico")
+ spack_logo = posixpath.join(posix_root, "share/spack/logo/favicon.ico")
try:
spack.util.executable.Executable(cmake_path)(
- '-S', source_dir, '-B', output_dir,
- '-DSPACK_VERSION=%s' % spack_version,
- '-DSPACK_SOURCE=%s' % spack_source,
- '-DSPACK_LICENSE=%s' % spack_license,
- '-DSPACK_LOGO=%s' % spack_logo,
- '-DSPACK_GIT_VERBOSITY=%s' % git_verbosity
+ "-S",
+ source_dir,
+ "-B",
+ output_dir,
+ "-DSPACK_VERSION=%s" % spack_version,
+ "-DSPACK_SOURCE=%s" % spack_source,
+ "-DSPACK_LICENSE=%s" % spack_license,
+ "-DSPACK_LOGO=%s" % spack_logo,
+ "-DSPACK_GIT_VERBOSITY=%s" % git_verbosity,
)
except spack.util.executable.ProcessError:
print("Failed to generate installer")
@@ -111,36 +117,34 @@ def make_installer(parser, args):
try:
spack.util.executable.Executable(cpack_path)(
- "--config",
- "%s/CPackConfig.cmake" % output_dir,
- "-B",
- "%s/" % output_dir)
+ "--config", "%s/CPackConfig.cmake" % output_dir, "-B", "%s/" % output_dir
+ )
except spack.util.executable.ProcessError:
print("Failed to generate installer")
return spack.util.executable.ProcessError.returncode
try:
- spack.util.executable.Executable(os.environ.get('WIX') + '/bin/candle.exe')(
- '-ext',
- 'WixBalExtension',
- '%s/bundle.wxs' % output_dir,
- '-out',
- '%s/bundle.wixobj' % output_dir
+ spack.util.executable.Executable(os.environ.get("WIX") + "/bin/candle.exe")(
+ "-ext",
+ "WixBalExtension",
+ "%s/bundle.wxs" % output_dir,
+ "-out",
+ "%s/bundle.wixobj" % output_dir,
)
except spack.util.executable.ProcessError:
print("Failed to generate installer chain")
return spack.util.executable.ProcessError.returncode
try:
- spack.util.executable.Executable(os.environ.get('WIX') + "/bin/light.exe")(
+ spack.util.executable.Executable(os.environ.get("WIX") + "/bin/light.exe")(
"-sw1134",
"-ext",
"WixBalExtension",
"%s/bundle.wixobj" % output_dir,
- '-out',
- '%s/Spack.exe' % output_dir
+ "-out",
+ "%s/Spack.exe" % output_dir,
)
except spack.util.executable.ProcessError:
print("Failed to generate installer chain")
return spack.util.executable.ProcessError.returncode
print("Successfully generated Spack.exe in %s" % (output_dir))
else:
- print('The make-installer command is currently only supported on Windows.')
+ print("The make-installer command is currently only supported on Windows.")
diff --git a/lib/spack/spack/cmd/mark.py b/lib/spack/spack/cmd/mark.py
index da31917c39..1fba272ac9 100644
--- a/lib/spack/spack/cmd/mark.py
+++ b/lib/spack/spack/cmd/mark.py
@@ -28,29 +28,40 @@ error_message = """You can either:
# Arguments for display_specs when we find ambiguity
display_args = {
- 'long': True,
- 'show_flags': False,
- 'variants': False,
- 'indent': 4,
+ "long": True,
+ "show_flags": False,
+ "variants": False,
+ "indent": 4,
}
def setup_parser(subparser):
- arguments.add_common_arguments(
- subparser, ['installed_specs'])
+ arguments.add_common_arguments(subparser, ["installed_specs"])
subparser.add_argument(
- '-a', '--all', action='store_true', dest='all',
+ "-a",
+ "--all",
+ action="store_true",
+ dest="all",
help="Mark ALL installed packages that match each "
"supplied spec. If you `mark --all libelf`,"
" ALL versions of `libelf` are marked. If no spec is "
- "supplied, all installed packages will be marked.")
+ "supplied, all installed packages will be marked.",
+ )
exim = subparser.add_mutually_exclusive_group(required=True)
exim.add_argument(
- '-e', '--explicit', action='store_true', dest='explicit',
- help="Mark packages as explicitly installed.")
+ "-e",
+ "--explicit",
+ action="store_true",
+ dest="explicit",
+ help="Mark packages as explicitly installed.",
+ )
exim.add_argument(
- '-i', '--implicit', action='store_true', dest='implicit',
- help="Mark packages as implicitly installed.")
+ "-i",
+ "--implicit",
+ action="store_true",
+ dest="implicit",
+ help="Mark packages as implicitly installed.",
+ )
def find_matching_specs(specs, allow_multiple_matches=False):
@@ -74,17 +85,16 @@ def find_matching_specs(specs, allow_multiple_matches=False):
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
- tty.error('{0} matches multiple packages:'.format(spec))
- sys.stderr.write('\n')
- spack.cmd.display_specs(matching, output=sys.stderr,
- **display_args)
- sys.stderr.write('\n')
+ tty.error("{0} matches multiple packages:".format(spec))
+ sys.stderr.write("\n")
+ spack.cmd.display_specs(matching, output=sys.stderr, **display_args)
+ sys.stderr.write("\n")
sys.stderr.flush()
has_errors = True
# No installed package matches the query
if len(matching) == 0 and spec is not any:
- tty.die('{0} does not match any installed packages.'.format(spec))
+ tty.die("{0} does not match any installed packages.".format(spec))
specs_from_cli.extend(matching)
@@ -114,8 +124,10 @@ def mark_specs(args, specs):
def mark(parser, args):
if not args.specs and not args.all:
- tty.die('mark requires at least one package argument.',
- ' Use `spack mark --all` to mark ALL packages.')
+ tty.die(
+ "mark requires at least one package argument.",
+ " Use `spack mark --all` to mark ALL packages.",
+ )
# [any] here handles the --all case by forcing all specs to be returned
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index e043702bf3..63ec1a6bdd 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -26,104 +26,118 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
+ arguments.add_common_arguments(subparser, ["no_checksum", "deprecated"])
- sp = subparser.add_subparsers(
- metavar='SUBCOMMAND', dest='mirror_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="mirror_command")
# Create
- create_parser = sp.add_parser('create', help=mirror_create.__doc__)
- create_parser.add_argument('-d', '--directory', default=None,
- help="directory in which to create mirror")
+ create_parser = sp.add_parser("create", help=mirror_create.__doc__)
+ create_parser.add_argument(
+ "-d", "--directory", default=None, help="directory in which to create mirror"
+ )
create_parser.add_argument(
- '-a', '--all', action='store_true',
+ "-a",
+ "--all",
+ action="store_true",
help="mirror all versions of all packages in Spack, or all packages"
- " in the current environment if there is an active environment"
- " (this requires significant time and space)")
- create_parser.add_argument(
- '-f', '--file', help="file with specs of packages to put in mirror")
+ " in the current environment if there is an active environment"
+ " (this requires significant time and space)",
+ )
+ create_parser.add_argument("-f", "--file", help="file with specs of packages to put in mirror")
create_parser.add_argument(
- '--exclude-file',
+ "--exclude-file",
help="specs which Spack should not try to add to a mirror"
- " (listed in a file, one per line)")
+ " (listed in a file, one per line)",
+ )
create_parser.add_argument(
- '--exclude-specs',
- help="specs which Spack should not try to add to a mirror"
- " (specified on command line)")
+ "--exclude-specs",
+ help="specs which Spack should not try to add to a mirror" " (specified on command line)",
+ )
create_parser.add_argument(
- '--skip-unstable-versions', action='store_true',
- help="don't cache versions unless they identify a stable (unchanging)"
- " source code")
+ "--skip-unstable-versions",
+ action="store_true",
+ help="don't cache versions unless they identify a stable (unchanging)" " source code",
+ )
create_parser.add_argument(
- '-D', '--dependencies', action='store_true',
- help="also fetch all dependencies")
+ "-D", "--dependencies", action="store_true", help="also fetch all dependencies"
+ )
create_parser.add_argument(
- '-n', '--versions-per-spec',
+ "-n",
+ "--versions-per-spec",
help="the number of versions to fetch for each spec, choose 'all' to"
- " retrieve all versions of each package")
- arguments.add_common_arguments(create_parser, ['specs'])
+ " retrieve all versions of each package",
+ )
+ arguments.add_common_arguments(create_parser, ["specs"])
# Destroy
- destroy_parser = sp.add_parser('destroy', help=mirror_destroy.__doc__)
+ destroy_parser = sp.add_parser("destroy", help=mirror_destroy.__doc__)
destroy_target = destroy_parser.add_mutually_exclusive_group(required=True)
- destroy_target.add_argument('-m', '--mirror-name',
- metavar='mirror_name',
- type=str,
- help="find mirror to destroy by name")
- destroy_target.add_argument('--mirror-url',
- metavar='mirror_url',
- type=str,
- help="find mirror to destroy by url")
+ destroy_target.add_argument(
+ "-m",
+ "--mirror-name",
+ metavar="mirror_name",
+ type=str,
+ help="find mirror to destroy by name",
+ )
+ destroy_target.add_argument(
+ "--mirror-url", metavar="mirror_url", type=str, help="find mirror to destroy by url"
+ )
# used to construct scope arguments below
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Add
- add_parser = sp.add_parser('add', help=mirror_add.__doc__)
- add_parser.add_argument(
- 'name', help="mnemonic name for mirror", metavar="mirror")
+ add_parser = sp.add_parser("add", help=mirror_add.__doc__)
+ add_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
+ add_parser.add_argument("url", help="url of mirror directory from 'spack mirror create'")
add_parser.add_argument(
- 'url', help="url of mirror directory from 'spack mirror create'")
- add_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
- help="configuration scope to modify")
+ help="configuration scope to modify",
+ )
arguments.add_s3_connection_args(add_parser, False)
# Remove
- remove_parser = sp.add_parser('remove', aliases=['rm'],
- help=mirror_remove.__doc__)
- remove_parser.add_argument(
- 'name', help="mnemonic name for mirror", metavar="mirror")
+ remove_parser = sp.add_parser("remove", aliases=["rm"], help=mirror_remove.__doc__)
+ remove_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
remove_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
- help="configuration scope to modify")
+ help="configuration scope to modify",
+ )
# Set-Url
- set_url_parser = sp.add_parser('set-url', help=mirror_set_url.__doc__)
- set_url_parser.add_argument(
- 'name', help="mnemonic name for mirror", metavar="mirror")
+ set_url_parser = sp.add_parser("set-url", help=mirror_set_url.__doc__)
+ set_url_parser.add_argument("name", help="mnemonic name for mirror", metavar="mirror")
+ set_url_parser.add_argument("url", help="url of mirror directory from 'spack mirror create'")
set_url_parser.add_argument(
- 'url', help="url of mirror directory from 'spack mirror create'")
+ "--push", action="store_true", help="set only the URL used for uploading new packages"
+ )
set_url_parser.add_argument(
- '--push', action='store_true',
- help="set only the URL used for uploading new packages")
- set_url_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
- help="configuration scope to modify")
+ help="configuration scope to modify",
+ )
arguments.add_s3_connection_args(set_url_parser, False)
# List
- list_parser = sp.add_parser('list', help=mirror_list.__doc__)
+ list_parser = sp.add_parser("list", help=mirror_list.__doc__)
list_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_list_scope(),
- help="configuration scope to read from")
+ help="configuration scope to read from",
+ )
def mirror_add(args):
@@ -140,7 +154,7 @@ def mirror_remove(args):
def mirror_set_url(args):
"""Change the URL of a mirror."""
url = url_util.format(args.url)
- mirrors = spack.config.get('mirrors', scope=args.scope)
+ mirrors = spack.config.get("mirrors", scope=args.scope)
if not mirrors:
mirrors = syaml_dict()
@@ -151,15 +165,16 @@ def mirror_set_url(args):
key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
if any(value for value in key_values if value in args):
- incoming_data = {"url": url,
- "access_pair": (args.s3_access_key_id,
- args.s3_access_key_secret),
- "access_token": args.s3_access_token,
- "profile": args.s3_profile,
- "endpoint_url": args.s3_endpoint_url}
+ incoming_data = {
+ "url": url,
+ "access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
+ "access_token": args.s3_access_token,
+ "profile": args.s3_profile,
+ "endpoint_url": args.s3_endpoint_url,
+ }
try:
- fetch_url = entry['fetch']
- push_url = entry['push']
+ fetch_url = entry["fetch"]
+ push_url = entry["push"]
except TypeError:
fetch_url, push_url = entry, entry
@@ -174,8 +189,7 @@ def mirror_set_url(args):
push_url = url
else:
if isinstance(push_url, dict):
- changes_made = (changes_made or push_url != incoming_data
- or push_url != incoming_data)
+ changes_made = changes_made or push_url != incoming_data or push_url != incoming_data
fetch_url, push_url = incoming_data, incoming_data
else:
changes_made = changes_made or push_url != url
@@ -184,22 +198,24 @@ def mirror_set_url(args):
items = [
(
(n, u)
- if n != args.name else (
+ if n != args.name
+ else (
(n, {"fetch": fetch_url, "push": push_url})
- if fetch_url != push_url else (n, {"fetch": fetch_url,
- "push": fetch_url})
+ if fetch_url != push_url
+ else (n, {"fetch": fetch_url, "push": fetch_url})
)
)
for n, u in mirrors.items()
]
mirrors = syaml_dict(items)
- spack.config.set('mirrors', mirrors, scope=args.scope)
+ spack.config.set("mirrors", mirrors, scope=args.scope)
if changes_made:
tty.msg(
- "Changed%s url or connection information for mirror %s." %
- ((" (push)" if args.push else ""), args.name))
+ "Changed%s url or connection information for mirror %s."
+ % ((" (push)" if args.push else ""), args.name)
+ )
else:
tty.msg("No changes made to mirror %s." % args.name)
@@ -225,30 +241,33 @@ def _read_specs_from_file(filename):
specs.append(s)
except SpackError as e:
tty.debug(e)
- tty.die("Parse error in %s, line %d:" % (filename, i + 1),
- ">>> " + string, str(e))
+ tty.die("Parse error in %s, line %d:" % (filename, i + 1), ">>> " + string, str(e))
return specs
def _determine_specs_to_mirror(args):
if args.specs and args.all:
- raise SpackError("Cannot specify specs on command line if you"
- " chose to mirror all specs with '--all'")
+ raise SpackError(
+ "Cannot specify specs on command line if you" " chose to mirror all specs with '--all'"
+ )
elif args.file and args.all:
- raise SpackError("Cannot specify specs with a file ('-f') if you"
- " chose to mirror all specs with '--all'")
+ raise SpackError(
+ "Cannot specify specs with a file ('-f') if you"
+ " chose to mirror all specs with '--all'"
+ )
if not args.versions_per_spec:
num_versions = 1
- elif args.versions_per_spec == 'all':
- num_versions = 'all'
+ elif args.versions_per_spec == "all":
+ num_versions = "all"
else:
try:
num_versions = int(args.versions_per_spec)
except ValueError:
raise SpackError(
"'--versions-per-spec' must be a number or 'all',"
- " got '{0}'".format(args.versions_per_spec))
+ " got '{0}'".format(args.versions_per_spec)
+ )
# try to parse specs from the command line first.
with spack.concretize.disable_compiler_existence_check():
@@ -264,9 +283,11 @@ def _determine_specs_to_mirror(args):
if not specs:
# If nothing is passed, use environment or all if no active env
if not args.all:
- tty.die("No packages were specified.",
- "To mirror all packages, use the '--all' option"
- " (this will require significant time and space).")
+ tty.die(
+ "No packages were specified.",
+ "To mirror all packages, use the '--all' option"
+ " (this will require significant time and space).",
+ )
env = ev.active_environment()
if env:
@@ -288,44 +309,39 @@ def _determine_specs_to_mirror(args):
specs = [s for s in specs if not s.external]
for spec in external_specs:
- msg = 'Skipping {0} as it is an external spec.'
+ msg = "Skipping {0} as it is an external spec."
tty.msg(msg.format(spec.cshort_spec))
if env_specs:
if args.versions_per_spec:
- tty.warn("Ignoring '--versions-per-spec' for mirroring specs"
- " in environment.")
+ tty.warn("Ignoring '--versions-per-spec' for mirroring specs" " in environment.")
mirror_specs = env_specs
else:
- if num_versions == 'all':
+ if num_versions == "all":
mirror_specs = spack.mirror.get_all_versions(specs)
else:
- mirror_specs = spack.mirror.get_matching_versions(
- specs, num_versions=num_versions)
- mirror_specs.sort(
- key=lambda s: (s.name, s.version))
+ mirror_specs = spack.mirror.get_matching_versions(specs, num_versions=num_versions)
+ mirror_specs.sort(key=lambda s: (s.name, s.version))
exclude_specs = []
if args.exclude_file:
exclude_specs.extend(_read_specs_from_file(args.exclude_file))
if args.exclude_specs:
- exclude_specs.extend(
- spack.cmd.parse_specs(str(args.exclude_specs).split()))
+ exclude_specs.extend(spack.cmd.parse_specs(str(args.exclude_specs).split()))
if exclude_specs:
mirror_specs = list(
- x for x in mirror_specs
- if not any(x.satisfies(y, strict=True) for y in exclude_specs))
+ x for x in mirror_specs if not any(x.satisfies(y, strict=True) for y in exclude_specs)
+ )
return mirror_specs
def mirror_create(args):
"""Create a directory to be used as a spack mirror, and fill it with
- package archives."""
+ package archives."""
mirror_specs = _determine_specs_to_mirror(args)
- mirror = spack.mirror.Mirror(
- args.directory or spack.config.get('config:source_cache'))
+ mirror = spack.mirror.Mirror(args.directory or spack.config.get("config:source_cache"))
directory = url_util.format(mirror.push_url)
@@ -333,16 +349,18 @@ def mirror_create(args):
# Actually do the work to create the mirror
present, mirrored, error = spack.mirror.create(
- directory, mirror_specs, args.skip_unstable_versions)
+ directory, mirror_specs, args.skip_unstable_versions
+ )
p, m, e = len(present), len(mirrored), len(error)
verb = "updated" if existed else "created"
tty.msg(
"Successfully %s mirror in %s" % (verb, directory),
"Archive stats:",
- " %-4d already present" % p,
- " %-4d added" % m,
- " %-4d failed to fetch." % e)
+ " %-4d already present" % p,
+ " %-4d added" % m,
+ " %-4d failed to fetch." % e,
+ )
if error:
tty.error("Failed downloads:")
tty.colify(s.cformat("{name}{@version}") for s in error)
@@ -363,18 +381,20 @@ def mirror_destroy(args):
def mirror(parser, args):
- action = {'create': mirror_create,
- 'destroy': mirror_destroy,
- 'add': mirror_add,
- 'remove': mirror_remove,
- 'rm': mirror_remove,
- 'set-url': mirror_set_url,
- 'list': mirror_list}
+ action = {
+ "create": mirror_create,
+ "destroy": mirror_destroy,
+ "add": mirror_add,
+ "remove": mirror_remove,
+ "rm": mirror_remove,
+ "set-url": mirror_set_url,
+ "list": mirror_list,
+ }
if args.no_checksum:
- spack.config.set('config:checksum', False, scope='command_line')
+ spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
- spack.config.set('config:deprecated', True, scope='command_line')
+ spack.config.set("config:deprecated", True, scope="command_line")
action[args.mirror_command](args)
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index a9b9baf3ab..0212c7949e 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -17,7 +17,7 @@ _subcommands = {} # type: Dict[str, Callable]
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="module_command")
spack.cmd.modules.lmod.add_command(sp, _subcommands)
spack.cmd.modules.tcl.add_command(sp, _subcommands)
diff --git a/lib/spack/spack/cmd/modules/__init__.py b/lib/spack/spack/cmd/modules/__init__.py
index 2ccbf4b2cf..203c807dc1 100644
--- a/lib/spack/spack/cmd/modules/__init__.py
+++ b/lib/spack/spack/cmd/modules/__init__.py
@@ -26,68 +26,67 @@ level = "short"
def setup_parser(subparser):
subparser.add_argument(
- '-n', '--name',
- action='store', dest='module_set_name', default='default',
- help="Named module set to use from modules configuration."
+ "-n",
+ "--name",
+ action="store",
+ dest="module_set_name",
+ default="default",
+ help="Named module set to use from modules configuration.",
)
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subparser_name")
- refresh_parser = sp.add_parser('refresh', help='regenerate module files')
+ refresh_parser = sp.add_parser("refresh", help="regenerate module files")
refresh_parser.add_argument(
- '--delete-tree',
- help='delete the module file tree before refresh',
- action='store_true'
+ "--delete-tree", help="delete the module file tree before refresh", action="store_true"
)
refresh_parser.add_argument(
- '--upstream-modules',
- help='generate modules for packages installed upstream',
- action='store_true'
- )
- arguments.add_common_arguments(
- refresh_parser, ['constraint', 'yes_to_all']
+ "--upstream-modules",
+ help="generate modules for packages installed upstream",
+ action="store_true",
)
+ arguments.add_common_arguments(refresh_parser, ["constraint", "yes_to_all"])
- find_parser = sp.add_parser('find', help='find module files for packages')
+ find_parser = sp.add_parser("find", help="find module files for packages")
find_parser.add_argument(
- '--full-path',
- help='display full path to module file',
- action='store_true'
- )
- arguments.add_common_arguments(
- find_parser, ['constraint', 'recurse_dependencies']
+ "--full-path", help="display full path to module file", action="store_true"
)
+ arguments.add_common_arguments(find_parser, ["constraint", "recurse_dependencies"])
- rm_parser = sp.add_parser('rm', help='remove module files')
- arguments.add_common_arguments(
- rm_parser, ['constraint', 'yes_to_all']
- )
+ rm_parser = sp.add_parser("rm", help="remove module files")
+ arguments.add_common_arguments(rm_parser, ["constraint", "yes_to_all"])
loads_parser = sp.add_parser(
- 'loads',
- help='prompt the list of modules associated with a constraint'
+ "loads", help="prompt the list of modules associated with a constraint"
)
add_loads_arguments(loads_parser)
- arguments.add_common_arguments(loads_parser, ['constraint'])
+ arguments.add_common_arguments(loads_parser, ["constraint"])
return sp
def add_loads_arguments(subparser):
subparser.add_argument(
- '--input-only', action='store_false', dest='shell',
- help='generate input for module command (instead of a shell script)'
+ "--input-only",
+ action="store_false",
+ dest="shell",
+ help="generate input for module command (instead of a shell script)",
)
subparser.add_argument(
- '-p', '--prefix', dest='prefix', default='',
- help='prepend to module names when issuing module load commands'
+ "-p",
+ "--prefix",
+ dest="prefix",
+ default="",
+ help="prepend to module names when issuing module load commands",
)
subparser.add_argument(
- '-x', '--exclude', dest='exclude', action='append', default=[],
- help="exclude package from output; may be specified multiple times"
- )
- arguments.add_common_arguments(
- subparser, ['recurse_dependencies']
+ "-x",
+ "--exclude",
+ dest="exclude",
+ action="append",
+ default=[],
+ help="exclude package from output; may be specified multiple times",
)
+ arguments.add_common_arguments(subparser, ["recurse_dependencies"])
class MultipleSpecsMatch(Exception):
@@ -117,11 +116,16 @@ def one_spec_or_raise(specs):
def check_module_set_name(name):
- modules_config = spack.config.get('modules')
- valid_names = set([key for key, value in modules_config.items()
- if isinstance(value, dict) and value.get('enable', [])])
- if 'enable' in modules_config and modules_config['enable']:
- valid_names.add('default')
+ modules_config = spack.config.get("modules")
+ valid_names = set(
+ [
+ key
+ for key, value in modules_config.items()
+ if isinstance(value, dict) and value.get("enable", [])
+ ]
+ )
+ if "enable" in modules_config and modules_config["enable"]:
+ valid_names.add("default")
if name not in valid_names:
msg = "Cannot use invalid module set %s." % name
@@ -134,7 +138,8 @@ _missing_modules_warning = (
" because they were excluded or because the spec is"
" associated with a package that is installed upstream and"
" that installation has not generated a module file. Rerun"
- " this command with debug output enabled for more details.")
+ " this command with debug output enabled for more details."
+)
def loads(module_type, specs, args, out=None):
@@ -154,42 +159,48 @@ def loads(module_type, specs, args, out=None):
seen_add = seen.add
for spec in specs_from_user_constraint:
specs.extend(
- [item for item in spec.traverse(order='post', cover='nodes')
- if not (item in seen or seen_add(item))]
+ [
+ item
+ for item in spec.traverse(order="post", cover="nodes")
+ if not (item in seen or seen_add(item))
+ ]
)
modules = list(
- (spec,
- spack.modules.common.get_module(
- module_type, spec, get_full_path=False,
- module_set_name=args.module_set_name, required=False))
- for spec in specs)
+ (
+ spec,
+ spack.modules.common.get_module(
+ module_type,
+ spec,
+ get_full_path=False,
+ module_set_name=args.module_set_name,
+ required=False,
+ ),
+ )
+ for spec in specs
+ )
module_commands = {
- 'tcl': 'module load ',
- 'lmod': 'module load ',
+ "tcl": "module load ",
+ "lmod": "module load ",
}
- d = {
- 'command': '' if not args.shell else module_commands[module_type],
- 'prefix': args.prefix
- }
+ d = {"command": "" if not args.shell else module_commands[module_type], "prefix": args.prefix}
exclude_set = set(args.exclude)
- load_template = '{comment}{exclude}{command}{prefix}{name}'
+ load_template = "{comment}{exclude}{command}{prefix}{name}"
for spec, mod in modules:
if not mod:
- module_output_for_spec = (
- '## excluded or missing from upstream: {0}'.format(
- spec.format()))
+ module_output_for_spec = "## excluded or missing from upstream: {0}".format(
+ spec.format()
+ )
else:
- d['exclude'] = '## ' if spec.name in exclude_set else ''
- d['comment'] = '' if not args.shell else '# {0}\n'.format(
- spec.format())
- d['name'] = mod
+ d["exclude"] = "## " if spec.name in exclude_set else ""
+ d["comment"] = "" if not args.shell else "# {0}\n".format(spec.format())
+ d["name"] = mod
module_output_for_spec = load_template.format(**d)
out.write(module_output_for_spec)
- out.write('\n')
+ out.write("\n")
if not all(mod for _, mod in modules):
tty.warn(_missing_modules_warning)
@@ -203,29 +214,39 @@ def find(module_type, specs, args):
if args.recurse_dependencies:
dependency_specs_to_retrieve = list(
- single_spec.traverse(root=False, order='post', cover='nodes',
- deptype=('link', 'run')))
+ single_spec.traverse(root=False, order="post", cover="nodes", deptype=("link", "run"))
+ )
else:
dependency_specs_to_retrieve = []
try:
modules = [
spack.modules.common.get_module(
- module_type, spec, args.full_path,
- module_set_name=args.module_set_name, required=False)
- for spec in dependency_specs_to_retrieve]
+ module_type,
+ spec,
+ args.full_path,
+ module_set_name=args.module_set_name,
+ required=False,
+ )
+ for spec in dependency_specs_to_retrieve
+ ]
modules.append(
spack.modules.common.get_module(
- module_type, single_spec, args.full_path,
- module_set_name=args.module_set_name, required=True))
+ module_type,
+ single_spec,
+ args.full_path,
+ module_set_name=args.module_set_name,
+ required=True,
+ )
+ )
except spack.modules.common.ModuleNotFoundError as e:
tty.die(e.message)
if not all(modules):
tty.warn(_missing_modules_warning)
modules = list(x for x in modules if x)
- print(' '.join(modules))
+ print(" ".join(modules))
def rm(module_type, specs, args):
@@ -235,26 +256,24 @@ def rm(module_type, specs, args):
check_module_set_name(args.module_set_name)
module_cls = spack.modules.module_types[module_type]
- module_exist = lambda x: os.path.exists(
- module_cls(x, args.module_set_name).layout.filename)
+ module_exist = lambda x: os.path.exists(module_cls(x, args.module_set_name).layout.filename)
specs_with_modules = [spec for spec in specs if module_exist(spec)]
- modules = [module_cls(spec, args.module_set_name)
- for spec in specs_with_modules]
+ modules = [module_cls(spec, args.module_set_name) for spec in specs_with_modules]
if not modules:
- tty.die('No module file matches your query')
+ tty.die("No module file matches your query")
# Ask for confirmation
if not args.yes_to_all:
- msg = 'You are about to remove {0} module files for:\n'
+ msg = "You are about to remove {0} module files for:\n"
tty.msg(msg.format(module_type))
spack.cmd.display_specs(specs_with_modules, long=True)
- print('')
- answer = tty.get_yes_or_no('Do you want to proceed?')
+ print("")
+ answer = tty.get_yes_or_no("Do you want to proceed?")
if not answer:
- tty.die('Will not remove any module files')
+ tty.die("Will not remove any module files")
# Remove the module files
for s in modules:
@@ -269,20 +288,20 @@ def refresh(module_type, specs, args):
# Prompt a message to the user about what is going to change
if not specs:
- tty.msg('No package matches your query')
+ tty.msg("No package matches your query")
return
if not args.upstream_modules:
specs = list(s for s in specs if not s.installed_upstream)
if not args.yes_to_all:
- msg = 'You are about to regenerate {types} module files for:\n'
+ msg = "You are about to regenerate {types} module files for:\n"
tty.msg(msg.format(types=module_type))
spack.cmd.display_specs(specs, long=True)
- print('')
- answer = tty.get_yes_or_no('Do you want to proceed?')
+ print("")
+ answer = tty.get_yes_or_no("Do you want to proceed?")
if not answer:
- tty.die('Module file regeneration aborted.')
+ tty.die("Module file regeneration aborted.")
# Cycle over the module types and regenerate module files
@@ -290,8 +309,8 @@ def refresh(module_type, specs, args):
# Skip unknown packages.
writers = [
- cls(spec, args.module_set_name) for spec in specs
- if spack.repo.path.exists(spec.name)]
+ cls(spec, args.module_set_name) for spec in specs if spack.repo.path.exists(spec.name)
+ ]
# Filter excluded packages early
writers = [x for x in writers if not x.conf.excluded]
@@ -302,40 +321,41 @@ def refresh(module_type, specs, args):
file2writer[item.layout.filename].append(item)
if len(file2writer) != len(writers):
- message = 'Name clashes detected in module files:\n'
+ message = "Name clashes detected in module files:\n"
for filename, writer_list in file2writer.items():
if len(writer_list) > 1:
- message += '\nfile: {0}\n'.format(filename)
+ message += "\nfile: {0}\n".format(filename)
for x in writer_list:
- message += 'spec: {0}\n'.format(x.spec.format())
+ message += "spec: {0}\n".format(x.spec.format())
tty.error(message)
- tty.error('Operation aborted')
+ tty.error("Operation aborted")
raise SystemExit(1)
if len(writers) == 0:
- msg = 'Nothing to be done for {0} module files.'
+ msg = "Nothing to be done for {0} module files."
tty.msg(msg.format(module_type))
return
# If we arrived here we have at least one writer
module_type_root = writers[0].layout.dirname()
# Proceed regenerating module files
- tty.msg('Regenerating {name} module files'.format(name=module_type))
+ tty.msg("Regenerating {name} module files".format(name=module_type))
if os.path.isdir(module_type_root) and args.delete_tree:
shutil.rmtree(module_type_root, ignore_errors=False)
filesystem.mkdirp(module_type_root)
# Dump module index after potentially removing module tree
spack.modules.common.generate_module_index(
- module_type_root, writers, overwrite=args.delete_tree)
+ module_type_root, writers, overwrite=args.delete_tree
+ )
for x in writers:
try:
x.write(overwrite=True)
except Exception as e:
tty.debug(e)
- msg = 'Could not write module file [{0}]'
+ msg = "Could not write module file [{0}]"
tty.warn(msg.format(x.layout.filename))
- tty.warn('\t--> {0} <--'.format(str(e)))
+ tty.warn("\t--> {0} <--".format(str(e)))
#: Dictionary populated with the list of sub-commands.
@@ -344,22 +364,14 @@ def refresh(module_type, specs, args):
#: - module_type: the type of module it refers to
#: - specs : the list of specs to be processed
#: - args : namespace containing the parsed command line arguments
-callbacks = {
- 'refresh': refresh,
- 'rm': rm,
- 'find': find,
- 'loads': loads
-}
+callbacks = {"refresh": refresh, "rm": rm, "find": find, "loads": loads}
def modules_cmd(parser, args, module_type, callbacks=callbacks):
# Qualifiers to be used when querying the db for specs
constraint_qualifiers = {
- 'refresh': {
- 'installed': True,
- 'known': True
- },
+ "refresh": {"installed": True, "known": True},
}
query_args = constraint_qualifiers.get(args.subparser_name, {})
@@ -373,9 +385,9 @@ def modules_cmd(parser, args, module_type, callbacks=callbacks):
except MultipleSpecsMatch:
msg = "the constraint '{query}' matches multiple packages:\n"
for s in specs:
- spec_fmt = '{hash:7} {name}{@version}{%compiler}'
- spec_fmt += '{compiler_flags}{variants}{arch=architecture}'
- msg += '\t' + s.cformat(spec_fmt) + '\n'
+ spec_fmt = "{hash:7} {name}{@version}{%compiler}"
+ spec_fmt += "{compiler_flags}{variants}{arch=architecture}"
+ msg += "\t" + s.cformat(spec_fmt) + "\n"
tty.error(msg.format(query=args.constraint))
tty.die(
"In this context exactly **one** match is needed: "
diff --git a/lib/spack/spack/cmd/modules/lmod.py b/lib/spack/spack/cmd/modules/lmod.py
index 8555822826..4e44cee5be 100644
--- a/lib/spack/spack/cmd/modules/lmod.py
+++ b/lib/spack/spack/cmd/modules/lmod.py
@@ -12,24 +12,20 @@ import spack.modules.lmod
def add_command(parser, command_dict):
- lmod_parser = parser.add_parser(
- 'lmod', help='manipulate hierarchical module files'
- )
+ lmod_parser = parser.add_parser("lmod", help="manipulate hierarchical module files")
sp = spack.cmd.modules.setup_parser(lmod_parser)
# Set default module file for a package
setdefault_parser = sp.add_parser(
- 'setdefault', help='set the default module file for a package'
- )
- spack.cmd.common.arguments.add_common_arguments(
- setdefault_parser, ['constraint']
+ "setdefault", help="set the default module file for a package"
)
+ spack.cmd.common.arguments.add_common_arguments(setdefault_parser, ["constraint"])
callbacks = dict(spack.cmd.modules.callbacks.items())
- callbacks['setdefault'] = setdefault
+ callbacks["setdefault"] = setdefault
- command_dict['lmod'] = functools.partial(
- spack.cmd.modules.modules_cmd, module_type='lmod', callbacks=callbacks
+ command_dict["lmod"] = functools.partial(
+ spack.cmd.modules.modules_cmd, module_type="lmod", callbacks=callbacks
)
@@ -41,18 +37,10 @@ def setdefault(module_type, specs, args):
#
spack.cmd.modules.one_spec_or_raise(specs)
spec = specs[0]
- data = {
- 'modules': {
- args.module_set_name: {
- 'lmod': {
- 'defaults': [str(spec)]
- }
- }
- }
- }
+ data = {"modules": {args.module_set_name: {"lmod": {"defaults": [str(spec)]}}}}
# Need to clear the cache if a SpackCommand is called during scripting
spack.modules.lmod.configuration_registry = {}
- scope = spack.config.InternalConfigScope('lmod-setdefault', data)
+ scope = spack.config.InternalConfigScope("lmod-setdefault", data)
with spack.config.override(scope):
- writer = spack.modules.module_types['lmod'](spec, args.module_set_name)
+ writer = spack.modules.module_types["lmod"](spec, args.module_set_name)
writer.update_module_defaults()
diff --git a/lib/spack/spack/cmd/modules/tcl.py b/lib/spack/spack/cmd/modules/tcl.py
index 6239a67c24..864e043c42 100644
--- a/lib/spack/spack/cmd/modules/tcl.py
+++ b/lib/spack/spack/cmd/modules/tcl.py
@@ -11,24 +11,20 @@ import spack.modules.tcl
def add_command(parser, command_dict):
- tcl_parser = parser.add_parser(
- 'tcl', help='manipulate non-hierarchical module files'
- )
+ tcl_parser = parser.add_parser("tcl", help="manipulate non-hierarchical module files")
sp = spack.cmd.modules.setup_parser(tcl_parser)
# Set default module file for a package
setdefault_parser = sp.add_parser(
- 'setdefault', help='set the default module file for a package'
- )
- spack.cmd.common.arguments.add_common_arguments(
- setdefault_parser, ['constraint']
+ "setdefault", help="set the default module file for a package"
)
+ spack.cmd.common.arguments.add_common_arguments(setdefault_parser, ["constraint"])
callbacks = dict(spack.cmd.modules.callbacks.items())
- callbacks['setdefault'] = setdefault
+ callbacks["setdefault"] = setdefault
- command_dict['tcl'] = functools.partial(
- spack.cmd.modules.modules_cmd, module_type='tcl', callbacks=callbacks
+ command_dict["tcl"] = functools.partial(
+ spack.cmd.modules.modules_cmd, module_type="tcl", callbacks=callbacks
)
@@ -37,17 +33,9 @@ def setdefault(module_type, specs, args):
# Currently, accepts only a single matching spec
spack.cmd.modules.one_spec_or_raise(specs)
spec = specs[0]
- data = {
- 'modules': {
- args.module_set_name: {
- 'tcl': {
- 'defaults': [str(spec)]
- }
- }
- }
- }
+ data = {"modules": {args.module_set_name: {"tcl": {"defaults": [str(spec)]}}}}
spack.modules.tcl.configuration_registry = {}
- scope = spack.config.InternalConfigScope('tcl-setdefault', data)
+ scope = spack.config.InternalConfigScope("tcl-setdefault", data)
with spack.config.override(scope):
- writer = spack.modules.module_types['tcl'](spec, args.module_set_name)
+ writer = spack.modules.module_types["tcl"](spec, args.module_set_name)
writer.update_module_defaults()
diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py
index c9031a62c6..7a7dde1358 100644
--- a/lib/spack/spack/cmd/patch.py
+++ b/lib/spack/spack/cmd/patch.py
@@ -15,8 +15,7 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(
- subparser, ['no_checksum', 'deprecated', 'specs'])
+ arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
def patch(parser, args):
@@ -24,10 +23,10 @@ def patch(parser, args):
tty.die("patch requires at least one spec argument")
if args.no_checksum:
- spack.config.set('config:checksum', False, scope='command_line')
+ spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
- spack.config.set('config:deprecated', True, scope='command_line')
+ spack.config.set("config:deprecated", True, scope="command_line")
specs = spack.cmd.parse_specs(args.specs, concretize=True)
for spec in specs:
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index dec604ddf3..6de7a4bcc1 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -22,60 +22,61 @@ level = "long"
def setup_parser(subparser):
- sp = subparser.add_subparsers(
- metavar='SUBCOMMAND', dest='pkg_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="pkg_command")
- add_parser = sp.add_parser('add', help=pkg_add.__doc__)
- arguments.add_common_arguments(add_parser, ['packages'])
+ add_parser = sp.add_parser("add", help=pkg_add.__doc__)
+ arguments.add_common_arguments(add_parser, ["packages"])
- list_parser = sp.add_parser('list', help=pkg_list.__doc__)
- list_parser.add_argument('rev', default='HEAD', nargs='?',
- help="revision to list packages for")
+ list_parser = sp.add_parser("list", help=pkg_list.__doc__)
+ list_parser.add_argument(
+ "rev", default="HEAD", nargs="?", help="revision to list packages for"
+ )
- diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
+ diff_parser = sp.add_parser("diff", help=pkg_diff.__doc__)
diff_parser.add_argument(
- 'rev1', nargs='?', default='HEAD^',
- help="revision to compare against")
+ "rev1", nargs="?", default="HEAD^", help="revision to compare against"
+ )
diff_parser.add_argument(
- 'rev2', nargs='?', default='HEAD',
- help="revision to compare to rev1 (default is HEAD)")
+ "rev2", nargs="?", default="HEAD", help="revision to compare to rev1 (default is HEAD)"
+ )
- add_parser = sp.add_parser('added', help=pkg_added.__doc__)
+ add_parser = sp.add_parser("added", help=pkg_added.__doc__)
+ add_parser.add_argument("rev1", nargs="?", default="HEAD^", help="revision to compare against")
add_parser.add_argument(
- 'rev1', nargs='?', default='HEAD^',
- help="revision to compare against")
- add_parser.add_argument(
- 'rev2', nargs='?', default='HEAD',
- help="revision to compare to rev1 (default is HEAD)")
+ "rev2", nargs="?", default="HEAD", help="revision to compare to rev1 (default is HEAD)"
+ )
- add_parser = sp.add_parser('changed', help=pkg_changed.__doc__)
- add_parser.add_argument(
- 'rev1', nargs='?', default='HEAD^',
- help="revision to compare against")
+ add_parser = sp.add_parser("changed", help=pkg_changed.__doc__)
+ add_parser.add_argument("rev1", nargs="?", default="HEAD^", help="revision to compare against")
add_parser.add_argument(
- 'rev2', nargs='?', default='HEAD',
- help="revision to compare to rev1 (default is HEAD)")
+ "rev2", nargs="?", default="HEAD", help="revision to compare to rev1 (default is HEAD)"
+ )
add_parser.add_argument(
- '-t', '--type', action='store', default='C',
- help="Types of changes to show (A: added, R: removed, "
- "C: changed); default is 'C'")
-
- rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
- rm_parser.add_argument(
- 'rev1', nargs='?', default='HEAD^',
- help="revision to compare against")
+ "-t",
+ "--type",
+ action="store",
+ default="C",
+ help="Types of changes to show (A: added, R: removed, " "C: changed); default is 'C'",
+ )
+
+ rm_parser = sp.add_parser("removed", help=pkg_removed.__doc__)
+ rm_parser.add_argument("rev1", nargs="?", default="HEAD^", help="revision to compare against")
rm_parser.add_argument(
- 'rev2', nargs='?', default='HEAD',
- help="revision to compare to rev1 (default is HEAD)")
+ "rev2", nargs="?", default="HEAD", help="revision to compare to rev1 (default is HEAD)"
+ )
- source_parser = sp.add_parser('source', help=pkg_source.__doc__)
+ source_parser = sp.add_parser("source", help=pkg_source.__doc__)
source_parser.add_argument(
- '-c', '--canonical', action='store_true', default=False,
- help="dump canonical source as used by package hash.")
- arguments.add_common_arguments(source_parser, ['spec'])
+ "-c",
+ "--canonical",
+ action="store_true",
+ default=False,
+ help="dump canonical source as used by package hash.",
+ )
+ arguments.add_common_arguments(source_parser, ["spec"])
- hash_parser = sp.add_parser('hash', help=pkg_hash.__doc__)
- arguments.add_common_arguments(hash_parser, ['spec'])
+ hash_parser = sp.add_parser("hash", help=pkg_hash.__doc__)
+ arguments.add_common_arguments(hash_parser, ["spec"])
def pkg_add(args):
@@ -161,13 +162,13 @@ def pkg(parser, args):
tty.die("This spack is not a git clone. Can't use 'spack pkg'")
action = {
- 'add': pkg_add,
- 'diff': pkg_diff,
- 'list': pkg_list,
- 'removed': pkg_removed,
- 'added': pkg_added,
- 'changed': pkg_changed,
- 'source': pkg_source,
- 'hash': pkg_hash,
+ "add": pkg_add,
+ "diff": pkg_diff,
+ "list": pkg_list,
+ "removed": pkg_removed,
+ "added": pkg_added,
+ "changed": pkg_changed,
+ "source": pkg_source,
+ "hash": pkg_hash,
}
action[args.pkg_command](args)
diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py
index 97111a4a8a..2c1b146f6a 100644
--- a/lib/spack/spack/cmd/providers.py
+++ b/lib/spack/spack/cmd/providers.py
@@ -18,12 +18,11 @@ level = "long"
def setup_parser(subparser):
- subparser.epilog = 'If called without argument returns ' \
- 'the list of all valid virtual packages'
+ subparser.epilog = (
+ "If called without argument returns " "the list of all valid virtual packages"
+ )
subparser.add_argument(
- 'virtual_package',
- nargs='*',
- help='find packages that provide this virtual package'
+ "virtual_package", nargs="*", help="find packages that provide this virtual package"
)
@@ -33,7 +32,7 @@ def providers(parser, args):
buffer = six.StringIO()
isatty = sys.stdout.isatty()
if isatty:
- buffer.write('Virtual packages:\n')
+ buffer.write("Virtual packages:\n")
colify.colify(valid_virtuals, output=buffer, tty=isatty, indent=4)
valid_virtuals_str = buffer.getvalue()
@@ -46,12 +45,10 @@ def providers(parser, args):
specs = spack.cmd.parse_specs(args.virtual_package)
# Check prerequisites
- non_virtual = [
- str(s) for s in specs if not s.virtual or s.name not in valid_virtuals
- ]
+ non_virtual = [str(s) for s in specs if not s.virtual or s.name not in valid_virtuals]
if non_virtual:
- msg = 'non-virtual specs cannot be part of the query '
- msg += '[{0}]\n'.format(', '.join(non_virtual))
+ msg = "non-virtual specs cannot be part of the query "
+ msg += "[{0}]\n".format(", ".join(non_virtual))
msg += valid_virtuals_str
raise ValueError(msg)
@@ -60,4 +57,4 @@ def providers(parser, args):
if sys.stdout.isatty():
print("{0}:".format(spec))
spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
- print('')
+ print("")
diff --git a/lib/spack/spack/cmd/pydoc.py b/lib/spack/spack/cmd/pydoc.py
index 020e1c9919..c6922a23db 100644
--- a/lib/spack/spack/cmd/pydoc.py
+++ b/lib/spack/spack/cmd/pydoc.py
@@ -10,7 +10,7 @@ level = "long"
def setup_parser(subparser):
- subparser.add_argument('entity', help="run pydoc help on entity")
+ subparser.add_argument("entity", help="run pydoc help on entity")
def pydoc(parser, args):
diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py
index e43996fb47..028184b817 100644
--- a/lib/spack/spack/cmd/python.py
+++ b/lib/spack/spack/cmd/python.py
@@ -23,27 +23,37 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-V', '--version', action='store_true', dest='python_version',
- help='print the Python version number and exit')
+ "-V",
+ "--version",
+ action="store_true",
+ dest="python_version",
+ help="print the Python version number and exit",
+ )
+ subparser.add_argument("-c", dest="python_command", help="command to execute")
subparser.add_argument(
- '-c', dest='python_command', help='command to execute')
+ "-i",
+ dest="python_interpreter",
+ help="python interpreter",
+ choices=["python", "ipython"],
+ default="python",
+ )
subparser.add_argument(
- '-i', dest='python_interpreter', help='python interpreter',
- choices=['python', 'ipython'], default='python')
+ "-m", dest="module", action="store", help="run library module as a script"
+ )
subparser.add_argument(
- '-m', dest='module', action='store',
- help='run library module as a script')
+ "--path",
+ action="store_true",
+ dest="show_path",
+ help="show path to python interpreter that spack uses",
+ )
subparser.add_argument(
- '--path', action='store_true', dest='show_path',
- help='show path to python interpreter that spack uses')
- subparser.add_argument(
- 'python_args', nargs=argparse.REMAINDER,
- help="file to run plus arguments")
+ "python_args", nargs=argparse.REMAINDER, help="file to run plus arguments"
+ )
def python(parser, args, unknown_args):
if args.python_version:
- print('Python', platform.python_version())
+ print("Python", platform.python_version())
return
if args.show_path:
@@ -51,7 +61,7 @@ def python(parser, args, unknown_args):
return
if args.module:
- sys.argv = ['spack-python'] + unknown_args + args.python_args
+ sys.argv = ["spack-python"] + unknown_args + args.python_args
runpy.run_module(args.module, run_name="__main__", alter_sys=True)
return
@@ -87,42 +97,49 @@ def ipython_interpreter(args):
if args.python_args:
IPython.start_ipython(argv=args.python_args)
elif args.python_command:
- IPython.start_ipython(argv=['-c', args.python_command])
+ IPython.start_ipython(argv=["-c", args.python_command])
else:
- header = ("Spack version %s\nPython %s, %s %s"
- % (spack.spack_version, platform.python_version(),
- platform.system(), platform.machine()))
+ header = "Spack version %s\nPython %s, %s %s" % (
+ spack.spack_version,
+ platform.python_version(),
+ platform.system(),
+ platform.machine(),
+ )
__name__ = "__main__" # noqa: F841
IPython.embed(module="__main__", header=header)
def python_interpreter(args):
- """A python interpreter is the default interpreter
- """
+ """A python interpreter is the default interpreter"""
# Fake a main python shell by setting __name__ to __main__.
- console = code.InteractiveConsole({'__name__': '__main__',
- 'spack': spack})
+ console = code.InteractiveConsole({"__name__": "__main__", "spack": spack})
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
if os.path.isfile(startup_file):
with open(startup_file) as startup:
- console.runsource(startup.read(), startup_file, 'exec')
+ console.runsource(startup.read(), startup_file, "exec")
if args.python_command:
console.runsource(args.python_command)
elif args.python_args:
sys.argv = args.python_args
with open(args.python_args[0]) as file:
- console.runsource(file.read(), args.python_args[0], 'exec')
+ console.runsource(file.read(), args.python_args[0], "exec")
else:
# Provides readline support, allowing user to use arrow keys
- console.push('import readline')
+ console.push("import readline")
# Provide tabcompletion
- console.push('from rlcompleter import Completer')
- console.push('readline.set_completer(Completer(locals()).complete)')
+ console.push("from rlcompleter import Completer")
+ console.push("readline.set_completer(Completer(locals()).complete)")
console.push('readline.parse_and_bind("tab: complete")')
- console.interact("Spack version %s\nPython %s, %s %s"
- % (spack.spack_version, platform.python_version(),
- platform.system(), platform.machine()))
+ console.interact(
+ "Spack version %s\nPython %s, %s %s"
+ % (
+ spack.spack_version,
+ platform.python_version(),
+ platform.system(),
+ platform.machine(),
+ )
+ )
diff --git a/lib/spack/spack/cmd/remove.py b/lib/spack/spack/cmd/remove.py
index 9da89fc35c..ae934b3b37 100644
--- a/lib/spack/spack/cmd/remove.py
+++ b/lib/spack/spack/cmd/remove.py
@@ -8,32 +8,36 @@ import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
-description = 'remove specs from an environment'
+description = "remove specs from an environment"
section = "environments"
level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-a', '--all', action='store_true',
- help="remove all specs from (clear) the environment")
- subparser.add_argument('-l', '--list-name',
- dest='list_name', default='specs',
- help="name of the list to remove specs from")
+ "-a", "--all", action="store_true", help="remove all specs from (clear) the environment"
+ )
subparser.add_argument(
- '-f', '--force', action='store_true',
- help="remove concretized spec (if any) immediately")
- arguments.add_common_arguments(subparser, ['specs'])
+ "-l",
+ "--list-name",
+ dest="list_name",
+ default="specs",
+ help="name of the list to remove specs from",
+ )
+ subparser.add_argument(
+ "-f", "--force", action="store_true", help="remove concretized spec (if any) immediately"
+ )
+ arguments.add_common_arguments(subparser, ["specs"])
def remove(parser, args):
- env = spack.cmd.require_active_env(cmd_name='remove')
+ env = spack.cmd.require_active_env(cmd_name="remove")
with env.write_transaction():
if args.all:
env.clear()
else:
for spec in spack.cmd.parse_specs(args.specs):
- tty.msg('Removing %s from environment %s' % (spec, env.name))
+ tty.msg("Removing %s from environment %s" % (spec, env.name))
env.remove(spec, args.list_name, force=args.force)
env.write()
diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py
index 5fd6ba463e..904b596a5c 100644
--- a/lib/spack/spack/cmd/repo.py
+++ b/lib/spack/spack/cmd/repo.py
@@ -20,54 +20,59 @@ level = "long"
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="repo_command")
scopes = spack.config.scopes()
scopes_metavar = spack.config.scopes_metavar
# Create
- create_parser = sp.add_parser('create', help=repo_create.__doc__)
+ create_parser = sp.add_parser("create", help=repo_create.__doc__)
+ create_parser.add_argument("directory", help="directory to create the repo in")
create_parser.add_argument(
- 'directory', help="directory to create the repo in")
- create_parser.add_argument(
- 'namespace', help="namespace to identify packages in the repository. "
- "defaults to the directory name", nargs='?')
+ "namespace",
+ help="namespace to identify packages in the repository. " "defaults to the directory name",
+ nargs="?",
+ )
# List
- list_parser = sp.add_parser('list', help=repo_list.__doc__)
+ list_parser = sp.add_parser("list", help=repo_list.__doc__)
list_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_list_scope(),
- help="configuration scope to read from")
+ help="configuration scope to read from",
+ )
# Add
- add_parser = sp.add_parser('add', help=repo_add.__doc__)
- add_parser.add_argument(
- 'path', help="path to a Spack package repository directory")
+ add_parser = sp.add_parser("add", help=repo_add.__doc__)
+ add_parser.add_argument("path", help="path to a Spack package repository directory")
add_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
- help="configuration scope to modify")
+ help="configuration scope to modify",
+ )
# Remove
- remove_parser = sp.add_parser(
- 'remove', help=repo_remove.__doc__, aliases=['rm'])
+ remove_parser = sp.add_parser("remove", help=repo_remove.__doc__, aliases=["rm"])
remove_parser.add_argument(
- 'namespace_or_path',
- help="namespace or path of a Spack package repository")
+ "namespace_or_path", help="namespace or path of a Spack package repository"
+ )
remove_parser.add_argument(
- '--scope', choices=scopes, metavar=scopes_metavar,
+ "--scope",
+ choices=scopes,
+ metavar=scopes_metavar,
default=spack.config.default_modify_scope(),
- help="configuration scope to modify")
+ help="configuration scope to modify",
+ )
def repo_create(args):
"""Create a new package repository."""
- full_path, namespace = spack.repo.create_repo(
- args.directory, args.namespace
- )
+ full_path, namespace = spack.repo.create_repo(args.directory, args.namespace)
tty.msg("Created repo with namespace '%s'." % namespace)
- tty.msg("To register it with spack, run this command:",
- 'spack repo add %s' % full_path)
+ tty.msg("To register it with spack, run this command:", "spack repo add %s" % full_path)
def repo_add(args):
@@ -89,7 +94,7 @@ def repo_add(args):
repo = spack.repo.Repo(canon_path)
# If that succeeds, finally add it to the configuration.
- repos = spack.config.get('repos', scope=args.scope)
+ repos = spack.config.get("repos", scope=args.scope)
if not repos:
repos = []
@@ -97,13 +102,13 @@ def repo_add(args):
tty.die("Repository is already registered with Spack: %s" % path)
repos.insert(0, canon_path)
- spack.config.set('repos', repos, args.scope)
+ spack.config.set("repos", repos, args.scope)
tty.msg("Added repo with namespace '%s'." % repo.namespace)
def repo_remove(args):
"""Remove a repository from Spack's configuration."""
- repos = spack.config.get('repos', scope=args.scope)
+ repos = spack.config.get("repos", scope=args.scope)
namespace_or_path = args.namespace_or_path
# If the argument is a path, remove that repository from config.
@@ -112,7 +117,7 @@ def repo_remove(args):
repo_canon_path = spack.util.path.canonicalize_path(repo_path)
if canon_path == repo_canon_path:
repos.remove(repo_path)
- spack.config.set('repos', repos, args.scope)
+ spack.config.set("repos", repos, args.scope)
tty.msg("Removed repository %s" % repo_path)
return
@@ -122,20 +127,18 @@ def repo_remove(args):
repo = spack.repo.Repo(path)
if repo.namespace == namespace_or_path:
repos.remove(path)
- spack.config.set('repos', repos, args.scope)
- tty.msg("Removed repository %s with namespace '%s'."
- % (repo.root, repo.namespace))
+ spack.config.set("repos", repos, args.scope)
+ tty.msg("Removed repository %s with namespace '%s'." % (repo.root, repo.namespace))
return
except spack.repo.RepoError:
continue
- tty.die("No repository with path or namespace: %s"
- % namespace_or_path)
+ tty.die("No repository with path or namespace: %s" % namespace_or_path)
def repo_list(args):
"""Show registered repositories and their namespaces."""
- roots = spack.config.get('repos', scope=args.scope)
+ roots = spack.config.get("repos", scope=args.scope)
repos = []
for r in roots:
try:
@@ -158,9 +161,11 @@ def repo_list(args):
def repo(parser, args):
- action = {'create': repo_create,
- 'list': repo_list,
- 'add': repo_add,
- 'remove': repo_remove,
- 'rm': repo_remove}
+ action = {
+ "create": repo_create,
+ "list": repo_list,
+ "add": repo_add,
+ "remove": repo_remove,
+ "rm": repo_remove,
+ }
action[args.repo_command](args)
diff --git a/lib/spack/spack/cmd/resource.py b/lib/spack/spack/cmd/resource.py
index 82a2d2c687..8d6a3c7485 100644
--- a/lib/spack/spack/cmd/resource.py
+++ b/lib/spack/spack/cmd/resource.py
@@ -18,15 +18,15 @@ level = "long"
def setup_parser(subparser):
- sp = subparser.add_subparsers(
- metavar='SUBCOMMAND', dest='resource_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="resource_command")
- list_parser = sp.add_parser('list', help=resource_list.__doc__)
- list_parser.add_argument('--only-hashes', action='store_true',
- help='only print sha256 hashes of resources')
+ list_parser = sp.add_parser("list", help=resource_list.__doc__)
+ list_parser.add_argument(
+ "--only-hashes", action="store_true", help="only print sha256 hashes of resources"
+ )
- show_parser = sp.add_parser('show', help=resource_show.__doc__)
- show_parser.add_argument('hash', action='store')
+ show_parser = sp.add_parser("show", help=resource_show.__doc__)
+ show_parser.add_argument("hash", action="store")
def _show_patch(sha256):
@@ -37,24 +37,23 @@ def _show_patch(sha256):
if not data:
candidates = [k for k in patches if k.startswith(sha256)]
if not candidates:
- tty.die('no such resource: %s' % sha256)
+ tty.die("no such resource: %s" % sha256)
elif len(candidates) > 1:
- tty.die('%s: ambiguous hash prefix. Options are:',
- *candidates)
+ tty.die("%s: ambiguous hash prefix. Options are:", *candidates)
sha256 = candidates[0]
data = patches.get(sha256)
- color.cprint('@c{%s}' % sha256)
+ color.cprint("@c{%s}" % sha256)
for package, rec in data.items():
- owner = rec['owner']
+ owner = rec["owner"]
- if 'relative_path' in rec:
+ if "relative_path" in rec:
pkg_dir = spack.repo.path.get_pkg_class(owner).package_dir
- path = os.path.join(pkg_dir, rec['relative_path'])
+ path = os.path.join(pkg_dir, rec["relative_path"])
print(" path: %s" % path)
else:
- print(" url: %s" % rec['url'])
+ print(" url: %s" % rec["url"])
print(" applies to: %s" % package)
if owner != package:
@@ -77,8 +76,5 @@ def resource_show(args):
def resource(parser, args):
- action = {
- 'list': resource_list,
- 'show': resource_show
- }
+ action = {"list": resource_list, "show": resource_show}
action[args.resource_command](args)
diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py
index 278d6ca217..eeac460fe7 100644
--- a/lib/spack/spack/cmd/restage.py
+++ b/lib/spack/spack/cmd/restage.py
@@ -15,7 +15,7 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(subparser, ['specs'])
+ arguments.add_common_arguments(subparser, ["specs"])
def restage(parser, args):
diff --git a/lib/spack/spack/cmd/solve.py b/lib/spack/spack/cmd/solve.py
index 29f96ba485..48f0226022 100644
--- a/lib/spack/spack/cmd/solve.py
+++ b/lib/spack/spack/cmd/solve.py
@@ -22,56 +22,82 @@ import spack.package_base
import spack.solver.asp as asp
description = "concretize a specs using an ASP solver"
-section = 'developer'
-level = 'long'
+section = "developer"
+level = "long"
#: output options
-show_options = ('asp', 'opt', 'output', 'solutions')
+show_options = ("asp", "opt", "output", "solutions")
def setup_parser(subparser):
# Solver arguments
subparser.add_argument(
- '--show', action='store', default='opt,solutions',
+ "--show",
+ action="store",
+ default="opt,solutions",
help="select outputs: comma-separated list of: \n"
" asp asp program text\n"
" opt optimization criteria for best model\n"
" output raw clingo output\n"
" solutions models found by asp program\n"
- " all all of the above"
+ " all all of the above",
)
subparser.add_argument(
- '--models', action='store', type=int, default=0,
- help="number of solutions to search (default 0 for all)")
+ "--models",
+ action="store",
+ type=int,
+ default=0,
+ help="number of solutions to search (default 0 for all)",
+ )
# Below are arguments w.r.t. spec display (like spack spec)
- arguments.add_common_arguments(
- subparser, ['long', 'very_long', 'install_status']
- )
+ arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
subparser.add_argument(
- '-y', '--yaml', action='store_const', dest='format', default=None,
- const='yaml', help='print concrete spec as yaml')
- subparser.add_argument(
- '-j', '--json', action='store_const', dest='format', default=None,
- const='json', help='print concrete spec as json')
+ "-y",
+ "--yaml",
+ action="store_const",
+ dest="format",
+ default=None,
+ const="yaml",
+ help="print concrete spec as yaml",
+ )
subparser.add_argument(
- '-c', '--cover', action='store',
- default='nodes', choices=['nodes', 'edges', 'paths'],
- help='how extensively to traverse the DAG (default: nodes)')
+ "-j",
+ "--json",
+ action="store_const",
+ dest="format",
+ default=None,
+ const="json",
+ help="print concrete spec as json",
+ )
subparser.add_argument(
- '-N', '--namespaces', action='store_true', default=False,
- help='show fully qualified package names')
+ "-c",
+ "--cover",
+ action="store",
+ default="nodes",
+ choices=["nodes", "edges", "paths"],
+ help="how extensively to traverse the DAG (default: nodes)",
+ )
subparser.add_argument(
- '-t', '--types', action='store_true', default=False,
- help='show dependency types')
+ "-N",
+ "--namespaces",
+ action="store_true",
+ default=False,
+ help="show fully qualified package names",
+ )
subparser.add_argument(
- '--timers', action='store_true', default=False,
- help='print out timers for different solve phases')
+ "-t", "--types", action="store_true", default=False, help="show dependency types"
+ )
subparser.add_argument(
- '--stats', action='store_true', default=False,
- help='print out statistics from clingo')
+ "--timers",
+ action="store_true",
+ default=False,
+ help="print out timers for different solve phases",
+ )
subparser.add_argument(
- 'specs', nargs=argparse.REMAINDER, help="specs of packages")
+ "--stats", action="store_true", default=False, help="print out statistics from clingo"
+ )
+ subparser.add_argument("specs", nargs=argparse.REMAINDER, help="specs of packages")
spack.cmd.common.arguments.add_concretizer_args(subparser)
@@ -84,14 +110,13 @@ def _process_result(result, show, required_format, kwargs):
tty.msg("Optimization Criteria:")
maxlen = max(len(s[2]) for s in result.criteria)
- color.cprint(
- "@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " ")
- )
+ color.cprint("@*{ Priority Criterion %sInstalled ToBuild}" % ((maxlen - 10) * " "))
fmt = " @K{%%-8d} %%-%ds%%9s %%7s" % maxlen
for i, (installed_cost, build_cost, name) in enumerate(result.criteria, 1):
color.cprint(
- fmt % (
+ fmt
+ % (
i,
name,
"-" if build_cost is None else installed_cost,
@@ -101,17 +126,16 @@ def _process_result(result, show, required_format, kwargs):
print()
# dump the solutions as concretized specs
- if 'solutions' in show:
+ if "solutions" in show:
for spec in result.specs:
# With -y, just print YAML to output.
- if required_format == 'yaml':
+ if required_format == "yaml":
# use write because to_yaml already has a newline.
sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
- elif required_format == 'json':
+ elif required_format == "json":
sys.stdout.write(spec.to_json(hash=ht.dag_hash))
else:
- sys.stdout.write(
- spec.tree(color=sys.stdout.isatty(), **kwargs))
+ sys.stdout.write(spec.tree(color=sys.stdout.isatty(), **kwargs))
print()
if result.unsolved_specs and "solutions" in show:
@@ -123,27 +147,28 @@ def _process_result(result, show, required_format, kwargs):
def solve(parser, args):
# these are the same options as `spack spec`
- name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
- fmt = '{@version}{%compiler}{compiler_flags}{variants}{arch=architecture}'
+ name_fmt = "{namespace}.{name}" if args.namespaces else "{name}"
+ fmt = "{@version}{%compiler}{compiler_flags}{variants}{arch=architecture}"
install_status_fn = spack.spec.Spec.install_status
kwargs = {
- 'cover': args.cover,
- 'format': name_fmt + fmt,
- 'hashlen': None if args.very_long else 7,
- 'show_types': args.types,
- 'status_fn': install_status_fn if args.install_status else None,
- 'hashes': args.long or args.very_long
+ "cover": args.cover,
+ "format": name_fmt + fmt,
+ "hashlen": None if args.very_long else 7,
+ "show_types": args.types,
+ "status_fn": install_status_fn if args.install_status else None,
+ "hashes": args.long or args.very_long,
}
# process output options
- show = re.split(r'\s*,\s*', args.show)
- if 'all' in show:
+ show = re.split(r"\s*,\s*", args.show)
+ if "all" in show:
show = show_options
for d in show:
if d not in show_options:
raise ValueError(
"Invalid option for '--show': '%s'\nchoose from: (%s)"
- % (d, ', '.join(show_options + ('all',))))
+ % (d, ", ".join(show_options + ("all",)))
+ )
models = args.models
if models < 0:
@@ -162,9 +187,9 @@ def solve(parser, args):
solver = asp.Solver()
output = sys.stdout if "asp" in show else None
- setup_only = set(show) == {'asp'}
- unify = spack.config.get('concretizer:unify')
- if unify != 'when_possible':
+ setup_only = set(show) == {"asp"}
+ unify = spack.config.get("concretizer:unify")
+ if unify != "when_possible":
# set up solver parameters
# Note: reuse and other concretizer prefs are passed as configuration
result = solver.solve(
@@ -173,14 +198,16 @@ def solve(parser, args):
models=models,
timers=args.timers,
stats=args.stats,
- setup_only=setup_only
+ setup_only=setup_only,
)
if not setup_only:
_process_result(result, show, required_format, kwargs)
else:
- for idx, result in enumerate(solver.solve_in_rounds(
+ for idx, result in enumerate(
+ solver.solve_in_rounds(
specs, out=output, models=models, timers=args.timers, stats=args.stats
- )):
+ )
+ ):
if "solutions" in show:
tty.msg("ROUND {0}".format(idx))
tty.msg("")
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index 1e5b213fe4..b701fbc83c 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -31,44 +31,65 @@ specs are used instead
for further documentation regarding the spec syntax, see:
spack help --spec
"""
- arguments.add_common_arguments(
- subparser, ['long', 'very_long', 'install_status']
- )
+ arguments.add_common_arguments(subparser, ["long", "very_long", "install_status"])
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
- '-y', '--yaml', action='store_const', dest='format', default=None,
- const='yaml', help='print concrete spec as YAML')
+ "-y",
+ "--yaml",
+ action="store_const",
+ dest="format",
+ default=None,
+ const="yaml",
+ help="print concrete spec as YAML",
+ )
format_group.add_argument(
- '-j', '--json', action='store_const', dest='format', default=None,
- const='json', help='print concrete spec as JSON')
+ "-j",
+ "--json",
+ action="store_const",
+ dest="format",
+ default=None,
+ const="json",
+ help="print concrete spec as JSON",
+ )
format_group.add_argument(
- '--format', action='store', default=None,
- help='print concrete spec with the specified format string')
+ "--format",
+ action="store",
+ default=None,
+ help="print concrete spec with the specified format string",
+ )
subparser.add_argument(
- '-c', '--cover', action='store',
- default='nodes', choices=['nodes', 'edges', 'paths'],
- help='how extensively to traverse the DAG (default: nodes)')
+ "-c",
+ "--cover",
+ action="store",
+ default="nodes",
+ choices=["nodes", "edges", "paths"],
+ help="how extensively to traverse the DAG (default: nodes)",
+ )
subparser.add_argument(
- '-N', '--namespaces', action='store_true', default=False,
- help='show fully qualified package names')
+ "-N",
+ "--namespaces",
+ action="store_true",
+ default=False,
+ help="show fully qualified package names",
+ )
subparser.add_argument(
- '-t', '--types', action='store_true', default=False,
- help='show dependency types')
- arguments.add_common_arguments(subparser, ['specs'])
+ "-t", "--types", action="store_true", default=False, help="show dependency types"
+ )
+ arguments.add_common_arguments(subparser, ["specs"])
spack.cmd.common.arguments.add_concretizer_args(subparser)
def spec(parser, args):
- name_fmt = '{namespace}.{name}' if args.namespaces else '{name}'
- fmt = '{@version}{%compiler}{compiler_flags}{variants}{arch=architecture}'
+ name_fmt = "{namespace}.{name}" if args.namespaces else "{name}"
+ fmt = "{@version}{%compiler}{compiler_flags}{variants}{arch=architecture}"
install_status_fn = spack.spec.Spec.install_status
tree_kwargs = {
- 'cover': args.cover,
- 'format': name_fmt + fmt,
- 'hashlen': None if args.very_long else 7,
- 'show_types': args.types,
- 'status_fn': install_status_fn if args.install_status else None
+ "cover": args.cover,
+ "format": name_fmt + fmt,
+ "hashlen": None if args.very_long else 7,
+ "show_types": args.types,
+ "status_fn": install_status_fn if args.install_status else None,
}
# use a read transaction if we are getting install status for every
@@ -93,10 +114,10 @@ def spec(parser, args):
for (input, output) in specs:
# With -y, just print YAML to output.
if args.format:
- if args.format == 'yaml':
+ if args.format == "yaml":
# use write because to_yaml already has a newline.
sys.stdout.write(output.to_yaml(hash=ht.dag_hash))
- elif args.format == 'json':
+ elif args.format == "json":
print(output.to_json(hash=ht.dag_hash))
else:
print(output.format(args.format))
@@ -107,12 +128,12 @@ def spec(parser, args):
# repeated output. This happens because parse_specs outputs concrete
# specs for `/hash` inputs.
if not input.concrete:
- tree_kwargs['hashes'] = False # Always False for input spec
+ tree_kwargs["hashes"] = False # Always False for input spec
print("Input spec")
print("--------------------------------")
print(input.tree(**tree_kwargs))
print("Concretized")
print("--------------------------------")
- tree_kwargs['hashes'] = args.long or args.very_long
+ tree_kwargs["hashes"] = args.long or args.very_long
print(output.tree(**tree_kwargs))
diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py
index 474efe375c..ef04afcb80 100644
--- a/lib/spack/spack/cmd/stage.py
+++ b/lib/spack/spack/cmd/stage.py
@@ -19,11 +19,10 @@ level = "long"
def setup_parser(subparser):
- arguments.add_common_arguments(
- subparser, ['no_checksum', 'deprecated', 'specs'])
+ arguments.add_common_arguments(subparser, ["no_checksum", "deprecated", "specs"])
subparser.add_argument(
- '-p', '--path', dest='path',
- help="path to stage package, does not add to spack tree")
+ "-p", "--path", dest="path", help="path to stage package, does not add to spack tree"
+ )
arguments.add_concretizer_args(subparser)
@@ -35,17 +34,16 @@ def stage(parser, args):
for spec in env.specs_by_hash.values():
for dep in spec.traverse():
dep.package.do_stage()
- tty.msg("Staged {0} in {1}".format(dep.package.name,
- dep.package.stage.path))
+ tty.msg("Staged {0} in {1}".format(dep.package.name, dep.package.stage.path))
return
else:
tty.die("`spack stage` requires a spec or an active environment")
if args.no_checksum:
- spack.config.set('config:checksum', False, scope='command_line')
+ spack.config.set("config:checksum", False, scope="command_line")
if args.deprecated:
- spack.config.set('config:deprecated', True, scope='command_line')
+ spack.config.set("config:deprecated", True, scope="command_line")
specs = spack.cmd.parse_specs(args.specs, concretize=False)
diff --git a/lib/spack/spack/cmd/tags.py b/lib/spack/spack/cmd/tags.py
index f7189f5b92..6ba9717784 100644
--- a/lib/spack/spack/cmd/tags.py
+++ b/lib/spack/spack/cmd/tags.py
@@ -25,7 +25,7 @@ def report_tags(category, tags):
if isatty:
num = len(tags)
- fmt = '{0} package tag'.format(category)
+ fmt = "{0} package tag".format(category)
buffer.write("{0}:\n".format(spack.util.string.plural(num, fmt)))
if tags:
@@ -43,18 +43,20 @@ def setup_parser(subparser):
"'--all' at the same time."
)
subparser.add_argument(
- '-i', '--installed', action='store_true', default=False,
- help="show information for installed packages only"
+ "-i",
+ "--installed",
+ action="store_true",
+ default=False,
+ help="show information for installed packages only",
)
subparser.add_argument(
- '-a', '--all', action='store_true', default=False,
- help="show packages for all available tags"
- )
- subparser.add_argument(
- 'tag',
- nargs='*',
- help="show packages with the specified tag"
+ "-a",
+ "--all",
+ action="store_true",
+ default=False,
+ help="show packages for all available tags",
)
+ subparser.add_argument("tag", nargs="*", help="show packages with the specified tag")
def tags(parser, args):
@@ -91,7 +93,7 @@ def tags(parser, args):
tags = args.tag if args.tag else available_tags
tag_pkgs = spack.tag.packages_with_tags(tags, args.installed, False)
- missing = 'No installed packages' if args.installed else 'None'
+ missing = "No installed packages" if args.installed else "None"
for tag in sorted(tag_pkgs):
# TODO: Remove the sorting once we're sure noone has an old
# TODO: tag cache since it can accumulate duplicates.
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index 51d23da429..ec062f259a 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -31,113 +31,123 @@ level = "long"
def first_line(docstring):
"""Return the first line of the docstring."""
- return docstring.split('\n')[0]
+ return docstring.split("\n")[0]
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='test_command')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="test_command")
# Run
- run_parser = sp.add_parser('run', description=test_run.__doc__,
- help=first_line(test_run.__doc__))
+ run_parser = sp.add_parser(
+ "run", description=test_run.__doc__, help=first_line(test_run.__doc__)
+ )
alias_help_msg = "Provide an alias for this test-suite"
alias_help_msg += " for subsequent access."
- run_parser.add_argument('--alias', help=alias_help_msg)
+ run_parser.add_argument("--alias", help=alias_help_msg)
run_parser.add_argument(
- '--fail-fast', action='store_true',
- help="Stop tests for each package after the first failure."
+ "--fail-fast",
+ action="store_true",
+ help="Stop tests for each package after the first failure.",
)
run_parser.add_argument(
- '--fail-first', action='store_true',
- help="Stop after the first failed package."
+ "--fail-first", action="store_true", help="Stop after the first failed package."
)
run_parser.add_argument(
- '--externals', action='store_true',
- help="Test packages that are externally installed."
+ "--externals", action="store_true", help="Test packages that are externally installed."
)
run_parser.add_argument(
- '--keep-stage',
- action='store_true',
- help='Keep testing directory for debugging'
+ "--keep-stage", action="store_true", help="Keep testing directory for debugging"
)
run_parser.add_argument(
- '--log-format',
+ "--log-format",
default=None,
choices=spack.report.valid_formats,
- help="format to be used for log files"
+ help="format to be used for log files",
)
run_parser.add_argument(
- '--log-file',
+ "--log-file",
default=None,
- help="filename for the log file. if not passed a default will be used"
+ help="filename for the log file. if not passed a default will be used",
)
arguments.add_cdash_args(run_parser, False)
run_parser.add_argument(
- '--help-cdash',
- action='store_true',
- help="Show usage instructions for CDash reporting"
+ "--help-cdash", action="store_true", help="Show usage instructions for CDash reporting"
)
cd_group = run_parser.add_mutually_exclusive_group()
- arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
+ arguments.add_common_arguments(cd_group, ["clean", "dirty"])
- arguments.add_common_arguments(run_parser, ['installed_specs'])
+ arguments.add_common_arguments(run_parser, ["installed_specs"])
# List
- list_parser = sp.add_parser('list', description=test_list.__doc__,
- help=first_line(test_list.__doc__))
- list_parser.add_argument(
- "-a", "--all", action="store_true", dest="list_all",
- help="list all packages with tests (not just installed)")
-
+ list_parser = sp.add_parser(
+ "list", description=test_list.__doc__, help=first_line(test_list.__doc__)
+ )
list_parser.add_argument(
- 'tag',
- nargs='*',
- help="limit packages to those with all listed tags"
+ "-a",
+ "--all",
+ action="store_true",
+ dest="list_all",
+ help="list all packages with tests (not just installed)",
)
+ list_parser.add_argument("tag", nargs="*", help="limit packages to those with all listed tags")
+
# Find
- find_parser = sp.add_parser('find', description=test_find.__doc__,
- help=first_line(test_find.__doc__))
+ find_parser = sp.add_parser(
+ "find", description=test_find.__doc__, help=first_line(test_find.__doc__)
+ )
find_parser.add_argument(
- 'filter', nargs=argparse.REMAINDER,
- help='optional case-insensitive glob patterns to filter results.')
+ "filter",
+ nargs=argparse.REMAINDER,
+ help="optional case-insensitive glob patterns to filter results.",
+ )
# Status
- status_parser = sp.add_parser('status', description=test_status.__doc__,
- help=first_line(test_status.__doc__))
+ status_parser = sp.add_parser(
+ "status", description=test_status.__doc__, help=first_line(test_status.__doc__)
+ )
status_parser.add_argument(
- 'names', nargs=argparse.REMAINDER,
- help="Test suites for which to print status")
+ "names", nargs=argparse.REMAINDER, help="Test suites for which to print status"
+ )
# Results
- results_parser = sp.add_parser('results', description=test_results.__doc__,
- help=first_line(test_results.__doc__))
+ results_parser = sp.add_parser(
+ "results", description=test_results.__doc__, help=first_line(test_results.__doc__)
+ )
results_parser.add_argument(
- '-l', '--logs', action='store_true',
- help="print the test log for each matching package")
+ "-l", "--logs", action="store_true", help="print the test log for each matching package"
+ )
results_parser.add_argument(
- '-f', '--failed', action='store_true',
- help="only show results for failed tests of matching packages")
+ "-f",
+ "--failed",
+ action="store_true",
+ help="only show results for failed tests of matching packages",
+ )
results_parser.add_argument(
- 'names', nargs=argparse.REMAINDER,
- metavar='[name(s)] [-- installed_specs]...',
- help="suite names and installed package constraints")
- results_parser.epilog = 'Test results will be filtered by space-' \
- 'separated suite name(s) and installed\nspecs when provided. '\
- 'If names are provided, then only results for those test\nsuites '\
- 'will be shown. If installed specs are provided, then ony results'\
- '\nmatching those specs will be shown.'
+ "names",
+ nargs=argparse.REMAINDER,
+ metavar="[name(s)] [-- installed_specs]...",
+ help="suite names and installed package constraints",
+ )
+ results_parser.epilog = (
+ "Test results will be filtered by space-"
+ "separated suite name(s) and installed\nspecs when provided. "
+ "If names are provided, then only results for those test\nsuites "
+ "will be shown. If installed specs are provided, then ony results"
+ "\nmatching those specs will be shown."
+ )
# Remove
- remove_parser = sp.add_parser('remove', description=test_remove.__doc__,
- help=first_line(test_remove.__doc__))
- arguments.add_common_arguments(remove_parser, ['yes_to_all'])
+ remove_parser = sp.add_parser(
+ "remove", description=test_remove.__doc__, help=first_line(test_remove.__doc__)
+ )
+ arguments.add_common_arguments(remove_parser, ["yes_to_all"])
remove_parser.add_argument(
- 'names', nargs=argparse.REMAINDER,
- help="Test suites to remove from test stage")
+ "names", nargs=argparse.REMAINDER, help="Test suites to remove from test stage"
+ )
def test_run(args):
@@ -149,25 +159,27 @@ def test_run(args):
if args.alias:
suites = spack.install_test.get_named_test_suites(args.alias)
if suites:
- tty.die('Test suite "{0}" already exists. Try another alias.'
- .format(args.alias))
+ tty.die('Test suite "{0}" already exists. Try another alias.'.format(args.alias))
# cdash help option
if args.help_cdash:
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
- epilog=textwrap.dedent('''\
+ epilog=textwrap.dedent(
+ """\
environment variables:
SPACK_CDASH_AUTH_TOKEN
authentication token to present to CDash
- '''))
+ """
+ ),
+ )
arguments.add_cdash_args(parser, True)
parser.print_help()
return
# set config option for fail-fast
if args.fail_fast:
- spack.config.set('config:fail_fast', True, scope='command_line')
+ spack.config.set("config:fail_fast", True, scope="command_line")
# Get specs to test
env = ev.active_environment()
@@ -187,9 +199,10 @@ environment variables:
tty.msg("Spack test %s" % test_suite.name)
# Set up reporter
- setattr(args, 'package', [s.format() for s in test_suite.specs])
+ setattr(args, "package", [s.format() for s in test_suite.specs])
reporter = spack.report.collect_info(
- spack.package_base.PackageBase, 'do_test', args.log_format, args)
+ spack.package_base.PackageBase, "do_test", args.log_format, args
+ )
if not reporter.filename:
if args.log_file:
if os.path.isabs(args.log_file):
@@ -198,27 +211,27 @@ environment variables:
log_dir = os.getcwd()
log_file = os.path.join(log_dir, args.log_file)
else:
- log_file = os.path.join(
- os.getcwd(),
- 'test-%s' % test_suite.name)
+ log_file = os.path.join(os.getcwd(), "test-%s" % test_suite.name)
reporter.filename = log_file
reporter.specs = specs_to_test
- with reporter('test', test_suite.stage):
- test_suite(remove_directory=not args.keep_stage,
- dirty=args.dirty,
- fail_first=args.fail_first,
- externals=args.externals)
+ with reporter("test", test_suite.stage):
+ test_suite(
+ remove_directory=not args.keep_stage,
+ dirty=args.dirty,
+ fail_first=args.fail_first,
+ externals=args.externals,
+ )
def test_list(args):
"""List installed packages with available tests."""
- tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag \
- else set()
+ tagged = set(spack.repo.path.packages_with_tags(*args.tag)) if args.tag else set()
def has_test_and_tags(pkg_class):
- return spack.package_base.has_test_method(pkg_class) and \
- (not args.tag or pkg_class.name in tagged)
+ return spack.package_base.has_test_method(pkg_class) and (
+ not args.tag or pkg_class.name in tagged
+ )
if args.list_all:
report_packages = [
@@ -228,9 +241,8 @@ def test_list(args):
]
if sys.stdout.isatty():
- filtered = ' tagged' if args.tag else ''
- tty.msg("{0}{1} packages with tests.".
- format(len(report_packages), filtered))
+ filtered = " tagged" if args.tag else ""
+ tty.msg("{0}{1} packages with tests.".format(len(report_packages), filtered))
colify.colify(report_packages)
return
@@ -254,24 +266,28 @@ def test_find(args): # TODO: merge with status (noargs)
# Filter tests by filter argument
if args.filter:
+
def create_filter(f):
- raw = fnmatch.translate('f' if '*' in f or '?' in f
- else '*' + f + '*')
+ raw = fnmatch.translate("f" if "*" in f or "?" in f else "*" + f + "*")
return re.compile(raw, flags=re.IGNORECASE)
+
filters = [create_filter(f) for f in args.filter]
def match(t, f):
return f.match(t)
- test_suites = [t for t in test_suites
- if any(match(t.alias, f) for f in filters) and
- os.path.isdir(t.stage)]
+
+ test_suites = [
+ t
+ for t in test_suites
+ if any(match(t.alias, f) for f in filters) and os.path.isdir(t.stage)
+ ]
names = [t.name for t in test_suites]
if names:
# TODO: Make these specify results vs active
msg = "Spack test results available for the following tests:\n"
- msg += " %s\n" % ' '.join(names)
+ msg += " %s\n" % " ".join(names)
msg += " Run `spack test remove` to remove all tests"
tty.msg(msg)
else:
@@ -315,42 +331,38 @@ def _report_suite_results(test_suite, args, constraints):
for s in spack.store.db.query(spec, installed=True):
specs[s.dag_hash()] = s
specs = sorted(specs.values())
- test_specs = dict((test_suite.test_pkg_id(s), s) for s in
- test_suite.specs if s in specs)
+ test_specs = dict((test_suite.test_pkg_id(s), s) for s in test_suite.specs if s in specs)
else:
- test_specs = dict((test_suite.test_pkg_id(s), s) for s in
- test_suite.specs)
+ test_specs = dict((test_suite.test_pkg_id(s), s) for s in test_suite.specs)
if not test_specs:
return
if os.path.exists(test_suite.results_file):
- results_desc = 'Failing results' if args.failed else 'Results'
- matching = ", spec matching '{0}'".format(' '.join(constraints)) \
- if constraints else ''
- tty.msg("{0} for test suite '{1}'{2}:"
- .format(results_desc, test_suite.name, matching))
+ results_desc = "Failing results" if args.failed else "Results"
+ matching = ", spec matching '{0}'".format(" ".join(constraints)) if constraints else ""
+ tty.msg("{0} for test suite '{1}'{2}:".format(results_desc, test_suite.name, matching))
results = {}
- with open(test_suite.results_file, 'r') as f:
+ with open(test_suite.results_file, "r") as f:
for line in f:
pkg_id, status = line.split()
results[pkg_id] = status
- tty.msg('test specs:')
+ tty.msg("test specs:")
failed, skipped, untested = 0, 0, 0
for pkg_id in test_specs:
if pkg_id in results:
status = results[pkg_id]
- if status == 'FAILED':
+ if status == "FAILED":
failed += 1
- elif status == 'NO-TESTS':
+ elif status == "NO-TESTS":
untested += 1
- elif status == 'SKIPPED':
+ elif status == "SKIPPED":
skipped += 1
- if args.failed and status != 'FAILED':
+ if args.failed and status != "FAILED":
continue
msg = " {0} {1}".format(pkg_id, status)
@@ -358,12 +370,11 @@ def _report_suite_results(test_suite, args, constraints):
spec = test_specs[pkg_id]
log_file = test_suite.log_file_for_spec(spec)
if os.path.isfile(log_file):
- with open(log_file, 'r') as f:
- msg += '\n{0}'.format(''.join(f.readlines()))
+ with open(log_file, "r") as f:
+ msg += "\n{0}".format("".join(f.readlines()))
tty.msg(msg)
- spack.install_test.write_test_summary(
- failed, skipped, untested, len(test_specs))
+ spack.install_test.write_test_summary(failed, skipped, untested, len(test_specs))
else:
msg = "Test %s has no results.\n" % test_suite.name
msg += " Check if it is running with "
@@ -375,9 +386,9 @@ def test_results(args):
"""Get the results from Spack test suite(s) (default all)."""
if args.names:
try:
- sep_index = args.names.index('--')
+ sep_index = args.names.index("--")
names = args.names[:sep_index]
- constraints = args.names[sep_index + 1:]
+ constraints = args.names[sep_index + 1 :]
except ValueError:
names = args.names
constraints = None
@@ -385,12 +396,10 @@ def test_results(args):
names, constraints = None, None
if names:
- test_suites = [spack.install_test.get_test_suite(name) for name
- in names]
+ test_suites = [spack.install_test.get_test_suite(name) for name in names]
test_suites = list(filter(lambda ts: ts is not None, test_suites))
if not test_suites:
- tty.msg('No test suite(s) found in test stage: {0}'
- .format(', '.join(names)))
+ tty.msg("No test suite(s) found in test stage: {0}".format(", ".join(names)))
else:
test_suites = spack.install_test.get_all_test_suites()
if not test_suites:
@@ -423,12 +432,12 @@ def test_remove(args):
return
if not args.yes_to_all:
- msg = 'The following test suites will be removed:\n\n'
- msg += ' ' + ' '.join(test.name for test in test_suites) + '\n'
+ msg = "The following test suites will be removed:\n\n"
+ msg += " " + " ".join(test.name for test in test_suites) + "\n"
tty.msg(msg)
- answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
+ answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not answer:
- tty.msg('Aborting removal of test suites')
+ tty.msg("Aborting removal of test suites")
return
for test_suite in test_suites:
@@ -436,4 +445,4 @@ def test_remove(args):
def test(parser, args):
- globals()['test_%s' % args.test_command](args)
+ globals()["test_%s" % args.test_command](args)
diff --git a/lib/spack/spack/cmd/test_env.py b/lib/spack/spack/cmd/test_env.py
index 0f14366b8e..e5bd02ba83 100644
--- a/lib/spack/spack/cmd/test_env.py
+++ b/lib/spack/spack/cmd/test_env.py
@@ -4,8 +4,9 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.cmd.common.env_utility as env_utility
-description = "run a command in a spec's test environment, " \
- "or dump its environment to screen or file"
+description = (
+ "run a command in a spec's test environment, " "or dump its environment to screen or file"
+)
section = "admin"
level = "long"
@@ -13,4 +14,4 @@ setup_parser = env_utility.setup_parser
def test_env(parser, args):
- env_utility.emulate_env_utility('test-env', 'test', args)
+ env_utility.emulate_env_utility("test-env", "test", args)
diff --git a/lib/spack/spack/cmd/tutorial.py b/lib/spack/spack/cmd/tutorial.py
index c1ad9742e5..33098ac5ca 100644
--- a/lib/spack/spack/cmd/tutorial.py
+++ b/lib/spack/spack/cmd/tutorial.py
@@ -26,7 +26,7 @@ level = "long"
# tutorial configuration parameters
tutorial_branch = "releases/v0.18"
tutorial_mirror = "file:///mirror"
-tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
+tutorial_key = os.path.join(spack.paths.share_path, "keys", "tutorial.pub")
# configs to remove
rm_configs = [
@@ -39,7 +39,7 @@ rm_configs = [
def setup_parser(subparser):
- arguments.add_common_arguments(subparser, ['yes_to_all'])
+ arguments.add_common_arguments(subparser, ["yes_to_all"])
def tutorial(parser, args):
@@ -47,14 +47,18 @@ def tutorial(parser, args):
tty.die("This command requires a git installation of Spack!")
if not args.yes_to_all:
- tty.msg("This command will set up Spack for the tutorial at "
- "https://spack-tutorial.readthedocs.io.",
- "")
- tty.warn("This will modify your Spack configuration by:",
- " - deleting some configuration in ~/.spack",
- " - adding a mirror and trusting its public key",
- " - checking out a particular branch of Spack",
- "")
+ tty.msg(
+ "This command will set up Spack for the tutorial at "
+ "https://spack-tutorial.readthedocs.io.",
+ "",
+ )
+ tty.warn(
+ "This will modify your Spack configuration by:",
+ " - deleting some configuration in ~/.spack",
+ " - adding a mirror and trusting its public key",
+ " - checking out a particular branch of Spack",
+ "",
+ )
if not tty.get_yes_or_no("Are you sure you want to proceed?"):
tty.die("Aborted")
@@ -64,22 +68,21 @@ def tutorial(parser, args):
if os.path.exists(path):
shutil.rmtree(path, ignore_errors=True)
- tty.msg("Ensuring that the tutorial binary mirror is configured:",
- "spack mirror add tutorial %s" % tutorial_mirror)
+ tty.msg(
+ "Ensuring that the tutorial binary mirror is configured:",
+ "spack mirror add tutorial %s" % tutorial_mirror,
+ )
mirror_config = syaml_dict()
mirror_config["tutorial"] = tutorial_mirror
- spack.config.set('mirrors', mirror_config, scope="user")
+ spack.config.set("mirrors", mirror_config, scope="user")
- tty.msg("Ensuring that we trust tutorial binaries",
- "spack gpg trust %s" % tutorial_key)
+ tty.msg("Ensuring that we trust tutorial binaries", "spack gpg trust %s" % tutorial_key)
spack.util.gpg.trust(tutorial_key)
# Note that checkout MUST be last. It changes Spack under our feet.
# If you don't put this last, you'll get import errors for the code
# that follows (exacerbated by the various lazy singletons we use)
- tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format(
- *spack.spack_version_info[:2]
- ))
+ tty.msg("Ensuring we're on the releases/v{0}.{1} branch".format(*spack.spack_version_info[:2]))
git = which("git", required=True)
with working_dir(spack.paths.prefix):
git("checkout", tutorial_branch)
diff --git a/lib/spack/spack/cmd/undevelop.py b/lib/spack/spack/cmd/undevelop.py
index 429e9b1685..666af3ee50 100644
--- a/lib/spack/spack/cmd/undevelop.py
+++ b/lib/spack/spack/cmd/undevelop.py
@@ -8,20 +8,20 @@ import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
-description = 'remove specs from an environment'
+description = "remove specs from an environment"
section = "environments"
level = "long"
def setup_parser(subparser):
subparser.add_argument(
- '-a', '--all', action='store_true',
- help="remove all specs from (clear) the environment")
- arguments.add_common_arguments(subparser, ['specs'])
+ "-a", "--all", action="store_true", help="remove all specs from (clear) the environment"
+ )
+ arguments.add_common_arguments(subparser, ["specs"])
def undevelop(parser, args):
- env = spack.cmd.require_active_env(cmd_name='undevelop')
+ env = spack.cmd.require_active_env(cmd_name="undevelop")
if args.all:
specs = env.dev_specs.keys()
@@ -31,8 +31,7 @@ def undevelop(parser, args):
with env.write_transaction():
changed = False
for spec in specs:
- tty.msg('Removing %s from environment %s development specs'
- % (spec, env.name))
+ tty.msg("Removing %s from environment %s development specs" % (spec, env.name))
changed |= env.undevelop(spec)
if changed:
env.write()
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 643be0044b..be4b74c54a 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -32,44 +32,51 @@ error_message = """You can either:
# Arguments for display_specs when we find ambiguity
display_args = {
- 'long': True,
- 'show_flags': False,
- 'variants': False,
- 'indent': 4,
+ "long": True,
+ "show_flags": False,
+ "variants": False,
+ "indent": 4,
}
def setup_parser(subparser):
- epilog_msg = ("Specs to be uninstalled are specified using the spec syntax"
- " (`spack help --spec`) and can be identified by their "
- "hashes. To remove packages that are needed only at build "
- "time and were not explicitly installed see `spack gc -h`."
- "\n\nWhen using the --all option ALL packages matching the "
- "supplied specs will be uninstalled. For instance, "
- "`spack uninstall --all libelf` uninstalls all the versions "
- "of `libelf` currently present in Spack's store. If no spec "
- "is supplied, all installed packages will be uninstalled. "
- "If used in an environment, all packages in the environment "
- "will be uninstalled.")
+ epilog_msg = (
+ "Specs to be uninstalled are specified using the spec syntax"
+ " (`spack help --spec`) and can be identified by their "
+ "hashes. To remove packages that are needed only at build "
+ "time and were not explicitly installed see `spack gc -h`."
+ "\n\nWhen using the --all option ALL packages matching the "
+ "supplied specs will be uninstalled. For instance, "
+ "`spack uninstall --all libelf` uninstalls all the versions "
+ "of `libelf` currently present in Spack's store. If no spec "
+ "is supplied, all installed packages will be uninstalled. "
+ "If used in an environment, all packages in the environment "
+ "will be uninstalled."
+ )
subparser.epilog = epilog_msg
subparser.add_argument(
- '-f', '--force', action='store_true', dest='force',
- help="remove regardless of whether other packages or environments "
- "depend on this one")
+ "-f",
+ "--force",
+ action="store_true",
+ dest="force",
+ help="remove regardless of whether other packages or environments " "depend on this one",
+ )
arguments.add_common_arguments(
- subparser, ['recurse_dependents', 'yes_to_all', 'installed_specs'])
+ subparser, ["recurse_dependents", "yes_to_all", "installed_specs"]
+ )
subparser.add_argument(
- '-a', '--all', action='store_true', dest='all',
- help="remove ALL installed packages that match each supplied spec"
+ "-a",
+ "--all",
+ action="store_true",
+ dest="all",
+ help="remove ALL installed packages that match each supplied spec",
)
subparser.add_argument(
- '--origin', dest='origin',
- help="only remove DB records with the specified origin"
+ "--origin", dest="origin", help="only remove DB records with the specified origin"
)
-def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
- origin=None):
+def find_matching_specs(env, specs, allow_multiple_matches=False, force=False, origin=None):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
@@ -91,15 +98,15 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
for spec in specs:
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
matching = spack.store.db.query_local(
- spec, hashes=hashes, installed=install_query, origin=origin)
+ spec, hashes=hashes, installed=install_query, origin=origin
+ )
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
- tty.error('{0} matches multiple packages:'.format(spec))
- sys.stderr.write('\n')
- spack.cmd.display_specs(matching, output=sys.stderr,
- **display_args)
- sys.stderr.write('\n')
+ tty.error("{0} matches multiple packages:".format(spec))
+ sys.stderr.write("\n")
+ spack.cmd.display_specs(matching, output=sys.stderr, **display_args)
+ sys.stderr.write("\n")
sys.stderr.flush()
has_errors = True
@@ -108,8 +115,8 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
if env:
pkg_type = "packages in environment '%s'" % env.name
else:
- pkg_type = 'installed packages'
- tty.die('{0} does not match any {1}.'.format(spec, pkg_type))
+ pkg_type = "installed packages"
+ tty.die("{0} does not match any {1}.".format(spec, pkg_type))
specs_from_cli.extend(matching)
@@ -242,9 +249,11 @@ def do_uninstall(env, specs, force):
while packages:
ready = [x for x in packages if is_ready(x.spec.dag_hash())]
if not ready:
- msg = 'unexpected error [cannot proceed uninstalling specs with' \
- ' remaining link or run dependents {0}]'
- msg = msg.format(', '.join(x.name for x in packages))
+ msg = (
+ "unexpected error [cannot proceed uninstalling specs with"
+ " remaining link or run dependents {0}]"
+ )
+ msg = msg.format(", ".join(x.name for x in packages))
raise spack.error.SpackError(msg)
packages = [x for x in packages if x not in ready]
@@ -255,8 +264,7 @@ def do_uninstall(env, specs, force):
def get_uninstall_list(args, specs, env):
# Gets the list of installed specs that match the ones give via cli
# args.all takes care of the case where '-a' is given in the cli
- uninstall_list = find_matching_specs(env, specs, args.all, args.force,
- args.origin)
+ uninstall_list = find_matching_specs(env, specs, args.all, args.force, args.origin)
# Takes care of '-R'
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
@@ -285,29 +293,27 @@ def get_uninstall_list(args, specs, env):
if i > 0:
print()
- spec_format = '{name}{@version}{%compiler}{/hash:7}'
- tty.info("Will not uninstall %s" % spec.cformat(spec_format),
- format='*r')
+ spec_format = "{name}{@version}{%compiler}{/hash:7}"
+ tty.info("Will not uninstall %s" % spec.cformat(spec_format), format="*r")
dependents = active_dpts.get(spec)
if dependents:
- print('The following packages depend on it:')
+ print("The following packages depend on it:")
spack.cmd.display_specs(dependents, **display_args)
if not env:
envs = spec_envs.get(spec)
if envs:
- print('It is used by the following environments:')
+ print("It is used by the following environments:")
colify([e.name for e in envs], indent=4)
msgs = []
if active_dpts:
- msgs.append(
- 'use `spack uninstall --dependents` to remove dependents too')
+ msgs.append("use `spack uninstall --dependents` to remove dependents too")
if spec_envs:
- msgs.append('use `spack env remove` to remove from environments')
+ msgs.append("use `spack env remove` to remove from environments")
print()
- tty.die('There are still dependents.', *msgs)
+ tty.die("There are still dependents.", *msgs)
elif args.dependents:
for spec, lst in active_dpts.items():
@@ -333,7 +339,7 @@ def uninstall_specs(args, specs):
anything_to_do = set(uninstall_list).union(set(remove_list))
if not anything_to_do:
- tty.warn('There are no package to uninstall.')
+ tty.warn("There are no package to uninstall.")
return
if not args.yes_to_all:
@@ -357,19 +363,21 @@ def confirm_removal(specs):
Args:
specs (list): specs to be removed
"""
- tty.msg('The following packages will be uninstalled:\n')
+ tty.msg("The following packages will be uninstalled:\n")
spack.cmd.display_specs(specs, **display_args)
- print('')
- answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
+ print("")
+ answer = tty.get_yes_or_no("Do you want to proceed?", default=False)
if not answer:
- tty.msg('Aborting uninstallation')
+ tty.msg("Aborting uninstallation")
sys.exit(0)
def uninstall(parser, args):
if not args.specs and not args.all:
- tty.die('uninstall requires at least one package argument.',
- ' Use `spack uninstall --all` to uninstall ALL packages.')
+ tty.die(
+ "uninstall requires at least one package argument.",
+ " Use `spack uninstall --all` to uninstall ALL packages.",
+ )
# [any] here handles the --all case by forcing all specs to be returned
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
diff --git a/lib/spack/spack/cmd/unit_test.py b/lib/spack/spack/cmd/unit_test.py
index eb78ad893b..d1d0c790f8 100644
--- a/lib/spack/spack/cmd/unit_test.py
+++ b/lib/spack/spack/cmd/unit_test.py
@@ -14,7 +14,7 @@ import sys
try:
import pytest
except ImportError:
- pytest = None # type: ignore
+ pytest = None # type: ignore
from six import StringIO
@@ -28,79 +28,110 @@ import spack.paths
description = "run spack's unit tests (wrapper around pytest)"
section = "developer"
level = "long"
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
def setup_parser(subparser):
subparser.add_argument(
- '-H', '--pytest-help', action='store_true', default=False,
- help="show full pytest help, with advanced options")
+ "-H",
+ "--pytest-help",
+ action="store_true",
+ default=False,
+ help="show full pytest help, with advanced options",
+ )
# extra spack arguments to list tests
list_group = subparser.add_argument_group("listing tests")
list_mutex = list_group.add_mutually_exclusive_group()
list_mutex.add_argument(
- '-l', '--list', action='store_const', default=None,
- dest='list', const='list', help="list test filenames")
+ "-l",
+ "--list",
+ action="store_const",
+ default=None,
+ dest="list",
+ const="list",
+ help="list test filenames",
+ )
list_mutex.add_argument(
- '-L', '--list-long', action='store_const', default=None,
- dest='list', const='long', help="list all test functions")
+ "-L",
+ "--list-long",
+ action="store_const",
+ default=None,
+ dest="list",
+ const="long",
+ help="list all test functions",
+ )
list_mutex.add_argument(
- '-N', '--list-names', action='store_const', default=None,
- dest='list', const='names', help="list full names of all tests")
+ "-N",
+ "--list-names",
+ action="store_const",
+ default=None,
+ dest="list",
+ const="names",
+ help="list full names of all tests",
+ )
# use tests for extension
subparser.add_argument(
- '--extension', default=None,
- help="run test for a given spack extension")
+ "--extension", default=None, help="run test for a given spack extension"
+ )
# spell out some common pytest arguments, so they'll show up in help
pytest_group = subparser.add_argument_group(
- "common pytest arguments (spack unit-test --pytest-help for more)")
+ "common pytest arguments (spack unit-test --pytest-help for more)"
+ )
pytest_group.add_argument(
- "-s", action='append_const', dest='parsed_args', const='-s',
- help="print output while tests run (disable capture)")
+ "-s",
+ action="append_const",
+ dest="parsed_args",
+ const="-s",
+ help="print output while tests run (disable capture)",
+ )
pytest_group.add_argument(
- "-k", action='store', metavar="EXPRESSION", dest='expression',
- help="filter tests by keyword (can also use w/list options)")
+ "-k",
+ action="store",
+ metavar="EXPRESSION",
+ dest="expression",
+ help="filter tests by keyword (can also use w/list options)",
+ )
pytest_group.add_argument(
- "--showlocals", action='append_const', dest='parsed_args',
- const='--showlocals', help="show local variable values in tracebacks")
+ "--showlocals",
+ action="append_const",
+ dest="parsed_args",
+ const="--showlocals",
+ help="show local variable values in tracebacks",
+ )
# remainder is just passed to pytest
- subparser.add_argument(
- 'pytest_args', nargs=argparse.REMAINDER, help="arguments for pytest")
+ subparser.add_argument("pytest_args", nargs=argparse.REMAINDER, help="arguments for pytest")
def do_list(args, extra_args):
"""Print a lists of tests than what pytest offers."""
+
def colorize(c, prefix):
if isinstance(prefix, tuple):
- return "::".join(
- color.colorize("@%s{%s}" % (c, p))
- for p in prefix if p != "()"
- )
+ return "::".join(color.colorize("@%s{%s}" % (c, p)) for p in prefix if p != "()")
return color.colorize("@%s{%s}" % (c, prefix))
# To list the files we just need to inspect the filesystem,
# which doesn't need to wait for pytest collection and doesn't
# require parsing pytest output
- files = llnl.util.filesystem.find(
- root=spack.paths.test_path, files='*.py', recursive=True
- )
+ files = llnl.util.filesystem.find(root=spack.paths.test_path, files="*.py", recursive=True)
files = [
os.path.relpath(f, start=spack.paths.spack_root)
- for f in files if not f.endswith(('conftest.py', '__init__.py'))
+ for f in files
+ if not f.endswith(("conftest.py", "__init__.py"))
]
old_output = sys.stdout
try:
sys.stdout = output = StringIO()
- pytest.main(['--collect-only'] + extra_args)
+ pytest.main(["--collect-only"] + extra_args)
finally:
sys.stdout = old_output
- lines = output.getvalue().split('\n')
+ lines = output.getvalue().split("\n")
tests = collections.defaultdict(set)
# collect tests into sections
@@ -114,7 +145,7 @@ def do_list(args, extra_args):
# strip parametrized tests
if "[" in name:
- name = name[:name.index("[")]
+ name = name[: name.index("[")]
len_indent = len(indent)
if os.path.isabs(name):
@@ -129,14 +160,14 @@ def do_list(args, extra_args):
# From version 3.X to version 6.X the output format
# changed a lot in pytest, and probably will change
# in the future - so this manipulation might be fragile
- if nodetype.lower() == 'function':
+ if nodetype.lower() == "function":
name_parts.append(item)
- key_end = os.path.join(*key_parts[-1][1].split('/'))
+ key_end = os.path.join(*key_parts[-1][1].split("/"))
key = next(f for f in files if f.endswith(key_end))
tests[key].add(tuple(x[1] for x in name_parts))
- elif nodetype.lower() == 'class':
+ elif nodetype.lower() == "class":
name_parts.append(item)
- elif nodetype.lower() in ('package', 'module'):
+ elif nodetype.lower() in ("package", "module"):
key_parts.append(item)
if args.list == "list":
@@ -187,16 +218,14 @@ def unit_test(parser, args, unknown_args):
spack.bootstrap.ensure_clingo_importable_or_raise()
if pytest is None:
- vendored_pytest_dir = os.path.join(
- spack.paths.external_path, 'pytest-fallback'
- )
+ vendored_pytest_dir = os.path.join(spack.paths.external_path, "pytest-fallback")
sys.path.append(vendored_pytest_dir)
import pytest
if args.pytest_help:
# make the pytest.main help output more accurate
- sys.argv[0] = 'spack unit-test'
- return pytest.main(['-h'])
+ sys.argv[0] = "spack unit-test"
+ return pytest.main(["-h"])
# add back any parsed pytest args we need to pass to pytest
pytest_args = add_back_pytest_args(args, unknown_args)
diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py
index e1b926a6ea..8cf7af8b9d 100644
--- a/lib/spack/spack/cmd/unload.py
+++ b/lib/spack/spack/cmd/unload.py
@@ -19,42 +19,62 @@ level = "short"
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
- message with -h. """
- arguments.add_common_arguments(subparser, ['installed_specs'])
+ message with -h."""
+ arguments.add_common_arguments(subparser, ["installed_specs"])
shells = subparser.add_mutually_exclusive_group()
shells.add_argument(
- '--sh', action='store_const', dest='shell', const='sh',
- help="print sh commands to activate the environment")
+ "--sh",
+ action="store_const",
+ dest="shell",
+ const="sh",
+ help="print sh commands to activate the environment",
+ )
shells.add_argument(
- '--csh', action='store_const', dest='shell', const='csh',
- help="print csh commands to activate the environment")
+ "--csh",
+ action="store_const",
+ dest="shell",
+ const="csh",
+ help="print csh commands to activate the environment",
+ )
shells.add_argument(
- '--fish', action='store_const', dest='shell', const='fish',
- help="print fish commands to load the package")
+ "--fish",
+ action="store_const",
+ dest="shell",
+ const="fish",
+ help="print fish commands to load the package",
+ )
shells.add_argument(
- '--bat', action='store_const', dest='shell', const='bat',
- help="print bat commands to load the package")
+ "--bat",
+ action="store_const",
+ dest="shell",
+ const="bat",
+ help="print bat commands to load the package",
+ )
- subparser.add_argument('-a', '--all', action='store_true',
- help='unload all loaded Spack packages.')
+ subparser.add_argument(
+ "-a", "--all", action="store_true", help="unload all loaded Spack packages."
+ )
def unload(parser, args):
"""Unload spack packages from the user environment."""
if args.specs and args.all:
- raise spack.error.SpackError("Cannot specify specs on command line"
- " when unloading all specs with '--all'")
+ raise spack.error.SpackError(
+ "Cannot specify specs on command line" " when unloading all specs with '--all'"
+ )
- hashes = os.environ.get(uenv.spack_loaded_hashes_var, '').split(':')
+ hashes = os.environ.get(uenv.spack_loaded_hashes_var, "").split(":")
if args.specs:
- specs = [spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
- for spec in spack.cmd.parse_specs(args.specs)]
+ specs = [
+ spack.cmd.disambiguate_spec_from_hashes(spec, hashes)
+ for spec in spack.cmd.parse_specs(args.specs)
+ ]
else:
specs = spack.store.db.query(hashes=hashes)
if not args.shell:
- specs_str = ' '.join(args.specs) or "SPECS"
+ specs_str = " ".join(args.specs) or "SPECS"
spack.cmd.common.shell_init_instructions(
"spack unload",
@@ -64,8 +84,7 @@ def unload(parser, args):
env_mod = spack.util.environment.EnvironmentModifications()
for spec in specs:
- env_mod.extend(
- uenv.environment_modifications_for_spec(spec).reversed())
+ env_mod.extend(uenv.environment_modifications_for_spec(spec).reversed())
env_mod.remove_path(uenv.spack_loaded_hashes_var, spec.dag_hash())
cmds = env_mod.shell_modifications(args.shell)
diff --git a/lib/spack/spack/cmd/url.py b/lib/spack/spack/cmd/url.py
index be24d86ffc..452bd059da 100644
--- a/lib/spack/spack/cmd/url.py
+++ b/lib/spack/spack/cmd/url.py
@@ -37,66 +37,81 @@ level = "long"
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subcommand')
+ sp = subparser.add_subparsers(metavar="SUBCOMMAND", dest="subcommand")
# Parse
- parse_parser = sp.add_parser('parse', help='attempt to parse a url')
+ parse_parser = sp.add_parser("parse", help="attempt to parse a url")
+ parse_parser.add_argument("url", help="url to parse")
parse_parser.add_argument(
- 'url',
- help='url to parse')
- parse_parser.add_argument(
- '-s', '--spider', action='store_true',
- help='spider the source page for versions')
+ "-s", "--spider", action="store_true", help="spider the source page for versions"
+ )
# List
- list_parser = sp.add_parser('list', help='list urls in all packages')
+ list_parser = sp.add_parser("list", help="list urls in all packages")
list_parser.add_argument(
- '-c', '--color', action='store_true',
- help='color the parsed version and name in the urls shown '
- '(versions will be cyan, name red)')
+ "-c",
+ "--color",
+ action="store_true",
+ help="color the parsed version and name in the urls shown "
+ "(versions will be cyan, name red)",
+ )
list_parser.add_argument(
- '-e', '--extrapolation', action='store_true',
- help='color the versions used for extrapolation as well '
- '(additional versions will be green, names magenta)')
+ "-e",
+ "--extrapolation",
+ action="store_true",
+ help="color the versions used for extrapolation as well "
+ "(additional versions will be green, names magenta)",
+ )
excl_args = list_parser.add_mutually_exclusive_group()
excl_args.add_argument(
- '-n', '--incorrect-name', action='store_true',
- help='only list urls for which the name was incorrectly parsed')
+ "-n",
+ "--incorrect-name",
+ action="store_true",
+ help="only list urls for which the name was incorrectly parsed",
+ )
excl_args.add_argument(
- '-N', '--correct-name', action='store_true',
- help='only list urls for which the name was correctly parsed')
+ "-N",
+ "--correct-name",
+ action="store_true",
+ help="only list urls for which the name was correctly parsed",
+ )
excl_args.add_argument(
- '-v', '--incorrect-version', action='store_true',
- help='only list urls for which the version was incorrectly parsed')
+ "-v",
+ "--incorrect-version",
+ action="store_true",
+ help="only list urls for which the version was incorrectly parsed",
+ )
excl_args.add_argument(
- '-V', '--correct-version', action='store_true',
- help='only list urls for which the version was correctly parsed')
+ "-V",
+ "--correct-version",
+ action="store_true",
+ help="only list urls for which the version was correctly parsed",
+ )
# Summary
- sp.add_parser(
- 'summary',
- help='print a summary of how well we are parsing package urls')
+ sp.add_parser("summary", help="print a summary of how well we are parsing package urls")
# Stats
stats_parser = sp.add_parser(
- 'stats',
- help='print statistics on versions and checksums for all packages')
+ "stats", help="print statistics on versions and checksums for all packages"
+ )
stats_parser.add_argument(
- "--show-issues", action="store_true",
- help="show packages with issues (md5 hashes, http urls)"
+ "--show-issues",
+ action="store_true",
+ help="show packages with issues (md5 hashes, http urls)",
)
def url(parser, args):
action = {
- 'parse': url_parse,
- 'list': url_list,
- 'summary': url_summary,
- 'stats': url_stats,
+ "parse": url_parse,
+ "list": url_list,
+ "summary": url_summary,
+ "stats": url_stats,
}
action[args.subcommand](args)
@@ -105,43 +120,43 @@ def url(parser, args):
def url_parse(args):
url = args.url
- tty.msg('Parsing URL: {0}'.format(url))
+ tty.msg("Parsing URL: {0}".format(url))
print()
- ver, vs, vl, vi, vregex = parse_version_offset(url)
- tty.msg('Matched version regex {0:>2}: r{1!r}'.format(vi, vregex))
+ ver, vs, vl, vi, vregex = parse_version_offset(url)
+ tty.msg("Matched version regex {0:>2}: r{1!r}".format(vi, vregex))
name, ns, nl, ni, nregex = parse_name_offset(url, ver)
- tty.msg('Matched name regex {0:>2}: r{1!r}'.format(ni, nregex))
+ tty.msg("Matched name regex {0:>2}: r{1!r}".format(ni, nregex))
print()
- tty.msg('Detected:')
+ tty.msg("Detected:")
try:
print_name_and_version(url)
except UrlParseError as e:
tty.error(str(e))
- print(' name: {0}'.format(name))
- print(' version: {0}'.format(ver))
+ print(" name: {0}".format(name))
+ print(" version: {0}".format(ver))
print()
- tty.msg('Substituting version 9.9.9b:')
- newurl = substitute_version(url, '9.9.9b')
+ tty.msg("Substituting version 9.9.9b:")
+ newurl = substitute_version(url, "9.9.9b")
print_name_and_version(newurl)
if args.spider:
print()
- tty.msg('Spidering for versions:')
+ tty.msg("Spidering for versions:")
versions = find_versions_of_archive(url)
if not versions:
- print(' Found no versions for {0}'.format(name))
+ print(" Found no versions for {0}".format(name))
return
max_len = max(len(str(v)) for v in versions)
for v in sorted(versions):
- print('{0:{1}} {2}'.format(v, max_len, versions[v]))
+ print("{0:{1}} {2}".format(v, max_len, versions[v]))
def url_list(args):
@@ -149,11 +164,11 @@ def url_list(args):
# Gather set of URLs from all packages
for pkg_cls in spack.repo.path.all_package_classes():
- url = getattr(pkg_cls, 'url', None)
+ url = getattr(pkg_cls, "url", None)
urls = url_list_parsing(args, urls, url, pkg_cls)
for params in pkg_cls.versions.values():
- url = params.get('url', None)
+ url = params.get("url", None)
urls = url_list_parsing(args, urls, url, pkg_cls)
# Print URLs
@@ -169,32 +184,32 @@ def url_list(args):
def url_summary(args):
# Collect statistics on how many URLs were correctly parsed
- total_urls = 0
- correct_names = 0
+ total_urls = 0
+ correct_names = 0
correct_versions = 0
# Collect statistics on which regexes were matched and how often
- name_regex_dict = dict()
- right_name_count = defaultdict(int)
- wrong_name_count = defaultdict(int)
+ name_regex_dict = dict()
+ right_name_count = defaultdict(int)
+ wrong_name_count = defaultdict(int)
- version_regex_dict = dict()
+ version_regex_dict = dict()
right_version_count = defaultdict(int)
wrong_version_count = defaultdict(int)
- tty.msg('Generating a summary of URL parsing in Spack...')
+ tty.msg("Generating a summary of URL parsing in Spack...")
# Loop through all packages
for pkg_cls in spack.repo.path.all_package_classes():
urls = set()
pkg = pkg_cls(spack.spec.Spec(pkg_cls.name))
- url = getattr(pkg, 'url', None)
+ url = getattr(pkg, "url", None)
if url:
urls.add(url)
for params in pkg.versions.values():
- url = params.get('url', None)
+ url = params.get("url", None)
if url:
urls.add(url)
@@ -228,44 +243,53 @@ def url_summary(args):
pass
print()
- print(' Total URLs found: {0}'.format(total_urls))
- print(' Names correctly parsed: {0:>4}/{1:>4} ({2:>6.2%})'.format(
- correct_names, total_urls, correct_names / total_urls))
- print(' Versions correctly parsed: {0:>4}/{1:>4} ({2:>6.2%})'.format(
- correct_versions, total_urls, correct_versions / total_urls))
+ print(" Total URLs found: {0}".format(total_urls))
+ print(
+ " Names correctly parsed: {0:>4}/{1:>4} ({2:>6.2%})".format(
+ correct_names, total_urls, correct_names / total_urls
+ )
+ )
+ print(
+ " Versions correctly parsed: {0:>4}/{1:>4} ({2:>6.2%})".format(
+ correct_versions, total_urls, correct_versions / total_urls
+ )
+ )
print()
- tty.msg('Statistics on name regular expressions:')
+ tty.msg("Statistics on name regular expressions:")
print()
- print(' Index Right Wrong Total Regular Expression')
+ print(" Index Right Wrong Total Regular Expression")
for ni in sorted(name_regex_dict.keys()):
- print(' {0:>5} {1:>5} {2:>5} {3:>5} r{4!r}'.format(
- ni,
- right_name_count[ni],
- wrong_name_count[ni],
- right_name_count[ni] + wrong_name_count[ni],
- name_regex_dict[ni])
+ print(
+ " {0:>5} {1:>5} {2:>5} {3:>5} r{4!r}".format(
+ ni,
+ right_name_count[ni],
+ wrong_name_count[ni],
+ right_name_count[ni] + wrong_name_count[ni],
+ name_regex_dict[ni],
+ )
)
print()
- tty.msg('Statistics on version regular expressions:')
+ tty.msg("Statistics on version regular expressions:")
print()
- print(' Index Right Wrong Total Regular Expression')
+ print(" Index Right Wrong Total Regular Expression")
for vi in sorted(version_regex_dict.keys()):
- print(' {0:>5} {1:>5} {2:>5} {3:>5} r{4!r}'.format(
- vi,
- right_version_count[vi],
- wrong_version_count[vi],
- right_version_count[vi] + wrong_version_count[vi],
- version_regex_dict[vi])
+ print(
+ " {0:>5} {1:>5} {2:>5} {3:>5} r{4!r}".format(
+ vi,
+ right_version_count[vi],
+ wrong_version_count[vi],
+ right_version_count[vi] + wrong_version_count[vi],
+ version_regex_dict[vi],
+ )
)
print()
# Return statistics, only for testing purposes
- return (total_urls, correct_names, correct_versions,
- right_name_count, right_version_count)
+ return (total_urls, correct_names, correct_versions, right_name_count, right_version_count)
def url_stats(args):
@@ -284,14 +308,14 @@ def url_stats(args):
self.total += 1
url_type = fetcher.url_attr
- self.url_type[url_type or 'no code'] += 1
+ self.url_type[url_type or "no code"] += 1
- if url_type == 'url':
- digest = getattr(fetcher, 'digest', None)
+ if url_type == "url":
+ digest = getattr(fetcher, "digest", None)
if digest:
algo = crypto.hash_algo_for_digest(digest)
else:
- algo = 'no checksum'
+ algo = "no checksum"
self.checksums[algo] += 1
if algo == "md5":
@@ -306,15 +330,15 @@ def url_stats(args):
http_urls = issues["http urls"]
http_urls[pkg_name].append(fetcher.url)
- elif url_type == 'git':
- if getattr(fetcher, 'commit', None):
- self.git_type['commit'] += 1
- elif getattr(fetcher, 'branch', None):
- self.git_type['branch'] += 1
- elif getattr(fetcher, 'tag', None):
- self.git_type['tag'] += 1
+ elif url_type == "git":
+ if getattr(fetcher, "commit", None):
+ self.git_type["commit"] += 1
+ elif getattr(fetcher, "branch", None):
+ self.git_type["branch"] += 1
+ elif getattr(fetcher, "tag", None):
+ self.git_type["tag"] += 1
else:
- self.git_type['no ref'] += 1
+ self.git_type["no ref"] += 1
npkgs = 0
version_stats = UrlStats()
@@ -351,9 +375,9 @@ def url_stats(args):
fmt += "%12d%8.1f%%%12d%8.1f%%"
v = getattr(version_stats, stat_name).get(name, 0)
r = getattr(resource_stats, stat_name).get(name, 0)
- print(fmt % (name,
- v, v / version_stats.total * 100,
- r, r / resource_stats.total * 100))
+ print(
+ fmt % (name, v, v / version_stats.total * 100, r, r / resource_stats.total * 100)
+ )
print_line()
print("%-20s%12s%9s%12s%9s" % ("stat", "versions", "%", "resources", "%"))
@@ -385,9 +409,7 @@ def url_stats(args):
if args.show_issues:
total_issues = sum(
- len(issues)
- for _, pkg_issues in issues.items()
- for _, issues in pkg_issues.items()
+ len(issues) for _, pkg_issues in issues.items() for _, issues in pkg_issues.items()
)
print()
tty.msg("Found %d issues." % total_issues)
@@ -407,14 +429,14 @@ def print_name_and_version(url):
url (str): The url to parse
"""
name, ns, nl, ntup, ver, vs, vl, vtup = substitution_offsets(url)
- underlines = [' '] * max(ns + nl, vs + vl)
+ underlines = [" "] * max(ns + nl, vs + vl)
for i in range(ns, ns + nl):
- underlines[i] = '-'
+ underlines[i] = "-"
for i in range(vs, vs + vl):
- underlines[i] = '~'
+ underlines[i] = "~"
- print(' {0}'.format(url))
- print(' {0}'.format(''.join(underlines)))
+ print(" {0}".format(url))
+ print(" {0}".format("".join(underlines)))
def url_list_parsing(args, urls, url, pkg):
@@ -435,12 +457,10 @@ def url_list_parsing(args, urls, url, pkg):
# Attempt to parse the name
try:
name = parse_name(url)
- if (args.correct_name and
- name_parsed_correctly(pkg, name)):
+ if args.correct_name and name_parsed_correctly(pkg, name):
# Add correctly parsed URLs
urls.add(url)
- elif (args.incorrect_name and
- not name_parsed_correctly(pkg, name)):
+ elif args.incorrect_name and not name_parsed_correctly(pkg, name):
# Add incorrectly parsed URLs
urls.add(url)
except UndetectableNameError:
@@ -451,12 +471,10 @@ def url_list_parsing(args, urls, url, pkg):
# Attempt to parse the version
try:
version = parse_version(url)
- if (args.correct_version and
- version_parsed_correctly(pkg, version)):
+ if args.correct_version and version_parsed_correctly(pkg, version):
# Add correctly parsed URLs
urls.add(url)
- elif (args.incorrect_version and
- not version_parsed_correctly(pkg, version)):
+ elif args.incorrect_version and not version_parsed_correctly(pkg, version):
# Add incorrectly parsed URLs
urls.add(url)
except UndetectableVersionError:
@@ -522,13 +540,22 @@ def remove_prefix(pkg_name):
str: the name of the package with any build system prefix removed
"""
prefixes = [
- 'r-', 'py-', 'tcl-', 'lua-', 'perl-', 'ruby-', 'llvm-',
- 'intel-', 'votca-', 'octave-', 'gtkorvo-'
+ "r-",
+ "py-",
+ "tcl-",
+ "lua-",
+ "perl-",
+ "ruby-",
+ "llvm-",
+ "intel-",
+ "votca-",
+ "octave-",
+ "gtkorvo-",
]
- prefix = next((p for p in prefixes if pkg_name.startswith(p)), '')
+ prefix = next((p for p in prefixes if pkg_name.startswith(p)), "")
- return pkg_name[len(prefix):]
+ return pkg_name[len(prefix) :]
def remove_separators(version):
@@ -546,8 +573,8 @@ def remove_separators(version):
"""
version = str(version)
- version = version.replace('.', '')
- version = version.replace('_', '')
- version = version.replace('-', '')
+ version = version.replace(".", "")
+ version = version.replace("_", "")
+ version = version.replace("-", "")
return version
diff --git a/lib/spack/spack/cmd/verify.py b/lib/spack/spack/cmd/verify.py
index da34846069..5d155049e9 100644
--- a/lib/spack/spack/cmd/verify.py
+++ b/lib/spack/spack/cmd/verify.py
@@ -20,30 +20,42 @@ level = "long"
def setup_parser(subparser):
setup_parser.parser = subparser
- subparser.add_argument('-l', '--local', action='store_true',
- help="Verify only locally installed packages")
- subparser.add_argument('-j', '--json', action='store_true',
- help="Ouptut json-formatted errors")
- subparser.add_argument('-a', '--all', action='store_true',
- help="Verify all packages")
- subparser.add_argument('specs_or_files', nargs=argparse.REMAINDER,
- help="Specs or files to verify")
+ subparser.add_argument(
+ "-l", "--local", action="store_true", help="Verify only locally installed packages"
+ )
+ subparser.add_argument(
+ "-j", "--json", action="store_true", help="Ouptut json-formatted errors"
+ )
+ subparser.add_argument("-a", "--all", action="store_true", help="Verify all packages")
+ subparser.add_argument(
+ "specs_or_files", nargs=argparse.REMAINDER, help="Specs or files to verify"
+ )
type = subparser.add_mutually_exclusive_group()
type.add_argument(
- '-s', '--specs',
- action='store_const', const='specs', dest='type', default='specs',
- help='Treat entries as specs (default)')
+ "-s",
+ "--specs",
+ action="store_const",
+ const="specs",
+ dest="type",
+ default="specs",
+ help="Treat entries as specs (default)",
+ )
type.add_argument(
- '-f', '--files',
- action='store_const', const='files', dest='type', default='specs',
- help="Treat entries as absolute filenames. Cannot be used with '-a'")
+ "-f",
+ "--files",
+ action="store_const",
+ const="files",
+ dest="type",
+ default="specs",
+ help="Treat entries as absolute filenames. Cannot be used with '-a'",
+ )
def verify(parser, args):
local = args.local
- if args.type == 'files':
+ if args.type == "files":
if args.all:
setup_parser.parser.print_help()
return 1
@@ -75,9 +87,7 @@ def verify(parser, args):
elif args.specs_or_files:
# construct disambiguated spec list
env = ev.active_environment()
- specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env,
- local=local),
- spec_args))
+ specs = list(map(lambda x: spack.cmd.disambiguate_spec(x, env, local=local), spec_args))
else:
setup_parser.parser.print_help()
return 1
@@ -89,7 +99,7 @@ def verify(parser, args):
if args.json:
print(results.json_string())
else:
- tty.msg("In package %s" % spec.format('{name}/{hash:7}'))
+ tty.msg("In package %s" % spec.format("{name}/{hash:7}"))
print(results)
return 1
else:
diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py
index 10df443baa..c59f0195c2 100644
--- a/lib/spack/spack/cmd/versions.py
+++ b/lib/spack/spack/cmd/versions.py
@@ -22,21 +22,27 @@ level = "long"
def setup_parser(subparser):
output = subparser.add_mutually_exclusive_group()
- output.add_argument('-s', '--safe', action='store_true',
- help='only list safe versions of the package')
- output.add_argument('--safe-only', action='store_true',
- help='[deprecated] only list safe versions '
- 'of the package')
- output.add_argument('-r', '--remote', action='store_true',
- help='only list remote versions of the package')
- output.add_argument('-n', '--new', action='store_true',
- help='only list remote versions newer than '
- 'the latest checksummed version')
+ output.add_argument(
+ "-s", "--safe", action="store_true", help="only list safe versions of the package"
+ )
+ output.add_argument(
+ "--safe-only",
+ action="store_true",
+ help="[deprecated] only list safe versions " "of the package",
+ )
+ output.add_argument(
+ "-r", "--remote", action="store_true", help="only list remote versions of the package"
+ )
+ output.add_argument(
+ "-n",
+ "--new",
+ action="store_true",
+ help="only list remote versions newer than " "the latest checksummed version",
+ )
subparser.add_argument(
- '-c', '--concurrency', default=32, type=int,
- help='number of concurrent requests'
+ "-c", "--concurrency", default=32, type=int, help="number of concurrent requests"
)
- arguments.add_common_arguments(subparser, ['package'])
+ arguments.add_common_arguments(subparser, ["package"])
def versions(parser, args):
@@ -52,12 +58,12 @@ def versions(parser, args):
if not (args.remote or args.new):
if sys.stdout.isatty():
- tty.msg('Safe versions (already checksummed):')
+ tty.msg("Safe versions (already checksummed):")
if not safe_versions:
if sys.stdout.isatty():
- tty.warn('Found no versions for {0}'.format(pkg.name))
- tty.debug('Manually add versions to the package.')
+ tty.warn("Found no versions for {0}".format(pkg.name))
+ tty.debug("Manually add versions to the package.")
else:
colify(sorted(safe_versions, reverse=True), indent=2)
@@ -68,26 +74,26 @@ def versions(parser, args):
if args.new:
if sys.stdout.isatty():
- tty.msg('New remote versions (not yet checksummed):')
- numeric_safe_versions = list(filter(
- lambda v: str(v) not in infinity_versions,
- safe_versions))
+ tty.msg("New remote versions (not yet checksummed):")
+ numeric_safe_versions = list(
+ filter(lambda v: str(v) not in infinity_versions, safe_versions)
+ )
highest_safe_version = max(numeric_safe_versions)
- remote_versions = set([ver(v) for v in set(fetched_versions)
- if v > highest_safe_version])
+ remote_versions = set([ver(v) for v in set(fetched_versions) if v > highest_safe_version])
else:
if sys.stdout.isatty():
- tty.msg('Remote versions (not yet checksummed):')
+ tty.msg("Remote versions (not yet checksummed):")
remote_versions = set(fetched_versions).difference(safe_versions)
if not remote_versions:
if sys.stdout.isatty():
if not fetched_versions:
- tty.warn('Found no versions for {0}'.format(pkg.name))
- tty.debug('Check the list_url and list_depth attributes of '
- 'the package to help Spack find versions.')
+ tty.warn("Found no versions for {0}".format(pkg.name))
+ tty.debug(
+ "Check the list_url and list_depth attributes of "
+ "the package to help Spack find versions."
+ )
else:
- tty.warn('Found no unchecksummed versions for {0}'.format(
- pkg.name))
+ tty.warn("Found no unchecksummed versions for {0}".format(pkg.name))
else:
colify(sorted(remote_versions, reverse=True), indent=2)
diff --git a/lib/spack/spack/cmd/view.py b/lib/spack/spack/cmd/view.py
index 40e1b74e2e..cb693c569d 100644
--- a/lib/spack/spack/cmd/view.py
+++ b/lib/spack/spack/cmd/view.py
@@ -3,7 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-'''Produce a "view" of a Spack DAG.
+"""Produce a "view" of a Spack DAG.
A "view" is file hierarchy representing the union of a number of
Spack-installed package file hierarchies. The union is formed from:
@@ -32,7 +32,7 @@ brett.viren@gmail.com ca 2016.
All operations on views are performed via proxy objects such as
YamlFilesystemView.
-'''
+"""
import llnl.util.tty as tty
from llnl.util.link_tree import MergeConflictError
from llnl.util.tty.color import colorize
@@ -49,16 +49,15 @@ description = "project packages to a compact naming scheme on the filesystem."
section = "environments"
level = "short"
-actions_link = ["symlink", "add", "soft", "hardlink", "hard", "copy",
- "relocate"]
+actions_link = ["symlink", "add", "soft", "hardlink", "hard", "copy", "relocate"]
actions_remove = ["remove", "rm"]
actions_status = ["statlink", "status", "check"]
def disambiguate_in_view(specs, view):
"""
- When dealing with querying actions (remove/status) we only need to
- disambiguate among specs in the view
+ When dealing with querying actions (remove/status) we only need to
+ disambiguate among specs in the view
"""
view_specs = set(view.get_all_specs())
@@ -69,11 +68,12 @@ def disambiguate_in_view(specs, view):
matching_in_view = [ms for ms in matching_specs if ms in view_specs]
if len(matching_in_view) > 1:
- spec_format = '{name}{@version}{%compiler}{arch=architecture}'
- args = ["Spec matches multiple packages.",
- "Matching packages:"]
- args += [colorize(" @K{%s} " % s.dag_hash(7)) +
- s.cformat(spec_format) for s in matching_in_view]
+ spec_format = "{name}{@version}{%compiler}{arch=architecture}"
+ args = ["Spec matches multiple packages.", "Matching packages:"]
+ args += [
+ colorize(" @K{%s} " % s.dag_hash(7)) + s.cformat(spec_format)
+ for s in matching_in_view
+ ]
args += ["Use a more specific spec."]
tty.die(*args)
@@ -87,97 +87,119 @@ def setup_parser(sp):
setup_parser.parser = sp
sp.add_argument(
- '-v', '--verbose', action='store_true', default=False,
- help="If not verbose only warnings/errors will be printed.")
+ "-v",
+ "--verbose",
+ action="store_true",
+ default=False,
+ help="If not verbose only warnings/errors will be printed.",
+ )
sp.add_argument(
- '-e', '--exclude', action='append', default=[],
- help="exclude packages with names matching the given regex pattern")
+ "-e",
+ "--exclude",
+ action="append",
+ default=[],
+ help="exclude packages with names matching the given regex pattern",
+ )
sp.add_argument(
- '-d', '--dependencies', choices=['true', 'false', 'yes', 'no'],
- default='true',
- help="Link/remove/list dependencies.")
+ "-d",
+ "--dependencies",
+ choices=["true", "false", "yes", "no"],
+ default="true",
+ help="Link/remove/list dependencies.",
+ )
- ssp = sp.add_subparsers(metavar='ACTION', dest='action')
+ ssp = sp.add_subparsers(metavar="ACTION", dest="action")
- specs_opts = dict(metavar='spec', action='store',
- help="seed specs of the packages to view")
+ specs_opts = dict(metavar="spec", action="store", help="seed specs of the packages to view")
# The action parameterizes the command but in keeping with Spack
# patterns we make it a subcommand.
file_system_view_actions = {
"symlink": ssp.add_parser(
- 'symlink', aliases=['add', 'soft'],
- help='add package files to a filesystem view via symbolic links'),
+ "symlink",
+ aliases=["add", "soft"],
+ help="add package files to a filesystem view via symbolic links",
+ ),
"hardlink": ssp.add_parser(
- 'hardlink', aliases=['hard'],
- help='add packages files to a filesystem view via hard links'),
+ "hardlink",
+ aliases=["hard"],
+ help="add packages files to a filesystem view via hard links",
+ ),
"copy": ssp.add_parser(
- 'copy', aliases=['relocate'],
- help='add package files to a filesystem view via copy/relocate'),
+ "copy",
+ aliases=["relocate"],
+ help="add package files to a filesystem view via copy/relocate",
+ ),
"remove": ssp.add_parser(
- 'remove', aliases=['rm'],
- help='remove packages from a filesystem view'),
+ "remove", aliases=["rm"], help="remove packages from a filesystem view"
+ ),
"statlink": ssp.add_parser(
- 'statlink', aliases=['status', 'check'],
- help='check status of packages in a filesystem view')
+ "statlink",
+ aliases=["status", "check"],
+ help="check status of packages in a filesystem view",
+ ),
}
# All these options and arguments are common to every action.
for cmd, act in file_system_view_actions.items():
- act.add_argument('path', nargs=1,
- help="path to file system view directory")
+ act.add_argument("path", nargs=1, help="path to file system view directory")
if cmd in ("symlink", "hardlink", "copy"):
# invalid for remove/statlink, for those commands the view needs to
# already know its own projections.
help_msg = "Initialize view using projections from file."
- act.add_argument('--projection-file', dest='projection_file',
- type=spack.cmd.extant_file, help=help_msg)
+ act.add_argument(
+ "--projection-file",
+ dest="projection_file",
+ type=spack.cmd.extant_file,
+ help=help_msg,
+ )
if cmd == "remove":
grp = act.add_mutually_exclusive_group(required=True)
act.add_argument(
- '--no-remove-dependents', action="store_true",
- help="Do not remove dependents of specified specs.")
+ "--no-remove-dependents",
+ action="store_true",
+ help="Do not remove dependents of specified specs.",
+ )
# with all option, spec is an optional argument
so = specs_opts.copy()
so["nargs"] = "*"
so["default"] = []
- grp.add_argument('specs', **so)
- grp.add_argument("-a", "--all", action='store_true',
- help="act on all specs in view")
+ grp.add_argument("specs", **so)
+ grp.add_argument("-a", "--all", action="store_true", help="act on all specs in view")
elif cmd == "statlink":
so = specs_opts.copy()
so["nargs"] = "*"
- act.add_argument('specs', **so)
+ act.add_argument("specs", **so)
else:
# without all option, spec is required
so = specs_opts.copy()
so["nargs"] = "+"
- act.add_argument('specs', **so)
+ act.add_argument("specs", **so)
for cmd in ["symlink", "hardlink", "copy"]:
act = file_system_view_actions[cmd]
- act.add_argument("-i", "--ignore-conflicts", action='store_true')
+ act.add_argument("-i", "--ignore-conflicts", action="store_true")
return
def view(parser, args):
- 'Produce a view of a set of packages.'
+ "Produce a view of a set of packages."
specs = spack.cmd.parse_specs(args.specs)
path = args.path[0]
if args.action in actions_link and args.projection_file:
# argparse confirms file exists
- with open(args.projection_file, 'r') as f:
+ with open(args.projection_file, "r") as f:
projections_data = s_yaml.load(f)
validate(projections_data, spack.schema.projections.schema)
- ordered_projections = projections_data['projections']
+ ordered_projections = projections_data["projections"]
else:
ordered_projections = {}
@@ -185,14 +207,16 @@ def view(parser, args):
if args.action in actions_link:
link_fn = view_func_parser(args.action)
else:
- link_fn = view_func_parser('symlink')
+ link_fn = view_func_parser("symlink")
view = YamlFilesystemView(
- path, spack.store.layout,
+ path,
+ spack.store.layout,
projections=ordered_projections,
ignore_conflicts=getattr(args, "ignore_conflicts", False),
link=link_fn,
- verbose=args.verbose)
+ verbose=args.verbose,
+ )
# Process common args and specs
if getattr(args, "all", False):
@@ -216,25 +240,27 @@ def view(parser, args):
# status and remove can map a partial spec to packages in view
specs = disambiguate_in_view(specs, view)
- with_dependencies = args.dependencies.lower() in ['true', 'yes']
+ with_dependencies = args.dependencies.lower() in ["true", "yes"]
# Map action to corresponding functionality
if args.action in actions_link:
try:
- view.add_specs(*specs,
- with_dependencies=with_dependencies,
- exclude=args.exclude)
+ view.add_specs(*specs, with_dependencies=with_dependencies, exclude=args.exclude)
except MergeConflictError:
- tty.info("Some file blocked the merge, adding the '-i' flag will "
- "ignore this conflict. For more information see e.g. "
- "https://github.com/spack/spack/issues/9029")
+ tty.info(
+ "Some file blocked the merge, adding the '-i' flag will "
+ "ignore this conflict. For more information see e.g. "
+ "https://github.com/spack/spack/issues/9029"
+ )
raise
elif args.action in actions_remove:
- view.remove_specs(*specs,
- with_dependencies=with_dependencies,
- exclude=args.exclude,
- with_dependents=not args.no_remove_dependents)
+ view.remove_specs(
+ *specs,
+ with_dependencies=with_dependencies,
+ exclude=args.exclude,
+ with_dependents=not args.no_remove_dependents
+ )
elif args.action in actions_status:
view.print_status(*specs, with_dependencies=with_dependencies)
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 0a502efffa..1e50749730 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -25,7 +25,7 @@ import spack.version
from spack.util.environment import filter_system_paths
from spack.util.path import system_path_filter
-__all__ = ['Compiler']
+__all__ = ["Compiler"]
@llnl.util.lang.memoized
@@ -39,11 +39,9 @@ def _get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
"""
compiler = spack.util.executable.Executable(compiler_path)
if version_arg:
- output = compiler(
- version_arg, output=str, error=str, ignore_errors=ignore_errors)
+ output = compiler(version_arg, output=str, error=str, ignore_errors=ignore_errors)
else:
- output = compiler(
- output=str, error=str, ignore_errors=ignore_errors)
+ output = compiler(output=str, error=str, ignore_errors=ignore_errors)
return output
@@ -53,26 +51,25 @@ def get_compiler_version_output(compiler_path, *args, **kwargs):
# not just executable name. If we don't do this, and the path changes
# (e.g., during testing), we can get incorrect results.
if not os.path.isabs(compiler_path):
- compiler_path = spack.util.executable.which_string(
- compiler_path, required=True)
+ compiler_path = spack.util.executable.which_string(compiler_path, required=True)
return _get_compiler_version_output(compiler_path, *args, **kwargs)
def tokenize_flags(flags_str):
"""Given a compiler flag specification as a string, this returns a list
- where the entries are the flags. For compiler options which set values
- using the syntax "-flag value", this function groups flags and their
- values together. Any token not preceded by a "-" is considered the
- value of a prior flag."""
+ where the entries are the flags. For compiler options which set values
+ using the syntax "-flag value", this function groups flags and their
+ values together. Any token not preceded by a "-" is considered the
+ value of a prior flag."""
tokens = flags_str.split()
if not tokens:
return []
flag = tokens[0]
flags = []
for token in tokens[1:]:
- if not token.startswith('-'):
- flag += ' ' + token
+ if not token.startswith("-"):
+ flag += " " + token
else:
flags.append(flag)
flag = token
@@ -81,19 +78,16 @@ def tokenize_flags(flags_str):
#: regex for parsing linker lines
-_LINKER_LINE = re.compile(
- r'^( *|.*[/\\])'
- r'(link|ld|([^/\\]+-)?ld|collect2)'
- r'[^/\\]*( |$)')
+_LINKER_LINE = re.compile(r"^( *|.*[/\\])" r"(link|ld|([^/\\]+-)?ld|collect2)" r"[^/\\]*( |$)")
#: components of linker lines to ignore
-_LINKER_LINE_IGNORE = re.compile(r'(collect2 version|^[A-Za-z0-9_]+=|/ldfe )')
+_LINKER_LINE_IGNORE = re.compile(r"(collect2 version|^[A-Za-z0-9_]+=|/ldfe )")
#: regex to match linker search paths
-_LINK_DIR_ARG = re.compile(r'^-L(.:)?(?P<dir>[/\\].*)')
+_LINK_DIR_ARG = re.compile(r"^-L(.:)?(?P<dir>[/\\].*)")
#: regex to match linker library path arguments
-_LIBPATH_ARG = re.compile(r'^[-/](LIBPATH|libpath):(?P<dir>.*)')
+_LIBPATH_ARG = re.compile(r"^[-/](LIBPATH|libpath):(?P<dir>.*)")
def _parse_link_paths(string):
@@ -105,26 +99,26 @@ def _parse_link_paths(string):
"""
lib_search_paths = False
raw_link_dirs = []
- tty.debug('parsing implicit link info')
+ tty.debug("parsing implicit link info")
for line in string.splitlines():
if lib_search_paths:
- if line.startswith('\t'):
+ if line.startswith("\t"):
raw_link_dirs.append(line[1:])
continue
else:
lib_search_paths = False
- elif line.startswith('Library search paths:'):
+ elif line.startswith("Library search paths:"):
lib_search_paths = True
if not _LINKER_LINE.match(line):
continue
if _LINKER_LINE_IGNORE.match(line):
continue
- tty.debug('linker line: %s' % line)
+ tty.debug("linker line: %s" % line)
next_arg = False
for arg in line.split():
- if arg in ('-L', '-Y'):
+ if arg in ("-L", "-Y"):
next_arg = True
continue
@@ -135,16 +129,16 @@ def _parse_link_paths(string):
link_dir_arg = _LINK_DIR_ARG.match(arg)
if link_dir_arg:
- link_dir = link_dir_arg.group('dir')
- tty.debug('linkdir: %s' % link_dir)
+ link_dir = link_dir_arg.group("dir")
+ tty.debug("linkdir: %s" % link_dir)
raw_link_dirs.append(link_dir)
link_dir_arg = _LIBPATH_ARG.match(arg)
if link_dir_arg:
- link_dir = link_dir_arg.group('dir')
- tty.debug('libpath: %s', link_dir)
+ link_dir = link_dir_arg.group("dir")
+ tty.debug("libpath: %s", link_dir)
raw_link_dirs.append(link_dir)
- tty.debug('found raw link dirs: %s' % ', '.join(raw_link_dirs))
+ tty.debug("found raw link dirs: %s" % ", ".join(raw_link_dirs))
implicit_link_dirs = list()
visited = set()
@@ -154,7 +148,7 @@ def _parse_link_paths(string):
implicit_link_dirs.append(normalized_path)
visited.add(normalized_path)
- tty.debug('found link dirs: %s' % ', '.join(implicit_link_dirs))
+ tty.debug("found link dirs: %s" % ", ".join(implicit_link_dirs))
return implicit_link_dirs
@@ -183,16 +177,22 @@ def _parse_non_system_link_dirs(string):
def in_system_subdirectory(path):
- system_dirs = ['/lib/', '/lib64/', '/usr/lib/', '/usr/lib64/',
- '/usr/local/lib/', '/usr/local/lib64/']
+ system_dirs = [
+ "/lib/",
+ "/lib64/",
+ "/usr/lib/",
+ "/usr/lib64/",
+ "/usr/local/lib/",
+ "/usr/local/lib64/",
+ ]
return any(path_contains_subdirectory(path, x) for x in system_dirs)
class Compiler(object):
"""This class encapsulates a Spack "compiler", which includes C,
- C++, and Fortran compilers. Subclasses should implement
- support for specific compilers, their possible names, arguments,
- and how to identify the particular type of compiler."""
+ C++, and Fortran compilers. Subclasses should implement
+ support for specific compilers, their possible names, arguments,
+ and how to identify the particular type of compiler."""
# Subclasses use possible names of C compiler
cc_names = [] # type: List[str]
@@ -213,72 +213,81 @@ class Compiler(object):
# Optional suffix regexes for searching for this type of compiler.
# Suffixes are used by some frameworks, e.g. macports uses an '-mp-X.Y'
# version suffix for gcc.
- suffixes = [r'-.*']
+ suffixes = [r"-.*"]
#: Compiler argument that produces version information
- version_argument = '-dumpversion'
+ version_argument = "-dumpversion"
#: Return values to ignore when invoking the compiler to get its version
ignore_version_errors = () # type: Sequence[int]
#: Regex used to extract version from compiler's output
- version_regex = '(.*)'
+ version_regex = "(.*)"
# These libraries are anticipated to be required by all executables built
# by any compiler
- _all_compiler_rpath_libraries = ['libc', 'libc++', 'libstdc++']
+ _all_compiler_rpath_libraries = ["libc", "libc++", "libstdc++"]
# Default flags used by a compiler to set an rpath
@property
def cc_rpath_arg(self):
- return '-Wl,-rpath,'
+ return "-Wl,-rpath,"
@property
def cxx_rpath_arg(self):
- return '-Wl,-rpath,'
+ return "-Wl,-rpath,"
@property
def f77_rpath_arg(self):
- return '-Wl,-rpath,'
+ return "-Wl,-rpath,"
@property
def fc_rpath_arg(self):
- return '-Wl,-rpath,'
+ return "-Wl,-rpath,"
@property
def linker_arg(self):
"""Flag that need to be used to pass an argument to the linker."""
- return '-Wl,'
+ return "-Wl,"
@property
def disable_new_dtags(self):
- if platform.system() == 'Darwin':
- return ''
- return '--disable-new-dtags'
+ if platform.system() == "Darwin":
+ return ""
+ return "--disable-new-dtags"
@property
def enable_new_dtags(self):
- if platform.system() == 'Darwin':
- return ''
- return '--enable-new-dtags'
+ if platform.system() == "Darwin":
+ return ""
+ return "--enable-new-dtags"
@property
def debug_flags(self):
- return ['-g']
+ return ["-g"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3']
+ return ["-O", "-O0", "-O1", "-O2", "-O3"]
# Cray PrgEnv name that can be used to load this compiler
PrgEnv = None # type: str
# Name of module used to switch versions of this compiler
PrgEnv_compiler = None # type: str
- def __init__(self, cspec, operating_system, target,
- paths, modules=None, alias=None, environment=None,
- extra_rpaths=None, enable_implicit_rpaths=None,
- **kwargs):
+ def __init__(
+ self,
+ cspec,
+ operating_system,
+ target,
+ paths,
+ modules=None,
+ alias=None,
+ environment=None,
+ extra_rpaths=None,
+ enable_implicit_rpaths=None,
+ **kwargs
+ ):
self.spec = cspec
self.operating_system = str(operating_system)
self.target = target
@@ -288,7 +297,7 @@ class Compiler(object):
self.extra_rpaths = extra_rpaths or []
self.enable_implicit_rpaths = enable_implicit_rpaths
- self.cc = paths[0]
+ self.cc = paths[0]
self.cxx = paths[1]
self.f77 = None
self.fc = None
@@ -297,7 +306,7 @@ class Compiler(object):
if len(paths) == 3:
self.fc = self.f77
else:
- self.fc = paths[3]
+ self.fc = paths[3]
# Unfortunately have to make sure these params are accepted
# in the same order they are returned by sorted(flags)
@@ -321,6 +330,7 @@ class Compiler(object):
Raises a CompilerAccessError if any of the non-null paths for the
compiler are not accessible.
"""
+
def accessible_exe(exe):
# compilers may contain executable names (on Cray or user edited)
if not os.path.isabs(exe):
@@ -332,8 +342,11 @@ class Compiler(object):
# setup environment before verifying in case we have executable names
# instead of absolute paths
with self.compiler_environment():
- missing = [cmp for cmp in (self.cc, self.cxx, self.f77, self.fc)
- if cmp and not accessible_exe(cmp)]
+ missing = [
+ cmp
+ for cmp in (self.cc, self.cxx, self.f77, self.fc)
+ if cmp and not accessible_exe(cmp)
+ ]
if missing:
raise CompilerAccessError(self, missing)
@@ -349,9 +362,8 @@ class Compiler(object):
"""
if not self._real_version:
try:
- real_version = spack.version.Version(
- self.get_real_version())
- if real_version == spack.version.Version('unknown'):
+ real_version = spack.version.Version(self.get_real_version())
+ if real_version == spack.version.Version("unknown"):
return self.version
self._real_version = real_version
except spack.util.executable.ProcessError:
@@ -364,12 +376,10 @@ class Compiler(object):
# Put CXX first since it has the most linking issues
# And because it has flags that affect linking
- exe_paths = [
- x for x in [self.cxx, self.cc, self.fc, self.f77] if x]
+ exe_paths = [x for x in [self.cxx, self.cc, self.fc, self.f77] if x]
link_dirs = self._get_compiler_link_paths(exe_paths)
- all_required_libs = (
- list(self.required_libs) + Compiler._all_compiler_rpath_libraries)
+ all_required_libs = list(self.required_libs) + Compiler._all_compiler_rpath_libraries
return list(paths_containing_libs(link_dirs, all_required_libs))
@property
@@ -390,37 +400,36 @@ class Compiler(object):
return []
# What flag types apply to first_compiler, in what order
- flags = ['cppflags', 'ldflags']
+ flags = ["cppflags", "ldflags"]
if first_compiler == self.cc:
- flags = ['cflags'] + flags
+ flags = ["cflags"] + flags
elif first_compiler == self.cxx:
- flags = ['cxxflags'] + flags
+ flags = ["cxxflags"] + flags
else:
- flags.append('fflags')
+ flags.append("fflags")
try:
- tmpdir = tempfile.mkdtemp(prefix='spack-implicit-link-info')
- fout = os.path.join(tmpdir, 'output')
- fin = os.path.join(tmpdir, 'main.c')
+ tmpdir = tempfile.mkdtemp(prefix="spack-implicit-link-info")
+ fout = os.path.join(tmpdir, "output")
+ fin = os.path.join(tmpdir, "main.c")
- with open(fin, 'w+') as csource:
+ with open(fin, "w+") as csource:
csource.write(
- 'int main(int argc, char* argv[]) { '
- '(void)argc; (void)argv; return 0; }\n')
+ "int main(int argc, char* argv[]) { " "(void)argc; (void)argv; return 0; }\n"
+ )
compiler_exe = spack.util.executable.Executable(first_compiler)
for flag_type in flags:
for flag in self.flags.get(flag_type, []):
compiler_exe.add_default_arg(flag)
- output = ''
+ output = ""
with self.compiler_environment():
- output = str(compiler_exe(
- self.verbose_flag, fin, '-o', fout,
- output=str, error=str)) # str for py2
+ output = str(
+ compiler_exe(self.verbose_flag, fin, "-o", fout, output=str, error=str)
+ ) # str for py2
return _parse_non_system_link_dirs(output)
except spack.util.executable.ProcessError as pe:
- tty.debug('ProcessError: Command exited with non-zero status: ' +
- pe.long_message)
+ tty.debug("ProcessError: Command exited with non-zero status: " + pe.long_message)
return []
finally:
shutil.rmtree(tmpdir, ignore_errors=True)
@@ -452,69 +461,59 @@ class Compiler(object):
@property
def cxx11_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- raise UnsupportedCompilerFlag(self,
- "the C++11 standard",
- "cxx11_flag")
+ raise UnsupportedCompilerFlag(self, "the C++11 standard", "cxx11_flag")
# This property should be overridden in the compiler subclass if
# C++14 is supported by that compiler
@property
def cxx14_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- raise UnsupportedCompilerFlag(self,
- "the C++14 standard",
- "cxx14_flag")
+ raise UnsupportedCompilerFlag(self, "the C++14 standard", "cxx14_flag")
# This property should be overridden in the compiler subclass if
# C++17 is supported by that compiler
@property
def cxx17_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- raise UnsupportedCompilerFlag(self,
- "the C++17 standard",
- "cxx17_flag")
+ raise UnsupportedCompilerFlag(self, "the C++17 standard", "cxx17_flag")
# This property should be overridden in the compiler subclass if
# C99 is supported by that compiler
@property
def c99_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- raise UnsupportedCompilerFlag(self,
- "the C99 standard",
- "c99_flag")
+ raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag")
# This property should be overridden in the compiler subclass if
# C11 is supported by that compiler
@property
def c11_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- raise UnsupportedCompilerFlag(self,
- "the C11 standard",
- "c11_flag")
+ raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag")
@property
def cc_pic_flag(self):
"""Returns the flag used by the C compiler to produce
Position Independent Code (PIC)."""
- return '-fPIC'
+ return "-fPIC"
@property
def cxx_pic_flag(self):
"""Returns the flag used by the C++ compiler to produce
Position Independent Code (PIC)."""
- return '-fPIC'
+ return "-fPIC"
@property
def f77_pic_flag(self):
"""Returns the flag used by the F77 compiler to produce
Position Independent Code (PIC)."""
- return '-fPIC'
+ return "-fPIC"
@property
def fc_pic_flag(self):
"""Returns the flag used by the FC compiler to produce
Position Independent Code (PIC)."""
- return '-fPIC'
+ return "-fPIC"
# Note: This is not a class method. The class methods are used to detect
# compilers on PATH based systems, and do not set up the run environment of
@@ -530,9 +529,12 @@ class Compiler(object):
"""
cc = spack.util.executable.Executable(self.cc)
with self.compiler_environment():
- output = cc(self.version_argument,
- output=str, error=str,
- ignore_errors=tuple(self.ignore_version_errors))
+ output = cc(
+ self.version_argument,
+ output=str,
+ error=str,
+ ignore_errors=tuple(self.ignore_version_errors),
+ )
return self.extract_version_from_output(output)
#
@@ -546,7 +548,8 @@ class Compiler(object):
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
output = get_compiler_version_output(
- cc, cls.version_argument, tuple(cls.ignore_version_errors))
+ cc, cls.version_argument, tuple(cls.ignore_version_errors)
+ )
return cls.extract_version_from_output(output)
@classmethod
@@ -554,7 +557,7 @@ class Compiler(object):
def extract_version_from_output(cls, output):
"""Extracts the version from compiler's output."""
match = re.search(cls.version_regex, output)
- return match.group(1) if match else 'unknown'
+ return match.group(1) if match else "unknown"
@classmethod
def cc_version(cls, cc):
@@ -577,14 +580,13 @@ class Compiler(object):
# Compile all the regular expressions used for files beforehand.
# This searches for any combination of <prefix><name><suffix>
# defined for the compiler
- compiler_names = getattr(cls, '{0}_names'.format(language))
- prefixes = [''] + cls.prefixes
- suffixes = [''] + cls.suffixes
- regexp_fmt = r'^({0}){1}({2})$'
+ compiler_names = getattr(cls, "{0}_names".format(language))
+ prefixes = [""] + cls.prefixes
+ suffixes = [""] + cls.suffixes
+ regexp_fmt = r"^({0}){1}({2})$"
return [
re.compile(regexp_fmt.format(prefix, re.escape(name), suffix))
- for prefix, name, suffix in
- itertools.product(prefixes, compiler_names, suffixes)
+ for prefix, name, suffix in itertools.product(prefixes, compiler_names, suffixes)
]
def setup_custom_environment(self, pkg, env):
@@ -598,9 +600,21 @@ class Compiler(object):
def __str__(self):
"""Return a string representation of the compiler toolchain."""
return "%s(%s)" % (
- self.name, '\n '.join((str(s) for s in (
- self.cc, self.cxx, self.f77, self.fc, self.modules,
- str(self.operating_system)))))
+ self.name,
+ "\n ".join(
+ (
+ str(s)
+ for s in (
+ self.cc,
+ self.cxx,
+ self.f77,
+ self.fc,
+ self.modules,
+ str(self.operating_system),
+ )
+ )
+ ),
+ )
@contextlib.contextmanager
def compiler_environment(self):
@@ -617,8 +631,8 @@ class Compiler(object):
for module in self.modules:
# On cray, mic-knl module cannot be loaded without cce module
# See: https://github.com/spack/spack/issues/3153
- if os.environ.get("CRAY_CPU_TARGET") == 'mic-knl':
- spack.util.module_cmd.load_module('cce')
+ if os.environ.get("CRAY_CPU_TARGET") == "mic-knl":
+ spack.util.module_cmd.load_module("cce")
spack.util.module_cmd.load_module(module)
# apply other compiler environment changes
@@ -643,23 +657,18 @@ class CompilerAccessError(spack.error.SpackError):
class InvalidCompilerError(spack.error.SpackError):
-
def __init__(self):
- super(InvalidCompilerError, self).__init__(
- "Compiler has no executables.")
+ super(InvalidCompilerError, self).__init__("Compiler has no executables.")
class UnsupportedCompilerFlag(spack.error.SpackError):
-
def __init__(self, compiler, feature, flag_name, ver_string=None):
super(UnsupportedCompilerFlag, self).__init__(
- "{0} ({1}) does not support {2} (as compiler.{3})."
- .format(compiler.name,
- ver_string if ver_string else compiler.version,
- feature,
- flag_name),
- "If you think it should, please edit the compiler.{0} subclass to"
- .format(compiler.name) +
- " implement the {0} property and submit a pull request or issue."
- .format(flag_name)
+ "{0} ({1}) does not support {2} (as compiler.{3}).".format(
+ compiler.name, ver_string if ver_string else compiler.version, feature, flag_name
+ ),
+ "If you think it should, please edit the compiler.{0} subclass to".format(
+ compiler.name
+ )
+ + " implement the {0} property and submit a pull request or issue.".format(flag_name),
)
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index a57ca6ac55..1d61f37df7 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -29,10 +29,15 @@ import spack.spec
from spack.util.environment import get_path
from spack.util.naming import mod_to_class
-_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
-_flags_instance_vars = ['cflags', 'cppflags', 'cxxflags', 'fflags']
-_other_instance_vars = ['modules', 'operating_system', 'environment',
- 'implicit_rpaths', 'extra_rpaths']
+_path_instance_vars = ["cc", "cxx", "f77", "fc"]
+_flags_instance_vars = ["cflags", "cppflags", "cxxflags", "fflags"]
+_other_instance_vars = [
+ "modules",
+ "operating_system",
+ "environment",
+ "implicit_rpaths",
+ "extra_rpaths",
+]
_cache_config_file = []
# TODO: Caches at module level make it difficult to mock configurations in
@@ -41,16 +46,15 @@ _cache_config_file = []
_compiler_cache = {} # type: Dict[str, spack.compiler.Compiler]
_compiler_to_pkg = {
- 'clang': 'llvm+clang',
- 'oneapi': 'intel-oneapi-compilers',
- 'rocmcc': 'llvm-amdgpu'
+ "clang": "llvm+clang",
+ "oneapi": "intel-oneapi-compilers",
+ "rocmcc": "llvm-amdgpu",
}
def pkg_spec_for_compiler(cspec):
"""Return the spec of the package that provides the compiler."""
- spec_str = '%s@%s' % (_compiler_to_pkg.get(cspec.name, cspec.name),
- cspec.versions)
+ spec_str = "%s@%s" % (_compiler_to_pkg.get(cspec.name, cspec.name), cspec.versions)
return spack.spec.Spec(spec_str)
@@ -59,45 +63,49 @@ def _auto_compiler_spec(function):
if not isinstance(cspec_like, spack.spec.CompilerSpec):
cspec_like = spack.spec.CompilerSpec(cspec_like)
return function(cspec_like, *args, **kwargs)
+
return converter
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
d = {}
- d['spec'] = str(compiler.spec)
- d['paths'] = dict((attr, getattr(compiler, attr, None))
- for attr in _path_instance_vars)
- d['flags'] = dict((fname, fvals) for fname, fvals in compiler.flags)
- d['flags'].update(dict((attr, getattr(compiler, attr, None))
- for attr in _flags_instance_vars
- if hasattr(compiler, attr)))
- d['operating_system'] = str(compiler.operating_system)
- d['target'] = str(compiler.target)
- d['modules'] = compiler.modules or []
- d['environment'] = compiler.environment or {}
- d['extra_rpaths'] = compiler.extra_rpaths or []
+ d["spec"] = str(compiler.spec)
+ d["paths"] = dict((attr, getattr(compiler, attr, None)) for attr in _path_instance_vars)
+ d["flags"] = dict((fname, fvals) for fname, fvals in compiler.flags)
+ d["flags"].update(
+ dict(
+ (attr, getattr(compiler, attr, None))
+ for attr in _flags_instance_vars
+ if hasattr(compiler, attr)
+ )
+ )
+ d["operating_system"] = str(compiler.operating_system)
+ d["target"] = str(compiler.target)
+ d["modules"] = compiler.modules or []
+ d["environment"] = compiler.environment or {}
+ d["extra_rpaths"] = compiler.extra_rpaths or []
if compiler.enable_implicit_rpaths is not None:
- d['implicit_rpaths'] = compiler.enable_implicit_rpaths
+ d["implicit_rpaths"] = compiler.enable_implicit_rpaths
if compiler.alias:
- d['alias'] = compiler.alias
+ d["alias"] = compiler.alias
- return {'compiler': d}
+ return {"compiler": d}
def get_compiler_config(scope=None, init_config=True):
- """Return the compiler configuration for the specified architecture.
- """
+ """Return the compiler configuration for the specified architecture."""
+
def init_compiler_config():
"""Compiler search used when Spack has no compilers."""
compilers = find_compilers()
compilers_dict = []
for compiler in compilers:
compilers_dict.append(_to_dict(compiler))
- spack.config.set('compilers', compilers_dict, scope=scope)
+ spack.config.set("compilers", compilers_dict, scope=scope)
- config = spack.config.get('compilers', scope=scope)
+ config = spack.config.get("compilers", scope=scope)
# Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones.
@@ -105,15 +113,15 @@ def get_compiler_config(scope=None, init_config=True):
if scope is None:
# We know no compilers were configured in any scope.
init_compiler_config()
- config = spack.config.get('compilers', scope=scope)
- elif scope == 'user':
+ config = spack.config.get("compilers", scope=scope)
+ elif scope == "user":
# Check the site config and update the user config if
# nothing is configured at the site level.
- site_config = spack.config.get('compilers', scope='site')
- sys_config = spack.config.get('compilers', scope='system')
+ site_config = spack.config.get("compilers", scope="site")
+ sys_config = spack.config.get("compilers", scope="system")
if not site_config and not sys_config:
init_compiler_config()
- config = spack.config.get('compilers', scope=scope)
+ config = spack.config.get("compilers", scope=scope)
return config
elif config:
return config
@@ -126,9 +134,9 @@ def compiler_config_files():
config = spack.config.config
for scope in config.file_scopes:
name = scope.name
- compiler_config = config.get('compilers', scope=name)
+ compiler_config = config.get("compilers", scope=name)
if compiler_config:
- config_files.append(config.get_config_filename(name, 'compilers'))
+ config_files.append(config.get_config_filename(name, "compilers"))
return config_files
@@ -144,7 +152,7 @@ def add_compilers_to_config(compilers, scope=None, init_config=True):
compiler_config.append(_to_dict(compiler))
global _cache_config_file
_cache_config_file = compiler_config
- spack.config.set('compilers', compiler_config, scope=scope)
+ spack.config.set("compilers", compiler_config, scope=scope)
@_auto_compiler_spec
@@ -162,19 +170,21 @@ def remove_compiler_from_config(compiler_spec, scope=None):
config_length = len(compiler_config)
filtered_compiler_config = [
- comp for comp in compiler_config
- if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
+ comp
+ for comp in compiler_config
+ if spack.spec.CompilerSpec(comp["compiler"]["spec"]) != compiler_spec
+ ]
# Update the cache for changes
_cache_config_file = filtered_compiler_config
if len(filtered_compiler_config) == config_length: # No items removed
CompilerSpecInsufficientlySpecificError(compiler_spec)
- spack.config.set('compilers', filtered_compiler_config, scope=scope)
+ spack.config.set("compilers", filtered_compiler_config, scope=scope)
def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
- available to build with. These are instances of CompilerSpec.
+ available to build with. These are instances of CompilerSpec.
"""
# Get compilers for this architecture.
# Create a cache of the config file so we don't load all the time.
@@ -188,8 +198,10 @@ def all_compilers_config(scope=None, init_config=True):
def all_compiler_specs(scope=None, init_config=True):
# Return compiler specs from the merged config.
- return [spack.spec.CompilerSpec(s['compiler']['spec'])
- for s in all_compilers_config(scope, init_config)]
+ return [
+ spack.spec.CompilerSpec(s["compiler"]["spec"])
+ for s in all_compilers_config(scope, init_config)
+ ]
def find_compilers(path_hints=None):
@@ -201,7 +213,7 @@ def find_compilers(path_hints=None):
will be used if the value is None
"""
if path_hints is None:
- path_hints = get_path('PATH')
+ path_hints = get_path("PATH")
default_paths = fs.search_paths_for_executables(*path_hints)
# To detect the version of the compilers, we dispatch a certain number
@@ -209,7 +221,7 @@ def find_compilers(path_hints=None):
# of arguments for each call.
arguments = []
for o in all_os_classes():
- search_paths = getattr(o, 'compiler_search_paths', default_paths)
+ search_paths = getattr(o, "compiler_search_paths", default_paths)
arguments.extend(arguments_to_detect_version_fn(o, search_paths))
# Here we map the function arguments to the corresponding calls
@@ -235,9 +247,7 @@ def find_compilers(path_hints=None):
value, _ = item
return value
- return make_compiler_list(
- map(remove_errors, filter(valid_version, detected_versions))
- )
+ return make_compiler_list(map(remove_errors, filter(valid_version, detected_versions)))
def find_new_compilers(path_hints=None, scope=None):
@@ -262,9 +272,7 @@ def select_new_compilers(compilers, scope=None):
compilers_not_in_config = []
for c in compilers:
arch_spec = spack.spec.ArchSpec((None, c.operating_system, c.target))
- same_specs = compilers_for_spec(
- c.spec, arch_spec, scope=scope, init_config=False
- )
+ same_specs = compilers_for_spec(c.spec, arch_spec, scope=scope, init_config=False)
if not same_specs:
compilers_not_in_config.append(c)
@@ -274,13 +282,15 @@ def select_new_compilers(compilers, scope=None):
def supported_compilers():
"""Return a set of names of compilers supported by Spack.
- See available_compilers() to get a list of all the available
- versions of supported compilers.
+ See available_compilers() to get a list of all the available
+ versions of supported compilers.
"""
# Hack to be able to call the compiler `apple-clang` while still
# using a valid python name for the module
- return sorted(name if name != 'apple_clang' else 'apple-clang' for name in
- llnl.util.lang.list_modules(spack.paths.compilers_path))
+ return sorted(
+ name if name != "apple_clang" else "apple-clang"
+ for name in llnl.util.lang.list_modules(spack.paths.compilers_path)
+ )
@_auto_compiler_spec
@@ -292,37 +302,32 @@ def supported(compiler_spec):
@_auto_compiler_spec
def find(compiler_spec, scope=None, init_config=True):
"""Return specs of available compilers that match the supplied
- compiler spec. Return an empty list if nothing found."""
- return [c for c in all_compiler_specs(scope, init_config)
- if c.satisfies(compiler_spec)]
+ compiler spec. Return an empty list if nothing found."""
+ return [c for c in all_compiler_specs(scope, init_config) if c.satisfies(compiler_spec)]
@_auto_compiler_spec
def find_specs_by_arch(compiler_spec, arch_spec, scope=None, init_config=True):
"""Return specs of available compilers that match the supplied
- compiler spec. Return an empty list if nothing found."""
- return [c.spec for c in compilers_for_spec(compiler_spec,
- arch_spec,
- scope,
- True,
- init_config)]
+ compiler spec. Return an empty list if nothing found."""
+ return [c.spec for c in compilers_for_spec(compiler_spec, arch_spec, scope, True, init_config)]
def all_compilers(scope=None, init_config=True):
config = get_compiler_config(scope, init_config=init_config)
compilers = list()
for items in config:
- items = items['compiler']
+ items = items["compiler"]
compilers.append(_compiler_from_config_entry(items))
return compilers
@_auto_compiler_spec
def compilers_for_spec(
- compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
+ compiler_spec, arch_spec=None, scope=None, use_cache=True, init_config=True
):
"""This gets all compilers that satisfy the supplied CompilerSpec.
- Returns an empty list if none are found.
+ Returns an empty list if none are found.
"""
if use_cache:
config = all_compilers_config(scope, init_config)
@@ -343,11 +348,12 @@ def compilers_for_arch(arch_spec, scope=None):
class CacheReference(object):
"""This acts as a hashable reference to any object (regardless of whether
- the object itself is hashable) and also prevents the object from being
- garbage-collected (so if two CacheReference objects are equal, they
- will refer to the same object, since it will not have been gc'ed since
- the creation of the first CacheReference).
+ the object itself is hashable) and also prevents the object from being
+ garbage-collected (so if two CacheReference objects are equal, they
+ will refer to the same object, since it will not have been gc'ed since
+ the creation of the first CacheReference).
"""
+
def __init__(self, val):
self.val = val
self.id = id(val)
@@ -360,33 +366,32 @@ class CacheReference(object):
def compiler_from_dict(items):
- cspec = spack.spec.CompilerSpec(items['spec'])
- os = items.get('operating_system', None)
- target = items.get('target', None)
+ cspec = spack.spec.CompilerSpec(items["spec"])
+ os = items.get("operating_system", None)
+ target = items.get("target", None)
- if not ('paths' in items and
- all(n in items['paths'] for n in _path_instance_vars)):
+ if not ("paths" in items and all(n in items["paths"] for n in _path_instance_vars)):
raise InvalidCompilerConfigurationError(cspec)
- cls = class_for_compiler_name(cspec.name)
+ cls = class_for_compiler_name(cspec.name)
compiler_paths = []
for c in _path_instance_vars:
- compiler_path = items['paths'][c]
- if compiler_path != 'None':
+ compiler_path = items["paths"][c]
+ if compiler_path != "None":
compiler_paths.append(compiler_path)
else:
compiler_paths.append(None)
- mods = items.get('modules')
- if mods == 'None':
+ mods = items.get("modules")
+ if mods == "None":
mods = []
- alias = items.get('alias', None)
- compiler_flags = items.get('flags', {})
- environment = items.get('environment', {})
- extra_rpaths = items.get('extra_rpaths', [])
- implicit_rpaths = items.get('implicit_rpaths', None)
+ alias = items.get("alias", None)
+ compiler_flags = items.get("flags", {})
+ environment = items.get("environment", {})
+ extra_rpaths = items.get("extra_rpaths", [])
+ implicit_rpaths = items.get("implicit_rpaths", None)
# Starting with c22a145, 'implicit_rpaths' was a list. Now it is a
# boolean which can be set by the user to disable all automatic
@@ -394,18 +399,26 @@ def compiler_from_dict(items):
if implicit_rpaths is not None and not isinstance(implicit_rpaths, bool):
implicit_rpaths = None
- return cls(cspec, os, target, compiler_paths, mods, alias,
- environment, extra_rpaths,
- enable_implicit_rpaths=implicit_rpaths,
- **compiler_flags)
+ return cls(
+ cspec,
+ os,
+ target,
+ compiler_paths,
+ mods,
+ alias,
+ environment,
+ extra_rpaths,
+ enable_implicit_rpaths=implicit_rpaths,
+ **compiler_flags
+ )
def _compiler_from_config_entry(items):
"""Note this is intended for internal use only. To avoid re-parsing
- the same config dictionary this keeps track of its location in
- memory. If you provide the same dictionary twice it will return
- the same Compiler object (regardless of whether the dictionary
- entries have changed).
+ the same config dictionary this keeps track of its location in
+ memory. If you provide the same dictionary twice it will return
+ the same Compiler object (regardless of whether the dictionary
+ entries have changed).
"""
config_id = CacheReference(items)
compiler = _compiler_cache.get(config_id, None)
@@ -421,13 +434,13 @@ def get_compilers(config, cspec=None, arch_spec=None):
compilers = []
for items in config:
- items = items['compiler']
- if cspec and items['spec'] != str(cspec):
+ items = items["compiler"]
+ if cspec and items["spec"] != str(cspec):
continue
# If an arch spec is given, confirm that this compiler
# is for the given operating system
- os = items.get('operating_system', None)
+ os = items.get("operating_system", None)
if arch_spec and os != arch_spec.os:
continue
@@ -435,7 +448,7 @@ def get_compilers(config, cspec=None, arch_spec=None):
# is for the given target. If the target is 'any', match
# any given arch spec. If the compiler has no assigned
# target this is an old compiler config file, skip this logic.
- target = items.get('target', None)
+ target = items.get("target", None)
try:
current_target = archspec.cpu.TARGETS[str(arch_spec.target)]
@@ -447,14 +460,16 @@ def get_compilers(config, cspec=None, arch_spec=None):
except AttributeError:
assert arch_spec is None
- if arch_spec and target and (target != family and target != 'any'):
+ if arch_spec and target and (target != family and target != "any"):
# If the family of the target is the family we are seeking,
# there's an error in the underlying configuration
if archspec.cpu.TARGETS[target].family == family:
- msg = ('the "target" field in compilers.yaml accepts only '
- 'target families [replace "{0}" with "{1}"'
- ' in "{2}" specification]')
- msg = msg.format(str(target), family, items.get('spec', '??'))
+ msg = (
+ 'the "target" field in compilers.yaml accepts only '
+ 'target families [replace "{0}" with "{1}"'
+ ' in "{2}" specification]'
+ )
+ msg = msg.format(str(target), family, items.get("spec", "??"))
raise ValueError(msg)
continue
@@ -466,16 +481,16 @@ def get_compilers(config, cspec=None, arch_spec=None):
@_auto_compiler_spec
def compiler_for_spec(compiler_spec, arch_spec):
"""Get the compiler that satisfies compiler_spec. compiler_spec must
- be concrete."""
- assert(compiler_spec.concrete)
- assert(arch_spec.concrete)
+ be concrete."""
+ assert compiler_spec.concrete
+ assert arch_spec.concrete
compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec)
if len(compilers) < 1:
raise NoCompilerForSpecError(compiler_spec, arch_spec.os)
if len(compilers) > 1:
- msg = 'Multiple definitions of compiler %s' % compiler_spec
- msg += 'for architecture %s:\n %s' % (arch_spec, compilers)
+ msg = "Multiple definitions of compiler %s" % compiler_spec
+ msg += "for architecture %s:\n %s" % (arch_spec, compilers)
tty.debug(msg)
return compilers[0]
@@ -486,14 +501,15 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
scope_to_compilers = {}
for scope in config.scopes:
- compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec,
- scope=scope, use_cache=False)
+ compilers = compilers_for_spec(
+ compiler_spec, arch_spec=arch_spec, scope=scope, use_cache=False
+ )
if compilers:
scope_to_compilers[scope] = compilers
cfg_file_to_duplicates = {}
for scope, compilers in scope_to_compilers.items():
- config_file = config.get_config_filename(scope, 'compilers')
+ config_file = config.get_config_filename(scope, "compilers")
cfg_file_to_duplicates[config_file] = compilers
return cfg_file_to_duplicates
@@ -508,10 +524,10 @@ def class_for_compiler_name(compiler_name):
# Hack to be able to call the compiler `apple-clang` while still
# using a valid python name for the module
submodule_name = compiler_name
- if compiler_name == 'apple-clang':
- submodule_name = compiler_name.replace('-', '_')
+ if compiler_name == "apple-clang":
+ submodule_name = compiler_name.replace("-", "_")
- module_name = '.'.join(['spack', 'compilers', submodule_name])
+ module_name = ".".join(["spack", "compilers", submodule_name])
module_obj = __import__(module_name, fromlist=[None])
cls = getattr(module_obj, mod_to_class(compiler_name))
@@ -546,12 +562,10 @@ def all_compiler_types():
#: - compiler_name: the name of the compiler (e.g. 'gcc', 'clang', etc.)
#: - version: the version of the compiler
#:
-CompilerID = collections.namedtuple(
- 'CompilerID', ['os', 'compiler_name', 'version']
-)
+CompilerID = collections.namedtuple("CompilerID", ["os", "compiler_name", "version"])
#: Variations on a matched compiler name
-NameVariation = collections.namedtuple('NameVariation', ['prefix', 'suffix'])
+NameVariation = collections.namedtuple("NameVariation", ["prefix", "suffix"])
#: Groups together the arguments needed by `detect_version`. The four entries
#: in the tuple are:
@@ -563,7 +577,7 @@ NameVariation = collections.namedtuple('NameVariation', ['prefix', 'suffix'])
#: - path: full path to the executable being tested
#:
DetectVersionArgs = collections.namedtuple(
- 'DetectVersionArgs', ['id', 'variation', 'language', 'path']
+ "DetectVersionArgs", ["id", "variation", "language", "path"]
)
@@ -584,6 +598,7 @@ def arguments_to_detect_version_fn(operating_system, paths):
mapped to the corresponding function call to detect the version of the
compilers in this OS.
"""
+
def _default(search_paths):
command_arguments = []
files_to_be_tested = fs.files_in(*search_paths)
@@ -591,30 +606,26 @@ def arguments_to_detect_version_fn(operating_system, paths):
compiler_cls = class_for_compiler_name(compiler_name)
- for language in ('cc', 'cxx', 'f77', 'fc'):
+ for language in ("cc", "cxx", "f77", "fc"):
# Select only the files matching a regexp
for (file, full_path), regexp in itertools.product(
- files_to_be_tested,
- compiler_cls.search_regexps(language)
+ files_to_be_tested, compiler_cls.search_regexps(language)
):
match = regexp.match(file)
if match:
- compiler_id = CompilerID(
- operating_system, compiler_name, None
- )
+ compiler_id = CompilerID(operating_system, compiler_name, None)
detect_version_args = DetectVersionArgs(
id=compiler_id,
variation=NameVariation(*match.groups()),
- language=language, path=full_path
+ language=language,
+ path=full_path,
)
command_arguments.append(detect_version_args)
return command_arguments
- fn = getattr(
- operating_system, 'arguments_to_detect_version_fn', _default
- )
+ fn = getattr(operating_system, "arguments_to_detect_version_fn", _default)
return fn(paths)
@@ -636,6 +647,7 @@ def detect_version(detect_version_args):
of the tuple will contain it. Otherwise ``error`` is a string
containing an explanation on why the version couldn't be computed.
"""
+
def _default(fn_args):
compiler_id = fn_args.id
language = fn_args.language
@@ -643,31 +655,27 @@ def detect_version(detect_version_args):
path = fn_args.path
# Get compiler names and the callback to detect their versions
- callback = getattr(compiler_cls, '{0}_version'.format(language))
+ callback = getattr(compiler_cls, "{0}_version".format(language))
try:
version = callback(path)
- if version and six.text_type(version).strip() \
- and version != 'unknown':
- value = fn_args._replace(
- id=compiler_id._replace(version=version)
- )
+ if version and six.text_type(version).strip() and version != "unknown":
+ value = fn_args._replace(id=compiler_id._replace(version=version))
return value, None
error = "Couldn't get version for compiler {0}".format(path)
except spack.util.executable.ProcessError as e:
- error = "Couldn't get version for compiler {0}\n".format(path) + \
- six.text_type(e)
+ error = "Couldn't get version for compiler {0}\n".format(path) + six.text_type(e)
except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
- error = "Error while executing candidate compiler {0}" \
- "\n{1}: {2}".format(path, e.__class__.__name__,
- six.text_type(e))
+ error = "Error while executing candidate compiler {0}" "\n{1}: {2}".format(
+ path, e.__class__.__name__, six.text_type(e)
+ )
return None, error
operating_system = detect_version_args.id.os
- fn = getattr(operating_system, 'detect_version', _default)
+ fn = getattr(operating_system, "detect_version", _default)
return fn(detect_version_args)
@@ -697,11 +705,9 @@ def make_compiler_list(detected_versions):
operating_system, compiler_name, version = cmp_id
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
- paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
+ paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
target = archspec.cpu.host()
- compiler = compiler_cls(
- spec, operating_system, str(target.family), paths
- )
+ compiler = compiler_cls(spec, operating_system, str(target.family), paths)
return [compiler]
# For compilers with the same compiler id:
@@ -711,10 +717,10 @@ def make_compiler_list(detected_versions):
# - Prefer no variations to variations (e.g., clang to clang-gpu)
#
sort_fn = lambda variation: (
- 'cc' not in by_compiler_id[variation], # None last
- 'cxx' not in by_compiler_id[variation], # None last
- getattr(variation, 'prefix', None),
- getattr(variation, 'suffix', None),
+ "cc" not in by_compiler_id[variation], # None last
+ "cxx" not in by_compiler_id[variation], # None last
+ getattr(variation, "prefix", None),
+ getattr(variation, "suffix", None),
)
compilers = []
@@ -724,17 +730,16 @@ def make_compiler_list(detected_versions):
selected = by_compiler_id[selected_variation]
# fill any missing parts from subsequent entries
- for lang in ['cxx', 'f77', 'fc']:
+ for lang in ["cxx", "f77", "fc"]:
if lang not in selected:
- next_lang = next((
- by_compiler_id[v][lang] for v in ordered
- if lang in by_compiler_id[v]), None)
+ next_lang = next(
+ (by_compiler_id[v][lang] for v in ordered if lang in by_compiler_id[v]), None
+ )
if next_lang:
selected[lang] = next_lang
operating_system, _, _ = compiler_id
- make_compilers = getattr(
- operating_system, 'make_compilers', _default_make_compilers)
+ make_compilers = getattr(operating_system, "make_compilers", _default_make_compilers)
compilers.extend(make_compilers(compiler_id, selected))
@@ -748,10 +753,10 @@ def is_mixed_toolchain(compiler):
Args:
compiler (spack.compiler.Compiler): a valid compiler object
"""
- cc = os.path.basename(compiler.cc or '')
- cxx = os.path.basename(compiler.cxx or '')
- f77 = os.path.basename(compiler.f77 or '')
- fc = os.path.basename(compiler.fc or '')
+ cc = os.path.basename(compiler.cc or "")
+ cxx = os.path.basename(compiler.cxx or "")
+ f77 = os.path.basename(compiler.f77 or "")
+ fc = os.path.basename(compiler.fc or "")
toolchains = set()
for compiler_cls in all_compiler_types():
@@ -760,21 +765,24 @@ def is_mixed_toolchain(compiler):
def name_matches(name, name_list):
# This is such that 'gcc' matches variations
# like 'ggc-9' etc that are found in distros
- name, _, _ = name.partition('-')
+ name, _, _ = name.partition("-")
return len(name_list) == 1 and name and name in name_list
- if any([
- name_matches(cc, compiler_cls.cc_names),
- name_matches(cxx, compiler_cls.cxx_names),
- name_matches(f77, compiler_cls.f77_names),
- name_matches(fc, compiler_cls.fc_names)
- ]):
+ if any(
+ [
+ name_matches(cc, compiler_cls.cc_names),
+ name_matches(cxx, compiler_cls.cxx_names),
+ name_matches(f77, compiler_cls.f77_names),
+ name_matches(fc, compiler_cls.fc_names),
+ ]
+ ):
tty.debug("[TOOLCHAIN] MATCH {0}".format(compiler_cls.__name__))
toolchains.add(compiler_cls.__name__)
if len(toolchains) > 1:
- if toolchains == set(['Clang', 'AppleClang', 'Aocc']) or \
- toolchains == set(['Dpcpp', 'Oneapi']):
+ if toolchains == set(["Clang", "AppleClang", "Aocc"]) or toolchains == set(
+ ["Dpcpp", "Oneapi"]
+ ):
return False
tty.debug("[TOOLCHAINS] {0}".format(toolchains))
return True
@@ -783,53 +791,52 @@ def is_mixed_toolchain(compiler):
class InvalidCompilerConfigurationError(spack.error.SpackError):
-
def __init__(self, compiler_spec):
super(InvalidCompilerConfigurationError, self).__init__(
- "Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
+ 'Invalid configuration for [compiler "%s"]: ' % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
- % _path_instance_vars)
+ % _path_instance_vars,
+ )
class NoCompilersError(spack.error.SpackError):
def __init__(self):
- super(NoCompilersError, self).__init__(
- "Spack could not find any compilers!")
+ super(NoCompilersError, self).__init__("Spack could not find any compilers!")
class UnknownCompilerError(spack.error.SpackError):
def __init__(self, compiler_name):
super(UnknownCompilerError, self).__init__(
- "Spack doesn't support the requested compiler: {0}"
- .format(compiler_name))
+ "Spack doesn't support the requested compiler: {0}".format(compiler_name)
+ )
class NoCompilerForSpecError(spack.error.SpackError):
def __init__(self, compiler_spec, target):
super(NoCompilerForSpecError, self).__init__(
- "No compilers for operating system %s satisfy spec %s"
- % (target, compiler_spec))
+ "No compilers for operating system %s satisfy spec %s" % (target, compiler_spec)
+ )
class CompilerDuplicateError(spack.error.SpackError):
def __init__(self, compiler_spec, arch_spec):
- config_file_to_duplicates = get_compiler_duplicates(
- compiler_spec, arch_spec)
- duplicate_table = list(
- (x, len(y)) for x, y in config_file_to_duplicates.items())
- descriptor = lambda num: 'time' if num == 1 else 'times'
- duplicate_msg = (
- lambda cfgfile, count: "{0}: {1} {2}".format(
- cfgfile, str(count), descriptor(count)))
+ config_file_to_duplicates = get_compiler_duplicates(compiler_spec, arch_spec)
+ duplicate_table = list((x, len(y)) for x, y in config_file_to_duplicates.items())
+ descriptor = lambda num: "time" if num == 1 else "times"
+ duplicate_msg = lambda cfgfile, count: "{0}: {1} {2}".format(
+ cfgfile, str(count), descriptor(count)
+ )
msg = (
- "Compiler configuration contains entries with duplicate" +
- " specification ({0}, {1})".format(compiler_spec, arch_spec) +
- " in the following files:\n\t" +
- '\n\t'.join(duplicate_msg(x, y) for x, y in duplicate_table))
+ "Compiler configuration contains entries with duplicate"
+ + " specification ({0}, {1})".format(compiler_spec, arch_spec)
+ + " in the following files:\n\t"
+ + "\n\t".join(duplicate_msg(x, y) for x, y in duplicate_table)
+ )
super(CompilerDuplicateError, self).__init__(msg)
class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
def __init__(self, compiler_spec):
super(CompilerSpecInsufficientlySpecificError, self).__init__(
- "Multiple compilers satisfy spec %s" % compiler_spec)
+ "Multiple compilers satisfy spec %s" % compiler_spec
+ )
diff --git a/lib/spack/spack/compilers/aocc.py b/lib/spack/spack/compilers/aocc.py
index 345161bdeb..af72599457 100644
--- a/lib/spack/spack/compilers/aocc.py
+++ b/lib/spack/spack/compilers/aocc.py
@@ -15,38 +15,48 @@ from spack.version import ver
class Aocc(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['clang']
+ cc_names = ["clang"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['clang++']
+ cxx_names = ["clang++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['flang']
+ f77_names = ["flang"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['flang']
+ fc_names = ["flang"]
- PrgEnv = 'PrgEnv-aocc'
- PrgEnv_compiler = 'aocc'
+ PrgEnv = "PrgEnv-aocc"
+ PrgEnv_compiler = "aocc"
- version_argument = '--version'
+ version_argument = "--version"
@property
def debug_flags(self):
- return ['-gcodeview', '-gdwarf-2', '-gdwarf-3', '-gdwarf-4',
- '-gdwarf-5', '-gline-tables-only', '-gmodules', '-gz', '-g']
+ return [
+ "-gcodeview",
+ "-gdwarf-2",
+ "-gdwarf-3",
+ "-gdwarf-4",
+ "-gdwarf-5",
+ "-gline-tables-only",
+ "-gmodules",
+ "-gz",
+ "-g",
+ ]
@property
def opt_flags(self):
- return ['-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os', '-Oz', '-Og',
- '-O', '-O4']
+ return ["-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz", "-Og", "-O", "-O4"]
@property
def link_paths(self):
- link_paths = {'cc': os.path.join('aocc', 'clang'),
- 'cxx': os.path.join('aocc', 'clang++'),
- 'f77': os.path.join('aocc', 'flang'),
- 'fc': os.path.join('aocc', 'flang')}
+ link_paths = {
+ "cc": os.path.join("aocc", "clang"),
+ "cxx": os.path.join("aocc", "clang++"),
+ "f77": os.path.join("aocc", "flang"),
+ "fc": os.path.join("aocc", "flang"),
+ }
return link_paths
@@ -72,7 +82,7 @@ class Aocc(Compiler):
@property
def c99_flag(self):
- return '-std=c99'
+ return "-std=c99"
@property
def c11_flag(self):
@@ -94,22 +104,19 @@ class Aocc(Compiler):
def fc_pic_flag(self):
return "-fPIC"
- required_libs = ['libclang']
+ required_libs = ["libclang"]
@classmethod
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
- match = re.search(
- r'AOCC_(\d+)[._](\d+)[._](\d+)',
- output
- )
+ match = re.search(r"AOCC_(\d+)[._](\d+)[._](\d+)", output)
if match:
- return '.'.join(match.groups())
+ return ".".join(match.groups())
@classmethod
def fc_version(cls, fortran_compiler):
- if sys.platform == 'darwin':
- return cls.default_version('clang')
+ if sys.platform == "darwin":
+ return cls.default_version("clang")
return cls.default_version(fortran_compiler)
@@ -119,7 +126,7 @@ class Aocc(Compiler):
@property
def stdcxx_libs(self):
- return ('-lstdc++', )
+ return ("-lstdc++",)
@property
def cflags(self):
@@ -136,6 +143,5 @@ class Aocc(Compiler):
def _handle_default_flag_addtions(self):
# This is a known issue for AOCC 3.0 see:
# https://developer.amd.com/wp-content/resources/AOCC-3.0-Install-Guide.pdf
- if self.real_version == ver('3.0.0'):
- return ("-Wno-unused-command-line-argument "
- "-mllvm -eliminate-similar-expr=false")
+ if self.real_version == ver("3.0.0"):
+ return "-Wno-unused-command-line-argument " "-mllvm -eliminate-similar-expr=false"
diff --git a/lib/spack/spack/compilers/apple_clang.py b/lib/spack/spack/compilers/apple_clang.py
index e9efd89c49..0db3f11f6d 100644
--- a/lib/spack/spack/compilers/apple_clang.py
+++ b/lib/spack/spack/compilers/apple_clang.py
@@ -22,10 +22,10 @@ class AppleClang(spack.compilers.clang.Clang):
@classmethod
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
- ver = 'unknown'
+ ver = "unknown"
match = re.search(
# Apple's LLVM compiler has its own versions, so suffix them.
- r'^Apple (?:LLVM|clang) version ([^ )]+)',
+ r"^Apple (?:LLVM|clang) version ([^ )]+)",
output,
# Multi-line, since 'Apple clang' may not be on the first line
# in particular, when run as gcc, it seems to output
@@ -40,7 +40,7 @@ class AppleClang(spack.compilers.clang.Clang):
def cxx11_flag(self):
# Adapted from CMake's AppleClang-CXX rules
# Spack's AppleClang detection only valid from Xcode >= 4.6
- if self.real_version < spack.version.ver('4.0.0'):
+ if self.real_version < spack.version.ver("4.0.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++11 standard", "cxx11_flag", "Xcode < 4.0.0"
)
@@ -49,11 +49,11 @@ class AppleClang(spack.compilers.clang.Clang):
@property
def cxx14_flag(self):
# Adapted from CMake's rules for AppleClang
- if self.real_version < spack.version.ver('5.1.0'):
+ if self.real_version < spack.version.ver("5.1.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++14 standard", "cxx14_flag", "Xcode < 5.1.0"
)
- elif self.real_version < spack.version.ver('6.1.0'):
+ elif self.real_version < spack.version.ver("6.1.0"):
return "-std=c++1y"
return "-std=c++14"
@@ -61,7 +61,7 @@ class AppleClang(spack.compilers.clang.Clang):
@property
def cxx17_flag(self):
# Adapted from CMake's rules for AppleClang
- if self.real_version < spack.version.ver('6.1.0'):
+ if self.real_version < spack.version.ver("6.1.0"):
raise spack.compiler.UnsupportedCompilerFlag(
self, "the C++17 standard", "cxx17_flag", "Xcode < 6.1.0"
)
@@ -90,50 +90,50 @@ class AppleClang(spack.compilers.clang.Clang):
# Use special XCode versions of compiler wrappers when using XCode
# Overwrites build_environment's setting of SPACK_CC and SPACK_CXX
- xcrun = spack.util.executable.Executable('xcrun')
- xcode_clang = xcrun('-f', 'clang', output=str).strip()
- xcode_clangpp = xcrun('-f', 'clang++', output=str).strip()
- env.set('SPACK_CC', xcode_clang, force=True)
- env.set('SPACK_CXX', xcode_clangpp, force=True)
+ xcrun = spack.util.executable.Executable("xcrun")
+ xcode_clang = xcrun("-f", "clang", output=str).strip()
+ xcode_clangpp = xcrun("-f", "clang++", output=str).strip()
+ env.set("SPACK_CC", xcode_clang, force=True)
+ env.set("SPACK_CXX", xcode_clangpp, force=True)
- xcode_select = spack.util.executable.Executable('xcode-select')
+ xcode_select = spack.util.executable.Executable("xcode-select")
# Get the path of the active developer directory
- real_root = xcode_select('--print-path', output=str).strip()
+ real_root = xcode_select("--print-path", output=str).strip()
# The path name can be used to determine whether the full Xcode suite
# or just the command-line tools are installed
- if real_root.endswith('Developer'):
+ if real_root.endswith("Developer"):
# The full Xcode suite is installed
pass
else:
- if real_root.endswith('CommandLineTools'):
+ if real_root.endswith("CommandLineTools"):
# Only the command-line tools are installed
- msg = 'It appears that you have the Xcode command-line tools '
- msg += 'but not the full Xcode suite installed.\n'
+ msg = "It appears that you have the Xcode command-line tools "
+ msg += "but not the full Xcode suite installed.\n"
else:
# Xcode is not installed
- msg = 'It appears that you do not have Xcode installed.\n'
+ msg = "It appears that you do not have Xcode installed.\n"
- msg += 'In order to use Spack to build the requested application, '
- msg += 'you need the full Xcode suite. It can be installed '
- msg += 'through the App Store. Make sure you launch the '
- msg += 'application and accept the license agreement.\n'
+ msg += "In order to use Spack to build the requested application, "
+ msg += "you need the full Xcode suite. It can be installed "
+ msg += "through the App Store. Make sure you launch the "
+ msg += "application and accept the license agreement.\n"
raise OSError(msg)
real_root = os.path.dirname(os.path.dirname(real_root))
- developer_root = os.path.join(spack.stage.get_stage_root(),
- 'xcode-select',
- self.name,
- str(self.version))
- xcode_link = os.path.join(developer_root, 'Xcode.app')
+ developer_root = os.path.join(
+ spack.stage.get_stage_root(), "xcode-select", self.name, str(self.version)
+ )
+ xcode_link = os.path.join(developer_root, "Xcode.app")
if not os.path.exists(developer_root):
- tty.warn('Copying Xcode from %s to %s in order to add spack '
- 'wrappers to it. Please do not interrupt.'
- % (real_root, developer_root))
+ tty.warn(
+ "Copying Xcode from %s to %s in order to add spack "
+ "wrappers to it. Please do not interrupt." % (real_root, developer_root)
+ )
# We need to make a new Xcode.app instance, but with symlinks to
# the spack wrappers for the compilers it ships. This is necessary
@@ -142,31 +142,35 @@ class AppleClang(spack.compilers.clang.Clang):
# as they do realpath and end up ignoring the symlinks in a
# "softer" tree of nothing but symlinks in the right places.
shutil.copytree(
- real_root, developer_root, symlinks=True,
+ real_root,
+ developer_root,
+ symlinks=True,
ignore=shutil.ignore_patterns(
- 'AppleTV*.platform', 'Watch*.platform', 'iPhone*.platform',
- 'Documentation', 'swift*'
- ))
+ "AppleTV*.platform",
+ "Watch*.platform",
+ "iPhone*.platform",
+ "Documentation",
+ "swift*",
+ ),
+ )
real_dirs = [
- 'Toolchains/XcodeDefault.xctoolchain/usr/bin',
- 'usr/bin',
+ "Toolchains/XcodeDefault.xctoolchain/usr/bin",
+ "usr/bin",
]
- bins = ['c++', 'c89', 'c99', 'cc', 'clang', 'clang++', 'cpp']
+ bins = ["c++", "c89", "c99", "cc", "clang", "clang++", "cpp"]
for real_dir in real_dirs:
- dev_dir = os.path.join(developer_root,
- 'Contents',
- 'Developer',
- real_dir)
+ dev_dir = os.path.join(developer_root, "Contents", "Developer", real_dir)
for fname in os.listdir(dev_dir):
if fname in bins:
os.unlink(os.path.join(dev_dir, fname))
symlink(
- os.path.join(spack.paths.build_env_path, 'cc'),
- os.path.join(dev_dir, fname))
+ os.path.join(spack.paths.build_env_path, "cc"),
+ os.path.join(dev_dir, fname),
+ )
symlink(developer_root, xcode_link)
- env.set('DEVELOPER_DIR', xcode_link)
+ env.set("DEVELOPER_DIR", xcode_link)
diff --git a/lib/spack/spack/compilers/arm.py b/lib/spack/spack/compilers/arm.py
index 0d833318b4..ac20336154 100644
--- a/lib/spack/spack/compilers/arm.py
+++ b/lib/spack/spack/compilers/arm.py
@@ -11,22 +11,24 @@ import spack.compiler
class Arm(spack.compiler.Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['armclang']
+ cc_names = ["armclang"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['armclang++']
+ cxx_names = ["armclang++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['armflang']
+ f77_names = ["armflang"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['armflang']
+ fc_names = ["armflang"]
# Named wrapper links within lib/spack/env
- link_paths = {'cc': os.path.join('arm', 'armclang'),
- 'cxx': os.path.join('arm', 'armclang++'),
- 'f77': os.path.join('arm', 'armflang'),
- 'fc': os.path.join('arm', 'armflang')}
+ link_paths = {
+ "cc": os.path.join("arm", "armclang"),
+ "cxx": os.path.join("arm", "armclang++"),
+ "f77": os.path.join("arm", "armflang"),
+ "fc": os.path.join("arm", "armflang"),
+ }
# The ``--version`` option seems to be the most consistent one for
# arm compilers. Output looks like this:
@@ -37,17 +39,16 @@ class Arm(spack.compiler.Compiler):
# Thread model: posix
# InstalledDir:
# /opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin
- version_argument = '--version'
- version_regex = r'Arm C\/C\+\+\/Fortran Compiler version ([\d\.]+) '\
- r'\(build number (\d+)\) '
+ version_argument = "--version"
+ version_regex = r"Arm C\/C\+\+\/Fortran Compiler version ([\d\.]+) " r"\(build number (\d+)\) "
@classmethod
def extract_version_from_output(cls, output):
"""Extracts the version from compiler's output."""
match = re.search(cls.version_regex, output)
- temp = 'unknown'
+ temp = "unknown"
if match:
- if match.group(1).count('.') == 1:
+ if match.group(1).count(".") == 1:
temp = match.group(1) + ".0." + match.group(2)
else:
temp = match.group(1) + "." + match.group(2)
@@ -59,7 +60,7 @@ class Arm(spack.compiler.Compiler):
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast"]
@property
def openmp_flag(self):
@@ -101,7 +102,7 @@ class Arm(spack.compiler.Compiler):
def fc_pic_flag(self):
return "-fPIC"
- required_libs = ['libclang', 'libflang']
+ required_libs = ["libclang", "libflang"]
@classmethod
def fc_version(cls, fc):
diff --git a/lib/spack/spack/compilers/cce.py b/lib/spack/spack/compilers/cce.py
index b0a4fa1613..d572a31ff4 100644
--- a/lib/spack/spack/compilers/cce.py
+++ b/lib/spack/spack/compilers/cce.py
@@ -11,41 +11,44 @@ from spack.version import ver
class Cce(Compiler):
"""Cray compiler environment compiler."""
+
# Subclasses use possible names of C compiler
- cc_names = ['cc']
+ cc_names = ["cc"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['CC']
+ cxx_names = ["CC"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['ftn']
+ f77_names = ["ftn"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['ftn']
+ fc_names = ["ftn"]
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
- suffixes = [r'-mp-\d\.\d']
+ suffixes = [r"-mp-\d\.\d"]
- PrgEnv = 'PrgEnv-cray'
- PrgEnv_compiler = 'cce'
+ PrgEnv = "PrgEnv-cray"
+ PrgEnv_compiler = "cce"
- link_paths = {'cc': os.path.join('cce', 'cc'),
- 'cxx': os.path.join('cce', 'case-insensitive', 'CC'),
- 'f77': os.path.join('cce', 'ftn'),
- 'fc': os.path.join('cce', 'ftn')}
+ link_paths = {
+ "cc": os.path.join("cce", "cc"),
+ "cxx": os.path.join("cce", "case-insensitive", "CC"),
+ "f77": os.path.join("cce", "ftn"),
+ "fc": os.path.join("cce", "ftn"),
+ }
@property
def is_clang_based(self):
version = self._real_version or self.version
- return version >= ver('9.0') and 'classic' not in str(version)
+ return version >= ver("9.0") and "classic" not in str(version)
@property
def version_argument(self):
if self.is_clang_based:
- return '--version'
- return '-V'
+ return "--version"
+ return "-V"
- version_regex = r'[Vv]ersion.*?(\d+(\.\d+)+)'
+ version_regex = r"[Vv]ersion.*?(\d+(\.\d+)+)"
@property
def verbose_flag(self):
@@ -53,49 +56,43 @@ class Cce(Compiler):
@property
def debug_flags(self):
- return ['-g', '-G0', '-G1', '-G2', '-Gfast']
+ return ["-g", "-G0", "-G1", "-G2", "-Gfast"]
@property
def openmp_flag(self):
if self.is_clang_based:
- return '-fopenmp'
+ return "-fopenmp"
return "-h omp"
@property
def cxx11_flag(self):
if self.is_clang_based:
- return '-std=c++11'
+ return "-std=c++11"
return "-h std=c++11"
@property
def cxx14_flag(self):
if self.is_clang_based:
- return '-std=c++14'
+ return "-std=c++14"
return "-h std=c++14"
@property
def c99_flag(self):
if self.is_clang_based:
- return '-std=c99'
- elif self.real_version >= ver('8.4'):
- return '-h std=c99,noconform,gnu'
- elif self.real_version >= ver('8.1'):
- return '-h c99,noconform,gnu'
- raise UnsupportedCompilerFlag(self,
- 'the C99 standard',
- 'c99_flag',
- '< 8.1')
+ return "-std=c99"
+ elif self.real_version >= ver("8.4"):
+ return "-h std=c99,noconform,gnu"
+ elif self.real_version >= ver("8.1"):
+ return "-h c99,noconform,gnu"
+ raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag", "< 8.1")
@property
def c11_flag(self):
if self.is_clang_based:
- return '-std=c11'
- elif self.real_version >= ver('8.5'):
- return '-h std=c11,noconform,gnu'
- raise UnsupportedCompilerFlag(self,
- 'the C11 standard',
- 'c11_flag',
- '< 8.5')
+ return "-std=c11"
+ elif self.real_version >= ver("8.5"):
+ return "-h std=c11,noconform,gnu"
+ raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 8.5")
@property
def cc_pic_flag(self):
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index c9982d1440..9e5ce8cec9 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -14,71 +14,81 @@ from spack.version import ver
#: compiler symlink mappings for mixed f77 compilers
f77_mapping = [
- ('gfortran', os.path.join('clang', 'gfortran')),
- ('xlf_r', os.path.join('xl_r', 'xlf_r')),
- ('xlf', os.path.join('xl', 'xlf')),
- ('pgfortran', os.path.join('pgi', 'pgfortran')),
- ('ifort', os.path.join('intel', 'ifort'))
+ ("gfortran", os.path.join("clang", "gfortran")),
+ ("xlf_r", os.path.join("xl_r", "xlf_r")),
+ ("xlf", os.path.join("xl", "xlf")),
+ ("pgfortran", os.path.join("pgi", "pgfortran")),
+ ("ifort", os.path.join("intel", "ifort")),
]
#: compiler symlink mappings for mixed f90/fc compilers
fc_mapping = [
- ('gfortran', os.path.join('clang', 'gfortran')),
- ('xlf90_r', os.path.join('xl_r', 'xlf90_r')),
- ('xlf90', os.path.join('xl', 'xlf90')),
- ('pgfortran', os.path.join('pgi', 'pgfortran')),
- ('ifort', os.path.join('intel', 'ifort'))
+ ("gfortran", os.path.join("clang", "gfortran")),
+ ("xlf90_r", os.path.join("xl_r", "xlf90_r")),
+ ("xlf90", os.path.join("xl", "xlf90")),
+ ("pgfortran", os.path.join("pgi", "pgfortran")),
+ ("ifort", os.path.join("intel", "ifort")),
]
class Clang(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['clang']
+ cc_names = ["clang"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['clang++']
+ cxx_names = ["clang++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['flang', 'gfortran', 'xlf_r']
+ f77_names = ["flang", "gfortran", "xlf_r"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['flang', 'gfortran', 'xlf90_r']
+ fc_names = ["flang", "gfortran", "xlf90_r"]
- version_argument = '--version'
+ version_argument = "--version"
@property
def debug_flags(self):
- return ['-gcodeview', '-gdwarf-2', '-gdwarf-3', '-gdwarf-4',
- '-gdwarf-5', '-gline-tables-only', '-gmodules', '-gz', '-g']
+ return [
+ "-gcodeview",
+ "-gdwarf-2",
+ "-gdwarf-3",
+ "-gdwarf-4",
+ "-gdwarf-5",
+ "-gline-tables-only",
+ "-gmodules",
+ "-gz",
+ "-g",
+ ]
@property
def opt_flags(self):
- return ['-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os', '-Oz', '-Og',
- '-O', '-O4']
+ return ["-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz", "-Og", "-O", "-O4"]
# Clang has support for using different fortran compilers with the
# clang executable.
@property
def link_paths(self):
# clang links are always the same
- link_paths = {'cc': os.path.join('clang', 'clang'),
- 'cxx': os.path.join('clang', 'clang++')}
+ link_paths = {
+ "cc": os.path.join("clang", "clang"),
+ "cxx": os.path.join("clang", "clang++"),
+ }
# fortran links need to look at the actual compiler names from
# compilers.yaml to figure out which named symlink to use
for compiler_name, link_path in f77_mapping:
if self.f77 and compiler_name in self.f77:
- link_paths['f77'] = link_path
+ link_paths["f77"] = link_path
break
else:
- link_paths['f77'] = os.path.join('clang', 'flang')
+ link_paths["f77"] = os.path.join("clang", "flang")
for compiler_name, link_path in fc_mapping:
if self.fc and compiler_name in self.fc:
- link_paths['fc'] = link_path
+ link_paths["fc"] = link_path
break
else:
- link_paths['fc'] = os.path.join('clang', 'flang')
+ link_paths["fc"] = os.path.join("clang", "flang")
return link_paths
@@ -90,45 +100,36 @@ class Clang(Compiler):
@property
def cxx11_flag(self):
- if self.real_version < ver('3.3'):
- raise UnsupportedCompilerFlag(
- self, "the C++11 standard", "cxx11_flag", "< 3.3"
- )
+ if self.real_version < ver("3.3"):
+ raise UnsupportedCompilerFlag(self, "the C++11 standard", "cxx11_flag", "< 3.3")
return "-std=c++11"
@property
def cxx14_flag(self):
- if self.real_version < ver('3.4'):
- raise UnsupportedCompilerFlag(
- self, "the C++14 standard", "cxx14_flag", "< 3.5"
- )
- elif self.real_version < ver('3.5'):
+ if self.real_version < ver("3.4"):
+ raise UnsupportedCompilerFlag(self, "the C++14 standard", "cxx14_flag", "< 3.5")
+ elif self.real_version < ver("3.5"):
return "-std=c++1y"
return "-std=c++14"
@property
def cxx17_flag(self):
- if self.real_version < ver('3.5'):
- raise UnsupportedCompilerFlag(
- self, "the C++17 standard", "cxx17_flag", "< 3.5"
- )
- elif self.real_version < ver('5.0'):
+ if self.real_version < ver("3.5"):
+ raise UnsupportedCompilerFlag(self, "the C++17 standard", "cxx17_flag", "< 3.5")
+ elif self.real_version < ver("5.0"):
return "-std=c++1z"
return "-std=c++17"
@property
def c99_flag(self):
- return '-std=c99'
+ return "-std=c99"
@property
def c11_flag(self):
- if self.real_version < ver('6.1.0'):
- raise UnsupportedCompilerFlag(self,
- "the C11 standard",
- "c11_flag",
- "< 6.1.0")
+ if self.real_version < ver("6.1.0"):
+ raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 6.1.0")
else:
return "-std=c11"
@@ -148,23 +149,22 @@ class Clang(Compiler):
def fc_pic_flag(self):
return "-fPIC"
- required_libs = ['libclang']
+ required_libs = ["libclang"]
@classmethod
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
- ver = 'unknown'
- if ('Apple' in output) or ('AMD' in output):
+ ver = "unknown"
+ if ("Apple" in output) or ("AMD" in output):
return ver
match = re.search(
# Normal clang compiler versions are left as-is
- r'clang version ([^ )\n]+)-svn[~.\w\d-]*|'
+ r"clang version ([^ )\n]+)-svn[~.\w\d-]*|"
# Don't include hyphenated patch numbers in the version
# (see https://github.com/spack/spack/pull/14365 for details)
- r'clang version ([^ )\n]+?)-[~.\w\d-]*|'
- r'clang version ([^ )\n]+)',
- output
+ r"clang version ([^ )\n]+?)-[~.\w\d-]*|" r"clang version ([^ )\n]+)",
+ output,
)
if match:
ver = match.group(match.lastindex)
@@ -174,8 +174,8 @@ class Clang(Compiler):
def fc_version(cls, fc):
# We could map from gcc/gfortran version to clang version, but on macOS
# we normally mix any version of gfortran with any version of clang.
- if sys.platform == 'darwin':
- return cls.default_version('clang')
+ if sys.platform == "darwin":
+ return cls.default_version("clang")
else:
return cls.default_version(fc)
diff --git a/lib/spack/spack/compilers/dpcpp.py b/lib/spack/spack/compilers/dpcpp.py
index 1458e8f019..176f4cb795 100644
--- a/lib/spack/spack/compilers/dpcpp.py
+++ b/lib/spack/spack/compilers/dpcpp.py
@@ -21,11 +21,14 @@ class Dpcpp(spack.compilers.oneapi.Oneapi):
See also: https://www.intel.com/content/www/us/en/develop/documentation/oneapi-dpcpp-cpp-compiler-dev-guide-and-reference/top/compiler-setup/using-the-command-line/invoking-the-compiler.html
"""
+
# Subclasses use possible names of C++ compiler
- cxx_names = ['dpcpp']
+ cxx_names = ["dpcpp"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('oneapi', 'icx'),
- 'cxx': os.path.join('oneapi', 'dpcpp'),
- 'f77': os.path.join('oneapi', 'ifx'),
- 'fc': os.path.join('oneapi', 'ifx')}
+ link_paths = {
+ "cc": os.path.join("oneapi", "icx"),
+ "cxx": os.path.join("oneapi", "dpcpp"),
+ "f77": os.path.join("oneapi", "ifx"),
+ "fc": os.path.join("oneapi", "ifx"),
+ }
diff --git a/lib/spack/spack/compilers/fj.py b/lib/spack/spack/compilers/fj.py
index a12efd2208..b3dbd40ee4 100644
--- a/lib/spack/spack/compilers/fj.py
+++ b/lib/spack/spack/compilers/fj.py
@@ -10,27 +10,29 @@ import spack.compiler
class Fj(spack.compiler.Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['fcc']
+ cc_names = ["fcc"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['FCC']
+ cxx_names = ["FCC"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['frt']
+ f77_names = ["frt"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['frt']
+ fc_names = ["frt"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('fj', 'fcc'),
- 'cxx': os.path.join('fj', 'case-insensitive', 'FCC'),
- 'f77': os.path.join('fj', 'frt'),
- 'fc': os.path.join('fj', 'frt')}
+ link_paths = {
+ "cc": os.path.join("fj", "fcc"),
+ "cxx": os.path.join("fj", "case-insensitive", "FCC"),
+ "f77": os.path.join("fj", "frt"),
+ "fc": os.path.join("fj", "frt"),
+ }
- version_argument = '--version'
- version_regex = r'\((?:FCC|FRT)\) ([a-z\d.]+)'
+ version_argument = "--version"
+ version_regex = r"\((?:FCC|FRT)\) ([a-z\d.]+)"
- required_libs = ['libfj90i', 'libfj90f', 'libfjsrcinfo']
+ required_libs = ["libfj90i", "libfj90f", "libfjsrcinfo"]
@property
def verbose_flag(self):
@@ -42,7 +44,7 @@ class Fj(spack.compiler.Compiler):
@property
def opt_flags(self):
- return ['-O0', '-O1', '-O2', '-O3', '-Ofast']
+ return ["-O0", "-O1", "-O2", "-O3", "-Ofast"]
@property
def openmp_flag(self):
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 40794103f6..011646c408 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -13,30 +13,32 @@ from spack.version import ver
class Gcc(spack.compiler.Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['gcc']
+ cc_names = ["gcc"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['g++']
+ cxx_names = ["g++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['gfortran']
+ f77_names = ["gfortran"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['gfortran']
+ fc_names = ["gfortran"]
# MacPorts builds gcc versions with prefixes and -mp-X or -mp-X.Y suffixes.
# Homebrew and Linuxbrew may build gcc with -X, -X.Y suffixes.
# Old compatibility versions may contain XY suffixes.
- suffixes = [r'-mp-\d+(?:\.\d+)?', r'-\d+(?:\.\d+)?', r'\d\d']
+ suffixes = [r"-mp-\d+(?:\.\d+)?", r"-\d+(?:\.\d+)?", r"\d\d"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('gcc', 'gcc'),
- 'cxx': os.path.join('gcc', 'g++'),
- 'f77': os.path.join('gcc', 'gfortran'),
- 'fc': os.path.join('gcc', 'gfortran')}
+ link_paths = {
+ "cc": os.path.join("gcc", "gcc"),
+ "cxx": os.path.join("gcc", "g++"),
+ "f77": os.path.join("gcc", "gfortran"),
+ "fc": os.path.join("gcc", "gfortran"),
+ }
- PrgEnv = 'PrgEnv-gnu'
- PrgEnv_compiler = 'gcc'
+ PrgEnv = "PrgEnv-gnu"
+ PrgEnv_compiler = "gcc"
@property
def verbose_flag(self):
@@ -44,11 +46,11 @@ class Gcc(spack.compiler.Compiler):
@property
def debug_flags(self):
- return ['-g', '-gstabs+', '-gstabs', '-gxcoff+', '-gxcoff', '-gvms']
+ return ["-g", "-gstabs+", "-gstabs", "-gxcoff+", "-gxcoff", "-gvms"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-Os', '-Ofast', '-Og']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-Os", "-Ofast", "-Og"]
@property
def openmp_flag(self):
@@ -56,53 +58,58 @@ class Gcc(spack.compiler.Compiler):
@property
def cxx98_flag(self):
- if self.real_version < ver('6.0'):
+ if self.real_version < ver("6.0"):
return ""
else:
return "-std=c++98"
@property
def cxx11_flag(self):
- if self.real_version < ver('4.3'):
+ if self.real_version < ver("4.3"):
raise spack.compiler.UnsupportedCompilerFlag(
- self, "the C++11 standard", "cxx11_flag", " < 4.3")
- elif self.real_version < ver('4.7'):
+ self, "the C++11 standard", "cxx11_flag", " < 4.3"
+ )
+ elif self.real_version < ver("4.7"):
return "-std=c++0x"
else:
return "-std=c++11"
@property
def cxx14_flag(self):
- if self.real_version < ver('4.8'):
+ if self.real_version < ver("4.8"):
raise spack.compiler.UnsupportedCompilerFlag(
- self, "the C++14 standard", "cxx14_flag", "< 4.8")
- elif self.real_version < ver('4.9'):
+ self, "the C++14 standard", "cxx14_flag", "< 4.8"
+ )
+ elif self.real_version < ver("4.9"):
return "-std=c++1y"
else:
return "-std=c++14"
@property
def cxx17_flag(self):
- if self.real_version < ver('5.0'):
+ if self.real_version < ver("5.0"):
raise spack.compiler.UnsupportedCompilerFlag(
- self, "the C++17 standard", "cxx17_flag", "< 5.0")
- elif self.real_version < ver('6.0'):
+ self, "the C++17 standard", "cxx17_flag", "< 5.0"
+ )
+ elif self.real_version < ver("6.0"):
return "-std=c++1z"
else:
return "-std=c++17"
@property
def c99_flag(self):
- if self.real_version < ver('4.5'):
+ if self.real_version < ver("4.5"):
raise spack.compiler.UnsupportedCompilerFlag(
- self, "the C99 standard", "c99_flag", "< 4.5")
+ self, "the C99 standard", "c99_flag", "< 4.5"
+ )
return "-std=c99"
@property
def c11_flag(self):
- if self.real_version < ver('4.7'):
+ if self.real_version < ver("4.7"):
raise spack.compiler.UnsupportedCompilerFlag(
- self, "the C11 standard", "c11_flag", "< 4.7")
+ self, "the C11 standard", "c11_flag", "< 4.7"
+ )
return "-std=c11"
@property
@@ -121,7 +128,7 @@ class Gcc(spack.compiler.Compiler):
def fc_pic_flag(self):
return "-fPIC"
- required_libs = ['libgcc', 'libgfortran']
+ required_libs = ["libgcc", "libgfortran"]
@classmethod
def default_version(cls, cc):
@@ -143,14 +150,12 @@ class Gcc(spack.compiler.Compiler):
# Apple's gcc is actually apple clang, so skip it. Returning
# "unknown" ensures this compiler is not detected by default.
# Users can add it manually to compilers.yaml at their own risk.
- if apple_clang.AppleClang.default_version(cc) != 'unknown':
- return 'unknown'
+ if apple_clang.AppleClang.default_version(cc) != "unknown":
+ return "unknown"
version = super(Gcc, cls).default_version(cc)
- if ver(version) >= ver('7'):
- output = spack.compiler.get_compiler_version_output(
- cc, '-dumpfullversion'
- )
+ if ver(version) >= ver("7"):
+ output = spack.compiler.get_compiler_version_output(cc, "-dumpfullversion")
version = cls.extract_version_from_output(output)
return version
@@ -176,13 +181,11 @@ class Gcc(spack.compiler.Compiler):
7.2.0
"""
- output = spack.compiler.get_compiler_version_output(fc, '-dumpversion')
- match = re.search(r'(?:GNU Fortran \(GCC\) )?([\d.]+)', output)
- version = match.group(match.lastindex) if match else 'unknown'
- if ver(version) >= ver('7'):
- output = spack.compiler.get_compiler_version_output(
- fc, '-dumpfullversion'
- )
+ output = spack.compiler.get_compiler_version_output(fc, "-dumpversion")
+ match = re.search(r"(?:GNU Fortran \(GCC\) )?([\d.]+)", output)
+ version = match.group(match.lastindex) if match else "unknown"
+ if ver(version) >= ver("7"):
+ output = spack.compiler.get_compiler_version_output(fc, "-dumpfullversion")
version = cls.extract_version_from_output(output)
return version
@@ -192,4 +195,4 @@ class Gcc(spack.compiler.Compiler):
@property
def stdcxx_libs(self):
- return ('-lstdc++', )
+ return ("-lstdc++",)
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 2101c75bd4..e675361f60 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -12,66 +12,65 @@ from spack.version import ver
class Intel(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['icc']
+ cc_names = ["icc"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['icpc']
+ cxx_names = ["icpc"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['ifort']
+ f77_names = ["ifort"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['ifort']
+ fc_names = ["ifort"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('intel', 'icc'),
- 'cxx': os.path.join('intel', 'icpc'),
- 'f77': os.path.join('intel', 'ifort'),
- 'fc': os.path.join('intel', 'ifort')}
-
- PrgEnv = 'PrgEnv-intel'
- PrgEnv_compiler = 'intel'
-
- if sys.platform == 'win32':
- version_argument = '/QV'
+ link_paths = {
+ "cc": os.path.join("intel", "icc"),
+ "cxx": os.path.join("intel", "icpc"),
+ "f77": os.path.join("intel", "ifort"),
+ "fc": os.path.join("intel", "ifort"),
+ }
+
+ PrgEnv = "PrgEnv-intel"
+ PrgEnv_compiler = "intel"
+
+ if sys.platform == "win32":
+ version_argument = "/QV"
else:
- version_argument = '--version'
+ version_argument = "--version"
- if sys.platform == 'win32':
- version_regex = r'([1-9][0-9]*\.[0-9]*\.[0-9]*)'
+ if sys.platform == "win32":
+ version_regex = r"([1-9][0-9]*\.[0-9]*\.[0-9]*)"
else:
- version_regex = r'\((?:IFORT|ICC)\) ([^ ]+)'
+ version_regex = r"\((?:IFORT|ICC)\) ([^ ]+)"
@property
def verbose_flag(self):
return "-v"
- required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng']
+ required_libs = ["libirc", "libifcore", "libifcoremt", "libirng"]
@property
def debug_flags(self):
- return ['-debug', '-g', '-g0', '-g1', '-g2', '-g3']
+ return ["-debug", "-g", "-g0", "-g1", "-g2", "-g3"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os"]
@property
def openmp_flag(self):
- if self.real_version < ver('16.0'):
+ if self.real_version < ver("16.0"):
return "-openmp"
else:
return "-qopenmp"
@property
def cxx11_flag(self):
- if self.real_version < ver('11.1'):
- raise UnsupportedCompilerFlag(self,
- "the C++11 standard",
- "cxx11_flag",
- "< 11.1")
+ if self.real_version < ver("11.1"):
+ raise UnsupportedCompilerFlag(self, "the C++11 standard", "cxx11_flag", "< 11.1")
- elif self.real_version < ver('13'):
+ elif self.real_version < ver("13"):
return "-std=c++0x"
else:
return "-std=c++11"
@@ -79,33 +78,24 @@ class Intel(Compiler):
@property
def cxx14_flag(self):
# Adapted from CMake's Intel-CXX rules.
- if self.real_version < ver('15'):
- raise UnsupportedCompilerFlag(self,
- "the C++14 standard",
- "cxx14_flag",
- "< 15")
- elif self.real_version < ver('15.0.2'):
+ if self.real_version < ver("15"):
+ raise UnsupportedCompilerFlag(self, "the C++14 standard", "cxx14_flag", "< 15")
+ elif self.real_version < ver("15.0.2"):
return "-std=c++1y"
else:
return "-std=c++14"
@property
def c99_flag(self):
- if self.real_version < ver('12'):
- raise UnsupportedCompilerFlag(self,
- "the C99 standard",
- "c99_flag",
- "< 12")
+ if self.real_version < ver("12"):
+ raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag", "< 12")
else:
return "-std=c99"
@property
def c11_flag(self):
- if self.real_version < ver('16'):
- raise UnsupportedCompilerFlag(self,
- "the C11 standard",
- "c11_flag",
- "< 16")
+ if self.real_version < ver("16"):
+ raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 16")
else:
return "-std=c1x"
@@ -127,4 +117,4 @@ class Intel(Compiler):
@property
def stdcxx_libs(self):
- return ('-cxxlib', )
+ return ("-cxxlib",)
diff --git a/lib/spack/spack/compilers/msvc.py b/lib/spack/spack/compilers/msvc.py
index a3c5a7c752..5cf0b1356c 100644
--- a/lib/spack/spack/compilers/msvc.py
+++ b/lib/spack/spack/compilers/msvc.py
@@ -19,15 +19,15 @@ avail_fc_version = set() # type: Set[str]
fc_path = dict() # type: Dict[str, str]
fortran_mapping = {
- '2021.3.0': '19.29.30133',
- '2021.2.1': '19.28.29913',
- '2021.2.0': '19.28.29334',
- '2021.1.0': '19.28.29333',
+ "2021.3.0": "19.29.30133",
+ "2021.2.1": "19.28.29913",
+ "2021.2.0": "19.28.29334",
+ "2021.1.0": "19.28.29333",
}
def get_valid_fortran_pth(comp_ver):
- cl_ver = str(comp_ver).split('@')[1]
+ cl_ver = str(comp_ver).split("@")[1]
sort_fn = lambda fc_ver: StrictVersion(fc_ver)
sort_fc_ver = sorted(list(avail_fc_version), key=sort_fn)
for ver in sort_fc_ver:
@@ -39,16 +39,16 @@ def get_valid_fortran_pth(comp_ver):
class Msvc(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['cl.exe']
+ cc_names = ["cl.exe"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['cl.exe']
+ cxx_names = ["cl.exe"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['ifx.exe'] # type: List[str]
+ f77_names = ["ifx.exe"] # type: List[str]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['ifx.exe'] # type: List[str]
+ fc_names = ["ifx.exe"] # type: List[str]
# Named wrapper links within build_env_path
# Due to the challenges of supporting compiler wrappers
@@ -56,20 +56,17 @@ class Msvc(Compiler):
# based on proper versions of MSVC from there
# pending acceptance of #28117 for full support using
# compiler wrappers
- link_paths = {'cc': '',
- 'cxx': '',
- 'f77': '',
- 'fc': ''}
+ link_paths = {"cc": "", "cxx": "", "f77": "", "fc": ""}
#: Compiler argument that produces version information
- version_argument = ''
+ version_argument = ""
# For getting ifx's version, call it with version_argument
# and ignore the error code
ignore_version_errors = [1]
#: Regex used to extract version from compiler's output
- version_regex = r'([1-9][0-9]*\.[0-9]*\.[0-9]*)'
+ version_regex = r"([1-9][0-9]*\.[0-9]*\.[0-9]*)"
# Initialize, deferring to base class but then adding the vcvarsallfile
# file based on compiler executable path.
@@ -80,8 +77,7 @@ class Msvc(Compiler):
super(Msvc, self).__init__(*args, **kwargs)
if os.getenv("ONEAPI_ROOT"):
# If this found, it sets all the vars
- self.setvarsfile = os.path.join(
- os.getenv("ONEAPI_ROOT"), "setvars.bat")
+ self.setvarsfile = os.path.join(os.getenv("ONEAPI_ROOT"), "setvars.bat")
else:
# To use the MSVC compilers, VCVARS must be invoked
# VCVARS is located at a fixed location, referencable
@@ -90,15 +86,13 @@ class Msvc(Compiler):
# Spack first finds the compilers via VSWHERE
# and stores their path, but their respective VCVARS
# file must be invoked before useage.
- self.setvarsfile = os.path.abspath(
- os.path.join(self.cc, '../../../../../../..'))
- self.setvarsfile = os.path.join(
- self.setvarsfile, 'Auxiliary', 'Build', 'vcvars64.bat')
+ self.setvarsfile = os.path.abspath(os.path.join(self.cc, "../../../../../../.."))
+ self.setvarsfile = os.path.join(self.setvarsfile, "Auxiliary", "Build", "vcvars64.bat")
@property
def msvc_version(self):
ver = re.search(Msvc.version_regex, self.cc).group(1)
- ver = "".join(ver.split('.')[:2])[:-1]
+ ver = "".join(ver.split(".")[:2])[:-1]
return "MSVC" + ver
def setup_custom_environment(self, pkg, env):
@@ -112,24 +106,27 @@ class Msvc(Compiler):
# output, sort into dictionary, use that to make the build
# environment.
out = subprocess.check_output( # novermin
- 'cmd /u /c "{}" {} && set'.format(self.setvarsfile, 'amd64'),
- stderr=subprocess.STDOUT)
+ 'cmd /u /c "{}" {} && set'.format(self.setvarsfile, "amd64"),
+ stderr=subprocess.STDOUT,
+ )
if sys.version_info[0] >= 3:
- out = out.decode('utf-16le', errors='replace') # novermin
-
- int_env = dict((key.lower(), value) for key, _, value in
- (line.partition('=') for line in out.splitlines())
- if key and value)
-
- if 'path' in int_env:
- env.set_path('PATH', int_env['path'].split(';'))
- env.set_path('INCLUDE', int_env.get('include', '').split(';'))
- env.set_path('LIB', int_env.get('lib', '').split(';'))
-
- env.set('CC', self.cc)
- env.set('CXX', self.cxx)
- env.set('FC', self.fc)
- env.set('F77', self.f77)
+ out = out.decode("utf-16le", errors="replace") # novermin
+
+ int_env = dict(
+ (key.lower(), value)
+ for key, _, value in (line.partition("=") for line in out.splitlines())
+ if key and value
+ )
+
+ if "path" in int_env:
+ env.set_path("PATH", int_env["path"].split(";"))
+ env.set_path("INCLUDE", int_env.get("include", "").split(";"))
+ env.set_path("LIB", int_env.get("lib", "").split(";"))
+
+ env.set("CC", self.cc)
+ env.set("CXX", self.cxx)
+ env.set("FC", self.fc)
+ env.set("F77", self.f77)
else:
# Should not this be an exception?
print("Cannot pull msvc compiler information in Python 2.6 or below")
diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py
index 2e0fccab8e..3e73ee0333 100644
--- a/lib/spack/spack/compilers/nag.py
+++ b/lib/spack/spack/compilers/nag.py
@@ -17,21 +17,22 @@ class Nag(spack.compiler.Compiler):
cxx_names = [] # type: List[str]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['nagfor']
+ f77_names = ["nagfor"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['nagfor']
+ fc_names = ["nagfor"]
# Named wrapper links within build_env_path
# Use default wrappers for C and C++, in case provided in compilers.yaml
link_paths = {
- 'cc': 'cc',
- 'cxx': 'c++',
- 'f77': os.path.join('nag', 'nagfor'),
- 'fc': os.path.join('nag', 'nagfor')}
+ "cc": "cc",
+ "cxx": "c++",
+ "f77": os.path.join("nag", "nagfor"),
+ "fc": os.path.join("nag", "nagfor"),
+ }
- version_argument = '-V'
- version_regex = r'NAG Fortran Compiler Release ([0-9.]+)'
+ version_argument = "-V"
+ version_regex = r"NAG Fortran Compiler Release ([0-9.]+)"
@property
def verbose_flag(self):
@@ -69,11 +70,11 @@ class Nag(spack.compiler.Compiler):
@property
def debug_flags(self):
- return ['-g', '-gline', '-g90']
+ return ["-g", "-gline", "-g90"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"]
@property
def cxx11_flag(self):
@@ -94,22 +95,22 @@ class Nag(spack.compiler.Compiler):
# options with '-Wl,-Wl,,'
@property
def f77_rpath_arg(self):
- return '-Wl,-Wl,,-rpath,,'
+ return "-Wl,-Wl,,-rpath,,"
@property
def fc_rpath_arg(self):
- return '-Wl,-Wl,,-rpath,,'
+ return "-Wl,-Wl,,-rpath,,"
@property
def linker_arg(self):
- return '-Wl,-Wl,,'
+ return "-Wl,-Wl,,"
@property
def disable_new_dtags(self):
# Disable RPATH/RUNPATH forcing for NAG/GCC mixed toolchains:
- return ''
+ return ""
@property
def enable_new_dtags(self):
# Disable RPATH/RUNPATH forcing for NAG/GCC mixed toolchains:
- return ''
+ return ""
diff --git a/lib/spack/spack/compilers/nvhpc.py b/lib/spack/spack/compilers/nvhpc.py
index bb4c28c695..e578558e10 100644
--- a/lib/spack/spack/compilers/nvhpc.py
+++ b/lib/spack/spack/compilers/nvhpc.py
@@ -10,28 +10,30 @@ from spack.compiler import Compiler
class Nvhpc(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['nvc']
+ cc_names = ["nvc"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['nvc++']
+ cxx_names = ["nvc++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['nvfortran']
+ f77_names = ["nvfortran"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['nvfortran']
+ fc_names = ["nvfortran"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('nvhpc', 'nvc'),
- 'cxx': os.path.join('nvhpc', 'nvc++'),
- 'f77': os.path.join('nvhpc', 'nvfortran'),
- 'fc': os.path.join('nvhpc', 'nvfortran')}
+ link_paths = {
+ "cc": os.path.join("nvhpc", "nvc"),
+ "cxx": os.path.join("nvhpc", "nvc++"),
+ "f77": os.path.join("nvhpc", "nvfortran"),
+ "fc": os.path.join("nvhpc", "nvfortran"),
+ }
- PrgEnv = 'PrgEnv-nvhpc'
- PrgEnv_compiler = 'nvhpc'
+ PrgEnv = "PrgEnv-nvhpc"
+ PrgEnv_compiler = "nvhpc"
- version_argument = '--version'
- version_regex = r'nv[^ ]* (?:[^ ]+ Dev-r)?([0-9.]+)(?:-[0-9]+)?'
+ version_argument = "--version"
+ version_regex = r"nv[^ ]* (?:[^ ]+ Dev-r)?([0-9.]+)(?:-[0-9]+)?"
@property
def verbose_flag(self):
@@ -39,11 +41,11 @@ class Nvhpc(Compiler):
@property
def debug_flags(self):
- return ['-g', '-gopt']
+ return ["-g", "-gopt"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"]
@property
def openmp_flag(self):
@@ -67,26 +69,26 @@ class Nvhpc(Compiler):
@property
def c99_flag(self):
- return '-c99'
+ return "-c99"
@property
def c11_flag(self):
- return '-c11'
+ return "-c11"
@property
def cxx11_flag(self):
- return '--c++11'
+ return "--c++11"
@property
def cxx14_flag(self):
- return '--c++14'
+ return "--c++14"
@property
def cxx17_flag(self):
- return '--c++17'
+ return "--c++17"
@property
def stdcxx_libs(self):
- return ('-c++libs', )
+ return ("-c++libs",)
- required_libs = ['libnvc', 'libnvf']
+ required_libs = ["libnvc", "libnvf"]
diff --git a/lib/spack/spack/compilers/oneapi.py b/lib/spack/spack/compilers/oneapi.py
index 61657e96ca..d74cbc6b0d 100644
--- a/lib/spack/spack/compilers/oneapi.py
+++ b/lib/spack/spack/compilers/oneapi.py
@@ -11,48 +11,59 @@ from spack.compiler import Compiler
class Oneapi(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['icx']
+ cc_names = ["icx"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['icpx']
+ cxx_names = ["icpx"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['ifx']
+ f77_names = ["ifx"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['ifx']
+ fc_names = ["ifx"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('oneapi', 'icx'),
- 'cxx': os.path.join('oneapi', 'icpx'),
- 'f77': os.path.join('oneapi', 'ifx'),
- 'fc': os.path.join('oneapi', 'ifx')}
+ link_paths = {
+ "cc": os.path.join("oneapi", "icx"),
+ "cxx": os.path.join("oneapi", "icpx"),
+ "f77": os.path.join("oneapi", "ifx"),
+ "fc": os.path.join("oneapi", "ifx"),
+ }
- PrgEnv = 'PrgEnv-oneapi'
- PrgEnv_compiler = 'oneapi'
+ PrgEnv = "PrgEnv-oneapi"
+ PrgEnv_compiler = "oneapi"
- version_argument = '--version'
- version_regex = r'(?:(?:oneAPI DPC\+\+(?:\/C\+\+)? Compiler)|(?:\(IFORT\))) (\S+)'
+ version_argument = "--version"
+ version_regex = r"(?:(?:oneAPI DPC\+\+(?:\/C\+\+)? Compiler)|(?:\(IFORT\))) (\S+)"
@property
def verbose_flag(self):
return "-v"
- required_libs = ['libirc', 'libifcore', 'libifcoremt', 'libirng',
- 'libsvml', 'libintlc', 'libimf', 'libsycl',
- 'libOpenCL']
+ required_libs = [
+ "libirc",
+ "libifcore",
+ "libifcoremt",
+ "libirng",
+ "libsvml",
+ "libintlc",
+ "libimf",
+ "libsycl",
+ "libOpenCL",
+ ]
@property
def debug_flags(self):
- return ['-debug', '-g', '-g0', '-g1', '-g2', '-g3']
+ return ["-debug", "-g", "-g0", "-g1", "-g2", "-g3"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os"]
@property
def openmp_flag(self):
return "-fiopenmp"
+
# There may be some additional options here for offload, e.g. :
# -fopenmp-simd Emit OpenMP code only for SIMD-based constructs.
# -fopenmp-targets=<value>
@@ -115,7 +126,7 @@ class Oneapi(Compiler):
@property
def stdcxx_libs(self):
- return ('-cxxlib', )
+ return ("-cxxlib",)
def setup_custom_environment(self, pkg, env):
# workaround bug in icpx driver where it requires sycl-post-link is on the PATH
@@ -123,4 +134,4 @@ class Oneapi(Compiler):
# clang++: error: unable to execute command:
# Executable "sycl-post-link" doesn't exist!
if self.cxx:
- env.prepend_path('PATH', dirname(self.cxx))
+ env.prepend_path("PATH", dirname(self.cxx))
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index 36fcaf5398..3aa850236d 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -11,29 +11,31 @@ from spack.version import ver
class Pgi(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['pgcc']
+ cc_names = ["pgcc"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['pgc++', 'pgCC']
+ cxx_names = ["pgc++", "pgCC"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['pgfortran', 'pgf77']
+ f77_names = ["pgfortran", "pgf77"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['pgfortran', 'pgf95', 'pgf90']
+ fc_names = ["pgfortran", "pgf95", "pgf90"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('pgi', 'pgcc'),
- 'cxx': os.path.join('pgi', 'pgc++'),
- 'f77': os.path.join('pgi', 'pgfortran'),
- 'fc': os.path.join('pgi', 'pgfortran')}
+ link_paths = {
+ "cc": os.path.join("pgi", "pgcc"),
+ "cxx": os.path.join("pgi", "pgc++"),
+ "f77": os.path.join("pgi", "pgfortran"),
+ "fc": os.path.join("pgi", "pgfortran"),
+ }
- PrgEnv = 'PrgEnv-pgi'
- PrgEnv_compiler = 'pgi'
+ PrgEnv = "PrgEnv-pgi"
+ PrgEnv_compiler = "pgi"
- version_argument = '-V'
+ version_argument = "-V"
ignore_version_errors = [2] # `pgcc -V` on PowerPC annoyingly returns 2
- version_regex = r'pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on '
+ version_regex = r"pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on "
@property
def verbose_flag(self):
@@ -41,11 +43,11 @@ class Pgi(Compiler):
@property
def debug_flags(self):
- return ['-g', '-gopt']
+ return ["-g", "-gopt"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"]
@property
def openmp_flag(self):
@@ -71,26 +73,20 @@ class Pgi(Compiler):
def fc_pic_flag(self):
return "-fpic"
- required_libs = ['libpgc', 'libpgf90']
+ required_libs = ["libpgc", "libpgf90"]
@property
def c99_flag(self):
- if self.real_version >= ver('12.10'):
- return '-c99'
- raise UnsupportedCompilerFlag(self,
- 'the C99 standard',
- 'c99_flag',
- '< 12.10')
+ if self.real_version >= ver("12.10"):
+ return "-c99"
+ raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag", "< 12.10")
@property
def c11_flag(self):
- if self.real_version >= ver('15.3'):
- return '-c11'
- raise UnsupportedCompilerFlag(self,
- 'the C11 standard',
- 'c11_flag',
- '< 15.3')
+ if self.real_version >= ver("15.3"):
+ return "-c11"
+ raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 15.3")
@property
def stdcxx_libs(self):
- return ('-pgc++libs',)
+ return ("-pgc++libs",)
diff --git a/lib/spack/spack/compilers/rocmcc.py b/lib/spack/spack/compilers/rocmcc.py
index 9cb11be869..6c9781d92e 100644
--- a/lib/spack/spack/compilers/rocmcc.py
+++ b/lib/spack/spack/compilers/rocmcc.py
@@ -12,26 +12,28 @@ import spack.compilers.clang
class Rocmcc(spack.compilers.clang.Clang):
# Subclasses use possible names of C compiler
- cc_names = ['amdclang']
+ cc_names = ["amdclang"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['amdclang++']
+ cxx_names = ["amdclang++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['amdflang']
+ f77_names = ["amdflang"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['amdflang']
+ fc_names = ["amdflang"]
- PrgEnv = 'PrgEnv-amd'
- PrgEnv_compiler = 'amd'
+ PrgEnv = "PrgEnv-amd"
+ PrgEnv_compiler = "amd"
@property
def link_paths(self):
- link_paths = {'cc': 'rocmcc/amdclang',
- 'cxx': 'rocmcc/amdclang++',
- 'f77': 'rocmcc/amdflang',
- 'fc': 'rocmcc/amdflang'}
+ link_paths = {
+ "cc": "rocmcc/amdclang",
+ "cxx": "rocmcc/amdclang++",
+ "f77": "rocmcc/amdflang",
+ "fc": "rocmcc/amdflang",
+ }
return link_paths
@@ -49,7 +51,7 @@ class Rocmcc(spack.compilers.clang.Clang):
@property
def c99_flag(self):
- return '-std=c99'
+ return "-std=c99"
@property
def c11_flag(self):
@@ -58,12 +60,9 @@ class Rocmcc(spack.compilers.clang.Clang):
@classmethod
@llnl.util.lang.memoized
def extract_version_from_output(cls, output):
- match = re.search(
- r'llvm-project roc-(\d+)[._](\d+)[._](\d+)',
- output
- )
+ match = re.search(r"llvm-project roc-(\d+)[._](\d+)[._](\d+)", output)
if match:
- return '.'.join(match.groups())
+ return ".".join(match.groups())
@classmethod
def fc_version(cls, fortran_compiler):
@@ -75,4 +74,4 @@ class Rocmcc(spack.compilers.clang.Clang):
@property
def stdcxx_libs(self):
- return ('-lstdc++', )
+ return ("-lstdc++",)
diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py
index 2d5d307ef6..c87f28e78d 100644
--- a/lib/spack/spack/compilers/xl.py
+++ b/lib/spack/spack/compilers/xl.py
@@ -11,25 +11,27 @@ from spack.version import ver
class Xl(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['xlc']
+ cc_names = ["xlc"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['xlC', 'xlc++']
+ cxx_names = ["xlC", "xlc++"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['xlf']
+ f77_names = ["xlf"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['xlf90', 'xlf95', 'xlf2003', 'xlf2008']
+ fc_names = ["xlf90", "xlf95", "xlf2003", "xlf2008"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('xl', 'xlc'),
- 'cxx': os.path.join('xl', 'xlc++'),
- 'f77': os.path.join('xl', 'xlf'),
- 'fc': os.path.join('xl', 'xlf90')}
+ link_paths = {
+ "cc": os.path.join("xl", "xlc"),
+ "cxx": os.path.join("xl", "xlc++"),
+ "f77": os.path.join("xl", "xlf"),
+ "fc": os.path.join("xl", "xlf90"),
+ }
- version_argument = '-qversion'
- version_regex = r'([0-9]?[0-9]\.[0-9])'
+ version_argument = "-qversion"
+ version_regex = r"([0-9]?[0-9]\.[0-9])"
@property
def verbose_flag(self):
@@ -37,11 +39,11 @@ class Xl(Compiler):
@property
def debug_flags(self):
- return ['-g', '-g0', '-g1', '-g2', '-g8', '-g9']
+ return ["-g", "-g0", "-g1", "-g2", "-g8", "-g9"]
@property
def opt_flags(self):
- return ['-O', '-O0', '-O1', '-O2', '-O3', '-O4', '-O5', '-Ofast']
+ return ["-O", "-O0", "-O1", "-O2", "-O3", "-O4", "-O5", "-Ofast"]
@property
def openmp_flag(self):
@@ -49,35 +51,26 @@ class Xl(Compiler):
@property
def cxx11_flag(self):
- if self.real_version < ver('13.1'):
- raise UnsupportedCompilerFlag(self,
- "the C++11 standard",
- "cxx11_flag",
- "< 13.1")
+ if self.real_version < ver("13.1"):
+ raise UnsupportedCompilerFlag(self, "the C++11 standard", "cxx11_flag", "< 13.1")
else:
return "-qlanglvl=extended0x"
@property
def c99_flag(self):
- if self.real_version >= ver('13.1.1'):
- return '-std=gnu99'
- if self.real_version >= ver('10.1'):
- return '-qlanglvl=extc99'
- raise UnsupportedCompilerFlag(self,
- 'the C99 standard',
- 'c99_flag',
- '< 10.1')
+ if self.real_version >= ver("13.1.1"):
+ return "-std=gnu99"
+ if self.real_version >= ver("10.1"):
+ return "-qlanglvl=extc99"
+ raise UnsupportedCompilerFlag(self, "the C99 standard", "c99_flag", "< 10.1")
@property
def c11_flag(self):
- if self.real_version >= ver('13.1.2'):
- return '-std=gnu11'
- if self.real_version >= ver('12.1'):
- return '-qlanglvl=extc1x'
- raise UnsupportedCompilerFlag(self,
- 'the C11 standard',
- 'c11_flag',
- '< 12.1')
+ if self.real_version >= ver("13.1.2"):
+ return "-std=gnu11"
+ if self.real_version >= ver("12.1"):
+ return "-qlanglvl=extc1x"
+ raise UnsupportedCompilerFlag(self, "the C11 standard", "c11_flag", "< 12.1")
@property
def cc_pic_flag(self):
diff --git a/lib/spack/spack/compilers/xl_r.py b/lib/spack/spack/compilers/xl_r.py
index cf3e482729..b790b42f70 100644
--- a/lib/spack/spack/compilers/xl_r.py
+++ b/lib/spack/spack/compilers/xl_r.py
@@ -10,19 +10,21 @@ import spack.compilers.xl
class XlR(spack.compilers.xl.Xl):
# Subclasses use possible names of C compiler
- cc_names = ['xlc_r']
+ cc_names = ["xlc_r"]
# Subclasses use possible names of C++ compiler
- cxx_names = ['xlC_r', 'xlc++_r']
+ cxx_names = ["xlC_r", "xlc++_r"]
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['xlf_r']
+ f77_names = ["xlf_r"]
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['xlf90_r', 'xlf95_r', 'xlf2003_r', 'xlf2008_r']
+ fc_names = ["xlf90_r", "xlf95_r", "xlf2003_r", "xlf2008_r"]
# Named wrapper links within build_env_path
- link_paths = {'cc': os.path.join('xl_r', 'xlc_r'),
- 'cxx': os.path.join('xl_r', 'xlc++_r'),
- 'f77': os.path.join('xl_r', 'xlf_r'),
- 'fc': os.path.join('xl_r', 'xlf90_r')}
+ link_paths = {
+ "cc": os.path.join("xl_r", "xlc_r"),
+ "cxx": os.path.join("xl_r", "xlc++_r"),
+ "f77": os.path.join("xl_r", "xlf_r"),
+ "fc": os.path.join("xl_r", "xlf90_r"),
+ }
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 520b003f74..8174855de2 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -51,9 +51,10 @@ _abi = llnl.util.lang.Singleton(lambda: spack.abi.ABI())
class reverse_order(object):
"""Helper for creating key functions.
- This is a wrapper that inverts the sense of the natural
- comparisons on the object.
+ This is a wrapper that inverts the sense of the natural
+ comparisons on the object.
"""
+
def __init__(self, value):
self.value = value
@@ -66,8 +67,9 @@ class reverse_order(object):
class Concretizer(object):
"""You can subclass this class to override some of the default
- concretization strategies, or you can override all of them.
+ concretization strategies, or you can override all of them.
"""
+
#: Controls whether we check that compiler versions actually exist
#: during concretization. Used for testing and for mirror creation
check_for_compiler_existence = None
@@ -75,7 +77,7 @@ class Concretizer(object):
def __init__(self, abstract_spec=None):
if Concretizer.check_for_compiler_existence is None:
Concretizer.check_for_compiler_existence = not config.get(
- 'config:install_missing_compilers', False
+ "config:install_missing_compilers", False
)
self.abstract_spec = abstract_spec
self._adjust_target_answer_generator = None
@@ -89,23 +91,22 @@ class Concretizer(object):
if not dev_info:
return False
- path = os.path.normpath(os.path.join(env.path, dev_info['path']))
+ path = os.path.normpath(os.path.join(env.path, dev_info["path"]))
- if 'dev_path' in spec.variants:
- assert spec.variants['dev_path'].value == path
+ if "dev_path" in spec.variants:
+ assert spec.variants["dev_path"].value == path
changed = False
else:
- spec.variants.setdefault(
- 'dev_path', vt.SingleValuedVariant('dev_path', path))
+ spec.variants.setdefault("dev_path", vt.SingleValuedVariant("dev_path", path))
changed = True
- changed |= spec.constrain(dev_info['spec'])
+ changed |= spec.constrain(dev_info["spec"])
return changed
def _valid_virtuals_and_externals(self, spec):
"""Returns a list of candidate virtual dep providers and external
- packages that coiuld be used to concretize a spec.
+ packages that coiuld be used to concretize a spec.
- Preferred specs come first in the list.
+ Preferred specs come first in the list.
"""
# First construct a list of concrete candidates to replace spec with.
candidates = [spec]
@@ -114,17 +115,15 @@ class Concretizer(object):
if spec.virtual:
candidates = spack.repo.path.providers_for(spec)
if not candidates:
- raise spack.error.UnsatisfiableProviderSpecError(
- candidates[0], spec)
+ raise spack.error.UnsatisfiableProviderSpecError(candidates[0], spec)
# Find nearest spec in the DAG (up then down) that has prefs.
spec_w_prefs = find_spec(
- spec, lambda p: PackagePrefs.has_preferred_providers(
- p.name, spec.name),
- spec) # default to spec itself.
+ spec, lambda p: PackagePrefs.has_preferred_providers(p.name, spec.name), spec
+ ) # default to spec itself.
# Create a key to sort candidates by the prefs we found
- pref_key = PackagePrefs(spec_w_prefs.name, 'providers', spec.name)
+ pref_key = PackagePrefs(spec_w_prefs.name, "providers", spec.name)
# For each candidate package, if it has externals, add those
# to the usable list. if it's not buildable, then *only* add
@@ -145,17 +144,20 @@ class Concretizer(object):
raise NoBuildError(spec)
# Use a sort key to order the results
- return sorted(usable, key=lambda spec: (
- not spec.external, # prefer externals
- pref_key(spec), # respect prefs
- spec.name, # group by name
- reverse_order(spec.versions), # latest version
- spec # natural order
- ))
+ return sorted(
+ usable,
+ key=lambda spec: (
+ not spec.external, # prefer externals
+ pref_key(spec), # respect prefs
+ spec.name, # group by name
+ reverse_order(spec.versions), # latest version
+ spec, # natural order
+ ),
+ )
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
- find one that is most ABI compatible.
+ find one that is most ABI compatible.
"""
candidates = self._valid_virtuals_and_externals(spec)
if not candidates:
@@ -170,25 +172,28 @@ class Concretizer(object):
# Sort candidates from most to least compatibility.
# We reverse because True > False.
# Sort is stable, so candidates keep their order.
- return sorted(candidates,
- reverse=True,
- key=lambda spec: (
- _abi.compatible(spec, abi_exemplar, loose=True),
- _abi.compatible(spec, abi_exemplar)))
+ return sorted(
+ candidates,
+ reverse=True,
+ key=lambda spec: (
+ _abi.compatible(spec, abi_exemplar, loose=True),
+ _abi.compatible(spec, abi_exemplar),
+ ),
+ )
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
- the preferred version from spackconfig, and default to the package's
- version if there are no available versions.
-
- TODO: In many cases we probably want to look for installed
- versions of each package and use an installed version
- if we can link to it. The policy implemented here will
- tend to rebuild a lot of stuff becasue it will prefer
- a compiler in the spec to any compiler already-
- installed things were built with. There is likely
- some better policy that finds some middle ground
- between these two extremes.
+ the preferred version from spackconfig, and default to the package's
+ version if there are no available versions.
+
+ TODO: In many cases we probably want to look for installed
+ versions of each package and use an installed version
+ if we can link to it. The policy implemented here will
+ tend to rebuild a lot of stuff becasue it will prefer
+ a compiler in the spec to any compiler already-
+ installed things were built with. There is likely
+ some better policy that finds some middle ground
+ between these two extremes.
"""
# return if already concrete.
if spec.versions.concrete:
@@ -196,10 +201,9 @@ class Concretizer(object):
# List of versions we could consider, in sorted order
pkg_versions = spec.package_class.versions
- usable = [v for v in pkg_versions
- if any(v.satisfies(sv) for sv in spec.versions)]
+ usable = [v for v in pkg_versions if any(v.satisfies(sv) for sv in spec.versions)]
- yaml_prefs = PackagePrefs(spec.name, 'version')
+ yaml_prefs = PackagePrefs(spec.name, "version")
# The keys below show the order of precedence of factors used
# to select a version when concretizing. The item with
@@ -212,21 +216,19 @@ class Concretizer(object):
# ------- Special direction from the user
# Respect order listed in packages.yaml
-yaml_prefs(v),
-
# The preferred=True flag (packages or packages.yaml or both?)
- pkg_versions.get(Version(v)).get('preferred', False),
-
+ pkg_versions.get(Version(v)).get("preferred", False),
# ------- Regular case: use latest non-develop version by default.
# Avoid @develop version, which would otherwise be the "largest"
# in straight version comparisons
not v.isdevelop(),
-
# Compare the version itself
# This includes the logic:
# a) develop > everything (disabled by "not v.isdevelop() above)
# b) numeric > non-numeric
# c) Numeric or string comparison
- v)
+ v,
+ )
usable.sort(key=keyfn, reverse=True)
if usable:
@@ -238,7 +240,7 @@ class Concretizer(object):
# Someone else can raise an error if this happens,
# e.g. when we go to fetch it and don't know how. But it
# *might* work.
- if not spec.versions or spec.versions == VersionList([':']):
+ if not spec.versions or spec.versions == VersionList([":"]):
raise NoValidVersionError(spec)
else:
last = spec.versions[-1]
@@ -250,7 +252,7 @@ class Concretizer(object):
else:
spec.versions = ver([last])
- return True # Things changed
+ return True # Things changed
def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the
@@ -278,9 +280,7 @@ class Concretizer(object):
# Else if anyone else has a platform, take the closest one
# Search up, then down, along build/link deps first
# Then any nearest. Algorithm from compilerspec search
- platform_spec = find_spec(
- spec, lambda x: x.architecture and x.architecture.platform
- )
+ platform_spec = find_spec(spec, lambda x: x.architecture and x.architecture.platform)
if platform_spec:
new_plat = spack.platforms.by_name(platform_spec.architecture.platform)
else:
@@ -294,14 +294,17 @@ class Concretizer(object):
new_os = spec.architecture.os
else:
new_os_spec = find_spec(
- spec, lambda x: (x.architecture and
- x.architecture.platform == str(new_plat) and
- x.architecture.os)
+ spec,
+ lambda x: (
+ x.architecture
+ and x.architecture.platform == str(new_plat)
+ and x.architecture.os
+ ),
)
if new_os_spec:
new_os = new_os_spec.architecture.os
else:
- new_os = new_plat.operating_system('default_os')
+ new_os = new_plat.operating_system("default_os")
# Get the nearest spec with relevant platform and a target
# Generally, same algorithm as finding os
@@ -312,18 +315,21 @@ class Concretizer(object):
new_target = spec.architecture.target
else:
new_target_spec = find_spec(
- spec, lambda x: (x.architecture and
- x.architecture.platform == str(new_plat) and
- x.architecture.target and
- x.architecture.target != curr_target)
+ spec,
+ lambda x: (
+ x.architecture
+ and x.architecture.platform == str(new_plat)
+ and x.architecture.target
+ and x.architecture.target != curr_target
+ ),
)
if new_target_spec:
if curr_target:
# constrain one target by the other
new_target_arch = spack.spec.ArchSpec(
- (None, None, new_target_spec.architecture.target))
- curr_target_arch = spack.spec.ArchSpec(
- (None, None, curr_target))
+ (None, None, new_target_spec.architecture.target)
+ )
+ curr_target_arch = spack.spec.ArchSpec((None, None, curr_target))
curr_target_arch.constrain(new_target_arch)
new_target = curr_target_arch.target
else:
@@ -333,20 +339,18 @@ class Concretizer(object):
if PackagePrefs.has_preferred_targets(spec.name):
new_target = self.target_from_package_preferences(spec)
else:
- new_target = new_plat.target('default_target')
+ new_target = new_plat.target("default_target")
if curr_target:
# convert to ArchSpec to compare satisfaction
- new_target_arch = spack.spec.ArchSpec(
- (None, None, str(new_target)))
- curr_target_arch = spack.spec.ArchSpec(
- (None, None, str(curr_target)))
+ new_target_arch = spack.spec.ArchSpec((None, None, str(new_target)))
+ curr_target_arch = spack.spec.ArchSpec((None, None, str(curr_target)))
if not new_target_arch.satisfies(curr_target_arch):
# new_target is an incorrect guess based on preferences
# and/or default
- valid_target_ranges = str(curr_target).split(',')
+ valid_target_ranges = str(curr_target).split(",")
for target_range in valid_target_ranges:
- t_min, t_sep, t_max = target_range.partition(':')
+ t_min, t_sep, t_max = target_range.partition(":")
if not t_sep:
new_target = t_min
break
@@ -372,9 +376,8 @@ class Concretizer(object):
Args:
spec: abstract spec to be concretized
"""
- target_prefs = PackagePrefs(spec.name, 'target')
- target_specs = [spack.spec.Spec('target=%s' % tname)
- for tname in archspec.cpu.TARGETS]
+ target_prefs = PackagePrefs(spec.name, "target")
+ target_specs = [spack.spec.Spec("target=%s" % tname) for tname in archspec.cpu.TARGETS]
def tspec_filter(s):
# Filter target specs by whether the architecture
@@ -394,8 +397,8 @@ class Concretizer(object):
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
- the user preferences from packages.yaml or the default variants from
- the package specification.
+ the user preferences from packages.yaml or the default variants from
+ the package specification.
"""
changed = False
preferred_variants = PackagePrefs.preferred_variants(spec.name)
@@ -403,36 +406,34 @@ class Concretizer(object):
for name, entry in pkg_cls.variants.items():
variant, when = entry
var = spec.variants.get(name, None)
- if var and '*' in var:
+ if var and "*" in var:
# remove variant wildcard before concretizing
# wildcard cannot be combined with other variables in a
# multivalue variant, a concrete variant cannot have the value
# wildcard, and a wildcard does not constrain a variant
spec.variants.pop(name)
- if name not in spec.variants and any(spec.satisfies(w)
- for w in when):
+ if name not in spec.variants and any(spec.satisfies(w) for w in when):
changed = True
if name in preferred_variants:
spec.variants[name] = preferred_variants.get(name)
else:
spec.variants[name] = variant.make_default()
- if name in spec.variants and not any(spec.satisfies(w)
- for w in when):
+ if name in spec.variants and not any(spec.satisfies(w) for w in when):
raise vt.InvalidVariantForSpecError(name, when, spec)
return changed
def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take
- the compiler used for the nearest ancestor with a compiler
- spec and use that. If the ancestor's compiler is not
- concrete, then used the preferred compiler as specified in
- spackconfig.
-
- Intuition: Use the spackconfig default if no package that depends on
- this one has a strict compiler requirement. Otherwise, try to
- build with the compiler that will be used by libraries that
- link to this one, to maximize compatibility.
+ the compiler used for the nearest ancestor with a compiler
+ spec and use that. If the ancestor's compiler is not
+ concrete, then used the preferred compiler as specified in
+ spackconfig.
+
+ Intuition: Use the spackconfig default if no package that depends on
+ this one has a strict compiler requirement. Otherwise, try to
+ build with the compiler that will be used by libraries that
+ link to this one, to maximize compatibility.
"""
# Pass on concretizing the compiler if the target or operating system
# is not yet determined
@@ -447,59 +448,54 @@ class Concretizer(object):
# compiler_for_spec Should think whether this can be more
# efficient
def _proper_compiler_style(cspec, aspec):
- compilers = spack.compilers.compilers_for_spec(
- cspec, arch_spec=aspec
- )
+ compilers = spack.compilers.compilers_for_spec(cspec, arch_spec=aspec)
# If the spec passed as argument is concrete we want to check
# the versions match exactly
- if (cspec.concrete and compilers and
- cspec.version not in [c.version for c in compilers]):
+ if (
+ cspec.concrete
+ and compilers
+ and cspec.version not in [c.version for c in compilers]
+ ):
return []
return compilers
if spec.compiler and spec.compiler.concrete:
- if (self.check_for_compiler_existence and not
- _proper_compiler_style(spec.compiler, spec.architecture)):
- _compiler_concretization_failure(
- spec.compiler, spec.architecture)
+ if self.check_for_compiler_existence and not _proper_compiler_style(
+ spec.compiler, spec.architecture
+ ):
+ _compiler_concretization_failure(spec.compiler, spec.architecture)
return False
# Find another spec that has a compiler, or the root if none do
- other_spec = spec if spec.compiler else find_spec(
- spec, lambda x: x.compiler, spec.root)
+ other_spec = spec if spec.compiler else find_spec(spec, lambda x: x.compiler, spec.root)
other_compiler = other_spec.compiler
assert other_spec
# Check if the compiler is already fully specified
if other_compiler and other_compiler.concrete:
- if (self.check_for_compiler_existence and not
- _proper_compiler_style(other_compiler, spec.architecture)):
- _compiler_concretization_failure(
- other_compiler, spec.architecture)
+ if self.check_for_compiler_existence and not _proper_compiler_style(
+ other_compiler, spec.architecture
+ ):
+ _compiler_concretization_failure(other_compiler, spec.architecture)
spec.compiler = other_compiler
return True
if other_compiler: # Another node has abstract compiler information
- compiler_list = spack.compilers.find_specs_by_arch(
- other_compiler, spec.architecture
- )
+ compiler_list = spack.compilers.find_specs_by_arch(other_compiler, spec.architecture)
if not compiler_list:
# We don't have a matching compiler installed
if not self.check_for_compiler_existence:
# Concretize compiler spec versions as a package to build
- cpkg_spec = spack.compilers.pkg_spec_for_compiler(
- other_compiler
- )
+ cpkg_spec = spack.compilers.pkg_spec_for_compiler(other_compiler)
self.concretize_version(cpkg_spec)
spec.compiler = spack.spec.CompilerSpec(
- other_compiler.name, cpkg_spec.versions)
+ other_compiler.name, cpkg_spec.versions
+ )
return True
else:
# No compiler with a satisfactory spec was found
- raise UnavailableCompilerVersionError(
- other_compiler, spec.architecture
- )
+ raise UnavailableCompilerVersionError(other_compiler, spec.architecture)
else:
# We have no hints to go by, grab any compiler
compiler_list = spack.compilers.all_compiler_specs()
@@ -508,20 +504,18 @@ class Concretizer(object):
raise spack.compilers.NoCompilersError()
# By default, prefer later versions of compilers
- compiler_list = sorted(
- compiler_list, key=lambda x: (x.name, x.version), reverse=True)
- ppk = PackagePrefs(other_spec.name, 'compiler')
+ compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
+ ppk = PackagePrefs(other_spec.name, "compiler")
matches = sorted(compiler_list, key=ppk)
# copy concrete version into other_compiler
try:
spec.compiler = next(
- c for c in matches
- if _proper_compiler_style(c, spec.architecture)).copy()
+ c for c in matches if _proper_compiler_style(c, spec.architecture)
+ ).copy()
except StopIteration:
# No compiler with a satisfactory spec has a suitable arch
- _compiler_concretization_failure(
- other_compiler, spec.architecture)
+ _compiler_concretization_failure(other_compiler, spec.architecture)
assert spec.compiler.concrete
return True # things changed.
@@ -541,24 +535,23 @@ class Concretizer(object):
return True
compiler_match = lambda other: (
- spec.compiler == other.compiler and
- spec.architecture == other.architecture)
+ spec.compiler == other.compiler and spec.architecture == other.architecture
+ )
ret = False
for flag in spack.spec.FlagMap.valid_compiler_flags():
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = list()
try:
- nearest = next(p for p in spec.traverse(direction='parents')
- if (compiler_match(p) and
- (p is not spec) and
- flag in p.compiler_flags))
+ nearest = next(
+ p
+ for p in spec.traverse(direction="parents")
+ if (compiler_match(p) and (p is not spec) and flag in p.compiler_flags)
+ )
nearest_flags = nearest.compiler_flags.get(flag, [])
flags = spec.compiler_flags.get(flag, [])
if set(nearest_flags) - set(flags):
- spec.compiler_flags[flag] = list(
- llnl.util.lang.dedupe(nearest_flags + flags)
- )
+ spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(nearest_flags + flags))
ret = True
except StopIteration:
pass
@@ -567,8 +560,7 @@ class Concretizer(object):
# This ensures that spack will detect conflicts that stem from a change
# in default compiler flags.
try:
- compiler = spack.compilers.compiler_for_spec(
- spec.compiler, spec.architecture)
+ compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
except spack.compilers.NoCompilerForSpecError:
if self.check_for_compiler_existence:
raise
@@ -576,9 +568,7 @@ class Concretizer(object):
for flag in compiler.flags:
config_flags = compiler.flags.get(flag, [])
flags = spec.compiler_flags.get(flag, [])
- spec.compiler_flags[flag] = list(
- llnl.util.lang.dedupe(config_flags + flags)
- )
+ spec.compiler_flags[flag] = list(llnl.util.lang.dedupe(config_flags + flags))
if set(config_flags) - set(flags):
ret = True
@@ -634,14 +624,15 @@ class Concretizer(object):
current_target = spec.architecture.target
current_platform = spack.platforms.by_name(spec.architecture.platform)
- default_target = current_platform.target('default_target')
+ default_target = current_platform.target("default_target")
if PackagePrefs.has_preferred_targets(spec.name):
default_target = self.target_from_package_preferences(spec)
if current_target != default_target or (
- self.abstract_spec and
- self.abstract_spec.architecture and
- self.abstract_spec.architecture.concrete):
+ self.abstract_spec
+ and self.abstract_spec.architecture
+ and self.abstract_spec.architecture.concrete
+ ):
return False
try:
@@ -657,9 +648,11 @@ class Concretizer(object):
continue
if candidate is not None:
- msg = ('{0.name}@{0.version} cannot build optimized '
- 'binaries for "{1}". Using best target possible: '
- '"{2}"')
+ msg = (
+ "{0.name}@{0.version} cannot build optimized "
+ 'binaries for "{1}". Using best target possible: '
+ '"{2}"'
+ )
msg = msg.format(spec.compiler, current_target, candidate)
tty.warn(msg)
spec.architecture.target = candidate
@@ -688,12 +681,13 @@ def enable_compiler_existence_check():
def find_spec(spec, condition, default=None):
"""Searches the dag from spec in an intelligent order and looks
- for a spec that matches a condition"""
+ for a spec that matches a condition"""
# First search parents, then search children
- deptype = ('build', 'link')
+ deptype = ("build", "link")
dagiter = chain(
- spec.traverse(direction='parents', deptype=deptype, root=False),
- spec.traverse(direction='children', deptype=deptype, root=False))
+ spec.traverse(direction="parents", deptype=deptype, root=False),
+ spec.traverse(direction="children", deptype=deptype, root=False),
+ )
visited = set()
for relative in dagiter:
if condition(relative):
@@ -701,7 +695,7 @@ def find_spec(spec, condition, default=None):
visited.add(id(relative))
# Then search all other relatives in the DAG *except* spec
- for relative in spec.root.traverse(deptype='all'):
+ for relative in spec.root.traverse(deptype="all"):
if relative is spec:
continue
if id(relative) in visited:
@@ -713,7 +707,7 @@ def find_spec(spec, condition, default=None):
if condition(spec):
return spec
- return default # Nothing matched the condition; return default.
+ return default # Nothing matched the condition; return default.
def _compiler_concretization_failure(compiler_spec, arch):
@@ -722,8 +716,8 @@ def _compiler_concretization_failure(compiler_spec, arch):
# there are no compilers for the arch at all
if not spack.compilers.compilers_for_arch(arch):
available_os_targets = set(
- (c.operating_system, c.target) for c in
- spack.compilers.all_compilers())
+ (c.operating_system, c.target) for c in spack.compilers.all_compilers()
+ )
raise NoCompilersForArchError(arch, available_os_targets)
else:
raise UnavailableCompilerVersionError(compiler_spec, arch)
@@ -741,7 +735,7 @@ def concretize_specs_together(*abstract_specs, **kwargs):
Returns:
List of concretized specs
"""
- if spack.config.get('config:concretizer') == 'original':
+ if spack.config.get("config:concretizer") == "original":
return _concretize_specs_together_original(*abstract_specs, **kwargs)
return _concretize_specs_together_new(*abstract_specs, **kwargs)
@@ -750,7 +744,7 @@ def _concretize_specs_together_new(*abstract_specs, **kwargs):
import spack.solver.asp
solver = spack.solver.asp.Solver()
- solver.tests = kwargs.get('tests', False)
+ solver.tests = kwargs.get("tests", False)
result = solver.solve(abstract_specs)
result.raise_if_unsat()
@@ -765,22 +759,22 @@ def _concretize_specs_together_original(*abstract_specs, **kwargs):
tmpdir = tempfile.mkdtemp()
repo_path, _ = spack.repo.create_repo(tmpdir)
- debug_msg = '[CONCRETIZATION]: Creating helper repository in {0}'
+ debug_msg = "[CONCRETIZATION]: Creating helper repository in {0}"
tty.debug(debug_msg.format(repo_path))
- pkg_dir = os.path.join(repo_path, 'packages', 'concretizationroot')
+ pkg_dir = os.path.join(repo_path, "packages", "concretizationroot")
fs.mkdirp(pkg_dir)
environment = spack.tengine.make_environment()
- template = environment.get_template('misc/coconcretization.pyt')
+ template = environment.get_template("misc/coconcretization.pyt")
# Split recursive specs, as it seems the concretizer has issue
# respecting conditions on dependents expressed like
# depends_on('foo ^bar@1.0'), see issue #11160
- split_specs = [dep.copy(deps=False)
- for spec in abstract_specs
- for dep in spec.traverse(root=True)]
+ split_specs = [
+ dep.copy(deps=False) for spec in abstract_specs for dep in spec.traverse(root=True)
+ ]
- with open(os.path.join(pkg_dir, 'package.py'), 'w') as f:
+ with open(os.path.join(pkg_dir, "package.py"), "w") as f:
f.write(template.render(specs=[str(s) for s in split_specs]))
return spack.repo.Repo(repo_path)
@@ -790,81 +784,84 @@ def _concretize_specs_together_original(*abstract_specs, **kwargs):
with spack.repo.additional_repository(concretization_repository):
# Spec from a helper package that depends on all the abstract_specs
- concretization_root = spack.spec.Spec('concretizationroot')
+ concretization_root = spack.spec.Spec("concretizationroot")
concretization_root.concretize(tests=kwargs.get("tests", False))
# Retrieve the direct dependencies
- concrete_specs = [
- concretization_root[spec.name].copy() for spec in abstract_specs
- ]
+ concrete_specs = [concretization_root[spec.name].copy() for spec in abstract_specs]
return concrete_specs
class NoCompilersForArchError(spack.error.SpackError):
def __init__(self, arch, available_os_targets):
- err_msg = ("No compilers found"
- " for operating system %s and target %s."
- "\nIf previous installations have succeeded, the"
- " operating system may have been updated." %
- (arch.os, arch.target))
+ err_msg = (
+ "No compilers found"
+ " for operating system %s and target %s."
+ "\nIf previous installations have succeeded, the"
+ " operating system may have been updated." % (arch.os, arch.target)
+ )
available_os_target_strs = list()
for operating_system, t in available_os_targets:
- os_target_str = "%s-%s" % (operating_system, t) if t \
- else operating_system
+ os_target_str = "%s-%s" % (operating_system, t) if t else operating_system
available_os_target_strs.append(os_target_str)
err_msg += (
"\nCompilers are defined for the following"
- " operating systems and targets:\n\t" +
- "\n\t".join(available_os_target_strs))
+ " operating systems and targets:\n\t" + "\n\t".join(available_os_target_strs)
+ )
super(NoCompilersForArchError, self).__init__(
- err_msg, "Run 'spack compiler find' to add compilers.")
+ err_msg, "Run 'spack compiler find' to add compilers."
+ )
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
- compiler spec."""
+ compiler spec."""
def __init__(self, compiler_spec, arch=None):
err_msg = "No compilers with spec {0} found".format(compiler_spec)
if arch:
- err_msg += " for operating system {0} and target {1}.".format(
- arch.os, arch.target
- )
+ err_msg += " for operating system {0} and target {1}.".format(arch.os, arch.target)
super(UnavailableCompilerVersionError, self).__init__(
- err_msg, "Run 'spack compiler find' to add compilers or "
+ err_msg,
+ "Run 'spack compiler find' to add compilers or "
"'spack compilers' to see which compilers are already recognized"
- " by spack.")
+ " by spack.",
+ )
class NoValidVersionError(spack.error.SpackError):
"""Raised when there is no way to have a concrete version for a
- particular spec."""
+ particular spec."""
def __init__(self, spec):
super(NoValidVersionError, self).__init__(
- "There are no valid versions for %s that match '%s'"
- % (spec.name, spec.versions))
+ "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions)
+ )
class InsufficientArchitectureInfoError(spack.error.SpackError):
"""Raised when details on architecture cannot be collected from the
- system"""
+ system"""
def __init__(self, spec, archs):
super(InsufficientArchitectureInfoError, self).__init__(
"Cannot determine necessary architecture information for '%s': %s"
- % (spec.name, str(archs)))
+ % (spec.name, str(archs))
+ )
class NoBuildError(spack.error.SpecError):
"""Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found
"""
+
def __init__(self, spec):
- msg = ("The spec\n '%s'\n is configured as not buildable, "
- "and no matching external installs were found")
+ msg = (
+ "The spec\n '%s'\n is configured as not buildable, "
+ "and no matching external installs were found"
+ )
super(NoBuildError, self).__init__(msg % spec)
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 2eb603780a..208daf77f7 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -69,51 +69,48 @@ from spack.util.cpus import cpus_available
#: Dict from section names -> schema for that section
section_schemas = {
- 'compilers': spack.schema.compilers.schema,
- 'concretizer': spack.schema.concretizer.schema,
- 'mirrors': spack.schema.mirrors.schema,
- 'repos': spack.schema.repos.schema,
- 'packages': spack.schema.packages.schema,
- 'modules': spack.schema.modules.schema,
- 'config': spack.schema.config.schema,
- 'upstreams': spack.schema.upstreams.schema,
- 'bootstrap': spack.schema.bootstrap.schema
+ "compilers": spack.schema.compilers.schema,
+ "concretizer": spack.schema.concretizer.schema,
+ "mirrors": spack.schema.mirrors.schema,
+ "repos": spack.schema.repos.schema,
+ "packages": spack.schema.packages.schema,
+ "modules": spack.schema.modules.schema,
+ "config": spack.schema.config.schema,
+ "upstreams": spack.schema.upstreams.schema,
+ "bootstrap": spack.schema.bootstrap.schema,
}
# Same as above, but including keys for environments
# this allows us to unify config reading between configs and environments
all_schemas = copy.deepcopy(section_schemas)
-all_schemas.update(dict((key, spack.schema.env.schema)
- for key in spack.schema.env.keys))
+all_schemas.update(dict((key, spack.schema.env.schema) for key in spack.schema.env.keys))
#: Path to the default configuration
-configuration_defaults_path = (
- 'defaults', os.path.join(spack.paths.etc_path, 'defaults')
-)
+configuration_defaults_path = ("defaults", os.path.join(spack.paths.etc_path, "defaults"))
#: Hard-coded default values for some key configuration options.
#: This ensures that Spack will still work even if config.yaml in
#: the defaults scope is removed.
config_defaults = {
- 'config': {
- 'debug': False,
- 'connect_timeout': 10,
- 'verify_ssl': True,
- 'checksum': True,
- 'dirty': False,
- 'build_jobs': min(16, cpus_available()),
- 'build_stage': '$tempdir/spack-stage',
- 'concretizer': 'clingo',
- 'license_dir': spack.paths.default_license_dir,
+ "config": {
+ "debug": False,
+ "connect_timeout": 10,
+ "verify_ssl": True,
+ "checksum": True,
+ "dirty": False,
+ "build_jobs": min(16, cpus_available()),
+ "build_stage": "$tempdir/spack-stage",
+ "concretizer": "clingo",
+ "license_dir": spack.paths.default_license_dir,
}
}
#: metavar to use for commands that accept scopes
#: this is shorter and more readable than listing all choices
-scopes_metavar = '{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT'
+scopes_metavar = "{defaults,system,site,user}[/PLATFORM] or env:ENVIRONMENT"
#: Base name for the (internal) overrides scope.
-overrides_base_name = 'overrides-'
+overrides_base_name = "overrides-"
def first_existing(dictionary, keys):
@@ -127,13 +124,13 @@ def first_existing(dictionary, keys):
class ConfigScope(object):
"""This class represents a configuration scope.
- A scope is one directory containing named configuration files.
- Each file is a config "section" (e.g., mirrors, compilers, etc).
+ A scope is one directory containing named configuration files.
+ Each file is a config "section" (e.g., mirrors, compilers, etc).
"""
def __init__(self, name, path):
- self.name = name # scope name.
- self.path = path # path to directory containing configs.
+ self.name = name # scope name.
+ self.path = path # path to directory containing configs.
self.sections = syaml.syaml_dict() # sections read from config files.
@property
@@ -146,9 +143,9 @@ class ConfigScope(object):
def get_section(self, section):
if section not in self.sections:
- path = self.get_section_filename(section)
+ path = self.get_section_filename(section)
schema = section_schemas[section]
- data = read_config_file(path, schema)
+ data = read_config_file(path, schema)
self.sections[section] = data
return self.sections[section]
@@ -162,22 +159,22 @@ class ConfigScope(object):
try:
mkdirp(self.path)
- with open(filename, 'w') as f:
+ with open(filename, "w") as f:
syaml.dump_config(data, stream=f, default_flow_style=False)
except (yaml.YAMLError, IOError) as e:
- raise ConfigFileError(
- "Error writing to config file: '%s'" % str(e))
+ raise ConfigFileError("Error writing to config file: '%s'" % str(e))
def clear(self):
"""Empty cached config information."""
self.sections = syaml.syaml_dict()
def __repr__(self):
- return '<ConfigScope: %s: %s>' % (self.name, self.path)
+ return "<ConfigScope: %s: %s>" % (self.name, self.path)
class SingleFileScope(ConfigScope):
"""This class represents a configuration scope in a single YAML file."""
+
def __init__(self, name, path, schema, yaml_path=None):
"""Similar to ``ConfigScope`` but can be embedded in another schema.
@@ -289,18 +286,16 @@ class SingleFileScope(ConfigScope):
parent = os.path.dirname(self.path)
mkdirp(parent)
- tmp = os.path.join(parent, '.%s.tmp' % os.path.basename(self.path))
- with open(tmp, 'w') as f:
- syaml.dump_config(data_to_write, stream=f,
- default_flow_style=False)
+ tmp = os.path.join(parent, ".%s.tmp" % os.path.basename(self.path))
+ with open(tmp, "w") as f:
+ syaml.dump_config(data_to_write, stream=f, default_flow_style=False)
rename(tmp, self.path)
except (yaml.YAMLError, IOError) as e:
- raise ConfigFileError(
- "Error writing to config file: '%s'" % str(e))
+ raise ConfigFileError("Error writing to config file: '%s'" % str(e))
def __repr__(self):
- return '<SingleFileScope: %s: %s>' % (self.name, self.path)
+ return "<SingleFileScope: %s: %s>" % (self.name, self.path)
class ImmutableConfigScope(ConfigScope):
@@ -313,7 +308,7 @@ class ImmutableConfigScope(ConfigScope):
raise ConfigError("Cannot write to immutable scope %s" % self)
def __repr__(self):
- return '<ImmutableConfigScope: %s: %s>' % (self.name, self.path)
+ return "<ImmutableConfigScope: %s: %s>" % (self.name, self.path)
class InternalConfigScope(ConfigScope):
@@ -323,6 +318,7 @@ class InternalConfigScope(ConfigScope):
config file settings are accessed the same way, and Spack can easily
override settings from files.
"""
+
def __init__(self, name, data=None):
super(InternalConfigScope, self).__init__(name, None)
self.sections = syaml.syaml_dict()
@@ -332,12 +328,10 @@ class InternalConfigScope(ConfigScope):
for section in data:
dsec = data[section]
validate({section: dsec}, section_schemas[section])
- self.sections[section] = _mark_internal(
- syaml.syaml_dict({section: dsec}), name)
+ self.sections[section] = _mark_internal(syaml.syaml_dict({section: dsec}), name)
def get_section_filename(self, section):
- raise NotImplementedError(
- "Cannot get filename for InternalConfigScope.")
+ raise NotImplementedError("Cannot get filename for InternalConfigScope.")
def get_section(self, section):
"""Just reads from an internal dictionary."""
@@ -353,7 +347,7 @@ class InternalConfigScope(ConfigScope):
self.sections[section] = _mark_internal(data, self.name)
def __repr__(self):
- return '<InternalConfigScope: %s>' % self.name
+ return "<InternalConfigScope: %s>" % self.name
def clear(self):
# no cache to clear here.
@@ -364,15 +358,14 @@ class InternalConfigScope(ConfigScope):
"""Turn a trailing `:' in a key name into an override attribute."""
result = {}
for sk, sv in iteritems(data):
- if sk.endswith(':'):
+ if sk.endswith(":"):
key = syaml.syaml_str(sk[:-1])
key.override = True
else:
key = sk
if isinstance(sv, dict):
- result[key]\
- = InternalConfigScope._process_dict_keyname_overrides(sv)
+ result[key] = InternalConfigScope._process_dict_keyname_overrides(sv)
else:
result[key] = copy.copy(sv)
@@ -384,10 +377,12 @@ def _config_mutator(method):
that mutate the underlying configuration. Used to clear the
memoization cache.
"""
+
@functools.wraps(method)
def _method(self, *args, **kwargs):
self._get_config_memoized.cache.clear()
return method(self, *args, **kwargs)
+
return _method
@@ -416,14 +411,14 @@ class Configuration(object):
cmd_line_scope = None
if self.scopes:
highest_precedence_scope = list(self.scopes.values())[-1]
- if highest_precedence_scope.name == 'command_line':
+ if highest_precedence_scope.name == "command_line":
# If the command-line scope is present, it should always
# be the scope of highest precedence
cmd_line_scope = self.pop_scope()
self.scopes[scope.name] = scope
if cmd_line_scope:
- self.scopes['command_line'] = cmd_line_scope
+ self.scopes["command_line"] = cmd_line_scope
@_config_mutator
def pop_scope(self):
@@ -439,9 +434,11 @@ class Configuration(object):
@property
def file_scopes(self):
"""List of writable scopes with an associated file."""
- return [s for s in self.scopes.values()
- if (type(s) == ConfigScope
- or type(s) == SingleFileScope)]
+ return [
+ s
+ for s in self.scopes.values()
+ if (type(s) == ConfigScope or type(s) == SingleFileScope)
+ ]
def highest_precedence_scope(self):
"""Non-internal scope with highest precedence."""
@@ -487,12 +484,12 @@ class Configuration(object):
return self.scopes[scope]
else:
- raise ValueError("Invalid config scope: '%s'. Must be one of %s"
- % (scope, self.scopes.keys()))
+ raise ValueError(
+ "Invalid config scope: '%s'. Must be one of %s" % (scope, self.scopes.keys())
+ )
def get_config_filename(self, scope, section):
- """For some scope and section, get the name of the configuration file.
- """
+ """For some scope and section, get the name of the configuration file."""
scope = self._validate_scope(scope)
return scope.get_section_filename(section)
@@ -525,12 +522,14 @@ class Configuration(object):
force (str): force the update
"""
if self.format_updates.get(section) and not force:
- msg = ('The "{0}" section of the configuration needs to be written'
- ' to disk, but is currently using a deprecated format. '
- 'Please update it using:\n\n'
- '\tspack config [--scope=<scope>] update {0}\n\n'
- 'Note that previous versions of Spack will not be able to '
- 'use the updated configuration.')
+ msg = (
+ 'The "{0}" section of the configuration needs to be written'
+ " to disk, but is currently using a deprecated format. "
+ "Please update it using:\n\n"
+ "\tspack config [--scope=<scope>] update {0}\n\n"
+ "Note that previous versions of Spack will not be able to "
+ "use the updated configuration."
+ )
msg = msg.format(section)
raise RuntimeError(msg)
@@ -538,19 +537,16 @@ class Configuration(object):
scope = self._validate_scope(scope) # get ConfigScope object
# manually preserve comments
- need_comment_copy = (section in scope.sections and
- scope.sections[section] is not None)
+ need_comment_copy = section in scope.sections and scope.sections[section] is not None
if need_comment_copy:
- comments = getattr(scope.sections[section][section],
- yaml.comments.Comment.attrib,
- None)
+ comments = getattr(
+ scope.sections[section][section], yaml.comments.Comment.attrib, None
+ )
# read only the requested section's data.
scope.sections[section] = syaml.syaml_dict({section: update_data})
if need_comment_copy and comments:
- setattr(scope.sections[section][section],
- yaml.comments.Comment.attrib,
- comments)
+ setattr(scope.sections[section][section], yaml.comments.Comment.attrib, comments)
scope._write_section(section)
@@ -634,7 +630,7 @@ class Configuration(object):
spack.config.get('config:dirty')
We use ``:`` as the separator, like YAML objects.
- """
+ """
# TODO: Currently only handles maps. Think about lists if needed.
parts = process_config_path(path)
section = parts.pop(0)
@@ -657,7 +653,7 @@ class Configuration(object):
Accepts the path syntax described in ``get()``.
"""
- if ':' not in path:
+ if ":" not in path:
# handle bare section name as path
self.update_config(path, value, scope=scope)
return
@@ -702,8 +698,7 @@ class Configuration(object):
try:
data = syaml.syaml_dict()
data[section] = self.get_config(section)
- syaml.dump_config(
- data, stream=sys.stdout, default_flow_style=False, blame=blame)
+ syaml.dump_config(data, stream=sys.stdout, default_flow_style=False, blame=blame)
except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
@@ -727,11 +722,10 @@ def override(path_or_scope, value=None):
else:
base_name = overrides_base_name
# Ensure the new override gets a unique scope name
- current_overrides = [s.name for s in
- config.matching_scopes(r'^{0}'.format(base_name))]
+ current_overrides = [s.name for s in config.matching_scopes(r"^{0}".format(base_name))]
num_overrides = len(current_overrides)
while True:
- scope_name = '{0}{1}'.format(base_name, num_overrides)
+ scope_name = "{0}{1}".format(base_name, num_overrides)
if scope_name in current_overrides:
num_overrides += 1
else:
@@ -775,7 +769,7 @@ def _add_command_line_scopes(cfg, command_line_scopes):
raise ConfigError("config scope is not readable: '%s'" % path)
# name based on order on the command line
- name = 'cmd_scope_%d' % i
+ name = "cmd_scope_%d" % i
cfg.push_scope(ImmutableConfigScope(name, path))
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
@@ -794,7 +788,7 @@ def _config():
cfg = Configuration()
# first do the builtin, hardcoded defaults
- builtin = InternalConfigScope('_builtin', config_defaults)
+ builtin = InternalConfigScope("_builtin", config_defaults)
cfg.push_scope(builtin)
# Builtin paths to configuration files in Spack
@@ -810,21 +804,19 @@ def _config():
# This is disabled if user asks for no local configuration.
if not disable_local_config:
configuration_paths.append(
- ('system', spack.paths.system_config_path),
+ ("system", spack.paths.system_config_path),
)
# Site configuration is per spack instance, for sites or projects
# No site-level configs should be checked into spack by default.
configuration_paths.append(
- ('site', os.path.join(spack.paths.etc_path)),
+ ("site", os.path.join(spack.paths.etc_path)),
)
# User configuration can override both spack defaults and site config
# This is disabled if user asks for no local configuration.
if not disable_local_config:
- configuration_paths.append(
- ('user', spack.paths.user_config_path)
- )
+ configuration_paths.append(("user", spack.paths.user_config_path))
# add each scope and its platform-specific directory
for name, path in configuration_paths:
@@ -838,7 +830,7 @@ def _config():
# we make a special scope for spack commands so that they can
# override configuration options.
- cfg.push_scope(InternalConfigScope('command_line'))
+ cfg.push_scope(InternalConfigScope("command_line"))
return cfg
@@ -848,8 +840,7 @@ config = llnl.util.lang.Singleton(_config)
def add_from_file(filename, scope=None):
- """Add updates to a config from a filename
- """
+ """Add updates to a config from a filename"""
import spack.environment as ev
# Get file as config dict
@@ -881,13 +872,13 @@ def add(fullpath, scope=None):
components = process_config_path(fullpath)
has_existing_value = True
- path = ''
+ path = ""
override = False
for idx, name in enumerate(components[:-1]):
# First handle double colons in constructing path
- colon = '::' if override else ':' if path else ''
+ colon = "::" if override else ":" if path else ""
path += colon + name
- if getattr(name, 'override', False):
+ if getattr(name, "override", False):
override = True
else:
override = False
@@ -904,12 +895,12 @@ def add(fullpath, scope=None):
# construct value from this point down
value = syaml.load_config(components[-1])
- for component in reversed(components[idx + 1:-1]):
+ for component in reversed(components[idx + 1 : -1]):
value = {component: value}
break
if has_existing_value:
- path, _, value = fullpath.rpartition(':')
+ path, _, value = fullpath.rpartition(":")
value = syaml.load_config(value)
existing = get(path, scope=scope)
@@ -936,7 +927,7 @@ def set(path, value, scope=None):
def add_default_platform_scope(platform):
- plat_name = os.path.join('defaults', platform)
+ plat_name = os.path.join("defaults", platform)
plat_path = os.path.join(configuration_defaults_path[1], platform)
config.push_scope(ConfigScope(plat_name, plat_path))
@@ -951,7 +942,8 @@ def _validate_section_name(section):
if section not in section_schemas:
raise ConfigSectionError(
"Invalid config section: '%s'. Options are: %s"
- % (section, " ".join(section_schemas.keys())))
+ % (section, " ".join(section_schemas.keys()))
+ )
def validate(data, schema, filename=None):
@@ -973,16 +965,16 @@ def validate(data, schema, filename=None):
if isinstance(test_data, yaml.comments.CommentedMap):
# HACK to fully copy ruamel CommentedMap that doesn't provide copy
# method. Especially necessary for environments
- setattr(test_data,
- yaml.comments.Comment.attrib,
- getattr(data,
- yaml.comments.Comment.attrib,
- yaml.comments.Comment()))
+ setattr(
+ test_data,
+ yaml.comments.Comment.attrib,
+ getattr(data, yaml.comments.Comment.attrib, yaml.comments.Comment()),
+ )
try:
spack.schema.Validator(schema).validate(test_data)
except jsonschema.ValidationError as e:
- if hasattr(e.instance, 'lc'):
+ if hasattr(e.instance, "lc"):
line_number = e.instance.lc.line + 1
else:
line_number = None
@@ -1007,8 +999,7 @@ def read_config_file(filename, schema=None):
return None
elif not os.path.isfile(filename):
- raise ConfigFileError(
- "Invalid configuration. %s exists but is not a file." % filename)
+ raise ConfigFileError("Invalid configuration. %s exists but is not a file." % filename)
elif not os.access(filename, os.R_OK):
raise ConfigFileError("Config file is not readable: %s" % filename)
@@ -1026,16 +1017,13 @@ def read_config_file(filename, schema=None):
return data
except StopIteration:
- raise ConfigFileError(
- "Config file is empty or is not a valid YAML dict: %s" % filename)
+ raise ConfigFileError("Config file is empty or is not a valid YAML dict: %s" % filename)
except MarkedYAMLError as e:
- raise ConfigFileError(
- "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
+ raise ConfigFileError("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
except IOError as e:
- raise ConfigFileError(
- "Error reading configuration file %s: %s" % (filename, str(e)))
+ raise ConfigFileError("Error reading configuration file %s: %s" % (filename, str(e)))
def _override(string):
@@ -1046,7 +1034,7 @@ def _override(string):
configs instead of merging into them.
"""
- return hasattr(string, 'override') and string.override
+ return hasattr(string, "override") and string.override
def _mark_internal(data, name):
@@ -1055,8 +1043,9 @@ def _mark_internal(data, name):
This is used by `spack config blame` to show where config lines came from.
"""
if isinstance(data, dict):
- d = syaml.syaml_dict((_mark_internal(k, name), _mark_internal(v, name))
- for k, v in data.items())
+ d = syaml.syaml_dict(
+ (_mark_internal(k, name), _mark_internal(v, name)) for k, v in data.items()
+ )
elif isinstance(data, list):
d = syaml.syaml_list(_mark_internal(e, name) for e in data)
else:
@@ -1078,12 +1067,12 @@ def get_valid_type(path):
``int``, ``float``.
"""
types = {
- 'array': list,
- 'object': syaml.syaml_dict,
- 'string': str,
- 'boolean': bool,
- 'integer': int,
- 'number': float
+ "array": list,
+ "object": syaml.syaml_dict,
+ "string": str,
+ "boolean": bool,
+ "integer": int,
+ "number": float,
}
components = process_config_path(path)
@@ -1098,11 +1087,11 @@ def get_valid_type(path):
validate(test_data, section_schemas[section])
except (ConfigFormatError, AttributeError) as e:
jsonschema_error = e.validation_error
- if jsonschema_error.validator == 'type':
+ if jsonschema_error.validator == "type":
return types[jsonschema_error.validator_value]()
- elif jsonschema_error.validator in ('anyOf', 'oneOf'):
+ elif jsonschema_error.validator in ("anyOf", "oneOf"):
for subschema in jsonschema_error.validator_value:
- schema_type = subschema.get('type')
+ schema_type = subschema.get("type")
if schema_type is not None:
return types[schema_type]()
else:
@@ -1127,6 +1116,7 @@ def merge_yaml(dest, source):
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
"""
+
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
@@ -1176,18 +1166,17 @@ def merge_yaml(dest, source):
#
def process_config_path(path):
result = []
- if path.startswith(':'):
- raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".
- format(path), '')
+ if path.startswith(":"):
+ raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".format(path), "")
seen_override_in_path = False
while path:
- front, sep, path = path.partition(':')
- if (sep and not path) or path.startswith(':'):
+ front, sep, path = path.partition(":")
+ if (sep and not path) or path.startswith(":"):
if seen_override_in_path:
- raise syaml.SpackYAMLError("Meaningless second override"
- " indicator `::' in path `{0}'".
- format(path), '')
- path = path.lstrip(':')
+ raise syaml.SpackYAMLError(
+ "Meaningless second override" " indicator `::' in path `{0}'".format(path), ""
+ )
+ path = path.lstrip(":")
front = syaml.syaml_str(front)
front.override = True
seen_override_in_path = True
@@ -1198,7 +1187,7 @@ def process_config_path(path):
#
# Settings for commands that modify configuration
#
-def default_modify_scope(section='config'):
+def default_modify_scope(section="config"):
"""Return the config scope that commands should modify by default.
Commands that modify configuration by default modify the *highest*
@@ -1208,7 +1197,7 @@ def default_modify_scope(section='config'):
section (bool): Section for which to get the default scope.
If this is not 'compilers', a general (non-platform) scope is used.
"""
- if section == 'compilers':
+ if section == "compilers":
return spack.config.config.highest_precedence_scope().name
else:
return spack.config.config.highest_precedence_non_platform_scope().name
@@ -1253,7 +1242,7 @@ def ensure_latest_format_fn(section):
# The line below is based on the fact that every module we need
# is already imported at the top level
section_module = getattr(spack.schema, section)
- update_fn = getattr(section_module, 'update', lambda x: False)
+ update_fn = getattr(section_module, "update", lambda x: False)
return update_fn
@@ -1334,21 +1323,21 @@ class ConfigFormatError(ConfigError):
self.filename = filename # record this for ruamel.yaml
# construct location
- location = '<unknown file>'
+ location = "<unknown file>"
if filename:
- location = '%s' % filename
+ location = "%s" % filename
if line is not None:
- location += ':%d' % line
+ location += ":%d" % line
- message = '%s: %s' % (location, validation_error.message)
+ message = "%s: %s" % (location, validation_error.message)
super(ConfigError, self).__init__(message)
def _get_mark(self, validation_error, data):
- """Get the file/line mark fo a validation error from a Spack YAML file.
- """
+ """Get the file/line mark fo a validation error from a Spack YAML file."""
+
def _get_mark_or_first_member_mark(obj):
# mark of object itelf
- mark = getattr(obj, '_start_mark', None)
+ mark = getattr(obj, "_start_mark", None)
if mark:
return mark
@@ -1356,7 +1345,7 @@ class ConfigFormatError(ConfigError):
if isinstance(obj, (list, dict)):
first_member = next(iter(obj), None)
if first_member:
- mark = getattr(first_member, '_start_mark', None)
+ mark = getattr(first_member, "_start_mark", None)
if mark:
return mark
@@ -1384,7 +1373,7 @@ class ConfigFormatError(ConfigError):
elif isinstance(parent, list):
keylist = parent
idx = keylist.index(path[-1])
- mark = getattr(keylist[idx], '_start_mark', None)
+ mark = getattr(keylist[idx], "_start_mark", None)
if mark:
return mark
diff --git a/lib/spack/spack/container/__init__.py b/lib/spack/spack/container/__init__.py
index 196a405dc4..648e245eda 100644
--- a/lib/spack/spack/container/__init__.py
+++ b/lib/spack/spack/container/__init__.py
@@ -13,7 +13,7 @@ import spack.util.spack_yaml as syaml
from .writers import recipe
-__all__ = ['validate', 'recipe']
+__all__ = ["validate", "recipe"]
def validate(configuration_file):
@@ -32,50 +32,56 @@ def validate(configuration_file):
A sanitized copy of the configuration stored in the input file
"""
import jsonschema
+
with open(configuration_file) as f:
config = syaml.load(f)
# Ensure we have a "container" attribute with sensible defaults set
env_dict = ev.config_dict(config)
- env_dict.setdefault('container', {
- 'format': 'docker',
- 'images': {'os': 'ubuntu:18.04', 'spack': 'develop'}
- })
- env_dict['container'].setdefault('format', 'docker')
- env_dict['container'].setdefault(
- 'images', {'os': 'ubuntu:18.04', 'spack': 'develop'}
+ env_dict.setdefault(
+ "container", {"format": "docker", "images": {"os": "ubuntu:18.04", "spack": "develop"}}
)
+ env_dict["container"].setdefault("format", "docker")
+ env_dict["container"].setdefault("images", {"os": "ubuntu:18.04", "spack": "develop"})
# Remove attributes that are not needed / allowed in the
# container recipe
- for subsection in ('cdash', 'gitlab_ci', 'modules'):
+ for subsection in ("cdash", "gitlab_ci", "modules"):
if subsection in env_dict:
- msg = ('the subsection "{0}" in "{1}" is not used when generating'
- ' container recipes and will be discarded')
+ msg = (
+ 'the subsection "{0}" in "{1}" is not used when generating'
+ " container recipes and will be discarded"
+ )
warnings.warn(msg.format(subsection, configuration_file))
env_dict.pop(subsection)
# Set the default value of the concretization strategy to unify and
# warn if the user explicitly set another value
- env_dict.setdefault('concretizer', {'unify': True})
- if not env_dict['concretizer']['unify'] is True:
- warnings.warn('"concretizer:unify" is not set to "true", which means the '
- 'generated image may contain different variants of the same '
- 'packages. Set to "true" to get a consistent set of packages.')
+ env_dict.setdefault("concretizer", {"unify": True})
+ if not env_dict["concretizer"]["unify"] is True:
+ warnings.warn(
+ '"concretizer:unify" is not set to "true", which means the '
+ "generated image may contain different variants of the same "
+ 'packages. Set to "true" to get a consistent set of packages.'
+ )
# Check if the install tree was explicitly set to a custom value and warn
# that it will be overridden
- environment_config = env_dict.get('config', {})
- if environment_config.get('install_tree', None):
- msg = ('the "config:install_tree" attribute has been set explicitly '
- 'and will be overridden in the container image')
+ environment_config = env_dict.get("config", {})
+ if environment_config.get("install_tree", None):
+ msg = (
+ 'the "config:install_tree" attribute has been set explicitly '
+ "and will be overridden in the container image"
+ )
warnings.warn(msg)
# Likewise for the view
- environment_view = env_dict.get('view', None)
+ environment_view = env_dict.get("view", None)
if environment_view:
- msg = ('the "view" attribute has been set explicitly '
- 'and will be overridden in the container image')
+ msg = (
+ 'the "view" attribute has been set explicitly '
+ "and will be overridden in the container image"
+ )
warnings.warn(msg)
jsonschema.validate(config, schema=env.schema)
diff --git a/lib/spack/spack/container/images.py b/lib/spack/spack/container/images.py
index 32c95c950a..de3c686bae 100644
--- a/lib/spack/spack/container/images.py
+++ b/lib/spack/spack/container/images.py
@@ -25,7 +25,7 @@ def data():
global _data
if not _data:
json_dir = os.path.abspath(os.path.dirname(__file__))
- json_file = os.path.join(json_dir, 'images.json')
+ json_file = os.path.join(json_dir, "images.json")
with open(json_file) as f:
_data = json.load(f)
return _data
@@ -45,12 +45,12 @@ def build_info(image, spack_version):
# Don't handle error here, as a wrong image should have been
# caught by the JSON schema
image_data = data()["images"][image]
- build_image = image_data.get('build', None)
+ build_image = image_data.get("build", None)
if not build_image:
return None, None
# Translate version from git to docker if necessary
- build_tag = image_data['build_tags'].get(spack_version, spack_version)
+ build_tag = image_data["build_tags"].get(spack_version, spack_version)
return build_image, build_tag
@@ -72,7 +72,7 @@ def os_package_manager_for(image):
def all_bootstrap_os():
"""Return a list of all the OS that can be used to bootstrap Spack"""
- return list(data()['images'])
+ return list(data()["images"])
def commands_for(package_manager):
@@ -86,7 +86,7 @@ def commands_for(package_manager):
A tuple of (update, install, clean) commands.
"""
info = data()["os_package_managers"][package_manager]
- return info['update'], info['install'], info['clean']
+ return info["update"], info["install"], info["clean"]
def bootstrap_template_for(image):
@@ -97,11 +97,12 @@ def _verify_ref(url, ref, enforce_sha):
# Do a checkout in a temporary directory
msg = 'Cloning "{0}" to verify ref "{1}"'.format(url, ref)
tty.info(msg, stream=sys.stderr)
- git = executable.which('git', required=True)
+ git = executable.which("git", required=True)
with fs.temporary_dir():
- git('clone', '-q', url, '.')
- sha = git('rev-parse', '-q', ref + '^{commit}',
- output=str, error=os.devnull, fail_on_error=False)
+ git("clone", "-q", url, ".")
+ sha = git(
+ "rev-parse", "-q", ref + "^{commit}", output=str, error=os.devnull, fail_on_error=False
+ )
if git.returncode:
msg = '"{0}" is not a valid reference for "{1}"'
raise RuntimeError(msg.format(sha, url))
@@ -121,14 +122,16 @@ def checkout_command(url, ref, enforce_sha, verify):
enforce_sha (bool): if true turns every
verify (bool):
"""
- url = url or 'https://github.com/spack/spack.git'
- ref = ref or 'develop'
+ url = url or "https://github.com/spack/spack.git"
+ ref = ref or "develop"
enforce_sha, verify = bool(enforce_sha), bool(verify)
# If we want to enforce a sha or verify the ref we need
# to checkout the repository locally
if enforce_sha or verify:
ref = _verify_ref(url, ref, enforce_sha)
- command = ('git clone {0} . && git fetch origin {1}:container_branch &&'
- ' git checkout container_branch ').format(url, ref)
+ command = (
+ "git clone {0} . && git fetch origin {1}:container_branch &&"
+ " git checkout container_branch "
+ ).format(url, ref)
return command
diff --git a/lib/spack/spack/container/writers/__init__.py b/lib/spack/spack/container/writers/__init__.py
index 47282763cc..b8ac3dfbe3 100644
--- a/lib/spack/spack/container/writers/__init__.py
+++ b/lib/spack/spack/container/writers/__init__.py
@@ -32,9 +32,11 @@ def writer(name):
properly configured writer that, when called, prints the
corresponding recipe.
"""
+
def _decorator(factory):
_writer_factory[name] = factory
return factory
+
return _decorator
@@ -45,7 +47,7 @@ def create(configuration, last_phase=None):
configuration (dict): how to generate the current recipe
last_phase (str): last phase to be printed or None to print them all
"""
- name = ev.config_dict(configuration)['container']['format']
+ name = ev.config_dict(configuration)["container"]["format"]
return _writer_factory[name](configuration, last_phase)
@@ -66,62 +68,57 @@ def _stage_base_images(images_config):
images_config (dict): configuration under container:images
"""
# If we have custom base images, just return them verbatim.
- build_stage = images_config.get('build', None)
+ build_stage = images_config.get("build", None)
if build_stage:
- final_stage = images_config['final']
+ final_stage = images_config["final"]
return None, build_stage, final_stage
# Check the operating system: this will be the base of the bootstrap
# stage, if there, and of the final stage.
- operating_system = images_config.get('os', None)
+ operating_system = images_config.get("os", None)
# Check the OS is mentioned in the internal data stored in a JSON file
- images_json = data()['images']
+ images_json = data()["images"]
if not any(os_name == operating_system for os_name in images_json):
- msg = ('invalid operating system name "{0}". '
- '[Allowed values are {1}]')
- msg = msg.format(operating_system, ', '.join(data()['images']))
+ msg = 'invalid operating system name "{0}". ' "[Allowed values are {1}]"
+ msg = msg.format(operating_system, ", ".join(data()["images"]))
raise ValueError(msg)
# Retrieve the build stage
- spack_info = images_config['spack']
+ spack_info = images_config["spack"]
if isinstance(spack_info, dict):
- build_stage = 'bootstrap'
+ build_stage = "bootstrap"
else:
- spack_version = images_config['spack']
+ spack_version = images_config["spack"]
image_name, tag = build_info(operating_system, spack_version)
- build_stage = 'bootstrap'
+ build_stage = "bootstrap"
if image_name:
- build_stage = ':'.join([image_name, tag])
+ build_stage = ":".join([image_name, tag])
# Retrieve the bootstrap stage
bootstrap_stage = None
- if build_stage == 'bootstrap':
- bootstrap_stage = images_json[operating_system]['bootstrap'].get(
- 'image', operating_system
- )
+ if build_stage == "bootstrap":
+ bootstrap_stage = images_json[operating_system]["bootstrap"].get("image", operating_system)
# Retrieve the final stage
- final_stage = images_json[operating_system].get(
- 'final', {'image': operating_system}
- )['image']
+ final_stage = images_json[operating_system].get("final", {"image": operating_system})["image"]
return bootstrap_stage, build_stage, final_stage
def _spack_checkout_config(images_config):
- spack_info = images_config['spack']
+ spack_info = images_config["spack"]
- url = 'https://github.com/spack/spack.git'
- ref = 'develop'
+ url = "https://github.com/spack/spack.git"
+ ref = "develop"
resolve_sha, verify = False, False
# Config specific values may override defaults
if isinstance(spack_info, dict):
- url = spack_info.get('url', url)
- ref = spack_info.get('ref', ref)
- resolve_sha = spack_info.get('resolve_sha', resolve_sha)
- verify = spack_info.get('verify', verify)
+ url = spack_info.get("url", url)
+ ref = spack_info.get("ref", ref)
+ resolve_sha = spack_info.get("resolve_sha", resolve_sha)
+ verify = spack_info.get("verify", verify)
else:
ref = spack_info
@@ -133,16 +130,15 @@ class PathContext(tengine.Context):
install software in a common location and make it available
directly via PATH.
"""
+
def __init__(self, config, last_phase):
self.config = ev.config_dict(config)
- self.container_config = self.config['container']
+ self.container_config = self.config["container"]
# Operating system tag as written in the configuration file
- self.operating_system_key = self.container_config['images'].get('os')
+ self.operating_system_key = self.container_config["images"].get("os")
# Get base images and verify the OS
- bootstrap, build, final = _stage_base_images(
- self.container_config['images']
- )
+ bootstrap, build, final = _stage_base_images(self.container_config["images"])
self.bootstrap_image = bootstrap
self.build_image = build
self.final_image = final
@@ -153,31 +149,29 @@ class PathContext(tengine.Context):
@tengine.context_property
def run(self):
"""Information related to the run image."""
- Run = collections.namedtuple('Run', ['image'])
+ Run = collections.namedtuple("Run", ["image"])
return Run(image=self.final_image)
@tengine.context_property
def build(self):
"""Information related to the build image."""
- Build = collections.namedtuple('Build', ['image'])
+ Build = collections.namedtuple("Build", ["image"])
return Build(image=self.build_image)
@tengine.context_property
def strip(self):
"""Whether or not to strip binaries in the image"""
- return self.container_config.get('strip', True)
+ return self.container_config.get("strip", True)
@tengine.context_property
def paths(self):
"""Important paths in the image"""
- Paths = collections.namedtuple('Paths', [
- 'environment', 'store', 'hidden_view', 'view'
- ])
+ Paths = collections.namedtuple("Paths", ["environment", "store", "hidden_view", "view"])
return Paths(
- environment='/opt/spack-environment',
- store='/opt/software',
- hidden_view='/opt/._view',
- view='/opt/view'
+ environment="/opt/spack-environment",
+ store="/opt/software",
+ hidden_view="/opt/._view",
+ view="/opt/view",
)
@tengine.context_property
@@ -187,13 +181,13 @@ class PathContext(tengine.Context):
# Copy in the part of spack.yaml prescribed in the configuration file
manifest = copy.deepcopy(self.config)
- manifest.pop('container')
+ manifest.pop("container")
# Ensure that a few paths are where they need to be
- manifest.setdefault('config', syaml.syaml_dict())
- manifest['config']['install_tree'] = self.paths.store
- manifest['view'] = self.paths.view
- manifest = {'spack': manifest}
+ manifest.setdefault("config", syaml.syaml_dict())
+ manifest["config"]["install_tree"] = self.paths.store
+ manifest["view"] = self.paths.view
+ manifest = {"spack": manifest}
# Validate the manifest file
jsonschema.validate(manifest, schema=spack.schema.env.schema)
@@ -203,21 +197,21 @@ class PathContext(tengine.Context):
@tengine.context_property
def os_packages_final(self):
"""Additional system packages that are needed at run-time."""
- return self._os_packages_for_stage('final')
+ return self._os_packages_for_stage("final")
@tengine.context_property
def os_packages_build(self):
"""Additional system packages that are needed at build-time."""
- return self._os_packages_for_stage('build')
+ return self._os_packages_for_stage("build")
@tengine.context_property
def os_package_update(self):
"""Whether or not to update the OS package manager cache."""
- os_packages = self.container_config.get('os_packages', {})
- return os_packages.get('update', True)
+ os_packages = self.container_config.get("os_packages", {})
+ return os_packages.get("update", True)
def _os_packages_for_stage(self, stage):
- os_packages = self.container_config.get('os_packages', {})
+ os_packages = self.container_config.get("os_packages", {})
package_list = os_packages.get(stage, None)
return self._package_info_from(package_list)
@@ -235,65 +229,53 @@ class PathContext(tengine.Context):
if not package_list:
return package_list
- image_config = self.container_config['images']
- image = image_config.get('build', None)
+ image_config = self.container_config["images"]
+ image = image_config.get("build", None)
if image is None:
- os_pkg_manager = os_package_manager_for(image_config['os'])
+ os_pkg_manager = os_package_manager_for(image_config["os"])
else:
- os_pkg_manager = self.container_config['os_packages']['command']
+ os_pkg_manager = self.container_config["os_packages"]["command"]
update, install, clean = commands_for(os_pkg_manager)
- Packages = collections.namedtuple(
- 'Packages', ['update', 'install', 'list', 'clean']
- )
- return Packages(update=update, install=install,
- list=package_list, clean=clean)
+ Packages = collections.namedtuple("Packages", ["update", "install", "list", "clean"])
+ return Packages(update=update, install=install, list=package_list, clean=clean)
@tengine.context_property
def extra_instructions(self):
- Extras = collections.namedtuple('Extra', ['build', 'final'])
- extras = self.container_config.get('extra_instructions', {})
- build, final = extras.get('build', None), extras.get('final', None)
+ Extras = collections.namedtuple("Extra", ["build", "final"])
+ extras = self.container_config.get("extra_instructions", {})
+ build, final = extras.get("build", None), extras.get("final", None)
return Extras(build=build, final=final)
@tengine.context_property
def labels(self):
- return self.container_config.get('labels', {})
+ return self.container_config.get("labels", {})
@tengine.context_property
def bootstrap(self):
"""Information related to the build image."""
- images_config = self.container_config['images']
+ images_config = self.container_config["images"]
bootstrap_recipe = None
if self.bootstrap_image:
config_args = _spack_checkout_config(images_config)
command = checkout_command(*config_args)
template_path = bootstrap_template_for(self.operating_system_key)
env = tengine.make_environment()
- context = {"bootstrap": {
- "image": self.bootstrap_image,
- "spack_checkout": command
- }}
+ context = {"bootstrap": {"image": self.bootstrap_image, "spack_checkout": command}}
bootstrap_recipe = env.get_template(template_path).render(**context)
- Bootstrap = collections.namedtuple('Bootstrap', ['image', 'recipe'])
+ Bootstrap = collections.namedtuple("Bootstrap", ["image", "recipe"])
return Bootstrap(image=self.bootstrap_image, recipe=bootstrap_recipe)
@tengine.context_property
def render_phase(self):
render_bootstrap = bool(self.bootstrap_image)
- render_build = not (self.last_phase == 'bootstrap')
- render_final = self.last_phase in (None, 'final')
- Render = collections.namedtuple(
- 'Render', ['bootstrap', 'build', 'final']
- )
- return Render(
- bootstrap=render_bootstrap,
- build=render_build,
- final=render_final
- )
+ render_build = not (self.last_phase == "bootstrap")
+ render_final = self.last_phase in (None, "final")
+ Render = collections.namedtuple("Render", ["bootstrap", "build", "final"])
+ return Render(bootstrap=render_bootstrap, build=render_build, final=render_final)
def __call__(self):
"""Returns the recipe as a string"""
@@ -302,7 +284,8 @@ class PathContext(tengine.Context):
return t.render(**self.to_dict())
+import spack.container.writers.docker # noqa: E402
+
# Import after function definition all the modules in this package,
# so that registration of writers will happen automatically
import spack.container.writers.singularity # noqa: E402
-import spack.container.writers.docker # noqa: E402
diff --git a/lib/spack/spack/container/writers/docker.py b/lib/spack/spack/container/writers/docker.py
index 3c641c464b..9c6e95adb2 100644
--- a/lib/spack/spack/container/writers/docker.py
+++ b/lib/spack/spack/container/writers/docker.py
@@ -7,11 +7,12 @@ import spack.tengine as tengine
from . import PathContext, writer
-@writer('docker')
+@writer("docker")
class DockerContext(PathContext):
"""Context used to instantiate a Dockerfile"""
+
#: Name of the template used for Dockerfiles
- template_name = 'container/Dockerfile'
+ template_name = "container/Dockerfile"
@tengine.context_property
def manifest(self):
@@ -19,12 +20,12 @@ class DockerContext(PathContext):
# Docker doesn't support HEREDOC so we need to resort to
# a horrible echo trick to have the manifest in the Dockerfile
echoed_lines = []
- for idx, line in enumerate(manifest_str.split('\n')):
+ for idx, line in enumerate(manifest_str.split("\n")):
if idx == 0:
echoed_lines.append('&& (echo "' + line + '" \\')
continue
echoed_lines.append('&& echo "' + line + '" \\')
- echoed_lines[-1] = echoed_lines[-1].replace(' \\', ')')
+ echoed_lines[-1] = echoed_lines[-1].replace(" \\", ")")
- return '\n'.join(echoed_lines)
+ return "\n".join(echoed_lines)
diff --git a/lib/spack/spack/container/writers/singularity.py b/lib/spack/spack/container/writers/singularity.py
index 7fb3813f73..ad5c85b7a9 100644
--- a/lib/spack/spack/container/writers/singularity.py
+++ b/lib/spack/spack/container/writers/singularity.py
@@ -7,28 +7,29 @@ import spack.tengine as tengine
from . import PathContext, writer
-@writer('singularity')
+@writer("singularity")
class SingularityContext(PathContext):
"""Context used to instantiate a Singularity definition file"""
+
#: Name of the template used for Singularity definition files
- template_name = 'container/singularity.def'
+ template_name = "container/singularity.def"
@property
def singularity_config(self):
- return self.container_config.get('singularity', {})
+ return self.container_config.get("singularity", {})
@tengine.context_property
def runscript(self):
- return self.singularity_config.get('runscript', '')
+ return self.singularity_config.get("runscript", "")
@tengine.context_property
def startscript(self):
- return self.singularity_config.get('startscript', '')
+ return self.singularity_config.get("startscript", "")
@tengine.context_property
def test(self):
- return self.singularity_config.get('test', '')
+ return self.singularity_config.get("test", "")
@tengine.context_property
def help(self):
- return self.singularity_config.get('help', '')
+ return self.singularity_config.get("help", "")
diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py
index fa9c061d0c..15b0d9293b 100644
--- a/lib/spack/spack/cray_manifest.py
+++ b/lib/spack/spack/cray_manifest.py
@@ -16,11 +16,11 @@ from spack.schema.cray_manifest import schema as manifest_schema
#: Cray systems can store a Spack-compatible description of system
#: packages here.
-default_path = '/opt/cray/pe/cpe-descriptive-manifest/'
+default_path = "/opt/cray/pe/cpe-descriptive-manifest/"
compiler_name_translation = {
- 'nvidia': 'nvhpc',
- 'rocm': 'rocmcc',
+ "nvidia": "nvhpc",
+ "rocm": "rocmcc",
}
@@ -40,97 +40,93 @@ def translated_compiler_name(manifest_compiler_name):
return manifest_compiler_name
else:
raise spack.compilers.UnknownCompilerError(
- "Manifest parsing - unknown compiler: {0}"
- .format(manifest_compiler_name))
+ "Manifest parsing - unknown compiler: {0}".format(manifest_compiler_name)
+ )
def compiler_from_entry(entry):
- compiler_name = translated_compiler_name(entry['name'])
- paths = entry['executables']
- version = entry['version']
- arch = entry['arch']
- operating_system = arch['os']
- target = arch['target']
+ compiler_name = translated_compiler_name(entry["name"])
+ paths = entry["executables"]
+ version = entry["version"]
+ arch = entry["arch"]
+ operating_system = arch["os"]
+ target = arch["target"]
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
spec = spack.spec.CompilerSpec(compiler_cls.name, version)
- paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
- return compiler_cls(
- spec, operating_system, target, paths
- )
+ paths = [paths.get(x, None) for x in ("cc", "cxx", "f77", "fc")]
+ return compiler_cls(spec, operating_system, target, paths)
def spec_from_entry(entry):
arch_str = ""
- if 'arch' in entry:
+ if "arch" in entry:
arch_format = "arch={platform}-{os}-{target}"
arch_str = arch_format.format(
- platform=entry['arch']['platform'],
- os=entry['arch']['platform_os'],
- target=entry['arch']['target']['name']
+ platform=entry["arch"]["platform"],
+ os=entry["arch"]["platform_os"],
+ target=entry["arch"]["target"]["name"],
)
compiler_str = ""
- if 'compiler' in entry:
- compiler_format = "%{name}@{version}"
+ if "compiler" in entry:
+ compiler_format = "%{name}@{version}"
compiler_str = compiler_format.format(
- name=translated_compiler_name(entry['compiler']['name']),
- version=entry['compiler']['version']
+ name=translated_compiler_name(entry["compiler"]["name"]),
+ version=entry["compiler"]["version"],
)
spec_format = "{name}@{version} {compiler} {arch}"
spec_str = spec_format.format(
- name=entry['name'],
- version=entry['version'],
- compiler=compiler_str,
- arch=arch_str
+ name=entry["name"], version=entry["version"], compiler=compiler_str, arch=arch_str
)
- pkg_cls = spack.repo.path.get_pkg_class(entry['name'])
+ pkg_cls = spack.repo.path.get_pkg_class(entry["name"])
- if 'parameters' in entry:
+ if "parameters" in entry:
variant_strs = list()
- for name, value in entry['parameters'].items():
+ for name, value in entry["parameters"].items():
# TODO: also ensure that the variant value is valid?
if not (name in pkg_cls.variants):
- tty.debug("Omitting variant {0} for entry {1}/{2}"
- .format(name, entry['name'], entry['hash'][:7]))
+ tty.debug(
+ "Omitting variant {0} for entry {1}/{2}".format(
+ name, entry["name"], entry["hash"][:7]
+ )
+ )
continue
# Value could be a list (of strings), boolean, or string
if isinstance(value, six.string_types):
- variant_strs.append('{0}={1}'.format(name, value))
+ variant_strs.append("{0}={1}".format(name, value))
else:
try:
iter(value)
- variant_strs.append(
- '{0}={1}'.format(name, ','.join(value)))
+ variant_strs.append("{0}={1}".format(name, ",".join(value)))
continue
except TypeError:
# Not an iterable
pass
# At this point not a string or collection, check for boolean
if value in [True, False]:
- bool_symbol = '+' if value else '~'
- variant_strs.append('{0}{1}'.format(bool_symbol, name))
+ bool_symbol = "+" if value else "~"
+ variant_strs.append("{0}{1}".format(bool_symbol, name))
else:
raise ValueError(
"Unexpected value for {0} ({1}): {2}".format(
name, str(type(value)), str(value)
)
)
- spec_str += ' ' + ' '.join(variant_strs)
+ spec_str += " " + " ".join(variant_strs)
- spec, = spack.cmd.parse_specs(spec_str.split())
+ (spec,) = spack.cmd.parse_specs(spec_str.split())
- for ht in [hash_types.dag_hash, hash_types.build_hash,
- hash_types.full_hash]:
- setattr(spec, ht.attr, entry['hash'])
+ for ht in [hash_types.dag_hash, hash_types.build_hash, hash_types.full_hash]:
+ setattr(spec, ht.attr, entry["hash"])
spec._concrete = True
spec._hashes_final = True
- spec.external_path = entry['prefix']
- spec.origin = 'external-db'
+ spec.external_path = entry["prefix"]
+ spec.origin = "external-db"
spack.spec.Spec.ensure_valid_variants(spec)
return spec
@@ -143,22 +139,21 @@ def entries_to_specs(entries):
spec = spec_from_entry(entry)
spec_dict[spec._hash] = spec
except spack.repo.UnknownPackageError:
- tty.debug("Omitting package {0}: no corresponding repo package"
- .format(entry['name']))
+ tty.debug("Omitting package {0}: no corresponding repo package".format(entry["name"]))
except spack.error.SpackError:
raise
except Exception:
tty.warn("Could not parse entry: " + str(entry))
- for entry in filter(lambda x: 'dependencies' in x, entries):
- dependencies = entry['dependencies']
+ for entry in filter(lambda x: "dependencies" in x, entries):
+ dependencies = entry["dependencies"]
for name, properties in dependencies.items():
- dep_hash = properties['hash']
- deptypes = properties['type']
+ dep_hash = properties["hash"]
+ deptypes = properties["type"]
if dep_hash in spec_dict:
- if entry['hash'] not in spec_dict:
+ if entry["hash"] not in spec_dict:
continue
- parent_spec = spec_dict[entry['hash']]
+ parent_spec = spec_dict[entry["hash"]]
dep_spec = spec_dict[dep_hash]
parent_spec._add_dependency(dep_spec, deptypes)
@@ -166,27 +161,21 @@ def entries_to_specs(entries):
def read(path, apply_updates):
- with open(path, 'r') as json_file:
+ with open(path, "r") as json_file:
json_data = json.load(json_file)
jsonschema.validate(json_data, manifest_schema)
- specs = entries_to_specs(json_data['specs'])
- tty.debug("{0}: {1} specs read from manifest".format(
- path,
- str(len(specs))))
+ specs = entries_to_specs(json_data["specs"])
+ tty.debug("{0}: {1} specs read from manifest".format(path, str(len(specs))))
compilers = list()
- if 'compilers' in json_data:
- compilers.extend(compiler_from_entry(x)
- for x in json_data['compilers'])
- tty.debug("{0}: {1} compilers read from manifest".format(
- path,
- str(len(compilers))))
+ if "compilers" in json_data:
+ compilers.extend(compiler_from_entry(x) for x in json_data["compilers"])
+ tty.debug("{0}: {1} compilers read from manifest".format(path, str(len(compilers))))
# Filter out the compilers that already appear in the configuration
compilers = spack.compilers.select_new_compilers(compilers)
if apply_updates and compilers:
- spack.compilers.add_compilers_to_config(
- compilers, init_config=False)
+ spack.compilers.add_compilers_to_config(compilers, init_config=False)
if apply_updates:
for spec in specs.values():
spack.store.db.add(spec, directory_layout=None)
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index c56e07ca0a..c8943d70ad 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -32,6 +32,7 @@ import six
try:
import uuid
+
_use_uuid = True
except ImportError:
_use_uuid = False
@@ -57,12 +58,12 @@ from spack.version import Version
# TODO: clearing a failure.
# DB goes in this directory underneath the root
-_db_dirname = '.spack-db'
+_db_dirname = ".spack-db"
# DB version. This is stuck in the DB file to track changes in format.
# Increment by one when the database format changes.
# Versions before 5 were not integers.
-_db_version = Version('6')
+_db_version = Version("6")
# For any version combinations here, skip reindex when upgrading.
# Reindexing can take considerable time and is not always necessary.
@@ -72,8 +73,8 @@ _skip_reindex = [
# only difference is that v5 can contain "deprecated_for"
# fields. So, skip the reindex for this transition. The new
# version is saved to disk the first time the DB is written.
- (Version('0.9.3'), Version('5')),
- (Version('5'), Version('6'))
+ (Version("0.9.3"), Version("5")),
+ (Version("5"), Version("6")),
]
# Default timeout for spack database locks in seconds or None (no timeout).
@@ -96,13 +97,13 @@ _tracked_deps = ht.dag_hash.deptype
# Default list of fields written for each install record
default_install_record_fields = [
- 'spec',
- 'ref_count',
- 'path',
- 'installed',
- 'explicit',
- 'installation_time',
- 'deprecated_for',
+ "spec",
+ "ref_count",
+ "path",
+ "installed",
+ "explicit",
+ "installation_time",
+ "deprecated_for",
]
@@ -113,7 +114,7 @@ def _now():
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
- function to a Spec."""
+ function to a Spec."""
def converter(self, spec_like, *args, **kwargs):
if not isinstance(spec_like, spack.spec.Spec):
@@ -128,9 +129,9 @@ class InstallStatus(str):
class InstallStatuses(object):
- INSTALLED = InstallStatus('installed')
- DEPRECATED = InstallStatus('deprecated')
- MISSING = InstallStatus('missing')
+ INSTALLED = InstallStatus("installed")
+ DEPRECATED = InstallStatus("deprecated")
+ MISSING = InstallStatus("missing")
@classmethod
def canonicalize(cls, query_arg):
@@ -148,8 +149,9 @@ class InstallStatuses(object):
raise TypeError
except TypeError:
raise TypeError(
- 'installation query must be `any`, boolean, '
- 'InstallStatus, or iterable of InstallStatus')
+ "installation query must be `any`, boolean, "
+ "InstallStatus, or iterable of InstallStatus"
+ )
return query_arg
@@ -179,16 +181,16 @@ class InstallRecord(object):
"""
def __init__(
- self,
- spec,
- path,
- installed,
- ref_count=0,
- explicit=False,
- installation_time=None,
- deprecated_for=None,
- in_buildcache=False,
- origin=None
+ self,
+ spec,
+ path,
+ installed,
+ ref_count=0,
+ explicit=False,
+ installation_time=None,
+ deprecated_for=None,
+ in_buildcache=False,
+ origin=None,
):
self.spec = spec
self.path = str(path) if path else None
@@ -213,29 +215,29 @@ class InstallRecord(object):
rec_dict = {}
for field_name in include_fields:
- if field_name == 'spec':
- rec_dict.update({'spec': self.spec.node_dict_with_hashes()})
- elif field_name == 'deprecated_for' and self.deprecated_for:
- rec_dict.update({'deprecated_for': self.deprecated_for})
+ if field_name == "spec":
+ rec_dict.update({"spec": self.spec.node_dict_with_hashes()})
+ elif field_name == "deprecated_for" and self.deprecated_for:
+ rec_dict.update({"deprecated_for": self.deprecated_for})
else:
rec_dict.update({field_name: getattr(self, field_name)})
if self.origin:
- rec_dict['origin'] = self.origin
+ rec_dict["origin"] = self.origin
return rec_dict
@classmethod
def from_dict(cls, spec, dictionary):
d = dict(dictionary.items())
- d.pop('spec', None)
+ d.pop("spec", None)
# Old databases may have "None" for path for externals
- if 'path' not in d or d['path'] == 'None':
- d['path'] = None
+ if "path" not in d or d["path"] == "None":
+ d["path"] = None
- if 'installed' not in d:
- d['installed'] = False
+ if "installed" not in d:
+ d["installed"] = False
return InstallRecord(spec, **d)
@@ -246,8 +248,7 @@ class ForbiddenLockError(SpackError):
class ForbiddenLock(object):
def __getattribute__(self, name):
- raise ForbiddenLockError(
- "Cannot access attribute '{0}' of lock".format(name))
+ raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
_query_docstring = """
@@ -302,14 +303,21 @@ _query_docstring = """
class Database(object):
"""Per-process lock objects for each install prefix."""
+
_prefix_locks = {} # type: Dict[str, lk.Lock]
"""Per-process failure (lock) objects for each install prefix."""
_prefix_failures = {} # type: Dict[str, lk.Lock]
- def __init__(self, root, db_dir=None, upstream_dbs=None,
- is_upstream=False, enable_transaction_locking=True,
- record_fields=default_install_record_fields):
+ def __init__(
+ self,
+ root,
+ db_dir=None,
+ upstream_dbs=None,
+ is_upstream=False,
+ enable_transaction_locking=True,
+ record_fields=default_install_record_fields,
+ ):
"""Create a Database for Spack installations under ``root``.
A Database is a cache of Specs data from ``$prefix/spec.yaml``
@@ -340,20 +348,20 @@ class Database(object):
self._db_dir = db_dir or os.path.join(self.root, _db_dirname)
# Set up layout of database files within the db dir
- self._index_path = os.path.join(self._db_dir, 'index.json')
- self._verifier_path = os.path.join(self._db_dir, 'index_verifier')
- self._lock_path = os.path.join(self._db_dir, 'lock')
+ self._index_path = os.path.join(self._db_dir, "index.json")
+ self._verifier_path = os.path.join(self._db_dir, "index_verifier")
+ self._lock_path = os.path.join(self._db_dir, "lock")
# This is for other classes to use to lock prefix directories.
- self.prefix_lock_path = os.path.join(self._db_dir, 'prefix_lock')
+ self.prefix_lock_path = os.path.join(self._db_dir, "prefix_lock")
# Ensure a persistent location for dealing with parallel installation
# failures (e.g., across near-concurrent processes).
- self._failure_dir = os.path.join(self._db_dir, 'failures')
+ self._failure_dir = os.path.join(self._db_dir, "failures")
# Support special locks for handling parallel installation failures
# of a spec.
- self.prefix_fail_path = os.path.join(self._db_dir, 'prefix_failures')
+ self.prefix_fail_path = os.path.join(self._db_dir, "prefix_failures")
# Create needed directories and files
if not is_upstream and not os.path.exists(self._db_dir):
@@ -363,7 +371,7 @@ class Database(object):
fs.mkdirp(self._failure_dir)
self.is_upstream = is_upstream
- self.last_seen_verifier = ''
+ self.last_seen_verifier = ""
# Failed write transactions (interrupted by exceptions) will alert
# _write. When that happens, we set this flag to indicate that
# future read/write transactions should re-read the DB. Normally it
@@ -374,24 +382,24 @@ class Database(object):
self._state_is_inconsistent = False
# initialize rest of state.
- self.db_lock_timeout = (
- spack.config.get('config:db_lock_timeout') or _db_lock_timeout)
+ self.db_lock_timeout = spack.config.get("config:db_lock_timeout") or _db_lock_timeout
self.package_lock_timeout = (
- spack.config.get('config:package_lock_timeout') or
- _pkg_lock_timeout)
- tty.debug('DATABASE LOCK TIMEOUT: {0}s'.format(
- str(self.db_lock_timeout)))
- timeout_format_str = ('{0}s'.format(str(self.package_lock_timeout))
- if self.package_lock_timeout else 'No timeout')
- tty.debug('PACKAGE LOCK TIMEOUT: {0}'.format(
- str(timeout_format_str)))
+ spack.config.get("config:package_lock_timeout") or _pkg_lock_timeout
+ )
+ tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
+ timeout_format_str = (
+ "{0}s".format(str(self.package_lock_timeout))
+ if self.package_lock_timeout
+ else "No timeout"
+ )
+ tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
if self.is_upstream:
self.lock = ForbiddenLock()
else:
- self.lock = lk.Lock(self._lock_path,
- default_timeout=self.db_lock_timeout,
- desc='database')
+ self.lock = lk.Lock(
+ self._lock_path, default_timeout=self.db_lock_timeout, desc="database"
+ )
self._data = {}
# For every installed spec we keep track of its install prefix, so that
@@ -420,8 +428,7 @@ class Database(object):
def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
- return self._write_transaction_impl(
- self.lock, acquire=self._read, release=self._write)
+ return self._write_transaction_impl(self.lock, acquire=self._read, release=self._write)
def read_transaction(self):
"""Get a read lock context manager for use in a `with` block."""
@@ -430,28 +437,27 @@ class Database(object):
def _failed_spec_path(self, spec):
"""Return the path to the spec's failure file, which may not exist."""
if not spec.concrete:
- raise ValueError('Concrete spec required for failure path for {0}'
- .format(spec.name))
+ raise ValueError("Concrete spec required for failure path for {0}".format(spec.name))
- return os.path.join(self._failure_dir,
- '{0}-{1}'.format(spec.name, spec.dag_hash()))
+ return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
def clear_all_failures(self):
"""Force remove install failure tracking files."""
- tty.debug('Releasing prefix failure locks')
+ tty.debug("Releasing prefix failure locks")
for pkg_id in list(self._prefix_failures.keys()):
lock = self._prefix_failures.pop(pkg_id, None)
if lock:
lock.release_write()
# Remove all failure markings (aka files)
- tty.debug('Removing prefix failure tracking files')
+ tty.debug("Removing prefix failure tracking files")
for fail_mark in os.listdir(self._failure_dir):
try:
os.remove(os.path.join(self._failure_dir, fail_mark))
except OSError as exc:
- tty.warn('Unable to remove failure marking file {0}: {1}'
- .format(fail_mark, str(exc)))
+ tty.warn(
+ "Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
+ )
def clear_failure(self, spec, force=False):
"""
@@ -469,13 +475,11 @@ class Database(object):
"""
failure_locked = self.prefix_failure_locked(spec)
if failure_locked and not force:
- tty.msg('Retaining failure marking for {0} due to lock'
- .format(spec.name))
+ tty.msg("Retaining failure marking for {0} due to lock".format(spec.name))
return
if failure_locked:
- tty.warn('Removing failure marking despite lock for {0}'
- .format(spec.name))
+ tty.warn("Removing failure marking despite lock for {0}".format(spec.name))
lock = self._prefix_failures.pop(spec.prefix, None)
if lock:
@@ -484,11 +488,14 @@ class Database(object):
if self.prefix_failure_marked(spec):
try:
path = self._failed_spec_path(spec)
- tty.debug('Removing failure marking for {0}'.format(spec.name))
+ tty.debug("Removing failure marking for {0}".format(spec.name))
os.remove(path)
except OSError as err:
- tty.warn('Unable to remove failure marking for {0} ({1}): {2}'
- .format(spec.name, path, str(err)))
+ tty.warn(
+ "Unable to remove failure marking for {0} ({1}): {2}".format(
+ spec.name, path, str(err)
+ )
+ )
def mark_failed(self, spec):
"""
@@ -507,12 +514,12 @@ class Database(object):
"""
# Dump the spec to the failure file for (manual) debugging purposes
path = self._failed_spec_path(spec)
- with open(path, 'w') as f:
+ with open(path, "w") as f:
spec.to_json(f)
# Also ensure a failure lock is taken to prevent cleanup removal
# of failure status information during a concurrent parallel build.
- err = 'Unable to mark {0.name} as failed.'
+ err = "Unable to mark {0.name} as failed."
prefix = spec.prefix
if prefix not in self._prefix_failures:
@@ -520,15 +527,18 @@ class Database(object):
self.prefix_fail_path,
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
length=1,
- default_timeout=self.package_lock_timeout, desc=spec.name)
+ default_timeout=self.package_lock_timeout,
+ desc=spec.name,
+ )
try:
mark.acquire_write()
except lk.LockTimeoutError:
# Unlikely that another process failed to install at the same
# time but log it anyway.
- tty.debug('PID {0} failed to mark install failure for {1}'
- .format(os.getpid(), spec.name))
+ tty.debug(
+ "PID {0} failed to mark install failure for {1}".format(os.getpid(), spec.name)
+ )
tty.warn(err.format(spec))
# Whether we or another process marked it as a failure, track it
@@ -558,7 +568,9 @@ class Database(object):
self.prefix_fail_path,
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
length=1,
- default_timeout=self.package_lock_timeout, desc=spec.name)
+ default_timeout=self.package_lock_timeout,
+ desc=spec.name,
+ )
return check.is_write_locked()
@@ -588,7 +600,9 @@ class Database(object):
self.prefix_lock_path,
start=spec.dag_hash_bit_prefix(bit_length(sys.maxsize)),
length=1,
- default_timeout=timeout, desc=spec.name)
+ default_timeout=timeout,
+ desc=spec.name,
+ )
elif timeout != self._prefix_locks[prefix].default_timeout:
self._prefix_locks[prefix].default_timeout = timeout
@@ -635,8 +649,9 @@ class Database(object):
This function does not do any locking or transactions.
"""
# map from per-spec hash code to installation record.
- installs = dict((k, v.to_dict(include_fields=self._record_fields))
- for k, v in self._data.items())
+ installs = dict(
+ (k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
+ )
# database includes installation list and version.
@@ -645,13 +660,12 @@ class Database(object):
# different paths, it can't differentiate.
# TODO: fix this before we support multiple install locations.
database = {
- 'database': {
+ "database": {
# TODO: move this to a top-level _meta section if we ever
# TODO: bump the DB version to 7
- 'version': str(_db_version),
-
+ "version": str(_db_version),
# dictionary of installation records, keyed by DAG hash
- 'installs': installs,
+ "installs": installs,
}
}
@@ -665,14 +679,14 @@ class Database(object):
Does not do any locking.
"""
- spec_dict = installs[hash_key]['spec']
+ spec_dict = installs[hash_key]["spec"]
# Install records don't include hash with spec, so we add it in here
# to ensure it is read properly.
- if 'name' not in spec_dict.keys():
+ if "name" not in spec_dict.keys():
# old format, can't update format here
for name in spec_dict:
- spec_dict[name]['hash'] = hash_key
+ spec_dict[name]["hash"] = hash_key
else:
# new format, already a singleton
spec_dict[hash.name] = hash_key
@@ -713,14 +727,13 @@ class Database(object):
# Add dependencies from other records in the install DB to
# form a full spec.
spec = data[hash_key].spec
- spec_node_dict = installs[hash_key]['spec']
- if 'name' not in spec_node_dict:
+ spec_node_dict = installs[hash_key]["spec"]
+ if "name" not in spec_node_dict:
# old format
spec_node_dict = spec_node_dict[spec.name]
- if 'dependencies' in spec_node_dict:
- yaml_deps = spec_node_dict['dependencies']
- for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(
- yaml_deps):
+ if "dependencies" in spec_node_dict:
+ yaml_deps = spec_node_dict["dependencies"]
+ for dname, dhash, dtypes, _ in spack.spec.Spec.read_yaml_dep_specs(yaml_deps):
# It is important that we always check upstream installations
# in the same order, and that we always check the local
# installation first: if a downstream Spack installs a package
@@ -733,10 +746,11 @@ class Database(object):
child = record.spec if record else None
if not child:
- msg = ("Missing dependency not in database: "
- "%s needs %s-%s" % (
- spec.cformat('{name}{/hash:7}'),
- dname, dhash[:7]))
+ msg = "Missing dependency not in database: " "%s needs %s-%s" % (
+ spec.cformat("{name}{/hash:7}"),
+ dname,
+ dhash[:7],
+ )
if self._fail_when_missing_deps:
raise MissingDependenciesError(msg)
tty.warn(msg)
@@ -751,7 +765,7 @@ class Database(object):
Does not do any locking.
"""
try:
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
fdata = sjson.load(f)
except Exception as e:
raise six.raise_from(
@@ -764,27 +778,23 @@ class Database(object):
def check(cond, msg):
if not cond:
- raise CorruptDatabaseError(
- "Spack database is corrupt: %s" % msg, self._index_path)
+ raise CorruptDatabaseError("Spack database is corrupt: %s" % msg, self._index_path)
- check('database' in fdata, "no 'database' attribute in JSON DB.")
+ check("database" in fdata, "no 'database' attribute in JSON DB.")
# High-level file checks
- db = fdata['database']
- check('installs' in db, "no 'installs' in JSON DB.")
- check('version' in db, "no 'version' in JSON DB.")
+ db = fdata["database"]
+ check("installs" in db, "no 'installs' in JSON DB.")
+ check("version" in db, "no 'version' in JSON DB.")
- installs = db['installs']
+ installs = db["installs"]
# TODO: better version checking semantics.
- version = Version(db['version'])
+ version = Version(db["version"])
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
- if not any(
- old == version and new == _db_version
- for old, new in _skip_reindex
- ):
+ if not any(old == version and new == _db_version for old, new in _skip_reindex):
tty.warn(
"Spack database version changed from %s to %s. Upgrading."
% (version, _db_version)
@@ -797,8 +807,7 @@ class Database(object):
)
def invalid_record(hash_key, error):
- msg = ("Invalid record in Spack database: "
- "hash: %s, cause: %s: %s")
+ msg = "Invalid record in Spack database: " "hash: %s, cause: %s: %s"
msg %= (hash_key, type(error).__name__, str(error))
raise CorruptDatabaseError(msg, self._index_path)
@@ -825,8 +834,8 @@ class Database(object):
# this?
data[hash_key] = InstallRecord.from_dict(spec, rec)
- if not spec.external and 'installed' in rec and rec['installed']:
- installed_prefixes.add(rec['path'])
+ if not spec.external and "installed" in rec and rec["installed"]:
+ installed_prefixes.add(rec["path"])
except Exception as e:
invalid_record(hash_key, e)
@@ -856,8 +865,7 @@ class Database(object):
Locks the DB if it isn't locked already.
"""
if self.is_upstream:
- raise UpstreamDatabaseLockingError(
- "Cannot reindex an upstream database")
+ raise UpstreamDatabaseLockingError("Cannot reindex an upstream database")
# Special transaction to avoid recursive reindex calls and to
# ignore errors if we need to rebuild a corrupt database.
@@ -876,32 +884,27 @@ class Database(object):
with transaction:
if self._error:
- tty.warn(
- "Spack database was corrupt. Will rebuild. Error was:",
- str(self._error)
- )
+ tty.warn("Spack database was corrupt. Will rebuild. Error was:", str(self._error))
self._error = None
old_data = self._data
old_installed_prefixes = self._installed_prefixes
try:
- self._construct_from_directory_layout(
- directory_layout, old_data)
+ self._construct_from_directory_layout(directory_layout, old_data)
except BaseException:
# If anything explodes, restore old data, skip write.
self._data = old_data
self._installed_prefixes = old_installed_prefixes
raise
- def _construct_entry_from_directory_layout(self, directory_layout,
- old_data, spec,
- deprecator=None):
+ def _construct_entry_from_directory_layout(
+ self, directory_layout, old_data, spec, deprecator=None
+ ):
# Try to recover explicit value from old DB, but
# default it to True if DB was corrupt. This is
# just to be conservative in case a command like
# "autoremove" is run by the user after a reindex.
- tty.debug(
- 'RECONSTRUCTING FROM SPEC.YAML: {0}'.format(spec))
+ tty.debug("RECONSTRUCTING FROM SPEC.YAML: {0}".format(spec))
explicit = True
inst_time = os.stat(spec.prefix).st_ctime
if old_data is not None:
@@ -910,10 +913,7 @@ class Database(object):
explicit = old_info.explicit
inst_time = old_info.installation_time
- extra_args = {
- 'explicit': explicit,
- 'installation_time': inst_time
- }
+ extra_args = {"explicit": explicit, "installation_time": inst_time}
self._add(spec, directory_layout, **extra_args)
if deprecator:
self._deprecate(spec, deprecator)
@@ -933,22 +933,21 @@ class Database(object):
processed_specs = set()
for spec in directory_layout.all_specs():
- self._construct_entry_from_directory_layout(directory_layout,
- old_data, spec)
+ self._construct_entry_from_directory_layout(directory_layout, old_data, spec)
processed_specs.add(spec)
for spec, deprecator in directory_layout.all_deprecated_specs():
- self._construct_entry_from_directory_layout(directory_layout,
- old_data, spec,
- deprecator)
+ self._construct_entry_from_directory_layout(
+ directory_layout, old_data, spec, deprecator
+ )
processed_specs.add(spec)
for key, entry in old_data.items():
# We already took care of this spec using
# `spec.yaml` from its prefix.
if entry.spec in processed_specs:
- msg = 'SKIPPING RECONSTRUCTION FROM OLD DB: {0}'
- msg += ' [already reconstructed from spec.yaml]'
+ msg = "SKIPPING RECONSTRUCTION FROM OLD DB: {0}"
+ msg += " [already reconstructed from spec.yaml]"
tty.debug(msg.format(entry.spec))
continue
@@ -957,15 +956,14 @@ class Database(object):
# of other specs. This may be the case for externally
# installed compilers or externally installed
# applications.
- tty.debug(
- 'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
+ tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
try:
layout = None if entry.spec.external else spack.store.layout
kwargs = {
- 'spec': entry.spec,
- 'directory_layout': layout,
- 'explicit': entry.explicit,
- 'installation_time': entry.installation_time
+ "spec": entry.spec,
+ "directory_layout": layout,
+ "explicit": entry.explicit,
+ "installation_time": entry.installation_time,
}
self._add(**kwargs)
processed_specs.add(entry.spec)
@@ -1001,8 +999,9 @@ class Database(object):
found = rec.ref_count
if not expected == found:
raise AssertionError(
- "Invalid ref_count: %s: %d (expected %d), in DB %s" %
- (key, found, expected, self._index_path))
+ "Invalid ref_count: %s: %d (expected %d), in DB %s"
+ % (key, found, expected, self._index_path)
+ )
def _write(self, type, value, traceback):
"""Write the in-memory database index to its file path.
@@ -1023,17 +1022,16 @@ class Database(object):
self._state_is_inconsistent = True
return
- temp_file = self._index_path + (
- '.%s.%s.temp' % (socket.getfqdn(), os.getpid()))
+ temp_file = self._index_path + (".%s.%s.temp" % (socket.getfqdn(), os.getpid()))
# Write a temporary database file them move it into place
try:
- with open(temp_file, 'w') as f:
+ with open(temp_file, "w") as f:
self._write_to_file(f)
fs.rename(temp_file, self._index_path)
if _use_uuid:
- with open(self._verifier_path, 'w') as f:
+ with open(self._verifier_path, "w") as f:
new_verifier = str(uuid.uuid4())
f.write(new_verifier)
self.last_seen_verifier = new_verifier
@@ -1047,15 +1045,14 @@ class Database(object):
def _read(self):
"""Re-read Database from the data in the set location. This does no locking."""
if os.path.isfile(self._index_path):
- current_verifier = ''
+ current_verifier = ""
if _use_uuid:
try:
- with open(self._verifier_path, 'r') as f:
+ with open(self._verifier_path, "r") as f:
current_verifier = f.read()
except BaseException:
pass
- if ((current_verifier != self.last_seen_verifier) or
- (current_verifier == '')):
+ if (current_verifier != self.last_seen_verifier) or (current_verifier == ""):
self.last_seen_verifier = current_verifier
# Read from file if a database exists
self._read_from_file(self._index_path)
@@ -1064,15 +1061,9 @@ class Database(object):
self._state_is_inconsistent = False
return
elif self.is_upstream:
- tty.warn('upstream not found: {0}'.format(self._index_path))
-
- def _add(
- self,
- spec,
- directory_layout=None,
- explicit=False,
- installation_time=None
- ):
+ tty.warn("upstream not found: {0}".format(self._index_path))
+
+ def _add(self, spec, directory_layout=None, explicit=False, installation_time=None):
"""Add an install record for this spec to the database.
Assumes spec is installed in ``layout.path_for_spec(spec)``.
@@ -1098,8 +1089,7 @@ class Database(object):
"""
if not spec.concrete:
- raise NonConcreteSpecAddError(
- "Specs added to DB must be concrete.")
+ raise NonConcreteSpecAddError("Specs added to DB must be concrete.")
key = spec.dag_hash()
spec_pkg_hash = spec._package_hash
@@ -1113,10 +1103,7 @@ class Database(object):
for dep in spec.dependencies(deptype=_tracked_deps):
dkey = dep.dag_hash()
if dkey not in self._data:
- extra_args = {
- 'explicit': False,
- 'installation_time': installation_time
- }
+ extra_args = {"explicit": False, "installation_time": installation_time}
self._add(dep, directory_layout, **extra_args)
# Make sure the directory layout agrees whether the spec is installed
@@ -1128,9 +1115,11 @@ class Database(object):
installed = True
self._installed_prefixes.add(path)
except DirectoryLayoutError as e:
- msg = ("{0} is being {1} in the database with prefix {2}, "
- "but this directory does not contain an installation of "
- "the spec, due to: {3}")
+ msg = (
+ "{0} is being {1} in the database with prefix {2}, "
+ "but this directory does not contain an installation of "
+ "the spec, due to: {3}"
+ )
action = "updated" if key in self._data else "registered"
tty.warn(msg.format(spec.short_spec, action, path, str(e)))
elif spec.external_path:
@@ -1143,17 +1132,12 @@ class Database(object):
if key not in self._data:
# Create a new install record with no deps initially.
new_spec = spec.copy(deps=False)
- extra_args = {
- 'explicit': explicit,
- 'installation_time': installation_time
- }
+ extra_args = {"explicit": explicit, "installation_time": installation_time}
# Commands other than 'spack install' may add specs to the DB,
# we can record the source of an installed Spec with 'origin'
- if hasattr(spec, 'origin'):
- extra_args['origin'] = spec.origin
- self._data[key] = InstallRecord(
- new_spec, path, installed, ref_count=0, **extra_args
- )
+ if hasattr(spec, "origin"):
+ extra_args["origin"] = spec.origin
+ self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
# Connect dependencies from the DB to the new copy.
for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
@@ -1291,8 +1275,9 @@ class Database(object):
def specs_deprecated_by(self, spec):
"""Return all specs deprecated in favor of the given spec"""
with self.read_transaction():
- return [rec.spec for rec in self._data.values()
- if rec.deprecated_for == spec.dag_hash()]
+ return [
+ rec.spec for rec in self._data.values() if rec.deprecated_for == spec.dag_hash()
+ ]
def _deprecate(self, spec, deprecator):
spec_key = self._get_matching_spec_key(spec)
@@ -1328,18 +1313,16 @@ class Database(object):
return self._deprecate(spec, deprecator)
@_autospec
- def installed_relatives(self, spec, direction='children', transitive=True,
- deptype='all'):
+ def installed_relatives(self, spec, direction="children", transitive=True, deptype="all"):
"""Return installed specs related to this one."""
- if direction not in ('parents', 'children'):
+ if direction not in ("parents", "children"):
raise ValueError("Invalid direction: %s" % direction)
relatives = set()
for spec in self.query(spec):
if transitive:
- to_add = spec.traverse(
- direction=direction, root=False, deptype=deptype)
- elif direction == 'parents':
+ to_add = spec.traverse(direction=direction, root=False, deptype=deptype)
+ elif direction == "parents":
to_add = spec.dependents(deptype=deptype)
else: # direction == 'children'
to_add = spec.dependencies(deptype=deptype)
@@ -1348,10 +1331,12 @@ class Database(object):
hash_key = relative.dag_hash()
upstream, record = self.query_by_spec_hash(hash_key)
if not record:
- reltype = ('Dependent' if direction == 'parents'
- else 'Dependency')
- msg = ("Inconsistent state! %s %s of %s not in DB"
- % (reltype, hash_key, spec.dag_hash()))
+ reltype = "Dependent" if direction == "parents" else "Dependency"
+ msg = "Inconsistent state! %s %s of %s not in DB" % (
+ reltype,
+ hash_key,
+ spec.dag_hash(),
+ )
if self._fail_when_missing_deps:
raise MissingDependenciesError(msg)
tty.warn(msg)
@@ -1401,9 +1386,11 @@ class Database(object):
# check if hash is a prefix of some installed (or previously
# installed) spec.
- matches = [record.spec for h, record in self._data.items()
- if h.startswith(dag_hash) and
- record.install_type_matches(installed)]
+ matches = [
+ record.spec
+ for h, record in self._data.items()
+ if h.startswith(dag_hash) and record.install_type_matches(installed)
+ ]
if matches:
return matches
@@ -1459,30 +1446,28 @@ class Database(object):
"""
- spec = self.get_by_hash_local(
- dag_hash, default=default, installed=installed)
+ spec = self.get_by_hash_local(dag_hash, default=default, installed=installed)
if spec is not None:
return spec
for upstream_db in self.upstream_dbs:
- spec = upstream_db._get_by_hash_local(
- dag_hash, default=default, installed=installed)
+ spec = upstream_db._get_by_hash_local(dag_hash, default=default, installed=installed)
if spec is not None:
return spec
return default
def _query(
- self,
- query_spec=any,
- known=any,
- installed=True,
- explicit=any,
- start_date=None,
- end_date=None,
- hashes=None,
- in_buildcache=any,
- origin=None
+ self,
+ query_spec=any,
+ known=any,
+ installed=True,
+ explicit=any,
+ start_date=None,
+ end_date=None,
+ hashes=None,
+ in_buildcache=any,
+ origin=None,
):
"""Run a query on the database."""
@@ -1495,8 +1480,7 @@ class Database(object):
if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete:
# TODO: handling of hashes restriction is not particularly elegant.
hash_key = query_spec.dag_hash()
- if (hash_key in self._data and
- (not hashes or hash_key in hashes)):
+ if hash_key in self._data and (not hashes or hash_key in hashes):
return [self._data[hash_key].spec]
else:
return []
@@ -1523,19 +1507,15 @@ class Database(object):
if explicit is not any and rec.explicit != explicit:
continue
- if known is not any and spack.repo.path.exists(
- rec.spec.name) != known:
+ if known is not any and spack.repo.path.exists(rec.spec.name) != known:
continue
if start_date or end_date:
- inst_date = datetime.datetime.fromtimestamp(
- rec.installation_time
- )
+ inst_date = datetime.datetime.fromtimestamp(rec.installation_time)
if not (start_date < inst_date < end_date):
continue
- if (query_spec is any or
- rec.spec.satisfies(query_spec, strict=True)):
+ if query_spec is any or rec.spec.satisfies(query_spec, strict=True):
results.append(rec.spec)
return results
@@ -1564,8 +1544,7 @@ class Database(object):
local_results = set(self.query_local(*args, **kwargs))
- results = list(local_results) + list(
- x for x in upstream_results if x not in local_results)
+ results = list(local_results) + list(x for x in upstream_results if x not in local_results)
return sorted(results)
@@ -1580,8 +1559,7 @@ class Database(object):
query. Returns None if no installed package matches.
"""
- concrete_specs = self.query(
- query_spec, known=known, installed=installed)
+ concrete_specs = self.query(query_spec, known=known, installed=installed)
assert len(concrete_specs) <= 1
return concrete_specs[0] if concrete_specs else None
@@ -1614,8 +1592,9 @@ class Database(object):
for spec in rec.spec.traverse(visited=visited, deptype=("link", "run")):
needed.add(spec.dag_hash())
- unused = [rec.spec for key, rec in self._data.items()
- if key not in needed and rec.installed]
+ unused = [
+ rec.spec for key, rec in self._data.items() if key not in needed and rec.installed
+ ]
return unused
@@ -1632,8 +1611,8 @@ class Database(object):
rec = self.get_record(spec)
if explicit != rec.explicit:
with self.write_transaction():
- message = '{s.name}@{s.version} : marking the package {0}'
- status = 'explicit' if explicit else 'implicit'
+ message = "{s.name}@{s.version} : marking the package {0}"
+ status = "explicit" if explicit else "implicit"
tty.debug(message.format(status, s=spec))
rec.explicit = explicit
@@ -1655,10 +1634,8 @@ class MissingDependenciesError(SpackError):
class InvalidDatabaseVersionError(SpackError):
-
def __init__(self, expected, found):
super(InvalidDatabaseVersionError, self).__init__(
- "Expected database version %s but found version %s."
- % (expected, found),
- "`spack reindex` may fix this, or you may need a newer "
- "Spack version.")
+ "Expected database version %s but found version %s." % (expected, found),
+ "`spack reindex` may fix this, or you may need a newer " "Spack version.",
+ )
diff --git a/lib/spack/spack/dependency.py b/lib/spack/spack/dependency.py
index ca0da06665..1b38cd5092 100644
--- a/lib/spack/spack/dependency.py
+++ b/lib/spack/spack/dependency.py
@@ -10,10 +10,10 @@ from six import string_types
import spack.spec
#: The types of dependency relationships that Spack understands.
-all_deptypes = ('build', 'link', 'run', 'test')
+all_deptypes = ("build", "link", "run", "test")
#: Default dependency type if none is specified
-default_deptype = ('build', 'link')
+default_deptype = ("build", "link")
def deptype_chars(*type_tuples):
@@ -33,7 +33,7 @@ def deptype_chars(*type_tuples):
if t:
types.update(t)
- return ''.join(t[0] if t in types else ' ' for t in all_deptypes)
+ return "".join(t[0] if t in types else " " for t in all_deptypes)
def canonical_deptype(deptype):
@@ -45,22 +45,21 @@ def canonical_deptype(deptype):
builtin function ``all`` or the string 'all', which result in
a tuple of all dependency types known to Spack.
"""
- if deptype in ('all', all):
+ if deptype in ("all", all):
return all_deptypes
elif isinstance(deptype, string_types):
if deptype not in all_deptypes:
- raise ValueError('Invalid dependency type: %s' % deptype)
+ raise ValueError("Invalid dependency type: %s" % deptype)
return (deptype,)
elif isinstance(deptype, (tuple, list, set)):
bad = [d for d in deptype if d not in all_deptypes]
if bad:
- raise ValueError(
- 'Invalid dependency types: %s' % ','.join(str(t) for t in bad))
+ raise ValueError("Invalid dependency types: %s" % ",".join(str(t) for t in bad))
return tuple(sorted(set(deptype)))
- raise ValueError('Invalid dependency type: %s' % repr(deptype))
+ raise ValueError("Invalid dependency type: %s" % repr(deptype))
class Dependency(object):
@@ -89,6 +88,7 @@ class Dependency(object):
the dependency package can coexist with the patched version.
"""
+
def __init__(self, pkg, spec, type=default_deptype):
"""Create a new Dependency.
@@ -125,13 +125,10 @@ class Dependency(object):
for cond, p in other.patches.items():
if cond in self.patches:
current_list = self.patches[cond]
- current_list.extend(
- p for p in other.patches[cond] if p not in current_list
- )
+ current_list.extend(p for p in other.patches[cond] if p not in current_list)
else:
self.patches[cond] = other.patches[cond]
def __repr__(self):
types = deptype_chars(self.type)
- return '<Dependency: %s -> %s [%s]>' % (
- self.pkg.name, self.spec, types)
+ return "<Dependency: %s -> %s [%s]>" % (self.pkg.name, self.spec, types)
diff --git a/lib/spack/spack/detection/__init__.py b/lib/spack/spack/detection/__init__.py
index 586f39fd92..b9ff93d774 100644
--- a/lib/spack/spack/detection/__init__.py
+++ b/lib/spack/spack/detection/__init__.py
@@ -6,10 +6,10 @@ from .common import DetectedPackage, executable_prefix, update_configuration
from .path import by_executable, by_library, executables_in_path
__all__ = [
- 'DetectedPackage',
- 'by_library',
- 'by_executable',
- 'executables_in_path',
- 'executable_prefix',
- 'update_configuration'
+ "DetectedPackage",
+ "by_library",
+ "by_executable",
+ "executables_in_path",
+ "executable_prefix",
+ "update_configuration",
]
diff --git a/lib/spack/spack/detection/common.py b/lib/spack/spack/detection/common.py
index 813f235868..d79b62b872 100644
--- a/lib/spack/spack/detection/common.py
+++ b/lib/spack/spack/detection/common.py
@@ -28,20 +28,18 @@ import spack.config
import spack.spec
import spack.util.spack_yaml
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
#: Information on a package that has been detected
-DetectedPackage = collections.namedtuple(
- 'DetectedPackage', ['spec', 'prefix']
-)
+DetectedPackage = collections.namedtuple("DetectedPackage", ["spec", "prefix"])
def _externals_in_packages_yaml():
"""Return all the specs mentioned as externals in packages.yaml"""
- packages_yaml = spack.config.get('packages')
+ packages_yaml = spack.config.get("packages")
already_defined_specs = set()
for pkg_name, package_configuration in packages_yaml.items():
- for item in package_configuration.get('externals', []):
- already_defined_specs.add(spack.spec.Spec(item['spec']))
+ for item in package_configuration.get("externals", []):
+ already_defined_specs.add(spack.spec.Spec(item["spec"]))
return already_defined_specs
@@ -62,25 +60,25 @@ def _pkg_config_dict(external_pkg_entries):
}
"""
pkg_dict = spack.util.spack_yaml.syaml_dict()
- pkg_dict['externals'] = []
+ pkg_dict["externals"] = []
for e in external_pkg_entries:
if not _spec_is_valid(e.spec):
continue
- external_items = [('spec', str(e.spec)), ('prefix', e.prefix)]
+ external_items = [("spec", str(e.spec)), ("prefix", e.prefix)]
if e.spec.external_modules:
- external_items.append(('modules', e.spec.external_modules))
+ external_items.append(("modules", e.spec.external_modules))
if e.spec.extra_attributes:
external_items.append(
- ('extra_attributes',
- spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()))
+ (
+ "extra_attributes",
+ spack.util.spack_yaml.syaml_dict(e.spec.extra_attributes.items()),
+ )
)
# external_items.extend(e.spec.extra_attributes.items())
- pkg_dict['externals'].append(
- spack.util.spack_yaml.syaml_dict(external_items)
- )
+ pkg_dict["externals"].append(spack.util.spack_yaml.syaml_dict(external_items))
return pkg_dict
@@ -92,7 +90,7 @@ def _spec_is_valid(spec):
# It is assumed here that we can at least extract the package name from
# the spec so we can look up the implementation of
# determine_spec_details
- msg = 'Constructed spec for {0} does not have a string representation'
+ msg = "Constructed spec for {0} does not have a string representation"
llnl.util.tty.warn(msg.format(spec.name))
return False
@@ -100,9 +98,8 @@ def _spec_is_valid(spec):
spack.spec.Spec(str(spec))
except spack.error.SpackError:
llnl.util.tty.warn(
- 'Constructed spec has a string representation but the string'
- ' representation does not evaluate to a valid spec: {0}'
- .format(str(spec))
+ "Constructed spec has a string representation but the string"
+ " representation does not evaluate to a valid spec: {0}".format(str(spec))
)
return False
@@ -144,9 +141,9 @@ def executable_prefix(executable_dir):
assert os.path.isdir(executable_dir)
components = executable_dir.split(os.sep)
- if 'bin' not in components:
+ if "bin" not in components:
return executable_dir
- idx = components.index('bin')
+ idx = components.index("bin")
return os.sep.join(components[:idx])
@@ -163,11 +160,11 @@ def library_prefix(library_dir):
assert os.path.isdir(library_dir)
components = library_dir.split(os.sep)
- if 'lib64' in components:
- idx = components.index('lib64')
+ if "lib64" in components:
+ idx = components.index("lib64")
return os.sep.join(components[:idx])
- elif 'lib' in components:
- idx = components.index('lib')
+ elif "lib" in components:
+ idx = components.index("lib")
return os.sep.join(components[:idx])
else:
return library_dir
@@ -184,22 +181,18 @@ def update_configuration(detected_packages, scope=None, buildable=True):
predefined_external_specs = _externals_in_packages_yaml()
pkg_to_cfg, all_new_specs = {}, []
for package_name, entries in detected_packages.items():
- new_entries = [
- e for e in entries if (e.spec not in predefined_external_specs)
- ]
+ new_entries = [e for e in entries if (e.spec not in predefined_external_specs)]
pkg_config = _pkg_config_dict(new_entries)
- all_new_specs.extend([
- spack.spec.Spec(x['spec']) for x in pkg_config.get('externals', [])
- ])
+ all_new_specs.extend([spack.spec.Spec(x["spec"]) for x in pkg_config.get("externals", [])])
if buildable is False:
- pkg_config['buildable'] = False
+ pkg_config["buildable"] = False
pkg_to_cfg[package_name] = pkg_config
- pkgs_cfg = spack.config.get('packages', scope=scope)
+ pkgs_cfg = spack.config.get("packages", scope=scope)
pkgs_cfg = spack.config.merge_yaml(pkgs_cfg, pkg_to_cfg)
- spack.config.set('packages', pkgs_cfg, scope=scope)
+ spack.config.set("packages", pkgs_cfg, scope=scope)
return all_new_specs
@@ -209,18 +202,13 @@ def find_win32_additional_install_paths():
Return a list of other potential install locations.
"""
windows_search_ext = []
- cuda_re = r'CUDA_PATH[a-zA-Z1-9_]*'
+ cuda_re = r"CUDA_PATH[a-zA-Z1-9_]*"
# The list below should be expanded with other
# common Windows install locations as neccesary
- path_ext_keys = ['I_MPI_ONEAPI_ROOT',
- 'MSMPI_BIN',
- 'MLAB_ROOT',
- 'NUGET_PACKAGES']
+ path_ext_keys = ["I_MPI_ONEAPI_ROOT", "MSMPI_BIN", "MLAB_ROOT", "NUGET_PACKAGES"]
user = os.environ["USERPROFILE"]
add_path = lambda key: re.search(cuda_re, key) or key in path_ext_keys
- windows_search_ext.extend([os.environ[key] for key
- in os.environ.keys() if
- add_path(key)])
+ windows_search_ext.extend([os.environ[key] for key in os.environ.keys() if add_path(key)])
# note windows paths are fine here as this method should only ever be invoked
# to interact with Windows
# Add search path for default Chocolatey (https://github.com/chocolatey/choco)
@@ -231,7 +219,7 @@ def find_win32_additional_install_paths():
windows_search_ext.extend(
spack.config.get("config:additional_external_search_paths", default=[])
)
- windows_search_ext.extend(spack.util.environment.get_path('PATH'))
+ windows_search_ext.extend(spack.util.environment.get_path("PATH"))
return windows_search_ext
@@ -247,8 +235,9 @@ def compute_windows_program_path_for_package(pkg):
return []
# note windows paths are fine here as this method should only ever be invoked
# to interact with Windows
- program_files = 'C:\\Program Files{}\\{}'
+ program_files = "C:\\Program Files{}\\{}"
- return[program_files.format(arch, name) for
- arch, name in itertools.product(("", " (x86)"),
- (pkg.name, pkg.name.capitalize()))]
+ return [
+ program_files.format(arch, name)
+ for arch, name in itertools.product(("", " (x86)"), (pkg.name, pkg.name.capitalize()))
+ ]
diff --git a/lib/spack/spack/detection/path.py b/lib/spack/spack/detection/path.py
index c344b24cd7..b7a7f6702d 100644
--- a/lib/spack/spack/detection/path.py
+++ b/lib/spack/spack/detection/path.py
@@ -46,18 +46,20 @@ def executables_in_path(path_hints=None):
# If we're on a Windows box, run vswhere,
# steal the installationPath using windows_os.py logic,
# construct paths to CMake and Ninja, add to PATH
- path_hints = path_hints or spack.util.environment.get_path('PATH')
- if sys.platform == 'win32':
+ path_hints = path_hints or spack.util.environment.get_path("PATH")
+ if sys.platform == "win32":
msvc_paths = list(winOs.WindowsOs.vs_install_paths)
msvc_cmake_paths = [
- os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft",
- "CMake", "CMake", "bin")
- for path in msvc_paths]
+ os.path.join(
+ path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "CMake", "bin"
+ )
+ for path in msvc_paths
+ ]
path_hints = msvc_cmake_paths + path_hints
msvc_ninja_paths = [
- os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft",
- "CMake", "Ninja")
- for path in msvc_paths]
+ os.path.join(path, "Common7", "IDE", "CommonExtensions", "Microsoft", "CMake", "Ninja")
+ for path in msvc_paths
+ ]
path_hints = msvc_ninja_paths + path_hints
path_hints.extend(find_win32_additional_install_paths())
search_paths = llnl.util.filesystem.search_paths_for_executables(*path_hints)
@@ -90,11 +92,13 @@ def libraries_in_ld_library_path(path_hints=None):
DYLD_LIBRARY_PATH, and DYLD_FALLBACK_LIBRARY_PATH environment
variables.
"""
- path_hints = path_hints or \
- spack.util.environment.get_path('LIBRARY_PATH') + \
- spack.util.environment.get_path('LD_LIBRARY_PATH') + \
- spack.util.environment.get_path('DYLD_LIBRARY_PATH') + \
- spack.util.environment.get_path('DYLD_FALLBACK_LIBRARY_PATH')
+ path_hints = path_hints or spack.util.environment.get_path(
+ "LIBRARY_PATH"
+ ) + spack.util.environment.get_path("LD_LIBRARY_PATH") + spack.util.environment.get_path(
+ "DYLD_LIBRARY_PATH"
+ ) + spack.util.environment.get_path(
+ "DYLD_FALLBACK_LIBRARY_PATH"
+ )
search_paths = llnl.util.filesystem.search_paths_for_libraries(*path_hints)
path_to_lib = {}
@@ -135,7 +139,7 @@ def by_library(packages_to_check, path_hints=None):
path_to_lib_name = libraries_in_ld_library_path(path_hints=path_hints)
lib_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
- if hasattr(pkg, 'libraries'):
+ if hasattr(pkg, "libraries"):
for lib in pkg.libraries:
lib_pattern_to_pkgs[lib].append(pkg)
@@ -151,18 +155,17 @@ def by_library(packages_to_check, path_hints=None):
resolved_specs = {} # spec -> lib found for the spec
for pkg, libs in pkg_to_found_libs.items():
- if not hasattr(pkg, 'determine_spec_details'):
+ if not hasattr(pkg, "determine_spec_details"):
llnl.util.tty.warn(
"{0} must define 'determine_spec_details' in order"
" for Spack to detect externally-provided instances"
- " of the package.".format(pkg.name))
+ " of the package.".format(pkg.name)
+ )
continue
for prefix, libs_in_prefix in sorted(_group_by_prefix(libs)):
try:
- specs = _convert_to_iterable(
- pkg.determine_spec_details(prefix, libs_in_prefix)
- )
+ specs = _convert_to_iterable(pkg.determine_spec_details(prefix, libs_in_prefix))
except Exception as e:
specs = []
msg = 'error detecting "{0}" from prefix {1} [{2}]'
@@ -170,10 +173,10 @@ def by_library(packages_to_check, path_hints=None):
if not specs:
llnl.util.tty.debug(
- 'The following libraries in {0} were decidedly not '
- 'part of the package {1}: {2}'
- .format(prefix, pkg.name, ', '.join(
- _convert_to_iterable(libs_in_prefix)))
+ "The following libraries in {0} were decidedly not "
+ "part of the package {1}: {2}".format(
+ prefix, pkg.name, ", ".join(_convert_to_iterable(libs_in_prefix))
+ )
)
for spec in specs:
@@ -186,13 +189,12 @@ def by_library(packages_to_check, path_hints=None):
continue
if spec in resolved_specs:
- prior_prefix = ', '.join(
- _convert_to_iterable(resolved_specs[spec]))
+ prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
llnl.util.tty.debug(
"Libraries in {0} and {1} are both associated"
- " with the same spec {2}"
- .format(prefix, prior_prefix, str(spec)))
+ " with the same spec {2}".format(prefix, prior_prefix, str(spec))
+ )
continue
else:
resolved_specs[spec] = prefix
@@ -200,17 +202,17 @@ def by_library(packages_to_check, path_hints=None):
try:
spec.validate_detection()
except Exception as e:
- msg = ('"{0}" has been detected on the system but will '
- 'not be added to packages.yaml [reason={1}]')
+ msg = (
+ '"{0}" has been detected on the system but will '
+ "not be added to packages.yaml [reason={1}]"
+ )
llnl.util.tty.warn(msg.format(spec, str(e)))
continue
if spec.external_path:
pkg_prefix = spec.external_path
- pkg_to_entries[pkg.name].append(
- DetectedPackage(spec=spec, prefix=pkg_prefix)
- )
+ pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
return pkg_to_entries
@@ -227,7 +229,7 @@ def by_executable(packages_to_check, path_hints=None):
path_hints = [] if path_hints is None else path_hints
exe_pattern_to_pkgs = collections.defaultdict(list)
for pkg in packages_to_check:
- if hasattr(pkg, 'executables'):
+ if hasattr(pkg, "executables"):
for exe in pkg.platform_executables():
exe_pattern_to_pkgs[exe].append(pkg)
# Add Windows specific, package related paths to the search paths
@@ -246,11 +248,12 @@ def by_executable(packages_to_check, path_hints=None):
resolved_specs = {} # spec -> exe found for the spec
for pkg, exes in pkg_to_found_exes.items():
- if not hasattr(pkg, 'determine_spec_details'):
+ if not hasattr(pkg, "determine_spec_details"):
llnl.util.tty.warn(
"{0} must define 'determine_spec_details' in order"
" for Spack to detect externally-provided instances"
- " of the package.".format(pkg.name))
+ " of the package.".format(pkg.name)
+ )
continue
for prefix, exes_in_prefix in sorted(_group_by_prefix(exes)):
@@ -260,9 +263,7 @@ def by_executable(packages_to_check, path_hints=None):
# naming scheme which differentiates them), the spec won't be
# usable.
try:
- specs = _convert_to_iterable(
- pkg.determine_spec_details(prefix, exes_in_prefix)
- )
+ specs = _convert_to_iterable(pkg.determine_spec_details(prefix, exes_in_prefix))
except Exception as e:
specs = []
msg = 'error detecting "{0}" from prefix {1} [{2}]'
@@ -270,10 +271,10 @@ def by_executable(packages_to_check, path_hints=None):
if not specs:
llnl.util.tty.debug(
- 'The following executables in {0} were decidedly not '
- 'part of the package {1}: {2}'
- .format(prefix, pkg.name, ', '.join(
- _convert_to_iterable(exes_in_prefix)))
+ "The following executables in {0} were decidedly not "
+ "part of the package {1}: {2}".format(
+ prefix, pkg.name, ", ".join(_convert_to_iterable(exes_in_prefix))
+ )
)
for spec in specs:
@@ -285,13 +286,12 @@ def by_executable(packages_to_check, path_hints=None):
continue
if spec in resolved_specs:
- prior_prefix = ', '.join(
- _convert_to_iterable(resolved_specs[spec]))
+ prior_prefix = ", ".join(_convert_to_iterable(resolved_specs[spec]))
llnl.util.tty.debug(
"Executables in {0} and {1} are both associated"
- " with the same spec {2}"
- .format(prefix, prior_prefix, str(spec)))
+ " with the same spec {2}".format(prefix, prior_prefix, str(spec))
+ )
continue
else:
resolved_specs[spec] = prefix
@@ -299,16 +299,16 @@ def by_executable(packages_to_check, path_hints=None):
try:
spec.validate_detection()
except Exception as e:
- msg = ('"{0}" has been detected on the system but will '
- 'not be added to packages.yaml [reason={1}]')
+ msg = (
+ '"{0}" has been detected on the system but will '
+ "not be added to packages.yaml [reason={1}]"
+ )
llnl.util.tty.warn(msg.format(spec, str(e)))
continue
if spec.external_path:
pkg_prefix = spec.external_path
- pkg_to_entries[pkg.name].append(
- DetectedPackage(spec=spec, prefix=pkg_prefix)
- )
+ pkg_to_entries[pkg.name].append(DetectedPackage(spec=spec, prefix=pkg_prefix))
return pkg_to_entries
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 0801db6146..fa04641f38 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -48,11 +48,21 @@ from spack.fetch_strategy import from_kwargs
from spack.resource import Resource
from spack.version import GitVersion, Version, VersionChecksumError, VersionLookupError
-__all__ = ['DirectiveError', 'DirectiveMeta', 'version', 'conflicts', 'depends_on',
- 'extends', 'provides', 'patch', 'variant', 'resource']
+__all__ = [
+ "DirectiveError",
+ "DirectiveMeta",
+ "version",
+ "conflicts",
+ "depends_on",
+ "extends",
+ "provides",
+ "patch",
+ "variant",
+ "resource",
+]
#: These are variant names used by Spack internally; packages can't use them
-reserved_names = ['patches', 'dev_path']
+reserved_names = ["patches", "dev_path"]
#: Names of possible directives. This list is populated elsewhere in the file.
directive_names = []
@@ -121,37 +131,36 @@ class DirectiveMeta(type):
# commands:
# 1. in the order they were defined
# 2. following the MRO
- attr_dict['_directives_to_be_executed'] = []
+ attr_dict["_directives_to_be_executed"] = []
for base in reversed(bases):
try:
directive_from_base = base._directives_to_be_executed
- attr_dict['_directives_to_be_executed'].extend(
- directive_from_base)
+ attr_dict["_directives_to_be_executed"].extend(directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
# De-duplicates directives from base classes
- attr_dict['_directives_to_be_executed'] = [
- x for x in llnl.util.lang.dedupe(
- attr_dict['_directives_to_be_executed'])]
+ attr_dict["_directives_to_be_executed"] = [
+ x for x in llnl.util.lang.dedupe(attr_dict["_directives_to_be_executed"])
+ ]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMeta._directives_to_be_executed:
- attr_dict['_directives_to_be_executed'].extend(
- DirectiveMeta._directives_to_be_executed)
+ attr_dict["_directives_to_be_executed"].extend(
+ DirectiveMeta._directives_to_be_executed
+ )
DirectiveMeta._directives_to_be_executed = []
- return super(DirectiveMeta, cls).__new__(
- cls, name, bases, attr_dict)
+ return super(DirectiveMeta, cls).__new__(cls, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The instance is being initialized: if it is a package we must ensure
# that the directives are called to set it up.
- if 'spack.pkg' in cls.__module__:
+ if "spack.pkg" in cls.__module__:
# Ensure the presence of the dictionaries associated
# with the directives
for d in DirectiveMeta._directive_dict_names:
@@ -223,7 +232,7 @@ class DirectiveMeta(type):
global directive_names
if isinstance(dicts, six.string_types):
- dicts = (dicts, )
+ dicts = (dicts,)
if not isinstance(dicts, Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
@@ -242,24 +251,23 @@ class DirectiveMeta(type):
if DirectiveMeta._when_constraints_from_context:
# Check that directives not yet supporting the when= argument
# are not used inside the context manager
- if decorated_function.__name__ == 'version':
- msg = ('directive "{0}" cannot be used within a "when"'
- ' context since it does not support a "when=" '
- 'argument')
+ if decorated_function.__name__ == "version":
+ msg = (
+ 'directive "{0}" cannot be used within a "when"'
+ ' context since it does not support a "when=" '
+ "argument"
+ )
msg = msg.format(decorated_function.__name__)
raise DirectiveError(msg)
when_constraints = [
- spack.spec.Spec(x) for x in
- DirectiveMeta._when_constraints_from_context
+ spack.spec.Spec(x) for x in DirectiveMeta._when_constraints_from_context
]
- if kwargs.get('when'):
- when_constraints.append(spack.spec.Spec(kwargs['when']))
- when_spec = spack.spec.merge_abstract_anonymous_specs(
- *when_constraints
- )
+ if kwargs.get("when"):
+ when_constraints.append(spack.spec.Spec(kwargs["when"]))
+ when_spec = spack.spec.merge_abstract_anonymous_specs(*when_constraints)
- kwargs['when'] = when_spec
+ kwargs["when"] = when_spec
# If any of the arguments are executors returned by a
# directive passed as an argument, don't execute them
@@ -274,8 +282,7 @@ class DirectiveMeta(type):
remove_directives(a)
else:
# Remove directives args from the exec queue
- remove = next(
- (d for d in directives if d is arg), None)
+ remove = next((d for d in directives if d is arg), None)
if remove is not None:
directives.remove(remove)
@@ -291,13 +298,14 @@ class DirectiveMeta(type):
# ...so if it is not a sequence make it so
values = result
if not isinstance(values, Sequence):
- values = (values, )
+ values = (values,)
DirectiveMeta._directives_to_be_executed.extend(values)
# wrapped function returns same result as original so
# that we can nest directives
return result
+
return _wrapper
return _decorator
@@ -306,7 +314,7 @@ class DirectiveMeta(type):
directive = DirectiveMeta.directive
-@directive('versions')
+@directive("versions")
def version(ver, checksum=None, **kwargs):
"""Adds a version and, if appropriate, metadata for fetching its code.
@@ -320,27 +328,29 @@ def version(ver, checksum=None, **kwargs):
Keyword Arguments:
deprecated (bool): whether or not this version is deprecated
"""
+
def _execute_version(pkg):
if checksum is not None:
- if hasattr(pkg, 'has_code') and not pkg.has_code:
+ if hasattr(pkg, "has_code") and not pkg.has_code:
raise VersionChecksumError(
"{0}: Checksums not allowed in no-code packages"
- "(see '{1}' version).".format(pkg.name, ver))
+ "(see '{1}' version).".format(pkg.name, ver)
+ )
- kwargs['checksum'] = checksum
+ kwargs["checksum"] = checksum
# Store kwargs for the package to later with a fetch_strategy.
version = Version(ver)
if isinstance(version, GitVersion):
- if not hasattr(pkg, 'git') and 'git' not in kwargs:
+ if not hasattr(pkg, "git") and "git" not in kwargs:
msg = "Spack version directives cannot include git hashes fetched from"
msg += " URLs. Error in package '%s'\n" % pkg.name
msg += " version('%s', " % version.string
- msg += ', '.join("%s='%s'" % (argname, value)
- for argname, value in kwargs.items())
+ msg += ", ".join("%s='%s'" % (argname, value) for argname, value in kwargs.items())
msg += ")"
raise VersionLookupError(msg)
pkg.versions[version] = kwargs
+
return _execute_version
@@ -351,8 +361,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
dep_spec = spack.spec.Spec(spec)
if pkg.name == dep_spec.name:
- raise CircularReferenceError(
- "Package '%s' cannot depend on itself." % pkg.name)
+ raise CircularReferenceError("Package '%s' cannot depend on itself." % pkg.name)
type = canonical_deptype(type)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
@@ -379,8 +388,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
patches = [patches]
# auto-call patch() directive on any strings in patch list
- patches = [patch(p) if isinstance(p, six.string_types) else p
- for p in patches]
+ patches = [patch(p) if isinstance(p, six.string_types) else p for p in patches]
assert all(callable(p) for p in patches)
# this is where we actually add the dependency to this package
@@ -397,7 +405,7 @@ def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
execute_patch(dependency)
-@directive('conflicts')
+@directive("conflicts")
def conflicts(conflict_spec, when=None, msg=None):
"""Allows a package to define a conflict.
@@ -417,6 +425,7 @@ def conflicts(conflict_spec, when=None, msg=None):
when (spack.spec.Spec): optional constraint that triggers the conflict
msg (str): optional user defined message
"""
+
def _execute_conflicts(pkg):
# If when is not specified the conflict always holds
when_spec = make_when_spec(when)
@@ -426,10 +435,11 @@ def conflicts(conflict_spec, when=None, msg=None):
# Save in a list the conflicts and the associated custom messages
when_spec_list = pkg.conflicts.setdefault(conflict_spec, [])
when_spec_list.append((when_spec, msg))
+
return _execute_conflicts
-@directive(('dependencies'))
+@directive(("dependencies"))
def depends_on(spec, when=None, type=default_deptype, patches=None):
"""Creates a dict of deps with specs defining when they apply.
@@ -446,13 +456,15 @@ def depends_on(spec, when=None, type=default_deptype, patches=None):
@see The section "Dependency specs" in the Spack Packaging Guide.
"""
+
def _execute_depends_on(pkg):
_depends_on(pkg, spec, when=when, type=type, patches=patches)
+
return _execute_depends_on
-@directive(('extendees', 'dependencies'))
-def extends(spec, type=('build', 'run'), **kwargs):
+@directive(("extendees", "dependencies"))
+def extends(spec, type=("build", "run"), **kwargs):
"""Same as depends_on, but allows symlinking into dependency's
prefix tree.
@@ -467,8 +479,9 @@ def extends(spec, type=('build', 'run'), **kwargs):
mechanism.
"""
+
def _execute_extends(pkg):
- when = kwargs.get('when')
+ when = kwargs.get("when")
when_spec = make_when_spec(when)
if not when_spec:
return
@@ -476,17 +489,19 @@ def extends(spec, type=('build', 'run'), **kwargs):
_depends_on(pkg, spec, when=when, type=type)
spec_obj = spack.spec.Spec(spec)
pkg.extendees[spec_obj.name] = (spec_obj, kwargs)
+
return _execute_extends
-@directive('provided')
+@directive("provided")
def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
- 'mpi', other packages can declare that they depend on "mpi", and spack
- can use the providing package to satisfy the dependency.
+ 'mpi', other packages can declare that they depend on "mpi", and spack
+ can use the providing package to satisfy the dependency.
"""
+
def _execute_provides(pkg):
- when = kwargs.get('when')
+ when = kwargs.get("when")
when_spec = make_when_spec(when)
if not when_spec:
return
@@ -498,16 +513,16 @@ def provides(*specs, **kwargs):
for string in specs:
for provided_spec in spack.spec.parse(string):
if pkg.name == provided_spec.name:
- raise CircularReferenceError(
- "Package '%s' cannot provide itself.")
+ raise CircularReferenceError("Package '%s' cannot provide itself.")
if provided_spec not in pkg.provided:
pkg.provided[provided_spec] = set()
pkg.provided[provided_spec].add(when_spec)
+
return _execute_provides
-@directive('patches')
+@directive("patches")
def patch(url_or_filename, level=1, when=None, working_dir=".", **kwargs):
"""Packages can declare patches to apply to source. You can
optionally provide a when spec to indicate that a particular
@@ -528,15 +543,16 @@ def patch(url_or_filename, level=1, when=None, working_dir=".", **kwargs):
is compressed (only required for compressed URL patches)
"""
+
def _execute_patch(pkg_or_dep):
pkg = pkg_or_dep
if isinstance(pkg, Dependency):
pkg = pkg.pkg
- if hasattr(pkg, 'has_code') and not pkg.has_code:
+ if hasattr(pkg, "has_code") and not pkg.has_code:
raise UnsupportedPackageDirective(
- 'Patches are not allowed in {0}: package has no code.'.
- format(pkg.name))
+ "Patches are not allowed in {0}: package has no code.".format(pkg.name)
+ )
when_spec = make_when_spec(when)
if not when_spec:
@@ -550,30 +566,30 @@ def patch(url_or_filename, level=1, when=None, working_dir=".", **kwargs):
ordering_key = (pkg.name, _patch_order_index)
_patch_order_index += 1
- if '://' in url_or_filename:
+ if "://" in url_or_filename:
patch = spack.patch.UrlPatch(
- pkg, url_or_filename, level, working_dir,
- ordering_key=ordering_key, **kwargs)
+ pkg, url_or_filename, level, working_dir, ordering_key=ordering_key, **kwargs
+ )
else:
patch = spack.patch.FilePatch(
- pkg, url_or_filename, level, working_dir,
- ordering_key=ordering_key)
+ pkg, url_or_filename, level, working_dir, ordering_key=ordering_key
+ )
cur_patches.append(patch)
return _execute_patch
-@directive('variants')
+@directive("variants")
def variant(
- name,
- default=None,
- description='',
- values=None,
- multi=None,
- validator=None,
- when=None,
- sticky=False
+ name,
+ default=None,
+ description="",
+ values=None,
+ multi=None,
+ validator=None,
+ when=None,
+ sticky=False,
):
"""Define a variant for the package. Packager can specify a default
value as well as a text description.
@@ -600,20 +616,23 @@ def variant(
Raises:
DirectiveError: if arguments passed to the directive are invalid
"""
+
def format_error(msg, pkg):
msg += " @*r{{[{0}, variant '{1}']}}"
return llnl.util.tty.color.colorize(msg.format(pkg.name, name))
if name in reserved_names:
+
def _raise_reserved_name(pkg):
msg = "The name '%s' is reserved by Spack" % name
raise DirectiveError(format_error(msg, pkg))
+
return _raise_reserved_name
# Ensure we have a sequence of allowed variant values, or a
# predicate for it.
if values is None:
- if str(default).upper() in ('TRUE', 'FALSE'):
+ if str(default).upper() in ("TRUE", "FALSE"):
values = (True, False)
else:
values = lambda x: True
@@ -621,35 +640,40 @@ def variant(
# The object defining variant values might supply its own defaults for
# all the other arguments. Ensure we have no conflicting definitions
# in place.
- for argument in ('default', 'multi', 'validator'):
+ for argument in ("default", "multi", "validator"):
# TODO: we can consider treating 'default' differently from other
# TODO: attributes and let a packager decide whether to use the fluent
# TODO: interface or the directive argument
if hasattr(values, argument) and locals()[argument] is not None:
+
def _raise_argument_error(pkg):
- msg = "Remove specification of {0} argument: it is handled " \
- "by an attribute of the 'values' argument"
+ msg = (
+ "Remove specification of {0} argument: it is handled "
+ "by an attribute of the 'values' argument"
+ )
raise DirectiveError(format_error(msg.format(argument), pkg))
+
return _raise_argument_error
# Allow for the object defining the allowed values to supply its own
# default value and group validator, say if it supports multiple values.
- default = getattr(values, 'default', default)
- validator = getattr(values, 'validator', validator)
- multi = getattr(values, 'multi', bool(multi))
+ default = getattr(values, "default", default)
+ validator = getattr(values, "validator", validator)
+ multi = getattr(values, "multi", bool(multi))
# Here we sanitize against a default value being either None
# or the empty string, as the former indicates that a default
# was not set while the latter will make the variant unparsable
# from the command line
- if default is None or default == '':
+ if default is None or default == "":
+
def _raise_default_not_set(pkg):
if default is None:
- msg = "either a default was not explicitly set, " \
- "or 'None' was used"
- elif default == '':
+ msg = "either a default was not explicitly set, " "or 'None' was used"
+ elif default == "":
msg = "the default cannot be an empty string"
raise DirectiveError(format_error(msg, pkg))
+
return _raise_default_not_set
description = str(description).strip()
@@ -659,7 +683,7 @@ def variant(
when_specs = [when_spec]
if not re.match(spack.spec.identifier_re, name):
- directive = 'variant'
+ directive = "variant"
msg = "Invalid variant name in {0}: '{1}'"
raise DirectiveError(directive, msg.format(pkg.name, name))
@@ -669,13 +693,15 @@ def variant(
_, orig_when = pkg.variants[name]
when_specs += orig_when
- pkg.variants[name] = (spack.variant.Variant(
- name, default, description, values, multi, validator, sticky
- ), when_specs)
+ pkg.variants[name] = (
+ spack.variant.Variant(name, default, description, values, multi, validator, sticky),
+ when_specs,
+ )
+
return _execute_variant
-@directive('resources')
+@directive("resources")
def resource(**kwargs):
"""Define an external resource to be fetched and staged when building the
package. Based on the keywords present in the dictionary the appropriate
@@ -692,38 +718,43 @@ def resource(**kwargs):
* 'placement' : (optional) gives the possibility to fine tune how the
resource is moved into the main package stage area.
"""
+
def _execute_resource(pkg):
- when = kwargs.get('when')
+ when = kwargs.get("when")
when_spec = make_when_spec(when)
if not when_spec:
return
- destination = kwargs.get('destination', "")
- placement = kwargs.get('placement', None)
+ destination = kwargs.get("destination", "")
+ placement = kwargs.get("placement", None)
# Check if the path is relative
if os.path.isabs(destination):
- message = ('The destination keyword of a resource directive '
- 'can\'t be an absolute path.\n')
+ message = (
+ "The destination keyword of a resource directive " "can't be an absolute path.\n"
+ )
message += "\tdestination : '{dest}\n'".format(dest=destination)
raise RuntimeError(message)
# Check if the path falls within the main package stage area
- test_path = 'stage_folder_root'
+ test_path = "stage_folder_root"
normalized_destination = os.path.normpath(
os.path.join(test_path, destination)
) # Normalized absolute path
if test_path not in normalized_destination:
- message = ("The destination folder of a resource must fall "
- "within the main package stage directory.\n")
+ message = (
+ "The destination folder of a resource must fall "
+ "within the main package stage directory.\n"
+ )
message += "\tdestination : '{dest}'\n".format(dest=destination)
raise RuntimeError(message)
resources = pkg.resources.setdefault(when_spec, [])
- name = kwargs.get('name')
+ name = kwargs.get("name")
fetcher = from_kwargs(**kwargs)
resources.append(Resource(name, fetcher, destination, placement))
+
return _execute_resource
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 3ccec788d8..28f3caab9e 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -25,75 +25,80 @@ import spack.spec
import spack.util.spack_json as sjson
from spack.error import SpackError
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
# Note: Posixpath is used here as opposed to
# os.path.join due to spack.spec.Spec.format
# requiring forward slash path seperators at this stage
-default_projections = {'all': posixpath.join(
- '{architecture}', '{compiler.name}-{compiler.version}',
- '{name}-{version}-{hash}')}
+default_projections = {
+ "all": posixpath.join(
+ "{architecture}", "{compiler.name}-{compiler.version}", "{name}-{version}-{hash}"
+ )
+}
def _check_concrete(spec):
"""If the spec is not concrete, raise a ValueError"""
if not spec.concrete:
- raise ValueError('Specs passed to a DirectoryLayout must be concrete!')
+ raise ValueError("Specs passed to a DirectoryLayout must be concrete!")
class DirectoryLayout(object):
"""A directory layout is used to associate unique paths with specs.
- Different installations are going to want different layouts for their
- install, and they can use this to customize the nesting structure of
- spack installs. The default layout is:
+ Different installations are going to want different layouts for their
+ install, and they can use this to customize the nesting structure of
+ spack installs. The default layout is:
- * <install root>/
+ * <install root>/
- * <platform-os-target>/
+ * <platform-os-target>/
- * <compiler>-<compiler version>/
+ * <compiler>-<compiler version>/
- * <name>-<version>-<hash>
+ * <name>-<version>-<hash>
- The hash here is a SHA-1 hash for the full DAG plus the build
- spec.
+ The hash here is a SHA-1 hash for the full DAG plus the build
+ spec.
- The installation directory projections can be modified with the
- projections argument.
+ The installation directory projections can be modified with the
+ projections argument.
"""
def __init__(self, root, **kwargs):
self.root = root
self.check_upstream = True
- projections = kwargs.get('projections') or default_projections
- self.projections = dict((key, projection.lower())
- for key, projection in projections.items())
+ projections = kwargs.get("projections") or default_projections
+ self.projections = dict(
+ (key, projection.lower()) for key, projection in projections.items()
+ )
# apply hash length as appropriate
- self.hash_length = kwargs.get('hash_length', None)
+ self.hash_length = kwargs.get("hash_length", None)
if self.hash_length is not None:
for when_spec, projection in self.projections.items():
- if '{hash}' not in projection:
- if '{hash' in projection:
+ if "{hash}" not in projection:
+ if "{hash" in projection:
raise InvalidDirectoryLayoutParametersError(
- "Conflicting options for installation layout hash"
- " length")
+ "Conflicting options for installation layout hash" " length"
+ )
else:
raise InvalidDirectoryLayoutParametersError(
"Cannot specify hash length when the hash is not"
- " part of all install_tree projections")
+ " part of all install_tree projections"
+ )
self.projections[when_spec] = projection.replace(
- "{hash}", "{hash:%d}" % self.hash_length)
+ "{hash}", "{hash:%d}" % self.hash_length
+ )
# If any of these paths change, downstream databases may not be able to
# locate files in older upstream databases
- self.metadata_dir = '.spack'
- self.deprecated_dir = 'deprecated'
- self.spec_file_name = 'spec.json'
+ self.metadata_dir = ".spack"
+ self.deprecated_dir = "deprecated"
+ self.spec_file_name = "spec.json"
# Use for checking yaml and deprecated types
- self._spec_file_name_yaml = 'spec.yaml'
- self.extension_file_name = 'extensions.yaml'
- self.packages_dir = 'repos' # archive of package.py files
- self.manifest_file_name = 'install_manifest.json'
+ self._spec_file_name_yaml = "spec.yaml"
+ self.extension_file_name = "extensions.yaml"
+ self.packages_dir = "repos" # archive of package.py files
+ self.manifest_file_name = "install_manifest.json"
@property
def hidden_file_regexes(self):
@@ -109,7 +114,7 @@ class DirectoryLayout(object):
def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
- with open(path, 'w') as f:
+ with open(path, "w") as f:
# The hash of the projection is the DAG hash which contains
# the full provenance, so it's availabe if we want it later
spec.to_json(f, hash=ht.dag_hash)
@@ -120,9 +125,10 @@ class DirectoryLayout(object):
easily access this information.
"""
from spack.util.environment import get_host_environment_metadata
+
env_file = self.env_metadata_path(spec)
environ = get_host_environment_metadata()
- with open(env_file, 'w') as fd:
+ with open(env_file, "w") as fd:
sjson.dump(environ, fd)
def read_spec(self, path):
@@ -130,19 +136,19 @@ class DirectoryLayout(object):
try:
with open(path) as f:
extension = os.path.splitext(path)[-1].lower()
- if extension == '.json':
+ if extension == ".json":
spec = spack.spec.Spec.from_json(f)
- elif extension == '.yaml':
+ elif extension == ".yaml":
# Too late for conversion; spec_file_path() already called.
spec = spack.spec.Spec.from_yaml(f)
else:
- raise SpecReadError('Did not recognize spec file extension:'
- ' {0}'.format(extension))
+ raise SpecReadError(
+ "Did not recognize spec file extension:" " {0}".format(extension)
+ )
except Exception as e:
- if spack.config.get('config:debug'):
+ if spack.config.get("config:debug"):
raise
- raise SpecReadError(
- 'Unable to read file: %s' % path, 'Cause: ' + str(e))
+ raise SpecReadError("Unable to read file: %s" % path, "Cause: " + str(e))
# Specs read from actual installations are always concrete
spec._mark_concrete()
@@ -153,15 +159,14 @@ class DirectoryLayout(object):
_check_concrete(spec)
# Attempts to convert to JSON if possible.
# Otherwise just returns the YAML.
- yaml_path = os.path.join(
- self.metadata_path(spec), self._spec_file_name_yaml)
+ yaml_path = os.path.join(self.metadata_path(spec), self._spec_file_name_yaml)
json_path = os.path.join(self.metadata_path(spec), self.spec_file_name)
if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
self.write_spec(spec, json_path)
try:
os.remove(yaml_path)
except OSError as err:
- tty.debug('Could not remove deprecated {0}'.format(yaml_path))
+ tty.debug("Could not remove deprecated {0}".format(yaml_path))
tty.debug(err)
elif os.path.exists(yaml_path):
return yaml_path
@@ -179,24 +184,32 @@ class DirectoryLayout(object):
# If deprecator spec is None, assume deprecated_spec already deprecated
# and use its link to find the file.
- base_dir = self.path_for_spec(
- deprecator_spec
- ) if deprecator_spec else os.readlink(deprecated_spec.prefix)
+ base_dir = (
+ self.path_for_spec(deprecator_spec)
+ if deprecator_spec
+ else os.readlink(deprecated_spec.prefix)
+ )
- yaml_path = os.path.join(base_dir, self.metadata_dir,
- self.deprecated_dir, deprecated_spec.dag_hash()
- + '_' + self._spec_file_name_yaml)
-
- json_path = os.path.join(base_dir, self.metadata_dir,
- self.deprecated_dir, deprecated_spec.dag_hash()
- + '_' + self.spec_file_name)
+ yaml_path = os.path.join(
+ base_dir,
+ self.metadata_dir,
+ self.deprecated_dir,
+ deprecated_spec.dag_hash() + "_" + self._spec_file_name_yaml,
+ )
+
+ json_path = os.path.join(
+ base_dir,
+ self.metadata_dir,
+ self.deprecated_dir,
+ deprecated_spec.dag_hash() + "_" + self.spec_file_name,
+ )
- if (os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path)):
+ if os.path.exists(yaml_path) and fs.can_write_to_dir(yaml_path):
self.write_spec(deprecated_spec, json_path)
try:
os.remove(yaml_path)
except (IOError, OSError) as err:
- tty.debug('Could not remove deprecated {0}'.format(yaml_path))
+ tty.debug("Could not remove deprecated {0}".format(yaml_path))
tty.debug(err)
elif os.path.exists(yaml_path):
return yaml_path
@@ -231,7 +244,7 @@ class DirectoryLayout(object):
group = get_package_group(spec)
perms = get_package_dir_permissions(spec)
- fs.mkdirp(spec.prefix, mode=perms, group=group, default_perms='parents')
+ fs.mkdirp(spec.prefix, mode=perms, group=group, default_perms="parents")
fs.mkdirp(self.metadata_path(spec), mode=perms, group=group) # in prefix
self.write_spec(spec, self.spec_file_path(spec))
@@ -249,17 +262,19 @@ class DirectoryLayout(object):
if not os.path.isdir(path):
raise InconsistentInstallDirectoryError(
- "Install prefix {0} does not exist.".format(path))
+ "Install prefix {0} does not exist.".format(path)
+ )
if not os.path.isfile(spec_file_path):
raise InconsistentInstallDirectoryError(
- 'Install prefix exists but contains no spec.json:',
- " " + path)
+ "Install prefix exists but contains no spec.json:", " " + path
+ )
installed_spec = self.read_spec(spec_file_path)
if installed_spec.dag_hash() != spec.dag_hash():
raise InconsistentInstallDirectoryError(
- 'Spec file in %s does not match hash!' % spec_file_path)
+ "Spec file in %s does not match hash!" % spec_file_path
+ )
def all_specs(self):
if not os.path.isdir(self.root):
@@ -269,11 +284,11 @@ class DirectoryLayout(object):
for _, path_scheme in self.projections.items():
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
# NOTE: Does not validate filename extension; should happen later
- path_elems += [self.metadata_dir, 'spec.json']
+ path_elems += [self.metadata_dir, "spec.json"]
pattern = os.path.join(self.root, *path_elems)
spec_files = glob.glob(pattern)
if not spec_files: # we're probably looking at legacy yaml...
- path_elems += [self.metadata_dir, 'spec.yaml']
+ path_elems += [self.metadata_dir, "spec.yaml"]
pattern = os.path.join(self.root, *path_elems)
spec_files = glob.glob(pattern)
specs.extend([self.read_spec(s) for s in spec_files])
@@ -287,15 +302,19 @@ class DirectoryLayout(object):
for _, path_scheme in self.projections.items():
path_elems = ["*"] * len(path_scheme.split(posixpath.sep))
# NOTE: Does not validate filename extension; should happen later
- path_elems += [self.metadata_dir, self.deprecated_dir,
- '*_spec.*'] # + self.spec_file_name]
+ path_elems += [
+ self.metadata_dir,
+ self.deprecated_dir,
+ "*_spec.*",
+ ] # + self.spec_file_name]
pattern = os.path.join(self.root, *path_elems)
spec_files = glob.glob(pattern)
get_depr_spec_file = lambda x: os.path.join(
- os.path.dirname(os.path.dirname(x)), self.spec_file_name)
- deprecated_specs |= set((self.read_spec(s),
- self.read_spec(get_depr_spec_file(s)))
- for s in spec_files)
+ os.path.dirname(os.path.dirname(x)), self.spec_file_name
+ )
+ deprecated_specs |= set(
+ (self.read_spec(s), self.read_spec(get_depr_spec_file(s))) for s in spec_files
+ )
return deprecated_specs
def specs_by_hash(self):
@@ -311,29 +330,31 @@ class DirectoryLayout(object):
if spec.external:
return spec.external_path
if self.check_upstream:
- upstream, record = spack.store.db.query_by_spec_hash(
- spec.dag_hash())
+ upstream, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
if upstream:
raise SpackError(
"Internal error: attempted to call path_for_spec on"
- " upstream-installed package.")
+ " upstream-installed package."
+ )
path = self.relative_path_for_spec(spec)
- assert(not path.startswith(self.root))
+ assert not path.startswith(self.root)
return os.path.join(self.root, path)
def remove_install_directory(self, spec, deprecated=False):
"""Removes a prefix and any empty parent directories from the root.
- Raised RemoveFailedError if something goes wrong.
+ Raised RemoveFailedError if something goes wrong.
"""
path = self.path_for_spec(spec)
- assert(path.startswith(self.root))
+ assert path.startswith(self.root)
# Windows readonly files cannot be removed by Python
# directly, change permissions before attempting to remove
if is_windows:
- kwargs = {'ignore_errors': False,
- 'onerror': fs.readonly_file_handler(ignore_errors=False)}
+ kwargs = {
+ "ignore_errors": False,
+ "onerror": fs.readonly_file_handler(ignore_errors=False),
+ }
else:
kwargs = {} # the default value for ignore_errors is false
@@ -370,12 +391,13 @@ class DirectoryLayout(object):
class ExtensionsLayout(object):
"""A directory layout is used to associate unique paths with specs for
- package extensions.
- Keeps track of which extensions are activated for what package.
- Depending on the use case, this can mean globally activated extensions
- directly in the installation folder - or extensions activated in
- filesystem views.
+ package extensions.
+ Keeps track of which extensions are activated for what package.
+ Depending on the use case, this can mean globally activated extensions
+ directly in the installation folder - or extensions activated in
+ filesystem views.
"""
+
def __init__(self, view, **kwargs):
self.view = view
@@ -386,23 +408,23 @@ class ExtensionsLayout(object):
def check_activated(self, spec, ext_spec):
"""Ensure that ext_spec can be removed from spec.
- If not, raise NoSuchExtensionError.
+ If not, raise NoSuchExtensionError.
"""
raise NotImplementedError()
def check_extension_conflict(self, spec, ext_spec):
"""Ensure that ext_spec can be activated in spec.
- If not, raise ExtensionAlreadyInstalledError or
- ExtensionConflictError.
+ If not, raise ExtensionAlreadyInstalledError or
+ ExtensionConflictError.
"""
raise NotImplementedError()
def extension_map(self, spec):
"""Get a dict of currently installed extension packages for a spec.
- Dict maps { name : extension_spec }
- Modifying dict does not affect internals of this layout.
+ Dict maps { name : extension_spec }
+ Modifying dict does not affect internals of this layout.
"""
raise NotImplementedError()
@@ -417,15 +439,15 @@ class ExtensionsLayout(object):
class YamlViewExtensionsLayout(ExtensionsLayout):
- """Maintain extensions within a view.
- """
+ """Maintain extensions within a view."""
+
def __init__(self, view, layout):
"""layout is the corresponding YamlDirectoryLayout object for which
- we implement extensions.
+ we implement extensions.
"""
super(YamlViewExtensionsLayout, self).__init__(view)
self.layout = layout
- self.extension_file_name = 'extensions.yaml'
+ self.extension_file_name = "extensions.yaml"
# Cache of already written/read extension maps.
self._extension_maps = {}
@@ -458,23 +480,25 @@ class YamlViewExtensionsLayout(ExtensionsLayout):
def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file, which
- keeps track of all the extensions for that package which have been
- added to this view.
+ keeps track of all the extensions for that package which have been
+ added to this view.
"""
_check_concrete(spec)
- normalize_path = lambda p: (
- os.path.abspath(p).rstrip(os.path.sep))
+ normalize_path = lambda p: (os.path.abspath(p).rstrip(os.path.sep))
view_prefix = self.view.get_projection_for_spec(spec)
if normalize_path(spec.prefix) == normalize_path(view_prefix):
# For backwards compatibility, when the view is the extended
# package's installation directory, do not include the spec name
# as a subdirectory.
- components = [view_prefix, self.layout.metadata_dir,
- self.extension_file_name]
+ components = [view_prefix, self.layout.metadata_dir, self.extension_file_name]
else:
- components = [view_prefix, self.layout.metadata_dir, spec.name,
- self.extension_file_name]
+ components = [
+ view_prefix,
+ self.layout.metadata_dir,
+ spec.name,
+ self.extension_file_name,
+ ]
return os.path.join(*components)
@@ -497,7 +521,7 @@ class YamlViewExtensionsLayout(ExtensionsLayout):
def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currently
- installed for this package."""
+ installed for this package."""
_check_concrete(spec)
if spec not in self._extension_maps:
@@ -510,20 +534,22 @@ class YamlViewExtensionsLayout(ExtensionsLayout):
exts = {}
with open(path) as ext_file:
yaml_file = yaml.load(ext_file)
- for entry in yaml_file['extensions']:
+ for entry in yaml_file["extensions"]:
name = next(iter(entry))
- dag_hash = entry[name]['hash']
- prefix = entry[name]['path']
+ dag_hash = entry[name]["hash"]
+ prefix = entry[name]["path"]
if dag_hash not in by_hash:
raise InvalidExtensionSpecError(
- "Spec %s not found in %s" % (dag_hash, prefix))
+ "Spec %s not found in %s" % (dag_hash, prefix)
+ )
ext_spec = by_hash[dag_hash]
if prefix != ext_spec.prefix:
raise InvalidExtensionSpecError(
"Prefix %s does not match spec hash %s: %s"
- % (prefix, dag_hash, ext_spec))
+ % (prefix, dag_hash, ext_spec)
+ )
exts[ext_spec.name] = ext_spec
self._extension_maps[spec] = exts
@@ -542,18 +568,21 @@ class YamlViewExtensionsLayout(ExtensionsLayout):
dirname, basename = os.path.split(path)
fs.mkdirp(dirname)
- tmp = tempfile.NamedTemporaryFile(
- prefix=basename, dir=dirname, delete=False)
+ tmp = tempfile.NamedTemporaryFile(prefix=basename, dir=dirname, delete=False)
# write tmp file
with tmp:
- yaml.dump({
- 'extensions': [
- {ext.name: {
- 'hash': ext.dag_hash(),
- 'path': str(ext.prefix)
- }} for ext in sorted(extensions.values())]
- }, tmp, default_flow_style=False, encoding='utf-8')
+ yaml.dump(
+ {
+ "extensions": [
+ {ext.name: {"hash": ext.dag_hash(), "path": str(ext.prefix)}}
+ for ext in sorted(extensions.values())
+ ]
+ },
+ tmp,
+ default_flow_style=False,
+ encoding="utf-8",
+ )
# Atomic update by moving tmpfile on top of old one.
fs.rename(tmp.name, path)
@@ -571,8 +600,8 @@ class RemoveFailedError(DirectoryLayoutError):
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
- 'Could not remove prefix %s for %s : %s'
- % (prefix, installed_spec.short_spec, error))
+ "Could not remove prefix %s for %s : %s" % (prefix, installed_spec.short_spec, error)
+ )
self.cause = error
@@ -580,8 +609,7 @@ class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
def __init__(self, message, long_msg=None):
- super(InconsistentInstallDirectoryError, self).__init__(
- message, long_msg)
+ super(InconsistentInstallDirectoryError, self).__init__(message, long_msg)
class SpecReadError(DirectoryLayoutError):
@@ -592,8 +620,7 @@ class InvalidDirectoryLayoutParametersError(DirectoryLayoutError):
"""Raised when a invalid directory layout parameters are supplied"""
def __init__(self, message, long_msg=None):
- super(InvalidDirectoryLayoutParametersError, self).__init__(
- message, long_msg)
+ super(InvalidDirectoryLayoutParametersError, self).__init__(message, long_msg)
class InvalidExtensionSpecError(DirectoryLayoutError):
@@ -605,8 +632,8 @@ class ExtensionAlreadyInstalledError(DirectoryLayoutError):
def __init__(self, spec, ext_spec):
super(ExtensionAlreadyInstalledError, self).__init__(
- "%s is already installed in %s"
- % (ext_spec.short_spec, spec.short_spec))
+ "%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec)
+ )
class ExtensionConflictError(DirectoryLayoutError):
@@ -615,7 +642,8 @@ class ExtensionConflictError(DirectoryLayoutError):
def __init__(self, spec, ext_spec, conflict):
super(ExtensionConflictError, self).__init__(
"%s cannot be installed in %s because it conflicts with %s"
- % (ext_spec.short_spec, spec.short_spec, conflict.short_spec))
+ % (ext_spec.short_spec, spec.short_spec, conflict.short_spec)
+ )
class NoSuchExtensionError(DirectoryLayoutError):
@@ -624,4 +652,5 @@ class NoSuchExtensionError(DirectoryLayoutError):
def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__(
"%s cannot be removed from %s because it's not activated."
- % (ext_spec.short_spec, spec.short_spec))
+ % (ext_spec.short_spec, spec.short_spec)
+ )
diff --git a/lib/spack/spack/environment/__init__.py b/lib/spack/spack/environment/__init__.py
index add58261e6..e8c45148ec 100644
--- a/lib/spack/spack/environment/__init__.py
+++ b/lib/spack/spack/environment/__init__.py
@@ -359,30 +359,30 @@ from .environment import (
)
__all__ = [
- 'Environment',
- 'SpackEnvironmentError',
- 'SpackEnvironmentViewError',
- 'activate',
- 'active',
- 'active_environment',
- 'all_environment_names',
- 'all_environments',
- 'config_dict',
- 'create',
- 'deactivate',
- 'default_manifest_yaml',
- 'default_view_name',
- 'display_specs',
- 'exists',
- 'installed_specs',
- 'is_env_dir',
- 'is_latest_format',
- 'lockfile_name',
- 'manifest_file',
- 'manifest_name',
- 'no_active_environment',
- 'read',
- 'root',
- 'spack_env_var',
- 'update_yaml',
+ "Environment",
+ "SpackEnvironmentError",
+ "SpackEnvironmentViewError",
+ "activate",
+ "active",
+ "active_environment",
+ "all_environment_names",
+ "all_environments",
+ "config_dict",
+ "create",
+ "deactivate",
+ "default_manifest_yaml",
+ "default_view_name",
+ "display_specs",
+ "exists",
+ "installed_specs",
+ "is_env_dir",
+ "is_latest_format",
+ "lockfile_name",
+ "manifest_file",
+ "manifest_name",
+ "no_active_environment",
+ "read",
+ "root",
+ "spack_env_var",
+ "update_yaml",
]
diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py
index 83c33905aa..105b60642f 100644
--- a/lib/spack/spack/environment/environment.py
+++ b/lib/spack/spack/environment/environment.py
@@ -56,7 +56,7 @@ from spack.util.path import substitute_path_variables
from spack.variant import UnknownVariantError
#: environment variable used to indicate the active environment
-spack_env_var = 'SPACK_ENV'
+spack_env_var = "SPACK_ENV"
#: currently activated environment
@@ -64,19 +64,19 @@ _active_environment = None
#: path where environments are stored in the spack tree
-env_path = os.path.join(spack.paths.var_path, 'environments')
+env_path = os.path.join(spack.paths.var_path, "environments")
#: Name of the input yaml file for an environment
-manifest_name = 'spack.yaml'
+manifest_name = "spack.yaml"
#: Name of the input yaml file for an environment
-lockfile_name = 'spack.lock'
+lockfile_name = "spack.lock"
#: Name of the directory where environments store repos, logs, views
-env_subdir_name = '.spack-env'
+env_subdir_name = ".spack-env"
def default_manifest_yaml():
@@ -92,22 +92,24 @@ spack:
view: true
concretizer:
unify: {}
-""".format('true' if spack.config.get('concretizer:unify') else 'false')
+""".format(
+ "true" if spack.config.get("concretizer:unify") else "false"
+ )
#: regex for validating enviroment names
-valid_environment_name_re = r'^\w[\w-]*$'
+valid_environment_name_re = r"^\w[\w-]*$"
#: version of the lockfile format. Must increase monotonically.
lockfile_format_version = 4
# Magic names
# The name of the standalone spec list in the manifest yaml
-user_speclist_name = 'specs'
+user_speclist_name = "specs"
# The name of the default view (the view loaded on env.activate)
-default_view_name = 'default'
+default_view_name = "default"
# Default behavior to link all packages into views (vs. only root packages)
-default_view_link = 'all'
+default_view_link = "all"
def installed_specs():
@@ -126,9 +128,13 @@ def valid_env_name(name):
def validate_env_name(name):
if not valid_env_name(name):
- raise ValueError((
- "'%s': names must start with a letter, and only contain "
- "letters, numbers, _, and -.") % name)
+ raise ValueError(
+ (
+ "'%s': names must start with a letter, and only contain "
+ "letters, numbers, _, and -."
+ )
+ % name
+ )
return name
@@ -152,11 +158,11 @@ def activate(env, use_env_repo=False):
# Check if we need to reinitialize the store due to pushing the configuration
# below.
- install_tree_before = spack.config.get('config:install_tree')
- upstreams_before = spack.config.get('upstreams')
+ install_tree_before = spack.config.get("config:install_tree")
+ upstreams_before = spack.config.get("upstreams")
prepare_config_scope(env)
- install_tree_after = spack.config.get('config:install_tree')
- upstreams_after = spack.config.get('upstreams')
+ install_tree_after = spack.config.get("config:install_tree")
+ upstreams_after = spack.config.get("upstreams")
if install_tree_before != install_tree_after or upstreams_before != upstreams_after:
# Hack to store the state of the store before activation
env.store_token = spack.store.reinitialize()
@@ -179,9 +185,9 @@ def deactivate():
# If we attached a store token on activation, restore the previous state
# and consume the token
- if hasattr(_active_environment, 'store_token'):
+ if hasattr(_active_environment, "store_token"):
spack.store.restore(_active_environment.store_token)
- delattr(_active_environment, 'store_token')
+ delattr(_active_environment, "store_token")
deactivate_config_scope(_active_environment)
# use _repo so we only remove if a repo was actually constructed
@@ -223,8 +229,7 @@ def active(name):
def is_env_dir(path):
"""Whether a directory contains a spack environment."""
- return os.path.isdir(path) and os.path.exists(
- os.path.join(path, manifest_name))
+ return os.path.isdir(path) and os.path.exists(os.path.join(path, manifest_name))
def read(name):
@@ -274,15 +279,14 @@ def all_environments():
def _read_yaml(str_or_file):
"""Read YAML from a file for round-trip parsing."""
data = syaml.load_config(str_or_file)
- filename = getattr(str_or_file, 'name', None)
- default_data = spack.config.validate(
- data, spack.schema.env.schema, filename)
+ filename = getattr(str_or_file, "name", None)
+ default_data = spack.config.validate(data, spack.schema.env.schema, filename)
return (data, default_data)
def _write_yaml(data, str_or_file):
"""Write YAML to a file preserving comments and dict order."""
- filename = getattr(str_or_file, 'name', None)
+ filename = getattr(str_or_file, "name", None)
spack.config.validate(data, spack.schema.env.schema, filename)
syaml.dump_config(data, str_or_file, default_flow_style=False)
@@ -290,10 +294,12 @@ def _write_yaml(data, str_or_file):
def _eval_conditional(string):
"""Evaluate conditional definitions using restricted variable scope."""
valid_variables = spack.util.environment.get_host_environment()
- valid_variables.update({
- 're': re,
- 'env': os.environ,
- })
+ valid_variables.update(
+ {
+ "re": re,
+ "env": os.environ,
+ }
+ )
return eval(string, valid_variables)
@@ -302,7 +308,7 @@ def _is_dev_spec_and_has_changed(spec):
last installation"""
# First check if this is a dev build and in the process already try to get
# the dev_path
- dev_path_var = spec.variants.get('dev_path', None)
+ dev_path_var = spec.variants.get("dev_path", None)
if not dev_path_var:
return False
@@ -328,15 +334,14 @@ def _spec_needs_overwrite(spec, changed_dev_specs):
return True
# if spec and all deps aren't dev builds, we don't need to overwrite it
- if not any(spec.satisfies(c)
- for c in ('dev_path=*', '^dev_path=*')):
+ if not any(spec.satisfies(c) for c in ("dev_path=*", "^dev_path=*")):
return False
# If any dep needs overwrite, or any dep is missing and is a dev build then
# overwrite this package
if any(
- ((not dep.installed) and dep.satisfies('dev_path=*')) or
- _spec_needs_overwrite(dep, changed_dev_specs)
+ ((not dep.installed) and dep.satisfies("dev_path=*"))
+ or _spec_needs_overwrite(dep, changed_dev_specs)
for dep in spec.traverse(root=False)
):
return True
@@ -362,12 +367,21 @@ def _error_on_nonempty_view_dir(new_root):
raise SpackEnvironmentViewError(
"Failed to generate environment view, because the target {} already "
"exists or is not empty. To update the view, remove this path, and run "
- "`spack env view regenerate`".format(new_root))
+ "`spack env view regenerate`".format(new_root)
+ )
class ViewDescriptor(object):
- def __init__(self, base_path, root, projections={}, select=[], exclude=[],
- link=default_view_link, link_type='symlink'):
+ def __init__(
+ self,
+ base_path,
+ root,
+ projections={},
+ select=[],
+ exclude=[],
+ link=default_view_link,
+ link_type="symlink",
+ ):
self.base = base_path
self.root = spack.util.path.canonicalize_path(root)
self.projections = projections
@@ -383,45 +397,47 @@ class ViewDescriptor(object):
return not any(spec.satisfies(e) for e in self.exclude)
def __eq__(self, other):
- return all([self.root == other.root,
- self.projections == other.projections,
- self.select == other.select,
- self.exclude == other.exclude,
- self.link == other.link,
- self.link_type == other.link_type])
+ return all(
+ [
+ self.root == other.root,
+ self.projections == other.projections,
+ self.select == other.select,
+ self.exclude == other.exclude,
+ self.link == other.link,
+ self.link_type == other.link_type,
+ ]
+ )
def to_dict(self):
- ret = syaml.syaml_dict([('root', self.root)])
+ ret = syaml.syaml_dict([("root", self.root)])
if self.projections:
# projections guaranteed to be ordered dict if true-ish
# for python2.6, may be syaml or ruamel.yaml implementation
# so we have to check for both
- types = (
- collections.OrderedDict,
- syaml.syaml_dict,
- yaml.comments.CommentedMap
- )
+ types = (collections.OrderedDict, syaml.syaml_dict, yaml.comments.CommentedMap)
assert isinstance(self.projections, types)
- ret['projections'] = self.projections
+ ret["projections"] = self.projections
if self.select:
- ret['select'] = self.select
+ ret["select"] = self.select
if self.exclude:
- ret['exclude'] = self.exclude
+ ret["exclude"] = self.exclude
if self.link_type:
- ret['link_type'] = inverse_view_func_parser(self.link_type)
+ ret["link_type"] = inverse_view_func_parser(self.link_type)
if self.link != default_view_link:
- ret['link'] = self.link
+ ret["link"] = self.link
return ret
@staticmethod
def from_dict(base_path, d):
- return ViewDescriptor(base_path,
- d['root'],
- d.get('projections', {}),
- d.get('select', []),
- d.get('exclude', []),
- d.get('link', default_view_link),
- d.get('link_type', 'symlink'))
+ return ViewDescriptor(
+ base_path,
+ d["root"],
+ d.get("projections", {}),
+ d.get("select", []),
+ d.get("exclude", []),
+ d.get("link", default_view_link),
+ d.get("link_type", "symlink"),
+ )
@property
def _current_root(self):
@@ -439,13 +455,15 @@ class ViewDescriptor(object):
content_hash = self.content_hash(specs)
root_dir = os.path.dirname(self.root)
root_name = os.path.basename(self.root)
- return os.path.join(root_dir, '._%s' % root_name, content_hash)
+ return os.path.join(root_dir, "._%s" % root_name, content_hash)
def content_hash(self, specs):
- d = syaml.syaml_dict([
- ('descriptor', self.to_dict()),
- ('specs', [(spec.dag_hash(), spec.prefix) for spec in sorted(specs)])
- ])
+ d = syaml.syaml_dict(
+ [
+ ("descriptor", self.to_dict()),
+ ("specs", [(spec.dag_hash(), spec.prefix) for spec in sorted(specs)]),
+ ]
+ )
contents = sjson.dump(d)
return spack.util.hash.b32_hash(contents)
@@ -478,13 +496,18 @@ class ViewDescriptor(object):
root = new if new else self._current_root
if not root:
# This can only be hit if we write a future bug
- msg = ("Attempting to get nonexistent view from environment. "
- "View root is at %s" % self.root)
+ msg = (
+ "Attempting to get nonexistent view from environment. "
+ "View root is at %s" % self.root
+ )
raise SpackEnvironmentViewError(msg)
- return SimpleFilesystemView(root, spack.store.layout,
- ignore_conflicts=True,
- projections=self.projections,
- link=self.link_type)
+ return SimpleFilesystemView(
+ root,
+ spack.store.layout,
+ ignore_conflicts=True,
+ projections=self.projections,
+ link=self.link_type,
+ )
def __contains__(self, spec):
"""Is the spec described by the view descriptor
@@ -511,10 +534,10 @@ class ViewDescriptor(object):
specs = []
for s in concretized_root_specs:
- if self.link == 'all':
- specs.extend(s.traverse(deptype=('link', 'run')))
- elif self.link == 'run':
- specs.extend(s.traverse(deptype=('run')))
+ if self.link == "all":
+ specs.extend(s.traverse(deptype=("link", "run")))
+ elif self.link == "run":
+ specs.extend(s.traverse(deptype=("run")))
else:
specs.append(s)
@@ -557,7 +580,7 @@ class ViewDescriptor(object):
view = self.view(new=new_root)
root_dirname = os.path.dirname(self.root)
- tmp_symlink_name = os.path.join(root_dirname, '._view_link')
+ tmp_symlink_name = os.path.join(root_dirname, "._view_link")
# Create a new view
try:
@@ -584,9 +607,9 @@ class ViewDescriptor(object):
# against removal of an arbitrary path when the original symlink in self.root
# was not created by the environment, but by the user.
if (
- old_root and
- os.path.exists(old_root) and
- os.path.samefile(os.path.dirname(new_root), os.path.dirname(old_root))
+ old_root
+ and os.path.exists(old_root)
+ and os.path.samefile(os.path.dirname(new_root), os.path.dirname(old_root))
):
try:
shutil.rmtree(old_root)
@@ -636,7 +659,7 @@ class Environment(object):
# need to lock, because there are no Spack operations that alter
# the init file.
with fs.open_if_filename(init_file) as f:
- if hasattr(f, 'name') and f.name.endswith('.lock'):
+ if hasattr(f, "name") and f.name.endswith(".lock"):
self._read_manifest(default_manifest_yaml())
self._read_lockfile(f)
self._set_user_specs_from_lockfile()
@@ -645,8 +668,7 @@ class Environment(object):
# Rewrite relative develop paths when initializing a new
# environment in a different location from the spack.yaml file.
- if not keep_relative and hasattr(f, 'name') and \
- f.name.endswith('.yaml'):
+ if not keep_relative and hasattr(f, "name") and f.name.endswith(".yaml"):
init_file_dir = os.path.abspath(os.path.dirname(f.name))
self._rewrite_relative_paths_on_relocation(init_file_dir)
else:
@@ -656,45 +678,38 @@ class Environment(object):
if with_view is False:
self.views = {}
elif with_view is True:
- self.views = {
- default_view_name: ViewDescriptor(self.path,
- self.view_path_default)}
+ self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(with_view, six.string_types):
- self.views = {default_view_name: ViewDescriptor(self.path,
- with_view)}
+ self.views = {default_view_name: ViewDescriptor(self.path, with_view)}
# If with_view is None, then defer to the view settings determined by
# the manifest file
def __reduce__(self):
- return _create_environment, (
- self.path, self.init_file, self.with_view, self.keep_relative
- )
+ return _create_environment, (self.path, self.init_file, self.with_view, self.keep_relative)
def _rewrite_relative_paths_on_relocation(self, init_file_dir):
"""When initializing the environment from a manifest file and we plan
- to store the environment in a different directory, we have to rewrite
- relative paths to absolute ones."""
+ to store the environment in a different directory, we have to rewrite
+ relative paths to absolute ones."""
if init_file_dir == self.path:
return
for name, entry in self.dev_specs.items():
- dev_path = entry['path']
- expanded_path = os.path.normpath(os.path.join(
- init_file_dir, entry['path']))
+ dev_path = entry["path"]
+ expanded_path = os.path.normpath(os.path.join(init_file_dir, entry["path"]))
# Skip if the expanded path is the same (e.g. when absolute)
if dev_path == expanded_path:
continue
- tty.debug("Expanding develop path for {0} to {1}".format(
- name, expanded_path))
+ tty.debug("Expanding develop path for {0} to {1}".format(name, expanded_path))
- self.dev_specs[name]['path'] = expanded_path
+ self.dev_specs[name]["path"] = expanded_path
def _re_read(self):
"""Reinitialize the environment object if it has been written (this
- may not be true if the environment was just created in this running
- instance of Spack)."""
+ may not be true if the environment was just created in this running
+ instance of Spack)."""
if not os.path.exists(self.manifest_path):
return
@@ -720,7 +735,9 @@ class Environment(object):
if read_lock_version == 1:
tty.debug(
"Storing backup of old lockfile {0} at {1}".format(
- self.lock_path, self._lock_backup_v1_path))
+ self.lock_path, self._lock_backup_v1_path
+ )
+ )
shutil.copy(self.lock_path, self._lock_backup_v1_path)
def write_transaction(self):
@@ -737,9 +754,9 @@ class Environment(object):
self.spec_lists = collections.OrderedDict()
- for item in config_dict(self.yaml).get('definitions', []):
+ for item in config_dict(self.yaml).get("definitions", []):
entry = copy.deepcopy(item)
- when = _eval_conditional(entry.pop('when', 'True'))
+ when = _eval_conditional(entry.pop("when", "True"))
assert len(entry) == 1
if when:
name, spec_list = next(iter(entry.items()))
@@ -750,23 +767,23 @@ class Environment(object):
self.spec_lists[name] = user_specs
spec_list = config_dict(self.yaml).get(user_speclist_name, [])
- user_specs = SpecList(user_speclist_name, [s for s in spec_list if s],
- self.spec_lists.copy())
+ user_specs = SpecList(
+ user_speclist_name, [s for s in spec_list if s], self.spec_lists.copy()
+ )
self.spec_lists[user_speclist_name] = user_specs
- enable_view = config_dict(self.yaml).get('view')
+ enable_view = config_dict(self.yaml).get("view")
# enable_view can be boolean, string, or None
if enable_view is True or enable_view is None:
- self.views = {
- default_view_name: ViewDescriptor(self.path,
- self.view_path_default)}
+ self.views = {default_view_name: ViewDescriptor(self.path, self.view_path_default)}
elif isinstance(enable_view, six.string_types):
- self.views = {default_view_name: ViewDescriptor(self.path,
- enable_view)}
+ self.views = {default_view_name: ViewDescriptor(self.path, enable_view)}
elif enable_view:
path = self.path
- self.views = dict((name, ViewDescriptor.from_dict(path, values))
- for name, values in enable_view.items())
+ self.views = dict(
+ (name, ViewDescriptor.from_dict(path, values))
+ for name, values in enable_view.items()
+ )
else:
self.views = {}
# Retrieve the current concretization strategy
@@ -775,20 +792,20 @@ class Environment(object):
# Let `concretization` overrule `concretize:unify` config for now,
# but use a translation table to have internally a representation
# as if we were using the new configuration
- translation = {'separately': False, 'together': True}
+ translation = {"separately": False, "together": True}
try:
- self.unify = translation[configuration['concretization']]
+ self.unify = translation[configuration["concretization"]]
except KeyError:
- self.unify = spack.config.get('concretizer:unify', False)
+ self.unify = spack.config.get("concretizer:unify", False)
# Retrieve dev-build packages:
- self.dev_specs = configuration.get('develop', {})
+ self.dev_specs = configuration.get("develop", {})
for name, entry in self.dev_specs.items():
# spec must include a concrete version
- assert Spec(entry['spec']).version.concrete
+ assert Spec(entry["spec"]).version.concrete
# default path is the spec name
- if 'path' not in entry:
- self.dev_specs[name]['path'] = name
+ if "path" not in entry:
+ self.dev_specs[name]["path"] = name
@property
def user_specs(self):
@@ -798,8 +815,7 @@ class Environment(object):
"""Copy user_specs from a read-in lockfile."""
self.spec_lists = {
user_speclist_name: SpecList(
- user_speclist_name,
- [str(s) for s in self.concretized_user_specs]
+ user_speclist_name, [str(s) for s in self.concretized_user_specs]
)
}
@@ -813,16 +829,16 @@ class Environment(object):
environment.
"""
self.spec_lists = {user_speclist_name: SpecList()} # specs from yaml
- self.dev_specs = {} # dev-build specs from yaml
+ self.dev_specs = {} # dev-build specs from yaml
self.concretized_user_specs = [] # user specs from last concretize
- self.concretized_order = [] # roots of last concretize, in order
- self.specs_by_hash = {} # concretized specs by hash
- self._repo = None # RepoPath for this env (memoized)
- self._previous_active = None # previously active environment
+ self.concretized_order = [] # roots of last concretize, in order
+ self.specs_by_hash = {} # concretized specs by hash
+ self._repo = None # RepoPath for this env (memoized)
+ self._previous_active = None # previously active environment
if not re_read:
# things that cannot be recreated from file
- self.new_specs = [] # write packages for these on write()
- self.new_installs = [] # write modules for these on write()
+ self.new_specs = [] # write packages for these on write()
+ self.new_installs = [] # write modules for these on write()
@property
def internal(self):
@@ -856,7 +872,7 @@ class Environment(object):
"""The location of the lock file used to synchronize multiple
processes updating the same environment.
"""
- return os.path.join(self.env_subdir_path, 'transaction_lock')
+ return os.path.join(self.env_subdir_path, "transaction_lock")
@property
def lock_path(self):
@@ -866,7 +882,7 @@ class Environment(object):
@property
def _lock_backup_v1_path(self):
"""Path to backup of v1 lockfile before conversion to v2"""
- return self.lock_path + '.backup.v1'
+ return self.lock_path + ".backup.v1"
@property
def env_subdir_path(self):
@@ -875,16 +891,16 @@ class Environment(object):
@property
def repos_path(self):
- return os.path.join(self.path, env_subdir_name, 'repos')
+ return os.path.join(self.path, env_subdir_name, "repos")
@property
def log_path(self):
- return os.path.join(self.path, env_subdir_name, 'logs')
+ return os.path.join(self.path, env_subdir_name, "logs")
@property
def view_path_default(self):
# default path for environment views
- return os.path.join(self.env_subdir_path, 'view')
+ return os.path.join(self.env_subdir_path, "view")
@property
def repo(self):
@@ -906,7 +922,7 @@ class Environment(object):
# load config scopes added via 'include:', in reverse so that
# highest-precedence scopes are last.
- includes = config_dict(self.yaml).get('include', [])
+ includes = config_dict(self.yaml).get("include", [])
missing = []
for i, config_path in enumerate(reversed(includes)):
# allow paths to contain spack config/environment variables, etc.
@@ -919,14 +935,14 @@ class Environment(object):
if os.path.isdir(config_path):
# directories are treated as regular ConfigScopes
- config_name = 'env:%s:%s' % (
- self.name, os.path.basename(config_path))
+ config_name = "env:%s:%s" % (self.name, os.path.basename(config_path))
scope = spack.config.ConfigScope(config_name, config_path)
elif os.path.exists(config_path):
# files are assumed to be SingleFileScopes
- config_name = 'env:%s:%s' % (self.name, config_path)
+ config_name = "env:%s:%s" % (self.name, config_path)
scope = spack.config.SingleFileScope(
- config_name, config_path, spack.schema.merged.schema)
+ config_name, config_path, spack.schema.merged.schema
+ )
else:
missing.append(config_path)
continue
@@ -934,15 +950,15 @@ class Environment(object):
scopes.append(scope)
if missing:
- msg = 'Detected {0} missing include path(s):'.format(len(missing))
- msg += '\n {0}'.format('\n '.join(missing))
- tty.die('{0}\nPlease correct and try again.'.format(msg))
+ msg = "Detected {0} missing include path(s):".format(len(missing))
+ msg += "\n {0}".format("\n ".join(missing))
+ tty.die("{0}\nPlease correct and try again.".format(msg))
return scopes
def env_file_config_scope_name(self):
"""Name of the config scope of this environment's manifest file."""
- return 'env:%s' % self.name
+ return "env:%s" % self.name
def env_file_config_scope(self):
"""Get the configuration scope for the environment's manifest file."""
@@ -951,8 +967,8 @@ class Environment(object):
config_name,
self.manifest_path,
spack.schema.env.schema,
- [spack.config.first_existing(self.raw_yaml,
- spack.schema.env.keys)])
+ [spack.config.first_existing(self.raw_yaml, spack.schema.env.keys)],
+ )
def config_scopes(self):
"""A list of all configuration scopes for this environment."""
@@ -971,10 +987,9 @@ class Environment(object):
# spec_lists is an OrderedDict, all list entries after the modified
# list may refer to the modified list. Update stale references
for i, (name, speclist) in enumerate(
- list(self.spec_lists.items())[index + 1:], index + 1
+ list(self.spec_lists.items())[index + 1 :], index + 1
):
- new_reference = dict((n, self.spec_lists[n])
- for n in list(self.spec_lists.keys())[:i])
+ new_reference = dict((n, self.spec_lists[n]) for n in list(self.spec_lists.keys())[:i])
speclist.update_reference(new_reference)
def add(self, user_spec, list_name=user_speclist_name):
@@ -989,17 +1004,16 @@ class Environment(object):
if list_name not in self.spec_lists:
raise SpackEnvironmentError(
- 'No list %s exists in environment %s' % (list_name, self.name)
+ "No list %s exists in environment %s" % (list_name, self.name)
)
if list_name == user_speclist_name:
if not spec.name:
- raise SpackEnvironmentError(
- 'cannot add anonymous specs to an environment!')
+ raise SpackEnvironmentError("cannot add anonymous specs to an environment!")
elif not spack.repo.path.exists(spec.name):
virtuals = spack.repo.path.provider_index.providers.keys()
if spec.name not in virtuals:
- msg = 'no such package: %s' % spec.name
+ msg = "no such package: %s" % spec.name
raise SpackEnvironmentError(msg)
list_to_change = self.spec_lists[list_name]
@@ -1023,18 +1037,12 @@ class Environment(object):
if not matches:
# concrete specs match against concrete specs in the env
# by dag hash.
- specs_hashes = zip(
- self.concretized_user_specs, self.concretized_order
- )
+ specs_hashes = zip(self.concretized_user_specs, self.concretized_order)
- matches = [
- s for s, h in specs_hashes
- if query_spec.dag_hash() == h
- ]
+ matches = [s for s, h in specs_hashes if query_spec.dag_hash() == h]
if not matches:
- raise SpackEnvironmentError(
- "Not found: {0}".format(query_spec))
+ raise SpackEnvironmentError("Not found: {0}".format(query_spec))
old_specs = set(self.user_specs)
new_specs = set()
@@ -1084,18 +1092,16 @@ class Environment(object):
spec = spec.copy() # defensive copy since we access cached attributes
if not spec.versions.concrete:
- raise SpackEnvironmentError(
- 'Cannot develop spec %s without a concrete version' % spec)
+ raise SpackEnvironmentError("Cannot develop spec %s without a concrete version" % spec)
for name, entry in self.dev_specs.items():
if name == spec.name:
- e_spec = Spec(entry['spec'])
- e_path = entry['path']
+ e_spec = Spec(entry["spec"])
+ e_path = entry["path"]
if e_spec == spec:
if path == e_path:
- tty.msg("Spec %s already configured for development" %
- spec)
+ tty.msg("Spec %s already configured for development" % spec)
return False
else:
tty.msg("Updating development path for spec %s" % spec)
@@ -1106,8 +1112,7 @@ class Environment(object):
tty.msg(msg)
break
else:
- tty.msg("Configuring spec %s for development at path %s" %
- (spec, path))
+ tty.msg("Configuring spec %s for development at path %s" % (spec, path))
if clone:
# "steal" the source code via staging API
@@ -1122,7 +1127,7 @@ class Environment(object):
pkg_cls(spec).stage.steal_source(abspath)
# If it wasn't already in the list, append it
- self.dev_specs[spec.name] = {'path': path, 'spec': str(spec)}
+ self.dev_specs[spec.name] = {"path": path, "spec": str(spec)}
return True
def undevelop(self, spec):
@@ -1165,7 +1170,7 @@ class Environment(object):
self.specs_by_hash = {}
# Pick the right concretization strategy
- if self.unify == 'when_possible':
+ if self.unify == "when_possible":
return self._concretize_together_where_possible(tests=tests)
if self.unify is True:
@@ -1174,7 +1179,7 @@ class Environment(object):
if self.unify is False:
return self._concretize_separately(tests=tests)
- msg = 'concretization strategy not implemented [{0}]'
+ msg = "concretization strategy not implemented [{0}]"
raise SpackEnvironmentError(msg.format(self.unify))
def _concretize_together_where_possible(self, tests=False):
@@ -1226,9 +1231,11 @@ class Environment(object):
duplicates.append(name)
if duplicates:
- msg = ('environment that are configured to concretize specs'
- ' together cannot contain more than one spec for each'
- ' package [{0}]'.format(', '.join(duplicates)))
+ msg = (
+ "environment that are configured to concretize specs"
+ " together cannot contain more than one spec for each"
+ " package [{0}]".format(", ".join(duplicates))
+ )
raise SpackEnvironmentError(msg)
# Proceed with concretization
@@ -1236,9 +1243,7 @@ class Environment(object):
self.concretized_order = []
self.specs_by_hash = {}
- concrete_specs = spack.concretize.concretize_specs_together(
- *self.user_specs, tests=tests
- )
+ concrete_specs = spack.concretize.concretize_specs_together(*self.user_specs, tests=tests)
concretized_specs = [x for x in zip(self.user_specs, concrete_specs)]
for abstract, concrete in concretized_specs:
self._add_concrete_spec(abstract, concrete)
@@ -1264,15 +1269,13 @@ class Environment(object):
# Concretize any new user specs that we haven't concretized yet
arguments, root_specs = [], []
- for uspec, uspec_constraints in zip(
- self.user_specs, self.user_specs.specs_as_constraints
- ):
+ for uspec, uspec_constraints in zip(self.user_specs, self.user_specs.specs_as_constraints):
if uspec not in old_concretized_user_specs:
root_specs.append(uspec)
arguments.append((uspec_constraints, tests))
# Ensure we don't try to bootstrap clingo in parallel
- if spack.config.get('config:concretizer') == 'clingo':
+ if spack.config.get("config:concretizer") == "clingo":
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_clingo_importable_or_raise()
@@ -1293,26 +1296,22 @@ class Environment(object):
# Solve the environment in parallel on Linux
start = time.time()
- max_processes = min(
- len(arguments), # Number of specs
- 16 # Cap on 16 cores
- )
+ max_processes = min(len(arguments), 16) # Number of specs # Cap on 16 cores
# TODO: revisit this print as soon as darwin is parallel too
- msg = 'Starting concretization'
- if sys.platform != 'darwin':
+ msg = "Starting concretization"
+ if sys.platform != "darwin":
pool_size = spack.util.parallel.num_processes(max_processes=max_processes)
if pool_size > 1:
- msg = msg + ' pool with {0} processes'.format(pool_size)
+ msg = msg + " pool with {0} processes".format(pool_size)
tty.msg(msg)
concretized_root_specs = spack.util.parallel.parallel_map(
- _concretize_task, arguments, max_processes=max_processes,
- debug=tty.is_debug()
+ _concretize_task, arguments, max_processes=max_processes, debug=tty.is_debug()
)
finish = time.time()
- tty.msg('Environment concretized in %.2f seconds.' % (finish - start))
+ tty.msg("Environment concretized in %.2f seconds." % (finish - start))
by_hash = {}
for abstract, concrete in zip(root_specs, concretized_root_specs):
self._add_concrete_spec(abstract, concrete)
@@ -1328,17 +1327,17 @@ class Environment(object):
for h in self.specs_by_hash:
current_spec, computed_spec = self.specs_by_hash[h], by_hash[h]
for node in computed_spec.traverse():
- test_deps = node.dependencies(deptype='test')
+ test_deps = node.dependencies(deptype="test")
for test_dependency in test_deps:
if test_dependency in current_spec[node.name]:
continue
current_spec[node.name].add_dependency_edge(
- test_dependency.copy(), deptype='test'
+ test_dependency.copy(), deptype="test"
)
results = [
- (abstract, self.specs_by_hash[h]) for abstract, h in
- zip(self.concretized_user_specs, self.concretized_order)
+ (abstract, self.specs_by_hash[h])
+ for abstract, h in zip(self.concretized_user_specs, self.concretized_order)
]
return results
@@ -1356,10 +1355,12 @@ class Environment(object):
result of concretizing the provided ``user_spec``
"""
if self.unify is True:
- msg = 'cannot install a single spec in an environment that is ' \
- 'configured to be concretized together. Run instead:\n\n' \
- ' $ spack add <spec>\n' \
- ' $ spack install\n'
+ msg = (
+ "cannot install a single spec in an environment that is "
+ "configured to be concretized together. Run instead:\n\n"
+ " $ spack add <spec>\n"
+ " $ spack install\n"
+ )
raise SpackEnvironmentError(msg)
spec = Spec(user_spec)
@@ -1370,9 +1371,7 @@ class Environment(object):
else:
# spec might be in the user_specs, but not installed.
# TODO: Redo name-based comparison for old style envs
- spec = next(
- s for s in self.user_specs if s.satisfies(user_spec)
- )
+ spec = next(s for s in self.user_specs if s.satisfies(user_spec))
concrete = self.specs_by_hash.get(spec.dag_hash())
if not concrete:
concrete = spec.concretized(tests=tests)
@@ -1383,12 +1382,12 @@ class Environment(object):
@property
def default_view(self):
if not self.views:
- raise SpackEnvironmentError(
- "{0} does not have a view enabled".format(self.name))
+ raise SpackEnvironmentError("{0} does not have a view enabled".format(self.name))
if default_view_name not in self.views:
raise SpackEnvironmentError(
- "{0} does not have a default view enabled".format(self.name))
+ "{0} does not have a default view enabled".format(self.name)
+ )
return self.views[default_view_name]
@@ -1407,8 +1406,7 @@ class Environment(object):
def regenerate_views(self):
if not self.views:
- tty.debug("Skip view update, this environment does not"
- " maintain a view")
+ tty.debug("Skip view update, this environment does not" " maintain a view")
return
concretized_root_specs = [s for _, s in self.concretized_specs()]
@@ -1426,13 +1424,12 @@ class Environment(object):
msg = '{0} in view "{1}"'
tty.debug(msg.format(spec.name, view_name))
- except (spack.repo.UnknownPackageError,
- spack.repo.UnknownNamespaceError) as e:
+ except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
tty.warn(
- 'Environment %s includes out of date packages or repos. '
- 'Loading the environment view will require reconcretization.'
- % self.name)
+ "Environment %s includes out of date packages or repos. "
+ "Loading the environment view will require reconcretization." % self.name
+ )
def _env_modifications_for_default_view(self, reverse=False):
all_mods = spack.util.environment.EnvironmentModifications()
@@ -1441,9 +1438,8 @@ class Environment(object):
errors = []
for _, root_spec in self.concretized_specs():
- if (root_spec in self.default_view and
- root_spec.installed and root_spec.package):
- for spec in root_spec.traverse(deptype='run', root=True):
+ if root_spec in self.default_view and root_spec.installed and root_spec.package:
+ for spec in root_spec.traverse(deptype="run", root=True):
if spec.name in visited:
# It is expected that only one instance of the package
# can be added to the environment - do not attempt to
@@ -1459,11 +1455,11 @@ class Environment(object):
visited.add(spec.name)
try:
- mods = uenv.environment_modifications_for_spec(
- spec, self.default_view)
+ mods = uenv.environment_modifications_for_spec(spec, self.default_view)
except Exception as e:
- msg = ("couldn't get environment settings for %s"
- % spec.format("{name}@{version} /{hash:7}"))
+ msg = "couldn't get environment settings for %s" % spec.format(
+ "{name}@{version} /{hash:7}"
+ )
errors.append((msg, str(e)))
continue
@@ -1484,8 +1480,7 @@ class Environment(object):
# No default view to add to shell
return env_mod
- env_mod.extend(uenv.unconditional_environment_modifications(
- self.default_view))
+ env_mod.extend(uenv.unconditional_environment_modifications(self.default_view))
mods, errors = self._env_modifications_for_default_view()
env_mod.extend(mods)
@@ -1512,8 +1507,7 @@ class Environment(object):
# No default view to add to shell
return env_mod
- env_mod.extend(uenv.unconditional_environment_modifications(
- self.default_view).reversed())
+ env_mod.extend(uenv.unconditional_environment_modifications(self.default_view).reversed())
mods, _ = self._env_modifications_for_default_view(reverse=True)
env_mod.extend(mods)
@@ -1553,11 +1547,11 @@ class Environment(object):
root_spec = self.specs_by_hash[dag_hash]
specs_to_check.update(root_spec.traverse(root=True))
- changed_dev_specs = set(s for s in specs_to_check if
- _is_dev_spec_and_has_changed(s))
+ changed_dev_specs = set(s for s in specs_to_check if _is_dev_spec_and_has_changed(s))
- return [s.dag_hash() for s in specs_to_check if
- _spec_needs_overwrite(s, changed_dev_specs)]
+ return [
+ s.dag_hash() for s in specs_to_check if _spec_needs_overwrite(s, changed_dev_specs)
+ ]
def _install_log_links(self, spec):
if not spec.external:
@@ -1568,7 +1562,8 @@ class Environment(object):
with fs.working_dir(self.path):
# Link the resulting log file into logs dir
build_log_link = os.path.join(
- log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7)))
+ log_path, "%s-%s.log" % (spec.name, spec.dag_hash(7))
+ )
if os.path.lexists(build_log_link):
os.remove(build_log_link)
symlink(spec.package.build_log_path, build_log_link)
@@ -1583,8 +1578,7 @@ class Environment(object):
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
if not spec.installed or (
- spec.satisfies('dev_path=*') or
- spec.satisfies('^dev_path=*')
+ spec.satisfies("dev_path=*") or spec.satisfies("^dev_path=*")
):
uninstalled.append(spec)
else:
@@ -1608,7 +1602,7 @@ class Environment(object):
self.install_specs(None, **install_args)
def install_specs(self, specs=None, **install_args):
- tty.debug('Assessing installation status of environment packages')
+ tty.debug("Assessing installation status of environment packages")
# If "spack install" is invoked repeatedly for a large environment
# where all specs are already installed, the operation can take
# a large amount of time due to repeatedly acquiring and releasing
@@ -1630,21 +1624,19 @@ class Environment(object):
spack.store.db.update_explicit(spec, True)
if not specs_to_install:
- tty.msg('All of the packages are already installed')
+ tty.msg("All of the packages are already installed")
else:
- tty.debug('Processing {0} uninstalled specs'.format(len(specs_to_install)))
+ tty.debug("Processing {0} uninstalled specs".format(len(specs_to_install)))
specs_to_overwrite = self._get_overwrite_specs()
- tty.debug('{0} specs need to be overwritten'.format(
- len(specs_to_overwrite)))
+ tty.debug("{0} specs need to be overwritten".format(len(specs_to_overwrite)))
- install_args['overwrite'] = install_args.get(
- 'overwrite', []) + specs_to_overwrite
+ install_args["overwrite"] = install_args.get("overwrite", []) + specs_to_overwrite
installs = []
for spec in specs_to_install:
pkg_install_args = install_args.copy()
- pkg_install_args['explicit'] = spec in self.roots()
+ pkg_install_args["explicit"] = spec in self.roots()
installs.append((spec.package, pkg_install_args))
try:
@@ -1658,8 +1650,9 @@ class Environment(object):
try:
self._install_log_links(spec)
except OSError as e:
- tty.warn('Could not install log links for {0}: {1}'
- .format(spec.name, str(e)))
+ tty.warn(
+ "Could not install log links for {0}: {1}".format(spec.name, str(e))
+ )
with self.write_transaction():
self.regenerate_views()
@@ -1672,8 +1665,8 @@ class Environment(object):
spec = self.specs_by_hash[h]
except KeyError:
tty.warn(
- 'Environment %s appears to be corrupt: missing spec '
- '"%s"' % (self.name, h))
+ "Environment %s appears to be corrupt: missing spec " '"%s"' % (self.name, h)
+ )
continue
all_specs.update(spec.traverse())
@@ -1769,9 +1762,9 @@ class Environment(object):
elif len(matches) == 1:
return list(matches.keys())[0]
- root_matches = dict((concrete, abstract)
- for concrete, abstract in matches.items()
- if abstract)
+ root_matches = dict(
+ (concrete, abstract) for concrete, abstract in matches.items() if abstract
+ )
if len(root_matches) == 1:
return list(root_matches.items())[0][0]
@@ -1782,22 +1775,23 @@ class Environment(object):
# spec will most-succinctly summarize the difference between them
# (and the user can enter one of these to disambiguate)
match_strings = []
- fmt_str = '{hash:7} ' + spack.spec.default_format
+ fmt_str = "{hash:7} " + spack.spec.default_format
for concrete, abstract in matches.items():
if abstract:
- s = 'Root spec %s\n %s' % (abstract, concrete.format(fmt_str))
+ s = "Root spec %s\n %s" % (abstract, concrete.format(fmt_str))
else:
- s = 'Dependency spec\n %s' % concrete.format(fmt_str)
+ s = "Dependency spec\n %s" % concrete.format(fmt_str)
match_strings.append(s)
- matches_str = '\n'.join(match_strings)
+ matches_str = "\n".join(match_strings)
- msg = ("{0} matches multiple specs in the environment {1}: \n"
- "{2}".format(str(spec), self.name, matches_str))
+ msg = "{0} matches multiple specs in the environment {1}: \n" "{2}".format(
+ str(spec), self.name, matches_str
+ )
raise SpackEnvironmentError(msg)
def removed_specs(self):
"""Tuples of (user spec, concrete spec) for all specs that will be
- removed on nexg concretize."""
+ removed on nexg concretize."""
needed = set()
for s, c in self.concretized_specs():
if s in self.user_specs:
@@ -1820,8 +1814,7 @@ class Environment(object):
for spec_hash in self.concretized_order:
spec = self.specs_by_hash[spec_hash]
- specs = (spec.traverse(deptype=('link', 'run'))
- if recurse_dependencies else (spec,))
+ specs = spec.traverse(deptype=("link", "run")) if recurse_dependencies else (spec,)
spec_list.extend(specs)
@@ -1839,26 +1832,20 @@ class Environment(object):
spec_dict[ht.dag_hash.name] = s.dag_hash()
concrete_specs[dag_hash] = spec_dict
- hash_spec_list = zip(
- self.concretized_order, self.concretized_user_specs)
+ hash_spec_list = zip(self.concretized_order, self.concretized_user_specs)
# this is the lockfile we'll write out
data = {
# metadata about the format
- '_meta': {
- 'file-type': 'spack-lockfile',
- 'lockfile-version': lockfile_format_version,
- 'specfile-version': spack.spec.specfile_format_version
+ "_meta": {
+ "file-type": "spack-lockfile",
+ "lockfile-version": lockfile_format_version,
+ "specfile-version": spack.spec.specfile_format_version,
},
-
# users specs + hashes are the 'roots' of the environment
- 'roots': [{
- 'hash': h,
- 'spec': str(s)
- } for h, s in hash_spec_list],
-
+ "roots": [{"hash": h, "spec": str(s)} for h, s in hash_spec_list],
# Concrete specs by hash, including dependencies
- 'concrete_specs': concrete_specs,
+ "concrete_specs": concrete_specs,
}
return data
@@ -1867,16 +1854,16 @@ class Environment(object):
"""Read a lockfile from a file or from a raw string."""
lockfile_dict = sjson.load(file_or_json)
self._read_lockfile_dict(lockfile_dict)
- return lockfile_dict['_meta']['lockfile-version']
+ return lockfile_dict["_meta"]["lockfile-version"]
def _read_lockfile_dict(self, d):
"""Read a lockfile dictionary into this environment."""
self.specs_by_hash = {}
- roots = d['roots']
- self.concretized_user_specs = [Spec(r['spec']) for r in roots]
- self.concretized_order = [r['hash'] for r in roots]
- json_specs_by_hash = d['concrete_specs']
+ roots = d["roots"]
+ self.concretized_user_specs = [Spec(r["spec"]) for r in roots]
+ self.concretized_order = [r["hash"] for r in roots]
+ json_specs_by_hash = d["concrete_specs"]
# Track specs by their lockfile key. Currently spack uses the finest
# grained hash as the lockfile key, while older formats used the build
@@ -1898,10 +1885,8 @@ class Environment(object):
# Second pass: For each spec, get its dependencies from the node dict
# and add them to the spec
for lockfile_key, node_dict in json_specs_by_hash.items():
- for _, dep_hash, deptypes, _ in (
- Spec.dependencies_from_node_dict(node_dict)):
- specs_by_hash[lockfile_key]._add_dependency(
- specs_by_hash[dep_hash], deptypes)
+ for _, dep_hash, deptypes, _ in Spec.dependencies_from_node_dict(node_dict):
+ specs_by_hash[lockfile_key]._add_dependency(specs_by_hash[dep_hash], deptypes)
# Traverse the root specs one at a time in the order they appear.
# The first time we see each DAG hash, that's the one we want to
@@ -1916,8 +1901,9 @@ class Environment(object):
# Now make sure concretized_order and our internal specs dict
# contains the keys used by modern spack (i.e. the dag_hash
# that includes build deps and package hash).
- self.concretized_order = [specs_by_hash[h_key].dag_hash()
- for h_key in self.concretized_order]
+ self.concretized_order = [
+ specs_by_hash[h_key].dag_hash() for h_key in self.concretized_order
+ ]
for spec_dag_hash in self.concretized_order:
self.specs_by_hash[spec_dag_hash] = first_seen[spec_dag_hash]
@@ -1935,12 +1921,14 @@ class Environment(object):
"""
# Warn that environments are not in the latest format.
if not is_latest_format(self.manifest_path):
- ver = '.'.join(str(s) for s in spack.spack_version_info[:2])
- msg = ('The environment "{}" is written to disk in a deprecated format. '
- 'Please update it using:\n\n'
- '\tspack env update {}\n\n'
- 'Note that versions of Spack older than {} may not be able to '
- 'use the updated configuration.')
+ ver = ".".join(str(s) for s in spack.spack_version_info[:2])
+ msg = (
+ 'The environment "{}" is written to disk in a deprecated format. '
+ "Please update it using:\n\n"
+ "\tspack env update {}\n\n"
+ "Note that versions of Spack older than {} may not be able to "
+ "use the updated configuration."
+ )
tty.warn(msg.format(self.name, self.name, ver))
# ensure path in var/spack/environments
@@ -1956,8 +1944,9 @@ class Environment(object):
for spec in self.new_specs:
for dep in spec.traverse():
if not dep.concrete:
- raise ValueError('specs passed to environment.write() '
- 'must be concrete!')
+ raise ValueError(
+ "specs passed to environment.write() " "must be concrete!"
+ )
root = os.path.join(self.repos_path, dep.namespace)
repo = spack.repo.create_or_construct(root, dep.namespace)
@@ -2006,60 +1995,64 @@ class Environment(object):
# The primary list is handled differently
continue
- active_yaml_lists = [x for x in yaml_dict.get('definitions', [])
- if name in x and
- _eval_conditional(x.get('when', 'True'))]
+ active_yaml_lists = [
+ x
+ for x in yaml_dict.get("definitions", [])
+ if name in x and _eval_conditional(x.get("when", "True"))
+ ]
# Remove any specs in yaml that are not in internal representation
for ayl in active_yaml_lists:
# If it's not a string, it's a matrix. Those can't have changed
# If it is a string that starts with '$', it's a reference.
# Those also can't have changed.
- ayl[name][:] = [s for s in ayl.setdefault(name, [])
- if (not isinstance(s, six.string_types)) or
- s.startswith('$') or Spec(s) in speclist.specs]
+ ayl[name][:] = [
+ s
+ for s in ayl.setdefault(name, [])
+ if (not isinstance(s, six.string_types))
+ or s.startswith("$")
+ or Spec(s) in speclist.specs
+ ]
# Put the new specs into the first active list from the yaml
- new_specs = [entry for entry in speclist.yaml_list
- if isinstance(entry, six.string_types) and
- not any(entry in ayl[name]
- for ayl in active_yaml_lists)]
+ new_specs = [
+ entry
+ for entry in speclist.yaml_list
+ if isinstance(entry, six.string_types)
+ and not any(entry in ayl[name] for ayl in active_yaml_lists)
+ ]
list_for_new_specs = active_yaml_lists[0].setdefault(name, [])
list_for_new_specs[:] = list_for_new_specs + new_specs
# put the new user specs in the YAML.
# This can be done directly because there can't be multiple definitions
# nor when clauses for `specs` list.
- yaml_spec_list = yaml_dict.setdefault(user_speclist_name,
- [])
+ yaml_spec_list = yaml_dict.setdefault(user_speclist_name, [])
yaml_spec_list[:] = self.user_specs.yaml_list
# Construct YAML representation of view
default_name = default_view_name
if self.views and len(self.views) == 1 and default_name in self.views:
path = self.default_view.root
- if self.default_view == ViewDescriptor(self.path,
- self.view_path_default):
+ if self.default_view == ViewDescriptor(self.path, self.view_path_default):
view = True
elif self.default_view == ViewDescriptor(self.path, path):
view = path
else:
- view = dict((name, view.to_dict())
- for name, view in self.views.items())
+ view = dict((name, view.to_dict()) for name, view in self.views.items())
elif self.views:
- view = dict((name, view.to_dict())
- for name, view in self.views.items())
+ view = dict((name, view.to_dict()) for name, view in self.views.items())
else:
view = False
- yaml_dict['view'] = view
+ yaml_dict["view"] = view
if self.dev_specs:
# Remove entries that are mirroring defaults
write_dev_specs = copy.deepcopy(self.dev_specs)
for name, entry in write_dev_specs.items():
- if entry['path'] == name:
- del entry['path']
- yaml_dict['develop'] = write_dev_specs
+ if entry["path"] == name:
+ del entry["path"]
+ yaml_dict["develop"] = write_dev_specs
else:
- yaml_dict.pop('develop', None)
+ yaml_dict.pop("develop", None)
# Remove yaml sections that are shadowing defaults
# construct garbage path to ensure we don't find a manifest by accident
@@ -2095,8 +2088,7 @@ class Environment(object):
def yaml_equivalent(first, second):
- """Returns whether two spack yaml items are equivalent, including overrides
- """
+ """Returns whether two spack yaml items are equivalent, including overrides"""
if isinstance(first, dict):
return isinstance(second, dict) and _equiv_dict(first, second)
elif isinstance(first, list):
@@ -2106,24 +2098,21 @@ def yaml_equivalent(first, second):
def _equiv_list(first, second):
- """Returns whether two spack yaml lists are equivalent, including overrides
- """
+ """Returns whether two spack yaml lists are equivalent, including overrides"""
if len(first) != len(second):
return False
return all(yaml_equivalent(f, s) for f, s in zip(first, second))
def _equiv_dict(first, second):
- """Returns whether two spack yaml dicts are equivalent, including overrides
- """
+ """Returns whether two spack yaml dicts are equivalent, including overrides"""
if len(first) != len(second):
return False
- same_values = all(yaml_equivalent(fv, sv)
- for fv, sv in zip(first.values(), second.values()))
+ same_values = all(yaml_equivalent(fv, sv) for fv, sv in zip(first.values(), second.values()))
same_keys_with_same_overrides = all(
- fk == sk and getattr(fk, 'override', False) == getattr(sk, 'override',
- False)
- for fk, sk in zip(first.keys(), second.keys()))
+ fk == sk and getattr(fk, "override", False) == getattr(sk, "override", False)
+ for fk, sk in zip(first.keys(), second.keys())
+ )
return same_values and same_keys_with_same_overrides
@@ -2134,16 +2123,19 @@ def display_specs(concretized_specs):
concretized_specs (list): list of specs returned by
`Environment.concretize()`
"""
+
def _tree_to_display(spec):
return spec.tree(
recurse_dependencies=True,
status_fn=spack.spec.Spec.install_status,
- hashlen=7, hashes=True)
+ hashlen=7,
+ hashes=True,
+ )
for user_spec, concrete_spec in concretized_specs:
- tty.msg('Concretized {0}'.format(user_spec))
+ tty.msg("Concretized {0}".format(user_spec))
sys.stdout.write(_tree_to_display(concrete_spec))
- print('')
+ print("")
def _concretize_from_constraints(spec_constraints, tests=False):
@@ -2151,9 +2143,9 @@ def _concretize_from_constraints(spec_constraints, tests=False):
# Get the named spec even if out of order
root_spec = [s for s in spec_constraints if s.name]
if len(root_spec) != 1:
- m = 'The constraints %s are not a valid spec ' % spec_constraints
- m += 'concretization target. all specs must have a single name '
- m += 'constraint for concretization.'
+ m = "The constraints %s are not a valid spec " % spec_constraints
+ m += "concretization target. all specs must have a single name "
+ m += "constraint for concretization."
raise InvalidSpecConstraintError(m)
spec_constraints.remove(root_spec[0])
@@ -2167,18 +2159,20 @@ def _concretize_from_constraints(spec_constraints, tests=False):
try:
return s.concretized(tests=tests)
except spack.spec.InvalidDependencyError as e:
- invalid_deps_string = ['^' + d for d in e.invalid_deps]
- invalid_deps = [c for c in spec_constraints
- if any(c.satisfies(invd, strict=True)
- for invd in invalid_deps_string)]
+ invalid_deps_string = ["^" + d for d in e.invalid_deps]
+ invalid_deps = [
+ c
+ for c in spec_constraints
+ if any(c.satisfies(invd, strict=True) for invd in invalid_deps_string)
+ ]
if len(invalid_deps) != len(invalid_deps_string):
raise e
invalid_constraints.extend(invalid_deps)
except UnknownVariantError as e:
invalid_variants = e.unknown_variants
- inv_variant_constraints = [c for c in spec_constraints
- if any(name in c.variants
- for name in invalid_variants)]
+ inv_variant_constraints = [
+ c for c in spec_constraints if any(name in c.variants for name in invalid_variants)
+ ]
if len(inv_variant_constraints) != len(invalid_variants):
raise e
invalid_constraints.extend(inv_variant_constraints)
@@ -2265,12 +2259,14 @@ def update_yaml(manifest, backup_file):
return False
# Copy environment to a backup file and update it
- msg = ('backup file "{0}" already exists on disk. Check its content '
- 'and remove it before trying to update again.')
+ msg = (
+ 'backup file "{0}" already exists on disk. Check its content '
+ "and remove it before trying to update again."
+ )
assert not os.path.exists(backup_file), msg.format(backup_file)
shutil.copy(manifest, backup_file)
- with open(manifest, 'w') as f:
+ with open(manifest, "w") as f:
syaml.dump_config(data, f)
return True
@@ -2284,12 +2280,11 @@ def _top_level_key(data):
Returns:
Either 'spack' or 'env'
"""
- msg = ('cannot find top level attribute "spack" or "env"'
- 'in the environment')
- assert any(x in data for x in ('spack', 'env')), msg
- if 'spack' in data:
- return 'spack'
- return 'env'
+ msg = 'cannot find top level attribute "spack" or "env"' "in the environment"
+ assert any(x in data for x in ("spack", "env")), msg
+ if "spack" in data:
+ return "spack"
+ return "env"
def is_latest_format(manifest):
@@ -2311,7 +2306,7 @@ def is_latest_format(manifest):
@contextlib.contextmanager
def no_active_environment():
"""Deactivate the active environment for the duration of the context. Has no
- effect when there is no active environment."""
+ effect when there is no active environment."""
env = active_environment()
try:
deactivate()
diff --git a/lib/spack/spack/environment/shell.py b/lib/spack/spack/environment/shell.py
index 377c760930..42d997ec96 100644
--- a/lib/spack/spack/environment/shell.py
+++ b/lib/spack/spack/environment/shell.py
@@ -15,83 +15,83 @@ from spack.util.environment import EnvironmentModifications
def activate_header(env, shell, prompt=None):
# Construct the commands to run
- cmds = ''
- if shell == 'csh':
+ cmds = ""
+ if shell == "csh":
# TODO: figure out how to make color work for csh
- cmds += 'setenv SPACK_ENV %s;\n' % env.path
+ cmds += "setenv SPACK_ENV %s;\n" % env.path
cmds += 'alias despacktivate "spack env deactivate";\n'
if prompt:
- cmds += 'if (! $?SPACK_OLD_PROMPT ) '
+ cmds += "if (! $?SPACK_OLD_PROMPT ) "
cmds += 'setenv SPACK_OLD_PROMPT "${prompt}";\n'
cmds += 'set prompt="%s ${prompt}";\n' % prompt
- elif shell == 'fish':
- if 'color' in os.getenv('TERM', '') and prompt:
- prompt = colorize('@G{%s} ' % prompt, color=True)
-
- cmds += 'set -gx SPACK_ENV %s;\n' % env.path
- cmds += 'function despacktivate;\n'
- cmds += ' spack env deactivate;\n'
- cmds += 'end;\n'
+ elif shell == "fish":
+ if "color" in os.getenv("TERM", "") and prompt:
+ prompt = colorize("@G{%s} " % prompt, color=True)
+
+ cmds += "set -gx SPACK_ENV %s;\n" % env.path
+ cmds += "function despacktivate;\n"
+ cmds += " spack env deactivate;\n"
+ cmds += "end;\n"
#
# NOTE: We're not changing the fish_prompt function (which is fish's
# solution to the PS1 variable) here. This is a bit fiddly, and easy to
# screw up => spend time reasearching a solution. Feedback welcome.
#
- elif shell == 'bat':
+ elif shell == "bat":
# TODO: Color
cmds += 'set "SPACK_ENV=%s"\n' % env.path
# TODO: despacktivate
# TODO: prompt
else:
- if 'color' in os.getenv('TERM', '') and prompt:
- prompt = colorize('@G{%s}' % prompt, color=True)
+ if "color" in os.getenv("TERM", "") and prompt:
+ prompt = colorize("@G{%s}" % prompt, color=True)
- cmds += 'export SPACK_ENV=%s;\n' % env.path
+ cmds += "export SPACK_ENV=%s;\n" % env.path
cmds += "alias despacktivate='spack env deactivate';\n"
if prompt:
- cmds += 'if [ -z ${SPACK_OLD_PS1+x} ]; then\n'
- cmds += ' if [ -z ${PS1+x} ]; then\n'
+ cmds += "if [ -z ${SPACK_OLD_PS1+x} ]; then\n"
+ cmds += " if [ -z ${PS1+x} ]; then\n"
cmds += " PS1='$$$$';\n"
- cmds += ' fi;\n'
+ cmds += " fi;\n"
cmds += ' export SPACK_OLD_PS1="${PS1}";\n'
- cmds += 'fi;\n'
+ cmds += "fi;\n"
cmds += 'export PS1="%s ${PS1}";\n' % prompt
return cmds
def deactivate_header(shell):
- cmds = ''
- if shell == 'csh':
- cmds += 'unsetenv SPACK_ENV;\n'
- cmds += 'if ( $?SPACK_OLD_PROMPT ) '
+ cmds = ""
+ if shell == "csh":
+ cmds += "unsetenv SPACK_ENV;\n"
+ cmds += "if ( $?SPACK_OLD_PROMPT ) "
cmds += 'set prompt="$SPACK_OLD_PROMPT" && '
- cmds += 'unsetenv SPACK_OLD_PROMPT;\n'
- cmds += 'unalias despacktivate;\n'
- elif shell == 'fish':
- cmds += 'set -e SPACK_ENV;\n'
- cmds += 'functions -e despacktivate;\n'
+ cmds += "unsetenv SPACK_OLD_PROMPT;\n"
+ cmds += "unalias despacktivate;\n"
+ elif shell == "fish":
+ cmds += "set -e SPACK_ENV;\n"
+ cmds += "functions -e despacktivate;\n"
#
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
#
- elif shell == 'bat':
+ elif shell == "bat":
# TODO: Color
cmds += 'set "SPACK_ENV="\n'
# TODO: despacktivate
# TODO: prompt
else:
- cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
- cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
- cmds += 'fi;\n'
- cmds += 'alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n'
- cmds += 'if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n'
- cmds += ' if [ "$SPACK_OLD_PS1" = \'$$$$\' ]; then\n'
- cmds += ' unset PS1; export PS1;\n'
- cmds += ' else\n'
+ cmds += "if [ ! -z ${SPACK_ENV+x} ]; then\n"
+ cmds += "unset SPACK_ENV; export SPACK_ENV;\n"
+ cmds += "fi;\n"
+ cmds += "alias despacktivate > /dev/null 2>&1 && unalias despacktivate;\n"
+ cmds += "if [ ! -z ${SPACK_OLD_PS1+x} ]; then\n"
+ cmds += " if [ \"$SPACK_OLD_PS1\" = '$$$$' ]; then\n"
+ cmds += " unset PS1; export PS1;\n"
+ cmds += " else\n"
cmds += ' export PS1="$SPACK_OLD_PS1";\n'
- cmds += ' fi;\n'
- cmds += ' unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n'
- cmds += 'fi;\n'
+ cmds += " fi;\n"
+ cmds += " unset SPACK_OLD_PS1; export SPACK_OLD_PS1;\n"
+ cmds += "fi;\n"
return cmds
@@ -128,16 +128,15 @@ def activate(env, use_env_repo=False, add_view=True):
if add_view and ev.default_view_name in env.views:
with spack.store.db.read_transaction():
env.add_default_view_to_env(env_mods)
- except (spack.repo.UnknownPackageError,
- spack.repo.UnknownNamespaceError) as e:
+ except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
tty.error(e)
tty.die(
- 'Environment view is broken due to a missing package or repo.\n',
- ' To activate without views enabled, activate with:\n',
- ' spack env activate -V {0}\n'.format(env.name),
- ' To remove it and resolve the issue, '
- 'force concretize with the command:\n',
- ' spack -e {0} concretize --force'.format(env.name))
+ "Environment view is broken due to a missing package or repo.\n",
+ " To activate without views enabled, activate with:\n",
+ " spack env activate -V {0}\n".format(env.name),
+ " To remove it and resolve the issue, " "force concretize with the command:\n",
+ " spack -e {0} concretize --force".format(env.name),
+ )
return env_mods
@@ -164,11 +163,12 @@ def deactivate():
try:
with spack.store.db.read_transaction():
active.rm_default_view_from_env(env_mods)
- except (spack.repo.UnknownPackageError,
- spack.repo.UnknownNamespaceError) as e:
+ except (spack.repo.UnknownPackageError, spack.repo.UnknownNamespaceError) as e:
tty.warn(e)
- tty.warn('Could not fully deactivate view due to missing package '
- 'or repo, shell environment may be corrupt.')
+ tty.warn(
+ "Could not fully deactivate view due to missing package "
+ "or repo, shell environment may be corrupt."
+ )
ev.deactivate()
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index bcb2aeb218..c35ce5c99a 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -17,7 +17,7 @@ debug = 0
class SpackError(Exception):
"""This is the superclass for all Spack errors.
- Subclasses can be found in the modules they have to do with.
+ Subclasses can be found in the modules they have to do with.
"""
def __init__(self, message, long_message=None):
@@ -54,7 +54,7 @@ class SpackError(Exception):
tty.error(self.message)
if self.long_message:
sys.stderr.write(self.long_message)
- sys.stderr.write('\n')
+ sys.stderr.write("\n")
# stack trace, etc. in debug mode.
if debug:
@@ -81,10 +81,9 @@ class SpackError(Exception):
def __repr__(self):
args = [repr(self.message), repr(self.long_message)]
- args = ','.join(args)
- qualified_name = inspect.getmodule(
- self).__name__ + '.' + type(self).__name__
- return qualified_name + '(' + args + ')'
+ args = ",".join(args)
+ qualified_name = inspect.getmodule(self).__name__ + "." + type(self).__name__
+ return qualified_name + "(" + args + ")"
def __reduce__(self):
return type(self), (self.message, self.long_message)
@@ -102,9 +101,9 @@ class NoLibrariesError(SpackError):
def __init__(self, message_or_name, prefix=None):
super(NoLibrariesError, self).__init__(
- message_or_name if prefix is None else
- 'Unable to locate {0} libraries in {1}'.format(
- message_or_name, prefix)
+ message_or_name
+ if prefix is None
+ else "Unable to locate {0} libraries in {1}".format(message_or_name, prefix)
)
@@ -123,10 +122,12 @@ class UnsatisfiableSpecError(SpecError):
For original concretizer, provide the requirement that was violated when
raising.
"""
+
def __init__(self, provided, required, constraint_type):
# This is only the entrypoint for old concretizer errors
super(UnsatisfiableSpecError, self).__init__(
- "%s does not satisfy %s" % (provided, required))
+ "%s does not satisfy %s" % (provided, required)
+ )
self.provided = provided
self.required = required
diff --git a/lib/spack/spack/extensions.py b/lib/spack/spack/extensions.py
index 8aa99ec8b5..7aed7ff6c8 100644
--- a/lib/spack/spack/extensions.py
+++ b/lib/spack/spack/extensions.py
@@ -17,14 +17,14 @@ import spack.config
import spack.error
import spack.util.path
-_extension_regexp = re.compile(r'spack-(\w[-\w]*)$')
+_extension_regexp = re.compile(r"spack-(\w[-\w]*)$")
# TODO: For consistency we should use spack.cmd.python_name(), but
# currently this would create a circular relationship between
# spack.cmd and spack.extensions.
def _python_name(cmd_name):
- return cmd_name.replace('-', '_')
+ return cmd_name.replace("-", "_")
def extension_name(path):
@@ -40,8 +40,7 @@ def extension_name(path):
ExtensionNamingError: if path does not match the expected format
for a Spack command extension.
"""
- regexp_match = re.search(_extension_regexp,
- os.path.basename(os.path.normpath(path)))
+ regexp_match = re.search(_extension_regexp, os.path.basename(os.path.normpath(path)))
if not regexp_match:
raise ExtensionNamingError(path)
return regexp_match.group(1)
@@ -61,26 +60,26 @@ def load_command_extension(command, path):
extension = _python_name(extension_name(path))
# Compute the name of the module we search, exit early if already imported
- cmd_package = '{0}.{1}.cmd'.format(__name__, extension)
+ cmd_package = "{0}.{1}.cmd".format(__name__, extension)
python_name = _python_name(command)
- module_name = '{0}.{1}'.format(cmd_package, python_name)
+ module_name = "{0}.{1}".format(cmd_package, python_name)
if module_name in sys.modules:
return sys.modules[module_name]
# Compute the absolute path of the file to be loaded, along with the
# name of the python module where it will be stored
- cmd_path = os.path.join(path, extension, 'cmd', python_name + '.py')
+ cmd_path = os.path.join(path, extension, "cmd", python_name + ".py")
# Short circuit if the command source file does not exist
if not os.path.exists(cmd_path):
return None
def ensure_package_creation(name):
- package_name = '{0}.{1}'.format(__name__, name)
+ package_name = "{0}.{1}".format(__name__, name)
if package_name in sys.modules:
return
- parts = [path] + name.split('.') + ['__init__.py']
+ parts = [path] + name.split(".") + ["__init__.py"]
init_file = os.path.join(*parts)
if os.path.exists(init_file):
m = llnl.util.lang.load_module_from_file(package_name, init_file)
@@ -98,7 +97,7 @@ def load_command_extension(command, path):
# Create a searchable package for both the root folder of the extension
# and the subfolder containing the commands
ensure_package_creation(extension)
- ensure_package_creation(extension + '.cmd')
+ ensure_package_creation(extension + ".cmd")
module = importlib.import_module(module_name)
sys.modules[module_name] = module
@@ -107,10 +106,8 @@ def load_command_extension(command, path):
def get_extension_paths():
- """Return the list of canonicalized extension paths from config:extensions.
-
- """
- extension_paths = spack.config.get('config:extensions') or []
+ """Return the list of canonicalized extension paths from config:extensions."""
+ extension_paths = spack.config.get("config:extensions") or []
paths = [spack.util.path.canonicalize_path(p) for p in extension_paths]
return paths
@@ -122,7 +119,7 @@ def get_command_paths():
for path in extension_paths:
extension = _python_name(extension_name(path))
- command_paths.append(os.path.join(path, extension, 'cmd'))
+ command_paths.append(os.path.join(path, extension, "cmd"))
return command_paths
@@ -169,7 +166,7 @@ def get_template_dirs():
in extensions.
"""
extension_dirs = get_extension_paths()
- extensions = [os.path.join(x, 'templates') for x in extension_dirs]
+ extensions = [os.path.join(x, "templates") for x in extension_dirs]
return extensions
@@ -177,17 +174,20 @@ class CommandNotFoundError(spack.error.SpackError):
"""Exception class thrown when a requested command is not recognized as
such.
"""
+
def __init__(self, cmd_name):
super(CommandNotFoundError, self).__init__(
- '{0} is not a recognized Spack command or extension command;'
- ' check with `spack commands`.'.format(cmd_name))
+ "{0} is not a recognized Spack command or extension command;"
+ " check with `spack commands`.".format(cmd_name)
+ )
class ExtensionNamingError(spack.error.SpackError):
"""Exception class thrown when a configured extension does not follow
the expected naming convention.
"""
+
def __init__(self, path):
super(ExtensionNamingError, self).__init__(
- '{0} does not match the format for a Spack extension path.'
- .format(path))
+ "{0} does not match the format for a Spack extension path.".format(path)
+ )
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 4fc2d3b449..18230d9ee5 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -60,23 +60,25 @@ from spack.util.string import comma_and, quote
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE = (
"The contents of {subject} look like {content_type}. Either the URL"
" you are trying to use does not exist or you have an internet gateway"
" issue. You can remove the bad archive using 'spack clean"
- " <package>', then try again using the correct URL.")
+ " <package>', then try again using the correct URL."
+)
-def warn_content_type_mismatch(subject, content_type='HTML'):
- tty.warn(CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE.format(
- subject=subject, content_type=content_type))
+def warn_content_type_mismatch(subject, content_type="HTML"):
+ tty.warn(
+ CONTENT_TYPE_MISMATCH_WARNING_TEMPLATE.format(subject=subject, content_type=content_type)
+ )
def _needs_stage(fun):
"""Many methods on fetch strategies require a stage to be set
- using set_stage(). This decorator adds a check for self.stage."""
+ using set_stage(). This decorator adds a check for self.stage."""
@functools.wraps(fun)
def wrapper(self, *args, **kwargs):
@@ -102,6 +104,7 @@ def fetcher(cls):
class FetchStrategy(object):
"""Superclass of all fetch strategies."""
+
#: The URL attribute must be specified either at the package class
#: level, or as a keyword argument to ``version()``. It is used to
#: distinguish fetchers for different versions in the package DSL.
@@ -119,7 +122,7 @@ class FetchStrategy(object):
self.stage = None
# Enable or disable caching for this strategy based on
# 'no_cache' option from version directive.
- self.cache_enabled = not kwargs.pop('no_cache', False)
+ self.cache_enabled = not kwargs.pop("no_cache", False)
self.package = None
@@ -211,10 +214,11 @@ class BundleFetchStrategy(FetchStrategy):
TODO: Remove this class by refactoring resource handling and the link
between composite stages and composite fetch strategies (see #11981).
"""
+
#: There is no associated URL keyword in ``version()`` for no-code
#: packages but this property is required for some strategy-related
#: functions (e.g., check_pkg_attributes).
- url_attr = ''
+ url_attr = ""
def fetch(self):
"""Simply report success -- there is no code to fetch."""
@@ -227,22 +231,21 @@ class BundleFetchStrategy(FetchStrategy):
def source_id(self):
"""BundlePackages don't have a source id."""
- return ''
+ return ""
def mirror_id(self):
"""BundlePackages don't have a mirror id."""
class FetchStrategyComposite(pattern.Composite):
- """Composite for a FetchStrategy object.
- """
+ """Composite for a FetchStrategy object."""
+
matches = FetchStrategy.matches
def __init__(self):
- super(FetchStrategyComposite, self).__init__([
- 'fetch', 'check', 'expand', 'reset', 'archive', 'cachable',
- 'mirror_id'
- ])
+ super(FetchStrategyComposite, self).__init__(
+ ["fetch", "check", "expand", "reset", "archive", "cachable", "mirror_id"]
+ )
def source_id(self):
component_ids = tuple(i.source_id() for i in self)
@@ -261,31 +264,32 @@ class URLFetchStrategy(FetchStrategy):
The destination for the resulting file(s) is the standard stage path.
"""
- url_attr = 'url'
+
+ url_attr = "url"
# these are checksum types. The generic 'checksum' is deprecated for
# specific hash names, but we need it for backward compatibility
- optional_attrs = list(crypto.hashes.keys()) + ['checksum']
+ optional_attrs = list(crypto.hashes.keys()) + ["checksum"]
def __init__(self, url=None, checksum=None, **kwargs):
super(URLFetchStrategy, self).__init__(**kwargs)
# Prefer values in kwargs to the positionals.
- self.url = kwargs.get('url', url)
- self.mirrors = kwargs.get('mirrors', [])
+ self.url = kwargs.get("url", url)
+ self.mirrors = kwargs.get("mirrors", [])
# digest can be set as the first argument, or from an explicit
# kwarg by the hash name.
- self.digest = kwargs.get('checksum', checksum)
+ self.digest = kwargs.get("checksum", checksum)
for h in self.optional_attrs:
if h in kwargs:
self.digest = kwargs[h]
- self.expand_archive = kwargs.get('expand', True)
- self.extra_options = kwargs.get('fetch_options', {})
+ self.expand_archive = kwargs.get("expand", True)
+ self.extra_options = kwargs.get("fetch_options", {})
self._curl = None
- self.extension = kwargs.get('extension', None)
+ self.extension = kwargs.get("extension", None)
if not self.url:
raise ValueError("URLFetchStrategy requires a url for fetching.")
@@ -294,7 +298,7 @@ class URLFetchStrategy(FetchStrategy):
def curl(self):
if not self._curl:
try:
- self._curl = which('curl', required=True)
+ self._curl = which("curl", required=True)
except CommandNotFoundError as exc:
tty.error(str(exc))
return self._curl
@@ -308,8 +312,7 @@ class URLFetchStrategy(FetchStrategy):
# The filename is the digest. A directory is also created based on
# truncating the digest to avoid creating a directory with too many
# entries
- return os.path.sep.join(
- ['archive', self.digest[:2], self.digest])
+ return os.path.sep.join(["archive", self.digest[:2], self.digest])
@property
def candidate_urls(self):
@@ -318,9 +321,9 @@ class URLFetchStrategy(FetchStrategy):
for url in [self.url] + (self.mirrors or []):
# This must be skipped on Windows due to URL encoding
# of ':' characters on filepaths on Windows
- if sys.platform != "win32" and url.startswith('file://'):
- path = urllib_parse.quote(url[len('file://'):])
- url = 'file://' + path
+ if sys.platform != "win32" and url.startswith("file://"):
+ path = urllib_parse.quote(url[len("file://") :])
+ url = "file://" + path
urls.append(url)
return urls
@@ -328,7 +331,7 @@ class URLFetchStrategy(FetchStrategy):
@_needs_stage
def fetch(self):
if self.archive_file:
- tty.debug('Already downloaded {0}'.format(self.archive_file))
+ tty.debug("Already downloaded {0}".format(self.archive_file))
return
url = None
@@ -350,15 +353,15 @@ class URLFetchStrategy(FetchStrategy):
raise FailedDownloadError(url)
def _existing_url(self, url):
- tty.debug('Checking existence of {0}'.format(url))
+ tty.debug("Checking existence of {0}".format(url))
- if spack.config.get('config:url_fetch_method') == 'curl':
+ if spack.config.get("config:url_fetch_method") == "curl":
curl = self.curl
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
# portable.
- curl_args = ['--stderr', '-', '-s', '-f', '-r', '0-0', url]
- if not spack.config.get('config:verify_ssl'):
- curl_args.append('-k')
+ curl_args = ["--stderr", "-", "-s", "-f", "-r", "0-0", url]
+ if not spack.config.get("config:verify_ssl"):
+ curl_args.append("-k")
_ = curl(*curl_args, fail_on_error=False, output=os.devnull)
return curl.returncode == 0
else:
@@ -367,12 +370,14 @@ class URLFetchStrategy(FetchStrategy):
url, headers, response = spack.util.web.read_from_url(url)
except spack.util.web.SpackWebError as werr:
msg = "Urllib fetch failed to verify url\
- {0}\n with error {1}".format(url, werr)
+ {0}\n with error {1}".format(
+ url, werr
+ )
raise FailedDownloadError(url, msg)
- return (response.getcode() is None or response.getcode() == 200)
+ return response.getcode() is None or response.getcode() == 200
def _fetch_from_url(self, url):
- if spack.config.get('config:url_fetch_method') == 'curl':
+ if spack.config.get("config:url_fetch_method") == "curl":
return self._fetch_curl(url)
else:
return self._fetch_urllib(url)
@@ -381,15 +386,14 @@ class URLFetchStrategy(FetchStrategy):
# Check if we somehow got an HTML file rather than the archive we
# asked for. We only look at the last content type, to handle
# redirects properly.
- content_types = re.findall(r'Content-Type:[^\r\n]+', headers,
- flags=re.IGNORECASE)
- if content_types and 'text/html' in content_types[-1]:
+ content_types = re.findall(r"Content-Type:[^\r\n]+", headers, flags=re.IGNORECASE)
+ if content_types and "text/html" in content_types[-1]:
warn_content_type_mismatch(self.archive_file or "the archive")
@_needs_stage
def _fetch_urllib(self, url):
save_file = self.stage.save_filename
- tty.msg('Fetching {0}'.format(url))
+ tty.msg("Fetching {0}".format(url))
# Run urllib but grab the mime type from the http headers
try:
@@ -400,13 +404,13 @@ class URLFetchStrategy(FetchStrategy):
os.remove(self.archive_file)
if os.path.lexists(save_file):
os.remove(save_file)
- msg = 'urllib failed to fetch with error {0}'.format(e)
+ msg = "urllib failed to fetch with error {0}".format(e)
raise FailedDownloadError(url, msg)
if os.path.lexists(save_file):
os.remove(save_file)
- with open(save_file, 'wb') as _open_file:
+ with open(save_file, "wb") as _open_file:
shutil.copyfileobj(response, _open_file)
self._check_headers(str(headers))
@@ -417,48 +421,50 @@ class URLFetchStrategy(FetchStrategy):
partial_file = None
if self.stage.save_filename:
save_file = self.stage.save_filename
- partial_file = self.stage.save_filename + '.part'
- tty.msg('Fetching {0}'.format(url))
+ partial_file = self.stage.save_filename + ".part"
+ tty.msg("Fetching {0}".format(url))
if partial_file:
- save_args = ['-C',
- '-', # continue partial downloads
- '-o',
- partial_file] # use a .part file
+ save_args = [
+ "-C",
+ "-", # continue partial downloads
+ "-o",
+ partial_file,
+ ] # use a .part file
else:
- save_args = ['-O']
+ save_args = ["-O"]
curl_args = save_args + [
- '-f', # fail on >400 errors
- '-D',
- '-', # print out HTML headers
- '-L', # resolve 3xx redirects
+ "-f", # fail on >400 errors
+ "-D",
+ "-", # print out HTML headers
+ "-L", # resolve 3xx redirects
url,
]
- if not spack.config.get('config:verify_ssl'):
- curl_args.append('-k')
+ if not spack.config.get("config:verify_ssl"):
+ curl_args.append("-k")
if sys.stdout.isatty() and tty.msg_enabled():
- curl_args.append('-#') # status bar when using a tty
+ curl_args.append("-#") # status bar when using a tty
else:
- curl_args.append('-sS') # show errors if fail
+ curl_args.append("-sS") # show errors if fail
- connect_timeout = spack.config.get('config:connect_timeout', 10)
+ connect_timeout = spack.config.get("config:connect_timeout", 10)
if self.extra_options:
- cookie = self.extra_options.get('cookie')
+ cookie = self.extra_options.get("cookie")
if cookie:
- curl_args.append('-j') # junk cookies
- curl_args.append('-b') # specify cookie
+ curl_args.append("-j") # junk cookies
+ curl_args.append("-b") # specify cookie
curl_args.append(cookie)
- timeout = self.extra_options.get('timeout')
+ timeout = self.extra_options.get("timeout")
if timeout:
connect_timeout = max(connect_timeout, int(timeout))
if connect_timeout > 0:
# Timeout if can't establish a connection after n sec.
- curl_args.extend(['--connect-timeout', str(connect_timeout)])
+ curl_args.extend(["--connect-timeout", str(connect_timeout)])
# Run curl but grab the mime type from the http headers
curl = self.curl
@@ -475,8 +481,7 @@ class URLFetchStrategy(FetchStrategy):
if curl.returncode == 22:
# This is a 404. Curl will print the error.
- raise FailedDownloadError(
- url, "URL %s was not found!" % url)
+ raise FailedDownloadError(url, "URL %s was not found!" % url)
elif curl.returncode == 60:
# This is a certificate error. Suggest spack -k
@@ -487,14 +492,13 @@ class URLFetchStrategy(FetchStrategy):
"configuration is bad. If you believe your SSL "
"configuration is bad, you can try running spack -k, "
"which will not check SSL certificates."
- "Use this at your own risk.")
+ "Use this at your own risk.",
+ )
else:
# This is some other curl error. Curl will print the
# error, but print a spack message too
- raise FailedDownloadError(
- url,
- "Curl failed with error %d" % curl.returncode)
+ raise FailedDownloadError(url, "Curl failed with error %d" % curl.returncode)
self._check_headers(headers)
@@ -514,28 +518,30 @@ class URLFetchStrategy(FetchStrategy):
@_needs_stage
def expand(self):
if not self.expand_archive:
- tty.debug('Staging unexpanded archive {0} in {1}'
- .format(self.archive_file, self.stage.source_path))
+ tty.debug(
+ "Staging unexpanded archive {0} in {1}".format(
+ self.archive_file, self.stage.source_path
+ )
+ )
if not self.stage.expanded:
mkdirp(self.stage.source_path)
- dest = os.path.join(self.stage.source_path,
- os.path.basename(self.archive_file))
+ dest = os.path.join(self.stage.source_path, os.path.basename(self.archive_file))
shutil.move(self.archive_file, dest)
return
- tty.debug('Staging archive: {0}'.format(self.archive_file))
+ tty.debug("Staging archive: {0}".format(self.archive_file))
if not self.archive_file:
raise NoArchiveFileError(
- "Couldn't find archive file",
- "Failed on expand() for URL %s" % self.url)
+ "Couldn't find archive file", "Failed on expand() for URL %s" % self.url
+ )
# TODO: replace this by mime check.
if not self.extension:
self.extension = spack.url.determine_url_file_extension(self.url)
if self.stage.expanded:
- tty.debug('Source already staged to %s' % self.stage.source_path)
+ tty.debug("Source already staged to %s" % self.stage.source_path)
return
decompress = decompressor_for(self.archive_file, self.extension)
@@ -550,25 +556,21 @@ class URLFetchStrategy(FetchStrategy):
if not self.archive_file:
raise NoArchiveFileError("Cannot call archive() before fetching.")
- spack.util.web.push_to_url(
- self.archive_file,
- destination,
- keep_original=True)
+ spack.util.web.push_to_url(self.archive_file, destination, keep_original=True)
@_needs_stage
def check(self):
"""Check the downloaded archive against a checksum digest.
- No-op if this stage checks code out of a repository."""
+ No-op if this stage checks code out of a repository."""
if not self.digest:
- raise NoDigestError(
- "Attempt to check URLFetchStrategy with no digest.")
+ raise NoDigestError("Attempt to check URLFetchStrategy with no digest.")
checker = crypto.Checker(self.digest)
if not checker.check(self.archive_file):
raise ChecksumError(
- "%s checksum failed for %s" %
- (checker.hash_name, self.archive_file),
- "Expected %s but got %s" % (self.digest, checker.sum))
+ "%s checksum failed for %s" % (checker.hash_name, self.archive_file),
+ "Expected %s but got %s" % (self.digest, checker.sum),
+ )
@_needs_stage
def reset(self):
@@ -578,7 +580,8 @@ class URLFetchStrategy(FetchStrategy):
if not self.archive_file:
raise NoArchiveFileError(
"Tried to reset URLFetchStrategy before fetching",
- "Failed on reset() for URL %s" % self.url)
+ "Failed on reset() for URL %s" % self.url,
+ )
# Remove everything but the archive from the stage
for filename in os.listdir(self.stage.path):
@@ -606,12 +609,12 @@ class CacheURLFetchStrategy(URLFetchStrategy):
@_needs_stage
def fetch(self):
- reg_str = r'^file://'
- path = re.sub(reg_str, '', self.url)
+ reg_str = r"^file://"
+ path = re.sub(reg_str, "", self.url)
# check whether the cache file exists.
if not os.path.isfile(path):
- raise NoCacheError('No cache of %s' % path)
+ raise NoCacheError("No cache of %s" % path)
# remove old symlink if one is there.
filename = self.stage.save_filename
@@ -631,7 +634,7 @@ class CacheURLFetchStrategy(URLFetchStrategy):
raise
# Notify the user how we fetched.
- tty.msg('Using cached archive: {0}'.format(path))
+ tty.msg("Using cached archive: {0}".format(path))
class VCSFetchStrategy(FetchStrategy):
@@ -653,35 +656,32 @@ class VCSFetchStrategy(FetchStrategy):
# Set a URL based on the type of fetch strategy.
self.url = kwargs.get(self.url_attr, None)
if not self.url:
- raise ValueError(
- "%s requires %s argument." % (self.__class__, self.url_attr))
+ raise ValueError("%s requires %s argument." % (self.__class__, self.url_attr))
for attr in self.optional_attrs:
setattr(self, attr, kwargs.get(attr, None))
@_needs_stage
def check(self):
- tty.debug('No checksum needed when fetching with {0}'
- .format(self.url_attr))
+ tty.debug("No checksum needed when fetching with {0}".format(self.url_attr))
@_needs_stage
def expand(self):
- tty.debug(
- "Source fetched with %s is already expanded." % self.url_attr)
+ tty.debug("Source fetched with %s is already expanded." % self.url_attr)
@_needs_stage
def archive(self, destination, **kwargs):
- assert (extension(destination) == 'tar.gz')
- assert (self.stage.source_path.startswith(self.stage.path))
+ assert extension(destination) == "tar.gz"
+ assert self.stage.source_path.startswith(self.stage.path)
- tar = which('tar', required=True)
+ tar = which("tar", required=True)
- patterns = kwargs.get('exclude', None)
+ patterns = kwargs.get("exclude", None)
if patterns is not None:
if isinstance(patterns, six.string_types):
patterns = [patterns]
for p in patterns:
- tar.add_default_arg('--exclude=%s' % p)
+ tar.add_default_arg("--exclude=%s" % p)
with working_dir(self.stage.path):
if self.stage.srcdir:
@@ -690,10 +690,9 @@ class VCSFetchStrategy(FetchStrategy):
# directory that is included in the archive, but they differ
# based on OS, so we temporarily rename the repo
with temp_rename(self.stage.source_path, self.stage.srcdir):
- tar('-czf', destination, self.stage.srcdir)
+ tar("-czf", destination, self.stage.srcdir)
else:
- tar('-czf', destination,
- os.path.basename(self.stage.source_path))
+ tar("-czf", destination, os.path.basename(self.stage.source_path))
def __str__(self):
return "VCS: %s" % self.url
@@ -716,48 +715,48 @@ class GoFetchStrategy(VCSFetchStrategy):
The fetched source will be moved to the standard stage sourcepath directory
during the expand step.
"""
- url_attr = 'go'
+
+ url_attr = "go"
def __init__(self, **kwargs):
# Discards the keywords in kwargs that may conflict with the next
# call to __init__
forwarded_args = copy.copy(kwargs)
- forwarded_args.pop('name', None)
+ forwarded_args.pop("name", None)
super(GoFetchStrategy, self).__init__(**forwarded_args)
self._go = None
@property
def go_version(self):
- vstring = self.go('version', output=str).split(' ')[2]
+ vstring = self.go("version", output=str).split(" ")[2]
return spack.version.Version(vstring)
@property
def go(self):
if not self._go:
- self._go = which('go', required=True)
+ self._go = which("go", required=True)
return self._go
@_needs_stage
def fetch(self):
- tty.debug('Getting go resource: {0}'.format(self.url))
+ tty.debug("Getting go resource: {0}".format(self.url))
with working_dir(self.stage.path):
try:
- os.mkdir('go')
+ os.mkdir("go")
except OSError:
pass
env = dict(os.environ)
- env['GOPATH'] = os.path.join(os.getcwd(), 'go')
- self.go('get', '-v', '-d', self.url, env=env)
+ env["GOPATH"] = os.path.join(os.getcwd(), "go")
+ self.go("get", "-v", "-d", self.url, env=env)
def archive(self, destination):
- super(GoFetchStrategy, self).archive(destination, exclude='.git')
+ super(GoFetchStrategy, self).archive(destination, exclude=".git")
@_needs_stage
def expand(self):
- tty.debug(
- "Source fetched with %s is already expanded." % self.url_attr)
+ tty.debug("Source fetched with %s is already expanded." % self.url_attr)
# Move the directory to the well-known stage source path
repo_root = _ensure_one_stage_entry(self.stage.path)
@@ -766,7 +765,7 @@ class GoFetchStrategy(VCSFetchStrategy):
@_needs_stage
def reset(self):
with working_dir(self.stage.source_path):
- self.go('clean')
+ self.go("clean")
def __str__(self):
return "[go] %s" % self.url
@@ -794,23 +793,30 @@ class GitFetchStrategy(VCSFetchStrategy):
Repositories are cloned into the standard stage source path directory.
"""
- url_attr = 'git'
- optional_attrs = ['tag', 'branch', 'commit', 'submodules',
- 'get_full_repo', 'submodules_delete']
- git_version_re = r'git version (\S+)'
+ url_attr = "git"
+ optional_attrs = [
+ "tag",
+ "branch",
+ "commit",
+ "submodules",
+ "get_full_repo",
+ "submodules_delete",
+ ]
+
+ git_version_re = r"git version (\S+)"
def __init__(self, **kwargs):
# Discards the keywords in kwargs that may conflict with the next call
# to __init__
forwarded_args = copy.copy(kwargs)
- forwarded_args.pop('name', None)
+ forwarded_args.pop("name", None)
super(GitFetchStrategy, self).__init__(**forwarded_args)
self._git = None
- self.submodules = kwargs.get('submodules', False)
- self.submodules_delete = kwargs.get('submodules_delete', False)
- self.get_full_repo = kwargs.get('get_full_repo', False)
+ self.submodules = kwargs.get("submodules", False)
+ self.submodules_delete = kwargs.get("submodules_delete", False)
+ self.get_full_repo = kwargs.get("get_full_repo", False)
@property
def git_version(self):
@@ -819,27 +825,27 @@ class GitFetchStrategy(VCSFetchStrategy):
@staticmethod
def version_from_git(git_exe):
"""Given a git executable, return the Version (this will fail if
- the output cannot be parsed into a valid Version).
+ the output cannot be parsed into a valid Version).
"""
- version_output = git_exe('--version', output=str)
+ version_output = git_exe("--version", output=str)
m = re.search(GitFetchStrategy.git_version_re, version_output)
return spack.version.Version(m.group(1))
@property
def git(self):
if not self._git:
- self._git = which('git', required=True)
+ self._git = which("git", required=True)
# Disable advice for a quieter fetch
# https://github.com/git/git/blob/master/Documentation/RelNotes/1.7.2.txt
- if self.git_version >= spack.version.Version('1.7.2'):
- self._git.add_default_arg('-c')
- self._git.add_default_arg('advice.detachedHead=false')
+ if self.git_version >= spack.version.Version("1.7.2"):
+ self._git.add_default_arg("-c")
+ self._git.add_default_arg("advice.detachedHead=false")
# If the user asked for insecure fetching, make that work
# with git as well.
- if not spack.config.get('config:verify_ssl'):
- self._git.add_default_env('GIT_SSL_NO_VERIFY', 'true')
+ if not spack.config.get("config:verify_ssl"):
+ self._git.add_default_env("GIT_SSL_NO_VERIFY", "true")
return self._git
@@ -854,25 +860,25 @@ class GitFetchStrategy(VCSFetchStrategy):
repo_ref = self.commit or self.tag or self.branch
if repo_ref:
repo_path = url_util.parse(self.url).path
- result = os.path.sep.join(['git', repo_path, repo_ref])
+ result = os.path.sep.join(["git", repo_path, repo_ref])
return result
def _repo_info(self):
- args = ''
+ args = ""
if self.commit:
- args = ' at commit {0}'.format(self.commit)
+ args = " at commit {0}".format(self.commit)
elif self.tag:
- args = ' at tag {0}'.format(self.tag)
+ args = " at tag {0}".format(self.tag)
elif self.branch:
- args = ' on branch {0}'.format(self.branch)
+ args = " on branch {0}".format(self.branch)
- return '{0}{1}'.format(self.url, args)
+ return "{0}{1}".format(self.url, args)
@_needs_stage
def fetch(self):
if self.stage.expanded:
- tty.debug('Already fetched {0}'.format(self.stage.source_path))
+ tty.debug("Already fetched {0}".format(self.stage.source_path))
return
self.clone(commit=self.commit, branch=self.branch, tag=self.tag)
@@ -894,69 +900,71 @@ class GitFetchStrategy(VCSFetchStrategy):
"""
# Default to spack source path
dest = dest or self.stage.source_path
- tty.debug('Cloning git repository: {0}'.format(self._repo_info()))
+ tty.debug("Cloning git repository: {0}".format(self._repo_info()))
git = self.git
- debug = spack.config.get('config:debug')
+ debug = spack.config.get("config:debug")
if bare:
# We don't need to worry about which commit/branch/tag is checked out
- clone_args = ['clone', '--bare']
+ clone_args = ["clone", "--bare"]
if not debug:
- clone_args.append('--quiet')
+ clone_args.append("--quiet")
clone_args.extend([self.url, dest])
git(*clone_args)
elif commit:
# Need to do a regular clone and check out everything if
# they asked for a particular commit.
- clone_args = ['clone', self.url]
+ clone_args = ["clone", self.url]
if not debug:
- clone_args.insert(1, '--quiet')
+ clone_args.insert(1, "--quiet")
with temp_cwd():
git(*clone_args)
- repo_name = get_single_file('.')
+ repo_name = get_single_file(".")
if self.stage:
self.stage.srcdir = repo_name
shutil.move(repo_name, dest)
with working_dir(dest):
- checkout_args = ['checkout', commit]
+ checkout_args = ["checkout", commit]
if not debug:
- checkout_args.insert(1, '--quiet')
+ checkout_args.insert(1, "--quiet")
git(*checkout_args)
else:
# Can be more efficient if not checking out a specific commit.
- args = ['clone']
+ args = ["clone"]
if not debug:
- args.append('--quiet')
+ args.append("--quiet")
# If we want a particular branch ask for it.
if branch:
- args.extend(['--branch', branch])
- elif tag and self.git_version >= spack.version.ver('1.8.5.2'):
- args.extend(['--branch', tag])
+ args.extend(["--branch", branch])
+ elif tag and self.git_version >= spack.version.ver("1.8.5.2"):
+ args.extend(["--branch", tag])
# Try to be efficient if we're using a new enough git.
# This checks out only one branch's history
- if self.git_version >= spack.version.ver('1.7.10'):
+ if self.git_version >= spack.version.ver("1.7.10"):
if self.get_full_repo:
- args.append('--no-single-branch')
+ args.append("--no-single-branch")
else:
- args.append('--single-branch')
+ args.append("--single-branch")
with temp_cwd():
# Yet more efficiency: only download a 1-commit deep
# tree, if the in-use git and protocol permit it.
- if (not self.get_full_repo) and \
- self.git_version >= spack.version.ver('1.7.1') and \
- self.protocol_supports_shallow_clone():
- args.extend(['--depth', '1'])
+ if (
+ (not self.get_full_repo)
+ and self.git_version >= spack.version.ver("1.7.1")
+ and self.protocol_supports_shallow_clone()
+ ):
+ args.extend(["--depth", "1"])
args.extend([self.url])
git(*args)
- repo_name = get_single_file('.')
+ repo_name = get_single_file(".")
if self.stage:
self.stage.srcdir = repo_name
shutil.move(repo_name, dest)
@@ -965,15 +973,15 @@ class GitFetchStrategy(VCSFetchStrategy):
# For tags, be conservative and check them out AFTER
# cloning. Later git versions can do this with clone
# --branch, but older ones fail.
- if tag and self.git_version < spack.version.ver('1.8.5.2'):
+ if tag and self.git_version < spack.version.ver("1.8.5.2"):
# pull --tags returns a "special" error code of 1 in
# older versions that we have to ignore.
# see: https://github.com/git/git/commit/19d122b
- pull_args = ['pull', '--tags']
- co_args = ['checkout', self.tag]
- if not spack.config.get('config:debug'):
- pull_args.insert(1, '--quiet')
- co_args.insert(1, '--quiet')
+ pull_args = ["pull", "--tags"]
+ co_args = ["checkout", self.tag]
+ if not spack.config.get("config:debug"):
+ pull_args.insert(1, "--quiet")
+ co_args.insert(1, "--quiet")
git(*pull_args, ignore_errors=1)
git(*co_args)
@@ -981,9 +989,9 @@ class GitFetchStrategy(VCSFetchStrategy):
if self.submodules_delete:
with working_dir(dest):
for submodule_to_delete in self.submodules_delete:
- args = ['rm', submodule_to_delete]
- if not spack.config.get('config:debug'):
- args.insert(1, '--quiet')
+ args = ["rm", submodule_to_delete]
+ if not spack.config.get("config:debug"):
+ args.insert(1, "--quiet")
git(*args)
# Init submodules if the user asked for them.
@@ -992,7 +1000,7 @@ class GitFetchStrategy(VCSFetchStrategy):
if callable(submodules):
submodules = list(submodules(self.package))
git_commands.append(["submodule", "init", "--"] + submodules)
- git_commands.append(['submodule', 'update', '--recursive'])
+ git_commands.append(["submodule", "update", "--recursive"])
elif submodules:
git_commands.append(["submodule", "update", "--init", "--recursive"])
@@ -1001,21 +1009,21 @@ class GitFetchStrategy(VCSFetchStrategy):
with working_dir(dest):
for args in git_commands:
- if not spack.config.get('config:debug'):
- args.insert(1, '--quiet')
+ if not spack.config.get("config:debug"):
+ args.insert(1, "--quiet")
git(*args)
def archive(self, destination):
- super(GitFetchStrategy, self).archive(destination, exclude='.git')
+ super(GitFetchStrategy, self).archive(destination, exclude=".git")
@_needs_stage
def reset(self):
with working_dir(self.stage.source_path):
- co_args = ['checkout', '.']
- clean_args = ['clean', '-f']
- if spack.config.get('config:debug'):
- co_args.insert(1, '--quiet')
- clean_args.insert(1, '--quiet')
+ co_args = ["checkout", "."]
+ clean_args = ["clean", "-f"]
+ if spack.config.get("config:debug"):
+ co_args.insert(1, "--quiet")
+ clean_args.insert(1, "--quiet")
self.git(*co_args)
self.git(*clean_args)
@@ -1024,11 +1032,10 @@ class GitFetchStrategy(VCSFetchStrategy):
"""Shallow clone operations (--depth #) are not supported by the basic
HTTP protocol or by no-protocol file specifications.
Use (e.g.) https:// or file:// instead."""
- return not (self.url.startswith('http://') or
- self.url.startswith('/'))
+ return not (self.url.startswith("http://") or self.url.startswith("/"))
def __str__(self):
- return '[git] {0}'.format(self._repo_info())
+ return "[git] {0}".format(self._repo_info())
@fetcher
@@ -1047,14 +1054,15 @@ class CvsFetchStrategy(VCSFetchStrategy):
Repositories are checked out into the standard stage source path directory.
"""
- url_attr = 'cvs'
- optional_attrs = ['branch', 'date']
+
+ url_attr = "cvs"
+ optional_attrs = ["branch", "date"]
def __init__(self, **kwargs):
# Discards the keywords in kwargs that may conflict with the next call
# to __init__
forwarded_args = copy.copy(kwargs)
- forwarded_args.pop('name', None)
+ forwarded_args.pop("name", None)
super(CvsFetchStrategy, self).__init__(**forwarded_args)
self._cvs = None
@@ -1066,7 +1074,7 @@ class CvsFetchStrategy(VCSFetchStrategy):
@property
def cvs(self):
if not self._cvs:
- self._cvs = which('cvs', required=True)
+ self._cvs = which("cvs", required=True)
return self._cvs
@property
@@ -1077,11 +1085,11 @@ class CvsFetchStrategy(VCSFetchStrategy):
if not (self.branch or self.date):
# We need a branch or a date to make a checkout reproducible
return None
- id = 'id'
+ id = "id"
if self.branch:
- id += '-branch=' + self.branch
+ id += "-branch=" + self.branch
if self.date:
- id += '-date=' + self.date
+ id += "-date=" + self.date
return id
def mirror_id(self):
@@ -1089,59 +1097,59 @@ class CvsFetchStrategy(VCSFetchStrategy):
# We need a branch or a date to make a checkout reproducible
return None
# Special-case handling because this is not actually a URL
- elements = self.url.split(':')
+ elements = self.url.split(":")
final = elements[-1]
- elements = final.split('/')
+ elements = final.split("/")
# Everything before the first slash is a port number
elements = elements[1:]
- result = os.path.sep.join(['cvs'] + elements)
+ result = os.path.sep.join(["cvs"] + elements)
if self.branch:
- result += '%branch=' + self.branch
+ result += "%branch=" + self.branch
if self.date:
- result += '%date=' + self.date
+ result += "%date=" + self.date
return result
@_needs_stage
def fetch(self):
if self.stage.expanded:
- tty.debug('Already fetched {0}'.format(self.stage.source_path))
+ tty.debug("Already fetched {0}".format(self.stage.source_path))
return
- tty.debug('Checking out CVS repository: {0}'.format(self.url))
+ tty.debug("Checking out CVS repository: {0}".format(self.url))
with temp_cwd():
- url, module = self.url.split('%module=')
+ url, module = self.url.split("%module=")
# Check out files
- args = ['-z9', '-d', url, 'checkout']
+ args = ["-z9", "-d", url, "checkout"]
if self.branch is not None:
- args.extend(['-r', self.branch])
+ args.extend(["-r", self.branch])
if self.date is not None:
- args.extend(['-D', self.date])
+ args.extend(["-D", self.date])
args.append(module)
self.cvs(*args)
# Rename repo
- repo_name = get_single_file('.')
+ repo_name = get_single_file(".")
self.stage.srcdir = repo_name
shutil.move(repo_name, self.stage.source_path)
def _remove_untracked_files(self):
"""Removes untracked files in a CVS repository."""
with working_dir(self.stage.source_path):
- status = self.cvs('-qn', 'update', output=str)
- for line in status.split('\n'):
- if re.match(r'^[?]', line):
+ status = self.cvs("-qn", "update", output=str)
+ for line in status.split("\n"):
+ if re.match(r"^[?]", line):
path = line[2:].strip()
if os.path.isfile(path):
os.unlink(path)
def archive(self, destination):
- super(CvsFetchStrategy, self).archive(destination, exclude='CVS')
+ super(CvsFetchStrategy, self).archive(destination, exclude="CVS")
@_needs_stage
def reset(self):
self._remove_untracked_files()
with working_dir(self.stage.source_path):
- self.cvs('update', '-C', '.')
+ self.cvs("update", "-C", ".")
def __str__(self):
return "[cvs] %s" % self.url
@@ -1162,14 +1170,15 @@ class SvnFetchStrategy(VCSFetchStrategy):
Repositories are checked out into the standard stage source path directory.
"""
- url_attr = 'svn'
- optional_attrs = ['revision']
+
+ url_attr = "svn"
+ optional_attrs = ["revision"]
def __init__(self, **kwargs):
# Discards the keywords in kwargs that may conflict with the next call
# to __init__
forwarded_args = copy.copy(kwargs)
- forwarded_args.pop('name', None)
+ forwarded_args.pop("name", None)
super(SvnFetchStrategy, self).__init__(**forwarded_args)
self._svn = None
@@ -1179,7 +1188,7 @@ class SvnFetchStrategy(VCSFetchStrategy):
@property
def svn(self):
if not self._svn:
- self._svn = which('svn', required=True)
+ self._svn = which("svn", required=True)
return self._svn
@property
@@ -1192,35 +1201,35 @@ class SvnFetchStrategy(VCSFetchStrategy):
def mirror_id(self):
if self.revision:
repo_path = url_util.parse(self.url).path
- result = os.path.sep.join(['svn', repo_path, self.revision])
+ result = os.path.sep.join(["svn", repo_path, self.revision])
return result
@_needs_stage
def fetch(self):
if self.stage.expanded:
- tty.debug('Already fetched {0}'.format(self.stage.source_path))
+ tty.debug("Already fetched {0}".format(self.stage.source_path))
return
- tty.debug('Checking out subversion repository: {0}'.format(self.url))
+ tty.debug("Checking out subversion repository: {0}".format(self.url))
- args = ['checkout', '--force', '--quiet']
+ args = ["checkout", "--force", "--quiet"]
if self.revision:
- args += ['-r', self.revision]
+ args += ["-r", self.revision]
args.extend([self.url])
with temp_cwd():
self.svn(*args)
- repo_name = get_single_file('.')
+ repo_name = get_single_file(".")
self.stage.srcdir = repo_name
shutil.move(repo_name, self.stage.source_path)
def _remove_untracked_files(self):
"""Removes untracked files in an svn repository."""
with working_dir(self.stage.source_path):
- status = self.svn('status', '--no-ignore', output=str)
- self.svn('status', '--no-ignore')
- for line in status.split('\n'):
- if not re.match('^[I?]', line):
+ status = self.svn("status", "--no-ignore", output=str)
+ self.svn("status", "--no-ignore")
+ for line in status.split("\n"):
+ if not re.match("^[I?]", line):
continue
path = line[8:].strip()
if os.path.isfile(path):
@@ -1229,13 +1238,13 @@ class SvnFetchStrategy(VCSFetchStrategy):
shutil.rmtree(path, ignore_errors=True)
def archive(self, destination):
- super(SvnFetchStrategy, self).archive(destination, exclude='.svn')
+ super(SvnFetchStrategy, self).archive(destination, exclude=".svn")
@_needs_stage
def reset(self):
self._remove_untracked_files()
with working_dir(self.stage.source_path):
- self.svn('revert', '.', '-R')
+ self.svn("revert", ".", "-R")
def __str__(self):
return "[svn] %s" % self.url
@@ -1264,14 +1273,15 @@ class HgFetchStrategy(VCSFetchStrategy):
Repositories are cloned into the standard stage source path directory.
"""
- url_attr = 'hg'
- optional_attrs = ['revision']
+
+ url_attr = "hg"
+ optional_attrs = ["revision"]
def __init__(self, **kwargs):
# Discards the keywords in kwargs that may conflict with the next call
# to __init__
forwarded_args = copy.copy(kwargs)
- forwarded_args.pop('name', None)
+ forwarded_args.pop("name", None)
super(HgFetchStrategy, self).__init__(**forwarded_args)
self._hg = None
@@ -1283,12 +1293,12 @@ class HgFetchStrategy(VCSFetchStrategy):
Executable: the hg executable
"""
if not self._hg:
- self._hg = which('hg', required=True)
+ self._hg = which("hg", required=True)
# When building PythonPackages, Spack automatically sets
# PYTHONPATH. This can interfere with hg, which is a Python
# script. Unset PYTHONPATH while running hg.
- self._hg.add_default_env('PYTHONPATH', '')
+ self._hg.add_default_env("PYTHONPATH", "")
return self._hg
@@ -1302,39 +1312,38 @@ class HgFetchStrategy(VCSFetchStrategy):
def mirror_id(self):
if self.revision:
repo_path = url_util.parse(self.url).path
- result = os.path.sep.join(['hg', repo_path, self.revision])
+ result = os.path.sep.join(["hg", repo_path, self.revision])
return result
@_needs_stage
def fetch(self):
if self.stage.expanded:
- tty.debug('Already fetched {0}'.format(self.stage.source_path))
+ tty.debug("Already fetched {0}".format(self.stage.source_path))
return
args = []
if self.revision:
- args.append('at revision %s' % self.revision)
- tty.debug('Cloning mercurial repository: {0} {1}'
- .format(self.url, args))
+ args.append("at revision %s" % self.revision)
+ tty.debug("Cloning mercurial repository: {0} {1}".format(self.url, args))
- args = ['clone']
+ args = ["clone"]
- if not spack.config.get('config:verify_ssl'):
- args.append('--insecure')
+ if not spack.config.get("config:verify_ssl"):
+ args.append("--insecure")
if self.revision:
- args.extend(['-r', self.revision])
+ args.extend(["-r", self.revision])
args.extend([self.url])
with temp_cwd():
self.hg(*args)
- repo_name = get_single_file('.')
+ repo_name = get_single_file(".")
self.stage.srcdir = repo_name
shutil.move(repo_name, self.stage.source_path)
def archive(self, destination):
- super(HgFetchStrategy, self).archive(destination, exclude='.hg')
+ super(HgFetchStrategy, self).archive(destination, exclude=".hg")
@_needs_stage
def reset(self):
@@ -1342,9 +1351,9 @@ class HgFetchStrategy(VCSFetchStrategy):
source_path = self.stage.source_path
scrubbed = "scrubbed-source-tmp"
- args = ['clone']
+ args = ["clone"]
if self.revision:
- args += ['-r', self.revision]
+ args += ["-r", self.revision]
args += [source_path, scrubbed]
self.hg(*args)
@@ -1358,46 +1367,45 @@ class HgFetchStrategy(VCSFetchStrategy):
@fetcher
class S3FetchStrategy(URLFetchStrategy):
"""FetchStrategy that pulls from an S3 bucket."""
- url_attr = 's3'
+
+ url_attr = "s3"
def __init__(self, *args, **kwargs):
try:
super(S3FetchStrategy, self).__init__(*args, **kwargs)
except ValueError:
- if not kwargs.get('url'):
- raise ValueError(
- "S3FetchStrategy requires a url for fetching.")
+ if not kwargs.get("url"):
+ raise ValueError("S3FetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
if self.archive_file:
- tty.debug('Already downloaded {0}'.format(self.archive_file))
+ tty.debug("Already downloaded {0}".format(self.archive_file))
return
parsed_url = url_util.parse(self.url)
- if parsed_url.scheme != 's3':
- raise FetchError(
- 'S3FetchStrategy can only fetch from s3:// urls.')
+ if parsed_url.scheme != "s3":
+ raise FetchError("S3FetchStrategy can only fetch from s3:// urls.")
- tty.debug('Fetching {0}'.format(self.url))
+ tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path)
with working_dir(self.stage.path):
_, headers, stream = spack.util.web.read_from_url(self.url)
- with open(basename, 'wb') as f:
+ with open(basename, "wb") as f:
shutil.copyfileobj(stream, f)
- content_type = spack.util.web.get_header(headers, 'Content-type')
+ content_type = spack.util.web.get_header(headers, "Content-type")
- if content_type == 'text/html':
+ if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
llnl.util.filesystem.rename(
- os.path.join(self.stage.path, basename),
- self.stage.save_filename)
+ os.path.join(self.stage.path, basename), self.stage.save_filename
+ )
if not self.archive_file:
raise FailedDownloadError(self.url)
@@ -1406,47 +1414,45 @@ class S3FetchStrategy(URLFetchStrategy):
@fetcher
class GCSFetchStrategy(URLFetchStrategy):
"""FetchStrategy that pulls from a GCS bucket."""
- url_attr = 'gs'
+
+ url_attr = "gs"
def __init__(self, *args, **kwargs):
try:
super(GCSFetchStrategy, self).__init__(*args, **kwargs)
except ValueError:
- if not kwargs.get('url'):
- raise ValueError(
- "GCSFetchStrategy requires a url for fetching.")
+ if not kwargs.get("url"):
+ raise ValueError("GCSFetchStrategy requires a url for fetching.")
@_needs_stage
def fetch(self):
import spack.util.web as web_util
+
if self.archive_file:
- tty.debug('Already downloaded {0}'.format(self.archive_file))
+ tty.debug("Already downloaded {0}".format(self.archive_file))
return
parsed_url = url_util.parse(self.url)
- if parsed_url.scheme != 'gs':
- raise FetchError(
- 'GCSFetchStrategy can only fetch from gs:// urls.')
+ if parsed_url.scheme != "gs":
+ raise FetchError("GCSFetchStrategy can only fetch from gs:// urls.")
- tty.debug('Fetching {0}'.format(self.url))
+ tty.debug("Fetching {0}".format(self.url))
basename = os.path.basename(parsed_url.path)
with working_dir(self.stage.path):
_, headers, stream = web_util.read_from_url(self.url)
- with open(basename, 'wb') as f:
+ with open(basename, "wb") as f:
shutil.copyfileobj(stream, f)
- content_type = web_util.get_header(headers, 'Content-type')
+ content_type = web_util.get_header(headers, "Content-type")
- if content_type == 'text/html':
+ if content_type == "text/html":
warn_content_type_mismatch(self.archive_file or "the archive")
if self.stage.save_filename:
- os.rename(
- os.path.join(self.stage.path, basename),
- self.stage.save_filename)
+ os.rename(os.path.join(self.stage.path, basename), self.stage.save_filename)
if not self.archive_file:
raise FailedDownloadError(self.url)
@@ -1454,8 +1460,8 @@ class GCSFetchStrategy(URLFetchStrategy):
def stable_target(fetcher):
"""Returns whether the fetcher target is expected to have a stable
- checksum. This is only true if the target is a preexisting archive
- file."""
+ checksum. This is only true if the target is a preexisting archive
+ file."""
if isinstance(fetcher, URLFetchStrategy) and fetcher.cachable:
return True
return False
@@ -1463,10 +1469,10 @@ def stable_target(fetcher):
def from_url(url):
"""Given a URL, find an appropriate fetch strategy for it.
- Currently just gives you a URLFetchStrategy that uses curl.
+ Currently just gives you a URLFetchStrategy that uses curl.
- TODO: make this return appropriate fetch strategies for other
- types of URLs.
+ TODO: make this return appropriate fetch strategies for other
+ types of URLs.
"""
return URLFetchStrategy(url)
@@ -1500,16 +1506,16 @@ def check_pkg_attributes(pkg):
"""
# a single package cannot have URL attributes for multiple VCS fetch
# strategies *unless* they are the same attribute.
- conflicts = set([s.url_attr for s in all_strategies
- if hasattr(pkg, s.url_attr)])
+ conflicts = set([s.url_attr for s in all_strategies if hasattr(pkg, s.url_attr)])
# URL isn't a VCS fetch method. We can use it with a VCS method.
- conflicts -= set(['url'])
+ conflicts -= set(["url"])
if len(conflicts) > 1:
raise FetcherConflict(
- 'Package %s cannot specify %s together. Pick at most one.'
- % (pkg.name, comma_and(quote(conflicts))))
+ "Package %s cannot specify %s together. Pick at most one."
+ % (pkg.name, comma_and(quote(conflicts)))
+ )
def _check_version_attributes(fetcher, pkg, version):
@@ -1521,9 +1527,7 @@ def _check_version_attributes(fetcher, pkg, version):
all_optionals = set(a for s in all_strategies for a in s.optional_attrs)
args = pkg.versions[version]
- extra\
- = set(args) - set(fetcher.optional_attrs) - \
- set([fetcher.url_attr, 'no_cache'])
+ extra = set(args) - set(fetcher.optional_attrs) - set([fetcher.url_attr, "no_cache"])
extra.intersection_update(all_optionals)
if extra:
@@ -1532,43 +1536,42 @@ def _check_version_attributes(fetcher, pkg, version):
"%s version '%s' has extra arguments: %s"
% (pkg.name, version, comma_and(quote(extra))),
"Valid arguments for a %s fetcher are: \n %s"
- % (fetcher.url_attr, comma_and(quote(legal_attrs))))
+ % (fetcher.url_attr, comma_and(quote(legal_attrs))),
+ )
def _extrapolate(pkg, version):
"""Create a fetcher from an extrapolated URL for this version."""
try:
- return URLFetchStrategy(pkg.url_for_version(version),
- fetch_options=pkg.fetch_options)
+ return URLFetchStrategy(pkg.url_for_version(version), fetch_options=pkg.fetch_options)
except spack.package_base.NoURLError:
- msg = ("Can't extrapolate a URL for version %s "
- "because package %s defines no URLs")
+ msg = "Can't extrapolate a URL for version %s " "because package %s defines no URLs"
raise ExtrapolationError(msg % (version, pkg.name))
def _from_merged_attrs(fetcher, pkg, version):
"""Create a fetcher from merged package and version attributes."""
- if fetcher.url_attr == 'url':
+ if fetcher.url_attr == "url":
mirrors = pkg.all_urls_for_version(version)
url = mirrors[0]
mirrors = mirrors[1:]
- attrs = {fetcher.url_attr: url, 'mirrors': mirrors}
+ attrs = {fetcher.url_attr: url, "mirrors": mirrors}
else:
url = getattr(pkg, fetcher.url_attr)
attrs = {fetcher.url_attr: url}
- attrs['fetch_options'] = pkg.fetch_options
+ attrs["fetch_options"] = pkg.fetch_options
attrs.update(pkg.versions[version])
- if fetcher.url_attr == 'git' and hasattr(pkg, 'submodules'):
- attrs.setdefault('submodules', pkg.submodules)
+ if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
+ attrs.setdefault("submodules", pkg.submodules)
return fetcher(**attrs)
def for_package_version(pkg, version):
"""Determine a fetch strategy based on the arguments supplied to
- version() in the package description."""
+ version() in the package description."""
# No-code packages have a custom fetch strategy to work around issues
# with resource staging.
@@ -1584,8 +1587,7 @@ def for_package_version(pkg, version):
if isinstance(version, spack.version.GitVersion):
if not hasattr(pkg, "git"):
raise FetchError(
- "Cannot fetch git version for %s. Package has no 'git' attribute" %
- pkg.name
+ "Cannot fetch git version for %s. Package has no 'git' attribute" % pkg.name
)
# Populate the version with comparisons to other commits
version.generate_git_lookup(pkg.name)
@@ -1596,13 +1598,13 @@ def for_package_version(pkg, version):
# We call all non-commit refs tags in this context, at the cost of a slight
# performance hit for branches on older versions of git.
# Branches cannot be cached, so we tell the fetcher not to cache tags/branches
- ref_type = 'commit' if version.is_commit else 'tag'
+ ref_type = "commit" if version.is_commit else "tag"
kwargs = {
- 'git': pkg.git,
+ "git": pkg.git,
ref_type: version.ref,
- 'no_cache': True,
+ "no_cache": True,
}
- kwargs['submodules'] = getattr(pkg, 'submodules', False)
+ kwargs["submodules"] = getattr(pkg, "submodules", False)
fetcher = GitFetchStrategy(**kwargs)
return fetcher
@@ -1611,7 +1613,7 @@ def for_package_version(pkg, version):
return _extrapolate(pkg, version)
# Set package args first so version args can override them
- args = {'fetch_options': pkg.fetch_options}
+ args = {"fetch_options": pkg.fetch_options}
# Grab a dict of args out of the package version dict
args.update(pkg.versions[version])
@@ -1619,14 +1621,14 @@ def for_package_version(pkg, version):
for fetcher in all_strategies:
if fetcher.url_attr in args:
_check_version_attributes(fetcher, pkg, version)
- if fetcher.url_attr == 'git' and hasattr(pkg, 'submodules'):
- args.setdefault('submodules', pkg.submodules)
+ if fetcher.url_attr == "git" and hasattr(pkg, "submodules"):
+ args.setdefault("submodules", pkg.submodules)
return fetcher(**args)
# if a version's optional attributes imply a particular fetch
# strategy, and we have the `url_attr`, then use that strategy.
for fetcher in all_strategies:
- if hasattr(pkg, fetcher.url_attr) or fetcher.url_attr == 'url':
+ if hasattr(pkg, fetcher.url_attr) or fetcher.url_attr == "url":
optionals = fetcher.optional_attrs
if optionals and any(a in args for a in optionals):
_check_version_attributes(fetcher, pkg, version)
@@ -1645,38 +1647,36 @@ def for_package_version(pkg, version):
def from_url_scheme(url, *args, **kwargs):
"""Finds a suitable FetchStrategy by matching its url_attr with the scheme
- in the given url."""
+ in the given url."""
- url = kwargs.get('url', url)
- parsed_url = urllib_parse.urlparse(url, scheme='file')
+ url = kwargs.get("url", url)
+ parsed_url = urllib_parse.urlparse(url, scheme="file")
- scheme_mapping = (
- kwargs.get('scheme_mapping') or
- {
- 'file': 'url',
- 'http': 'url',
- 'https': 'url',
- 'ftp': 'url',
- 'ftps': 'url',
- })
+ scheme_mapping = kwargs.get("scheme_mapping") or {
+ "file": "url",
+ "http": "url",
+ "https": "url",
+ "ftp": "url",
+ "ftps": "url",
+ }
scheme = parsed_url.scheme
scheme = scheme_mapping.get(scheme, scheme)
for fetcher in all_strategies:
- url_attr = getattr(fetcher, 'url_attr', None)
+ url_attr = getattr(fetcher, "url_attr", None)
if url_attr and url_attr == scheme:
return fetcher(url, *args, **kwargs)
raise ValueError(
- 'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(
- SCHEME=parsed_url.scheme))
+ 'No FetchStrategy found for url with scheme: "{SCHEME}"'.format(SCHEME=parsed_url.scheme)
+ )
def from_list_url(pkg):
"""If a package provides a URL which lists URLs for resources by
- version, this can can create a fetcher for a URL discovered for
- the specified package's version."""
+ version, this can can create a fetcher for a URL discovered for
+ the specified package's version."""
if pkg.list_url:
try:
@@ -1691,12 +1691,11 @@ def from_list_url(pkg):
if version in pkg.versions:
args = pkg.versions[version]
checksum = next(
- (v for k, v in args.items() if k in crypto.hashes),
- args.get('checksum'))
+ (v for k, v in args.items() if k in crypto.hashes), args.get("checksum")
+ )
# construct a fetcher
- return URLFetchStrategy(url_from_list, checksum,
- fetch_options=pkg.fetch_options)
+ return URLFetchStrategy(url_from_list, checksum, fetch_options=pkg.fetch_options)
except KeyError as e:
tty.debug(e)
tty.msg("Cannot find version %s in url_list" % pkg.version)
@@ -1708,7 +1707,6 @@ def from_list_url(pkg):
class FsCache(object):
-
def __init__(self, root):
self.root = os.path.abspath(root)
@@ -1743,14 +1741,14 @@ class NoCacheError(FetchError):
class FailedDownloadError(FetchError):
"""Raised when a download fails."""
+
def __init__(self, url, msg=""):
- super(FailedDownloadError, self).__init__(
- "Failed to fetch file from URL: %s" % url, msg)
+ super(FailedDownloadError, self).__init__("Failed to fetch file from URL: %s" % url, msg)
self.url = url
class NoArchiveFileError(FetchError):
- """"Raised when an archive file is expected but none exists."""
+ """ "Raised when an archive file is expected but none exists."""
class NoDigestError(FetchError):
@@ -1767,13 +1765,14 @@ class FetcherConflict(FetchError):
class InvalidArgsError(FetchError):
"""Raised when a version can't be deduced from a set of arguments."""
+
def __init__(self, pkg=None, version=None, **args):
msg = "Could not guess a fetch strategy"
if pkg:
- msg += ' for {pkg}'.format(pkg=pkg)
+ msg += " for {pkg}".format(pkg=pkg)
if version:
- msg += '@{version}'.format(version=version)
- long_msg = 'with arguments: {args}'.format(args=args)
+ msg += "@{version}".format(version=version)
+ long_msg = "with arguments: {args}".format(args=args)
super(InvalidArgsError, self).__init__(msg, long_msg)
@@ -1783,7 +1782,8 @@ class ChecksumError(FetchError):
class NoStageError(FetchError):
"""Raised when fetch operations are called before set_stage()."""
+
def __init__(self, method):
super(NoStageError, self).__init__(
- "Must call FetchStrategy.set_stage() before calling %s" %
- method.__name__)
+ "Must call FetchStrategy.set_stage() before calling %s" % method.__name__
+ )
diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py
index 1a4a492d2f..2373ec5e45 100644
--- a/lib/spack/spack/filesystem_view.py
+++ b/lib/spack/spack/filesystem_view.py
@@ -46,7 +46,7 @@ from spack.error import SpackError
__all__ = ["FilesystemView", "YamlFilesystemView"]
-_projections_path = '.spack/projections.yaml'
+_projections_path = ".spack/projections.yaml"
def view_symlink(src, dst, **kwargs):
@@ -83,34 +83,28 @@ def view_copy(src, dst, view, spec=None):
# Break a package include cycle
import spack.relocate
- orig_sbang = '#!/bin/bash {0}/bin/sbang'.format(spack.paths.spack_root)
+ orig_sbang = "#!/bin/bash {0}/bin/sbang".format(spack.paths.spack_root)
new_sbang = sbang.sbang_shebang_line()
- prefix_to_projection = collections.OrderedDict({
- spec.prefix: view.get_projection_for_spec(spec)})
+ prefix_to_projection = collections.OrderedDict(
+ {spec.prefix: view.get_projection_for_spec(spec)}
+ )
for dep in spec.traverse():
if not dep.external:
- prefix_to_projection[dep.prefix] = \
- view.get_projection_for_spec(dep)
+ prefix_to_projection[dep.prefix] = view.get_projection_for_spec(dep)
if spack.relocate.is_binary(dst):
- spack.relocate.relocate_text_bin(
- binaries=[dst],
- prefixes=prefix_to_projection
- )
+ spack.relocate.relocate_text_bin(binaries=[dst], prefixes=prefix_to_projection)
else:
prefix_to_projection[spack.store.layout.root] = view._root
prefix_to_projection[orig_sbang] = new_sbang
- spack.relocate.relocate_text(
- files=[dst],
- prefixes=prefix_to_projection
- )
+ spack.relocate.relocate_text(files=[dst], prefixes=prefix_to_projection)
try:
stat = os.stat(src)
os.chown(dst, stat.st_uid, stat.st_gid)
except OSError:
- tty.debug('Can\'t change the permissions for %s' % dst)
+ tty.debug("Can't change the permissions for %s" % dst)
def view_func_parser(parsed_name):
@@ -128,38 +122,38 @@ def view_func_parser(parsed_name):
def inverse_view_func_parser(view_type):
# get string based on view type
if view_type is view_hardlink:
- link_name = 'hardlink'
+ link_name = "hardlink"
elif view_type is view_copy:
- link_name = 'copy'
+ link_name = "copy"
else:
- link_name = 'symlink'
+ link_name = "symlink"
return link_name
class FilesystemView(object):
"""
- Governs a filesystem view that is located at certain root-directory.
+ Governs a filesystem view that is located at certain root-directory.
- Packages are linked from their install directories into a common file
- hierachy.
+ Packages are linked from their install directories into a common file
+ hierachy.
- In distributed filesystems, loading each installed package seperately
- can lead to slow-downs due to too many directories being traversed.
- This can be circumvented by loading all needed modules into a common
- directory structure.
+ In distributed filesystems, loading each installed package seperately
+ can lead to slow-downs due to too many directories being traversed.
+ This can be circumvented by loading all needed modules into a common
+ directory structure.
"""
def __init__(self, root, layout, **kwargs):
"""
- Initialize a filesystem view under the given `root` directory with
- corresponding directory `layout`.
+ Initialize a filesystem view under the given `root` directory with
+ corresponding directory `layout`.
- Files are linked by method `link` (llnl.util.symlink by default).
+ Files are linked by method `link` (llnl.util.symlink by default).
"""
self._root = root
self.layout = layout
- self.projections = kwargs.get('projections', {})
+ self.projections = kwargs.get("projections", {})
self.ignore_conflicts = kwargs.get("ignore_conflicts", False)
self.verbose = kwargs.get("verbose", False)
@@ -170,116 +164,116 @@ class FilesystemView(object):
def add_specs(self, *specs, **kwargs):
"""
- Add given specs to view.
+ Add given specs to view.
- The supplied specs might be standalone packages or extensions of
- other packages.
+ The supplied specs might be standalone packages or extensions of
+ other packages.
- Should accept `with_dependencies` as keyword argument (default
- True) to indicate wether or not dependencies should be activated as
- well.
+ Should accept `with_dependencies` as keyword argument (default
+ True) to indicate wether or not dependencies should be activated as
+ well.
- Should except an `exclude` keyword argument containing a list of
- regexps that filter out matching spec names.
+ Should except an `exclude` keyword argument containing a list of
+ regexps that filter out matching spec names.
- This method should make use of `activate_{extension,standalone}`.
+ This method should make use of `activate_{extension,standalone}`.
"""
raise NotImplementedError
def add_extension(self, spec):
"""
- Add (link) an extension in this view. Does not add dependencies.
+ Add (link) an extension in this view. Does not add dependencies.
"""
raise NotImplementedError
def add_standalone(self, spec):
"""
- Add (link) a standalone package into this view.
+ Add (link) a standalone package into this view.
"""
raise NotImplementedError
def check_added(self, spec):
"""
- Check if the given concrete spec is active in this view.
+ Check if the given concrete spec is active in this view.
"""
raise NotImplementedError
def remove_specs(self, *specs, **kwargs):
"""
- Removes given specs from view.
+ Removes given specs from view.
- The supplied spec might be a standalone package or an extension of
- another package.
+ The supplied spec might be a standalone package or an extension of
+ another package.
- Should accept `with_dependencies` as keyword argument (default
- True) to indicate wether or not dependencies should be deactivated
- as well.
+ Should accept `with_dependencies` as keyword argument (default
+ True) to indicate wether or not dependencies should be deactivated
+ as well.
- Should accept `with_dependents` as keyword argument (default True)
- to indicate wether or not dependents on the deactivated specs
- should be removed as well.
+ Should accept `with_dependents` as keyword argument (default True)
+ to indicate wether or not dependents on the deactivated specs
+ should be removed as well.
- Should except an `exclude` keyword argument containing a list of
- regexps that filter out matching spec names.
+ Should except an `exclude` keyword argument containing a list of
+ regexps that filter out matching spec names.
- This method should make use of `deactivate_{extension,standalone}`.
+ This method should make use of `deactivate_{extension,standalone}`.
"""
raise NotImplementedError
def remove_extension(self, spec):
"""
- Remove (unlink) an extension from this view.
+ Remove (unlink) an extension from this view.
"""
raise NotImplementedError
def remove_standalone(self, spec):
"""
- Remove (unlink) a standalone package from this view.
+ Remove (unlink) a standalone package from this view.
"""
raise NotImplementedError
def get_projection_for_spec(self, spec):
"""
- Get the projection in this view for a spec.
+ Get the projection in this view for a spec.
"""
raise NotImplementedError
def get_all_specs(self):
"""
- Get all specs currently active in this view.
+ Get all specs currently active in this view.
"""
raise NotImplementedError
def get_spec(self, spec):
"""
- Return the actual spec linked in this view (i.e. do not look it up
- in the database by name).
+ Return the actual spec linked in this view (i.e. do not look it up
+ in the database by name).
- `spec` can be a name or a spec from which the name is extracted.
+ `spec` can be a name or a spec from which the name is extracted.
- As there can only be a single version active for any spec the name
- is enough to identify the spec in the view.
+ As there can only be a single version active for any spec the name
+ is enough to identify the spec in the view.
- If no spec is present, returns None.
+ If no spec is present, returns None.
"""
raise NotImplementedError
def print_status(self, *specs, **kwargs):
"""
- Print a short summary about the given specs, detailing whether..
- * ..they are active in the view.
- * ..they are active but the activated version differs.
- * ..they are not activte in the view.
+ Print a short summary about the given specs, detailing whether..
+ * ..they are active in the view.
+ * ..they are active but the activated version differs.
+ * ..they are not activte in the view.
- Takes `with_dependencies` keyword argument so that the status of
- dependencies is printed as well.
+ Takes `with_dependencies` keyword argument so that the status of
+ dependencies is printed as well.
"""
raise NotImplementedError
class YamlFilesystemView(FilesystemView):
"""
- Filesystem view to work with a yaml based directory layout.
+ Filesystem view to work with a yaml based directory layout.
"""
def __init__(self, root, layout, **kwargs):
@@ -298,8 +292,8 @@ class YamlFilesystemView(FilesystemView):
# Ensure projections are the same from each source
# Read projections file from view
if self.projections != self.read_projections():
- msg = 'View at %s has projections file' % self._root
- msg += ' which does not match projections passed manually.'
+ msg = "View at %s has projections file" % self._root
+ msg += " which does not match projections passed manually."
raise ConflictingProjectionsError(msg)
self.extensions_layout = YamlViewExtensionsLayout(self, layout)
@@ -309,16 +303,15 @@ class YamlFilesystemView(FilesystemView):
def write_projections(self):
if self.projections:
mkdirp(os.path.dirname(self.projections_path))
- with open(self.projections_path, 'w') as f:
- f.write(s_yaml.dump_config({'projections': self.projections}))
+ with open(self.projections_path, "w") as f:
+ f.write(s_yaml.dump_config({"projections": self.projections}))
def read_projections(self):
if os.path.exists(self.projections_path):
- with open(self.projections_path, 'r') as f:
+ with open(self.projections_path, "r") as f:
projections_data = s_yaml.load(f)
- spack.config.validate(projections_data,
- spack.schema.projections.schema)
- return projections_data['projections']
+ spack.config.validate(projections_data, spack.schema.projections.schema)
+ return projections_data["projections"]
else:
return {}
@@ -349,18 +342,15 @@ class YamlFilesystemView(FilesystemView):
def add_extension(self, spec):
if not spec.package.is_extension:
- tty.error(self._croot + 'Package %s is not an extension.'
- % spec.name)
+ tty.error(self._croot + "Package %s is not an extension." % spec.name)
return False
if spec.external:
- tty.warn(self._croot + 'Skipping external package: %s'
- % colorize_spec(spec))
+ tty.warn(self._croot + "Skipping external package: %s" % colorize_spec(spec))
return True
if not spec.package.is_activated(self):
- spec.package.do_activate(
- self, verbose=self.verbose, with_dependencies=False)
+ spec.package.do_activate(self, verbose=self.verbose, with_dependencies=False)
# make sure the meta folder is linked as well (this is not done by the
# extension-activation mechnism)
@@ -371,31 +361,28 @@ class YamlFilesystemView(FilesystemView):
def add_standalone(self, spec):
if spec.package.is_extension:
- tty.error(self._croot + 'Package %s is an extension.'
- % spec.name)
+ tty.error(self._croot + "Package %s is an extension." % spec.name)
return False
if spec.external:
- tty.warn(self._croot + 'Skipping external package: %s'
- % colorize_spec(spec))
+ tty.warn(self._croot + "Skipping external package: %s" % colorize_spec(spec))
return True
if self.check_added(spec):
- tty.warn(self._croot + 'Skipping already linked package: %s'
- % colorize_spec(spec))
+ tty.warn(self._croot + "Skipping already linked package: %s" % colorize_spec(spec))
return True
if spec.package.extendable:
# Check for globally activated extensions in the extendee that
# we're looking at.
- activated = [p.spec for p in
- spack.store.db.activated_extensions_for(spec)]
+ activated = [p.spec for p in spack.store.db.activated_extensions_for(spec)]
if activated:
- tty.error("Globally activated extensions cannot be used in "
- "conjunction with filesystem views. "
- "Please deactivate the following specs: ")
- spack.cmd.display_specs(activated, flags=True, variants=True,
- long=False)
+ tty.error(
+ "Globally activated extensions cannot be used in "
+ "conjunction with filesystem views. "
+ "Please deactivate the following specs: "
+ )
+ spack.cmd.display_specs(activated, flags=True, variants=True, long=False)
return False
self.merge(spec)
@@ -403,7 +390,7 @@ class YamlFilesystemView(FilesystemView):
self.link_meta_folder(spec)
if self.verbose:
- tty.info(self._croot + 'Linked package: %s' % colorize_spec(spec))
+ tty.info(self._croot + "Linked package: %s" % colorize_spec(spec))
return True
def merge(self, spec, ignore=None):
@@ -414,8 +401,7 @@ class YamlFilesystemView(FilesystemView):
tree = LinkTree(view_source)
ignore = ignore or (lambda f: False)
- ignore_file = match_predicate(
- self.layout.hidden_file_regexes, ignore)
+ ignore_file = match_predicate(self.layout.hidden_file_regexes, ignore)
# check for dir conflicts
conflicts = tree.find_dir_conflicts(view_dst, ignore_file)
@@ -440,8 +426,7 @@ class YamlFilesystemView(FilesystemView):
tree = LinkTree(view_source)
ignore = ignore or (lambda f: False)
- ignore_file = match_predicate(
- self.layout.hidden_file_regexes, ignore)
+ ignore_file = match_predicate(self.layout.hidden_file_regexes, ignore)
merge_map = tree.get_file_map(view_dst, ignore_file)
pkg.remove_files_from_view(self, merge_map)
@@ -458,10 +443,11 @@ class YamlFilesystemView(FilesystemView):
# check if this spec owns a file of that name (through the
# manifest in the metadata dir, which we have in the view).
- manifest_file = os.path.join(self.get_path_meta_folder(spec),
- spack.store.layout.manifest_file_name)
+ manifest_file = os.path.join(
+ self.get_path_meta_folder(spec), spack.store.layout.manifest_file_name
+ )
try:
- with open(manifest_file, 'r') as f:
+ with open(manifest_file, "r") as f:
manifest = s_json.load(f)
except (OSError, IOError):
# if we can't load it, assume it doesn't know about the file.
@@ -514,22 +500,25 @@ class YamlFilesystemView(FilesystemView):
if with_dependents:
# remove all packages depending on the ones to remove
if len(dependents) > 0:
- tty.warn(self._croot +
- "The following dependents will be removed: %s"
- % ", ".join((s.name for s in dependents)))
+ tty.warn(
+ self._croot
+ + "The following dependents will be removed: %s"
+ % ", ".join((s.name for s in dependents))
+ )
to_deactivate.update(dependents)
elif len(dependents) > 0:
- tty.warn(self._croot +
- "The following packages will be unusable: %s"
- % ", ".join((s.name for s in dependents)))
+ tty.warn(
+ self._croot
+ + "The following packages will be unusable: %s"
+ % ", ".join((s.name for s in dependents))
+ )
# Determine the order that packages should be removed from the view;
# dependents come before their dependencies.
to_deactivate_sorted = list()
depmap = dict()
for spec in to_deactivate:
- depmap[spec] = set(d for d in spec.traverse(root=False)
- if d in to_deactivate)
+ depmap[spec] = set(d for d in spec.traverse(root=False) if d in to_deactivate)
while depmap:
for spec in [s for s, d in depmap.items() if not d]:
@@ -553,40 +542,37 @@ class YamlFilesystemView(FilesystemView):
def remove_extension(self, spec, with_dependents=True):
"""
- Remove (unlink) an extension from this view.
+ Remove (unlink) an extension from this view.
"""
if not self.check_added(spec):
- tty.warn(self._croot +
- 'Skipping package not linked in view: %s' % spec.name)
+ tty.warn(self._croot + "Skipping package not linked in view: %s" % spec.name)
return
if spec.package.is_activated(self):
spec.package.do_deactivate(
- self,
- verbose=self.verbose,
- remove_dependents=with_dependents)
+ self, verbose=self.verbose, remove_dependents=with_dependents
+ )
self.unlink_meta_folder(spec)
def remove_standalone(self, spec):
"""
- Remove (unlink) a standalone package from this view.
+ Remove (unlink) a standalone package from this view.
"""
if not self.check_added(spec):
- tty.warn(self._croot +
- 'Skipping package not linked in view: %s' % spec.name)
+ tty.warn(self._croot + "Skipping package not linked in view: %s" % spec.name)
return
self.unmerge(spec)
self.unlink_meta_folder(spec)
if self.verbose:
- tty.info(self._croot + 'Removed package: %s' % colorize_spec(spec))
+ tty.info(self._croot + "Removed package: %s" % colorize_spec(spec))
def get_projection_for_spec(self, spec):
"""
- Return the projection for a spec in this view.
+ Return the projection for a spec in this view.
- Relies on the ordering of projections to avoid ambiguity.
+ Relies on the ordering of projections to avoid ambiguity.
"""
spec = spack.spec.Spec(spec)
# Extensions are placed by their extendee, not by their own spec
@@ -603,15 +589,13 @@ class YamlFilesystemView(FilesystemView):
md_dirs = []
for root, dirs, files in os.walk(self._root):
if spack.store.layout.metadata_dir in dirs:
- md_dirs.append(os.path.join(root,
- spack.store.layout.metadata_dir))
+ md_dirs.append(os.path.join(root, spack.store.layout.metadata_dir))
specs = []
for md_dir in md_dirs:
if os.path.exists(md_dir):
for name_dir in os.listdir(md_dir):
- filename = os.path.join(md_dir, name_dir,
- spack.store.layout.spec_file_name)
+ filename = os.path.join(md_dir, name_dir, spack.store.layout.spec_file_name)
spec = get_spec_from_file(filename)
if spec:
specs.append(spec)
@@ -619,23 +603,23 @@ class YamlFilesystemView(FilesystemView):
def get_conflicts(self, *specs):
"""
- Return list of tuples (<spec>, <spec in view>) where the spec
- active in the view differs from the one to be activated.
+ Return list of tuples (<spec>, <spec in view>) where the spec
+ active in the view differs from the one to be activated.
"""
in_view = map(self.get_spec, specs)
- return [(s, v) for s, v in zip(specs, in_view)
- if v is not None and s != v]
+ return [(s, v) for s, v in zip(specs, in_view) if v is not None and s != v]
def get_path_meta_folder(self, spec):
"Get path to meta folder for either spec or spec name."
- return os.path.join(self.get_projection_for_spec(spec),
- spack.store.layout.metadata_dir,
- getattr(spec, "name", spec))
+ return os.path.join(
+ self.get_projection_for_spec(spec),
+ spack.store.layout.metadata_dir,
+ getattr(spec, "name", spec),
+ )
def get_spec(self, spec):
dotspack = self.get_path_meta_folder(spec)
- filename = os.path.join(dotspack,
- spack.store.layout.spec_file_name)
+ filename = os.path.join(dotspack, spack.store.layout.spec_file_name)
return get_spec_from_file(filename)
@@ -651,11 +635,13 @@ class YamlFilesystemView(FilesystemView):
"Singular print function for spec conflicts."
cprint = getattr(tty, level)
color = sys.stdout.isatty()
- linked = tty.color.colorize(" (@gLinked@.)", color=color)
+ linked = tty.color.colorize(" (@gLinked@.)", color=color)
specified = tty.color.colorize("(@rSpecified@.)", color=color)
- cprint(self._croot + "Package conflict detected:\n"
- "%s %s\n" % (linked, colorize_spec(spec_active)) +
- "%s %s" % (specified, colorize_spec(spec_specified)))
+ cprint(
+ self._croot + "Package conflict detected:\n"
+ "%s %s\n" % (linked, colorize_spec(spec_active))
+ + "%s %s" % (specified, colorize_spec(spec_specified))
+ )
def print_status(self, *specs, **kwargs):
if kwargs.get("with_dependencies", False):
@@ -666,8 +652,7 @@ class YamlFilesystemView(FilesystemView):
for s, v in zip(specs, in_view):
if not v:
- tty.error(self._croot +
- 'Package not linked: %s' % s.name)
+ tty.error(self._croot + "Package not linked: %s" % s.name)
elif s != v:
self.print_conflict(v, s, level="warn")
@@ -677,24 +662,26 @@ class YamlFilesystemView(FilesystemView):
tty.msg("Packages linked in %s:" % self._croot[:-1])
# Make a dict with specs keyed by architecture and compiler.
- index = index_by(specs, ('architecture', 'compiler'))
+ index = index_by(specs, ("architecture", "compiler"))
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
print()
- header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
- architecture,
- spack.spec.compiler_color,
- compiler)
- tty.hline(colorize(header), char='-')
+ header = "%s{%s} / %s{%s}" % (
+ spack.spec.architecture_color,
+ architecture,
+ spack.spec.compiler_color,
+ compiler,
+ )
+ tty.hline(colorize(header), char="-")
specs = index[(architecture, compiler)]
specs.sort()
- format_string = '{name}{@version}'
- format_string += '{%compiler}{compiler_flags}{variants}'
+ format_string = "{name}{@version}"
+ format_string += "{%compiler}{compiler_flags}{variants}"
abbreviated = [s.cformat(format_string) for s in specs]
# Print one spec per line along with prefix path
@@ -703,13 +690,10 @@ class YamlFilesystemView(FilesystemView):
format = " %%-%ds%%s" % width
for abbrv, s in zip(abbreviated, specs):
- prefix = ''
+ prefix = ""
if self.verbose:
- prefix = colorize('@K{%s}' % s.dag_hash(7))
- print(
- prefix + (format % (abbrv,
- self.get_projection_for_spec(s)))
- )
+ prefix = colorize("@K{%s}" % s.dag_hash(7))
+ print(prefix + (format % (abbrv, self.get_projection_for_spec(s))))
else:
tty.warn(self._croot + "No packages found.")
@@ -730,7 +714,7 @@ class YamlFilesystemView(FilesystemView):
def _check_no_ext_conflicts(self, spec):
"""
- Check that there is no extension conflict for specs.
+ Check that there is no extension conflict for specs.
"""
extendee = spec.package.extendee_spec
try:
@@ -738,8 +722,7 @@ class YamlFilesystemView(FilesystemView):
except ExtensionAlreadyInstalledError:
# we print the warning here because later on the order in which
# packages get activated is not clear (set-sorting)
- tty.warn(self._croot +
- 'Skipping already activated package: %s' % spec.name)
+ tty.warn(self._croot + "Skipping already activated package: %s" % spec.name)
class SimpleFilesystemView(FilesystemView):
@@ -758,7 +741,7 @@ class SimpleFilesystemView(FilesystemView):
# Drop externals
for s in specs:
if s.external:
- tty.warn('Skipping external package: ' + s.short_spec)
+ tty.warn("Skipping external package: " + s.short_spec)
specs = [s for s in specs if not s.external]
if kwargs.get("exclude", None):
@@ -790,9 +773,9 @@ class SimpleFilesystemView(FilesystemView):
else:
raise MergeConflictSummary(visitor.file_conflicts)
- tty.debug("Creating {0} dirs and {1} links".format(
- len(visitor.directories),
- len(visitor.files)))
+ tty.debug(
+ "Creating {0} dirs and {1} links".format(len(visitor.directories), len(visitor.files))
+ )
# Make the directory structure
for dst in visitor.directories:
@@ -801,8 +784,7 @@ class SimpleFilesystemView(FilesystemView):
# Then group the files to be linked by spec...
# For compatibility, we have to create a merge_map dict mapping
# full_src => full_dst
- files_per_spec = itertools.groupby(
- visitor.files.items(), key=lambda item: item[1][0])
+ files_per_spec = itertools.groupby(visitor.files.items(), key=lambda item: item[1][0])
for (spec, (src_root, rel_paths)) in zip(specs, files_per_spec):
merge_map = dict()
@@ -819,13 +801,12 @@ class SimpleFilesystemView(FilesystemView):
metadata_visitor = SourceMergeVisitor()
for spec in specs:
- src_prefix = os.path.join(
- spec.package.view_source(),
- spack.store.layout.metadata_dir)
+ src_prefix = os.path.join(spec.package.view_source(), spack.store.layout.metadata_dir)
proj = os.path.join(
self.get_relative_projection_for_spec(spec),
spack.store.layout.metadata_dir,
- spec.name)
+ spec.name,
+ )
metadata_visitor.set_projection(proj)
visit_directory_tree(src_prefix, metadata_visitor)
@@ -844,8 +825,7 @@ class SimpleFilesystemView(FilesystemView):
os.mkdir(os.path.join(self._root, dst))
for dst_relpath, (src_root, src_relpath) in metadata_visitor.files.items():
- self.link(os.path.join(src_root, src_relpath),
- os.path.join(self._root, dst_relpath))
+ self.link(os.path.join(src_root, src_relpath), os.path.join(self._root, dst_relpath))
def get_relative_projection_for_spec(self, spec):
# Extensions are placed by their extendee, not by their own spec
@@ -853,13 +833,13 @@ class SimpleFilesystemView(FilesystemView):
spec = spec.package.extendee_spec
p = spack.projections.get_projection(self.projections, spec)
- return spec.format(p) if p else ''
+ return spec.format(p) if p else ""
def get_projection_for_spec(self, spec):
"""
- Return the projection for a spec in this view.
+ Return the projection for a spec in this view.
- Relies on the ordering of projections to avoid ambiguity.
+ Relies on the ordering of projections to avoid ambiguity.
"""
spec = spack.spec.Spec(spec)
# Extensions are placed by their extendee, not by their own spec
@@ -898,10 +878,10 @@ def colorize_spec(spec):
return spec.short_spec
-def find_dependents(all_specs, providers, deptype='run'):
+def find_dependents(all_specs, providers, deptype="run"):
"""
- Return a set containing all those specs from all_specs that depend on
- providers at the given dependency type.
+ Return a set containing all those specs from all_specs that depend on
+ providers at the given dependency type.
"""
dependents = set()
for s in all_specs:
@@ -920,6 +900,7 @@ def filter_exclude(specs, exclude):
if e.match(spec.name):
return False
return True
+
return filter(keep, specs)
diff --git a/lib/spack/spack/gcs_handler.py b/lib/spack/spack/gcs_handler.py
index de35511cef..13121be603 100644
--- a/lib/spack/spack/gcs_handler.py
+++ b/lib/spack/spack/gcs_handler.py
@@ -10,16 +10,14 @@ import spack.util.web as web_util
def gcs_open(req, *args, **kwargs):
- """Open a reader stream to a blob object on GCS
- """
+ """Open a reader stream to a blob object on GCS"""
import spack.util.gcs as gcs_util
url = url_util.parse(req.get_full_url())
gcsblob = gcs_util.GCSBlob(url)
if not gcsblob.exists():
- raise web_util.SpackWebError('GCS blob {0} does not exist'.format(
- gcsblob.blob_path))
+ raise web_util.SpackWebError("GCS blob {0} does not exist".format(gcsblob.blob_path))
stream = gcsblob.get_blob_byte_stream()
headers = gcsblob.get_blob_headers()
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 2c0a07f17c..6c302544c4 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -50,14 +50,14 @@ import llnl.util.tty.color
import spack.dependency
-__all__ = ['graph_ascii', 'AsciiGraph', 'graph_dot']
+__all__ = ["graph_ascii", "AsciiGraph", "graph_dot"]
def node_label(spec):
- return spec.format('{name}{@version}{/hash:7}')
+ return spec.format("{name}{@version}{/hash:7}")
-def topological_sort(spec, deptype='all'):
+def topological_sort(spec, deptype="all"):
"""Return a list of dependency specs in topological sorting order.
The spec argument is not modified in by the function.
@@ -78,33 +78,30 @@ def topological_sort(spec, deptype='all'):
def dependencies(specs):
"""Return all the dependencies (including transitive) for a spec."""
- return list(set(itertools.chain.from_iterable(
- s.dependencies(deptype=deptype) for s in specs
- )))
+ return list(
+ set(itertools.chain.from_iterable(s.dependencies(deptype=deptype) for s in specs))
+ )
def dependents(specs):
"""Return all the dependents (including those of transitive dependencies)
for a spec.
"""
- candidates = list(set(itertools.chain.from_iterable(
- s.dependents(deptype=deptype) for s in specs
- )))
+ candidates = list(
+ set(itertools.chain.from_iterable(s.dependents(deptype=deptype) for s in specs))
+ )
return [x for x in candidates if x.name in nodes]
topological_order, children = [], {}
# Map a spec encoded as (id, name) to a list of its transitive dependencies
for spec in itertools.chain.from_iterable(nodes.values()):
- children[(id(spec), spec.name)] = [
- x for x in dependencies([spec]) if x.name in nodes
- ]
+ children[(id(spec), spec.name)] = [x for x in dependencies([spec]) if x.name in nodes]
# To return a result that is topologically ordered we need to add nodes
# only after their dependencies. The first nodes we can add are leaf nodes,
# i.e. nodes that have no dependencies.
ready = [
- spec for spec in itertools.chain.from_iterable(nodes.values())
- if not dependencies([spec])
+ spec for spec in itertools.chain.from_iterable(nodes.values()) if not dependencies([spec])
]
heapq.heapify(ready)
@@ -138,34 +135,33 @@ def find(seq, predicate):
# Names of different graph line states. We record previous line
# states so that we can easily determine what to do when connecting.
-states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
+states = ("node", "collapse", "merge-right", "expand-right", "back-edge")
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
class AsciiGraph(object):
-
def __init__(self):
# These can be set after initialization or after a call to
# graph() to change behavior.
- self.node_character = 'o'
+ self.node_character = "o"
self.debug = False
self.indent = 0
self.deptype = spack.dependency.all_deptypes
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
- self.colors = 'rgbmcyRGBMCY'
+ self.colors = "rgbmcyRGBMCY"
# Internal vars are used in the graph() function and are
# properly initialized there.
- self._name_to_color = None # Node name to color
- self._out = None # Output stream
- self._frontier = None # frontier
- self._prev_state = None # State of previous line
- self._prev_index = None # Index of expansion point of prev line
+ self._name_to_color = None # Node name to color
+ self._out = None # Output stream
+ self._frontier = None # frontier
+ self._prev_state = None # State of previous line
+ self._prev_index = None # Index of expansion point of prev line
def _indent(self):
- self._out.write(self.indent * ' ')
+ self._out.write(self.indent * " ")
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
@@ -210,8 +206,7 @@ class AsciiGraph(object):
collapse = True
if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1.
- self._back_edge_line([], j, i + 1, True,
- label + "-1.5 " + str((i + 1, j)))
+ self._back_edge_line([], j, i + 1, True, label + "-1.5 " + str((i + 1, j)))
collapse = False
else:
@@ -221,12 +216,10 @@ class AsciiGraph(object):
if i - j > 1:
# We need two lines to connect if distance > 1
- self._back_edge_line([], j, i, True,
- label + "-1 " + str((i, j)))
+ self._back_edge_line([], j, i, True, label + "-1 " + str((i, j)))
collapse = False
- self._back_edge_line([j], -1, -1, collapse,
- label + "-2 " + str((i, j)))
+ self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i, j)))
return True
elif deps:
@@ -241,9 +234,8 @@ class AsciiGraph(object):
if self.debug:
self._out.write(" " * 20)
- self._out.write("%-20s" % (
- str(self._prev_state) if self._prev_state else ''))
- self._out.write("%-20s" % (str(label) if label else ''))
+ self._out.write("%-20s" % (str(self._prev_state) if self._prev_state else ""))
+ self._out.write("%-20s" % (str(label) if label else ""))
self._out.write("%s" % self._frontier)
def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
@@ -287,6 +279,7 @@ class AsciiGraph(object):
label -- optional debug label to print after the line.
"""
+
def advance(to_pos, edges):
"""Write edges up to <to_pos>."""
for i in range(self._pos, to_pos):
@@ -299,28 +292,28 @@ class AsciiGraph(object):
self._indent()
for p in prev_ends:
- advance(p, lambda: [("| ", self._pos)])
- advance(p + 1, lambda: [("|/", self._pos)])
+ advance(p, lambda: [("| ", self._pos)])
+ advance(p + 1, lambda: [("|/", self._pos)])
if end >= 0:
- advance(end + 1, lambda: [("| ", self._pos)])
- advance(start - 1, lambda: [("|", self._pos), ("_", end)])
+ advance(end + 1, lambda: [("| ", self._pos)])
+ advance(start - 1, lambda: [("|", self._pos), ("_", end)])
else:
advance(start - 1, lambda: [("| ", self._pos)])
if start >= 0:
- advance(start, lambda: [("|", self._pos), ("/", end)])
+ advance(start, lambda: [("|", self._pos), ("/", end)])
if collapse:
- advance(flen, lambda: [(" /", self._pos)])
+ advance(flen, lambda: [(" /", self._pos)])
else:
- advance(flen, lambda: [("| ", self._pos)])
+ advance(flen, lambda: [("| ", self._pos)])
self._set_state(BACK_EDGE, end, label)
self._out.write("\n")
def _node_label(self, node):
- return node.format('{name}@@{version}{/hash:7}')
+ return node.format("{name}@@{version}{/hash:7}")
def _node_line(self, index, node):
"""Writes a line with a node at index."""
@@ -437,8 +430,7 @@ class AsciiGraph(object):
self._frontier[i].remove(d)
if i - b > 1:
collapse_l1 = any(not e for e in self._frontier)
- self._back_edge_line(
- prev_ends, b, i, collapse_l1, 'left-1')
+ self._back_edge_line(prev_ends, b, i, collapse_l1, "left-1")
del prev_ends[:]
prev_ends.append(b)
@@ -450,15 +442,16 @@ class AsciiGraph(object):
collapse_l2 = False
if pop:
self._frontier.pop(i)
- self._back_edge_line(
- prev_ends, -1, -1, collapse_l2, 'left-2')
+ self._back_edge_line(prev_ends, -1, -1, collapse_l2, "left-2")
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
- if (i + 1 < len(self._frontier) and
- len(self._frontier[i + 1]) == 1 and
- self._frontier[i + 1][0] in self._frontier[i]):
+ if (
+ i + 1 < len(self._frontier)
+ and len(self._frontier[i + 1]) == 1
+ and self._frontier[i + 1][0] in self._frontier[i]
+ ):
# We need to connect to the element to the right.
# Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract.
@@ -493,9 +486,7 @@ class AsciiGraph(object):
# Replace node with its dependencies
self._frontier.pop(i)
- edges = sorted(
- node.edges_to_dependencies(deptype=self.deptype), reverse=True
- )
+ edges = sorted(node.edges_to_dependencies(deptype=self.deptype), reverse=True)
if edges:
deps = [e.spec.dag_hash() for e in edges]
self._connect_deps(i, deps, "new-deps") # anywhere.
@@ -504,8 +495,7 @@ class AsciiGraph(object):
self._collapse_line(i)
-def graph_ascii(spec, node='o', out=None, debug=False,
- indent=0, color=None, deptype='all'):
+def graph_ascii(spec, node="o", out=None, debug=False, indent=0, color=None, deptype="all"):
graph = AsciiGraph()
graph.debug = debug
graph.indent = indent
@@ -516,7 +506,7 @@ def graph_ascii(spec, node='o', out=None, debug=False,
graph.write(spec, color=color, out=out)
-def graph_dot(specs, deptype='all', static=False, out=None):
+def graph_dot(specs, deptype="all", static=False, out=None):
"""Generate a graph in dot format of all provided specs.
Print out a dot formatted graph of all the dependencies between
@@ -536,9 +526,7 @@ def graph_dot(specs, deptype='all', static=False, out=None):
def static_graph(spec, deptype):
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
- possible = pkg_cls.possible_dependencies(
- expand_virtuals=True, deptype=deptype
- )
+ possible = pkg_cls.possible_dependencies(expand_virtuals=True, deptype=deptype)
nodes = set() # elements are (node name, node label)
edges = set() # elements are (src key, dest key)
@@ -567,38 +555,38 @@ def graph_dot(specs, deptype='all', static=False, out=None):
nodes.update(n)
edges.update(e)
- out.write('digraph G {\n')
+ out.write("digraph G {\n")
out.write(' labelloc = "b"\n')
out.write(' rankdir = "TB"\n')
out.write(' ranksep = "1"\n')
- out.write(' edge[\n')
- out.write(' penwidth=4')
- out.write(' ]\n')
- out.write(' node[\n')
- out.write(' fontname=Monaco,\n')
- out.write(' penwidth=4,\n')
- out.write(' fontsize=24,\n')
- out.write(' margin=.2,\n')
- out.write(' shape=box,\n')
- out.write(' fillcolor=lightblue,\n')
+ out.write(" edge[\n")
+ out.write(" penwidth=4")
+ out.write(" ]\n")
+ out.write(" node[\n")
+ out.write(" fontname=Monaco,\n")
+ out.write(" penwidth=4,\n")
+ out.write(" fontsize=24,\n")
+ out.write(" margin=.2,\n")
+ out.write(" shape=box,\n")
+ out.write(" fillcolor=lightblue,\n")
out.write(' style="rounded,filled"')
- out.write(' ]\n')
+ out.write(" ]\n")
# write nodes
- out.write('\n')
+ out.write("\n")
for key, label in nodes:
out.write(' "%s" [label="%s"]\n' % (key, label))
# write edges
- out.write('\n')
+ out.write("\n")
for src, dest in edges:
out.write(' "%s" -> "%s"\n' % (src, dest))
# ensure that roots are all at the top of the plot
dests = set([d for _, d in edges])
roots = ['"%s"' % k for k, _ in nodes if k not in dests]
- out.write('\n')
- out.write(' { rank=min; %s; }' % "; ".join(roots))
+ out.write("\n")
+ out.write(" { rank=min; %s; }" % "; ".join(roots))
- out.write('\n')
- out.write('}\n')
+ out.write("\n")
+ out.write("}\n")
diff --git a/lib/spack/spack/hash_types.py b/lib/spack/spack/hash_types.py
index d9065148f2..d903515035 100644
--- a/lib/spack/spack/hash_types.py
+++ b/lib/spack/spack/hash_types.py
@@ -31,7 +31,7 @@ class SpecHashDescriptor(object):
@property
def attr(self):
"""Private attribute stored on spec"""
- return '_' + self.name
+ return "_" + self.name
def __call__(self, spec):
"""Run this hash on the provided spec."""
@@ -39,15 +39,12 @@ class SpecHashDescriptor(object):
#: Spack's deployment hash. Includes all inputs that can affect how a package is built.
-dag_hash = SpecHashDescriptor(
- deptype=('build', 'link', 'run'), package_hash=True, name='hash')
+dag_hash = SpecHashDescriptor(deptype=("build", "link", "run"), package_hash=True, name="hash")
#: Hash descriptor used only to transfer a DAG, as is, across processes
process_hash = SpecHashDescriptor(
- deptype=('build', 'link', 'run', 'test'),
- package_hash=False,
- name='process_hash'
+ deptype=("build", "link", "run", "test"), package_hash=False, name="process_hash"
)
@@ -59,16 +56,18 @@ def _content_hash_override(spec):
#: Package hash used as part of dag hash
package_hash = SpecHashDescriptor(
- deptype=(), package_hash=True, name='package_hash',
- override=_content_hash_override)
+ deptype=(), package_hash=True, name="package_hash", override=_content_hash_override
+)
# Deprecated hash types, no longer used, but needed to understand old serialized
# spec formats
full_hash = SpecHashDescriptor(
- deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
+ deptype=("build", "link", "run"), package_hash=True, name="full_hash"
+)
build_hash = SpecHashDescriptor(
- deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
+ deptype=("build", "link", "run"), package_hash=False, name="build_hash"
+)
diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py
index de3cf2fb42..699464c913 100644
--- a/lib/spack/spack/hooks/__init__.py
+++ b/lib/spack/spack/hooks/__init__.py
@@ -44,16 +44,14 @@ class _HookRunner(object):
def _populate_hooks(cls):
# Lazily populate the list of hooks
cls._hooks = []
- relative_names = list(llnl.util.lang.list_modules(
- spack.paths.hooks_path
- ))
+ relative_names = list(llnl.util.lang.list_modules(spack.paths.hooks_path))
# We want this hook to be the last registered
- relative_names.sort(key=lambda x: x == 'write_install_manifest')
- assert relative_names[-1] == 'write_install_manifest'
+ relative_names.sort(key=lambda x: x == "write_install_manifest")
+ assert relative_names[-1] == "write_install_manifest"
for name in relative_names:
- module_name = __name__ + '.' + name
+ module_name = __name__ + "." + name
# When importing a module from a package, __import__('A.B', ...)
# returns package A when 'fromlist' is empty. If fromlist is not
# empty it returns the submodule B instead
@@ -71,25 +69,25 @@ class _HookRunner(object):
for _, module in self.hooks:
if hasattr(module, self.hook_name):
hook = getattr(module, self.hook_name)
- if hasattr(hook, '__call__'):
+ if hasattr(hook, "__call__"):
hook(*args, **kwargs)
# pre/post install and run by the install subprocess
-pre_install = _HookRunner('pre_install')
-post_install = _HookRunner('post_install')
+pre_install = _HookRunner("pre_install")
+post_install = _HookRunner("post_install")
# These hooks are run within an install subprocess
-pre_uninstall = _HookRunner('pre_uninstall')
-post_uninstall = _HookRunner('post_uninstall')
-on_phase_success = _HookRunner('on_phase_success')
-on_phase_error = _HookRunner('on_phase_error')
+pre_uninstall = _HookRunner("pre_uninstall")
+post_uninstall = _HookRunner("post_uninstall")
+on_phase_success = _HookRunner("on_phase_success")
+on_phase_error = _HookRunner("on_phase_error")
# These are hooks in installer.py, before starting install subprocess
-on_install_start = _HookRunner('on_install_start')
-on_install_success = _HookRunner('on_install_success')
-on_install_failure = _HookRunner('on_install_failure')
-on_install_cancel = _HookRunner('on_install_cancel')
+on_install_start = _HookRunner("on_install_start")
+on_install_success = _HookRunner("on_install_success")
+on_install_failure = _HookRunner("on_install_failure")
+on_install_cancel = _HookRunner("on_install_cancel")
# Environment hooks
-post_env_write = _HookRunner('post_env_write')
+post_env_write = _HookRunner("post_env_write")
diff --git a/lib/spack/spack/hooks/licensing.py b/lib/spack/spack/hooks/licensing.py
index 2312811afe..40c399e110 100644
--- a/lib/spack/spack/hooks/licensing.py
+++ b/lib/spack/spack/hooks/licensing.py
@@ -40,24 +40,26 @@ def set_up_license(pkg):
write_license_file(pkg, license_path)
# Open up file in user's favorite $EDITOR for editing
editor_exe = None
- if 'VISUAL' in os.environ:
- editor_exe = Executable(os.environ['VISUAL'])
+ if "VISUAL" in os.environ:
+ editor_exe = Executable(os.environ["VISUAL"])
# gvim runs in the background by default so we force it to run
# in the foreground to make sure the license file is updated
# before we try to install
- if 'gvim' in os.environ['VISUAL']:
- editor_exe.add_default_arg('-f')
- elif 'EDITOR' in os.environ:
- editor_exe = Executable(os.environ['EDITOR'])
+ if "gvim" in os.environ["VISUAL"]:
+ editor_exe.add_default_arg("-f")
+ elif "EDITOR" in os.environ:
+ editor_exe = Executable(os.environ["EDITOR"])
else:
- editor_exe = which('vim', 'vi', 'emacs', 'nano')
+ editor_exe = which("vim", "vi", "emacs", "nano")
if editor_exe is None:
raise EnvironmentError(
- 'No text editor found! Please set the VISUAL and/or EDITOR'
- ' environment variable(s) to your preferred text editor.')
+ "No text editor found! Please set the VISUAL and/or EDITOR"
+ " environment variable(s) to your preferred text editor."
+ )
def editor_wrapper(exe, args):
editor_exe(license_path)
+
editor(license_path, _exec_func=editor_wrapper)
else:
# Use already existing license file
@@ -65,15 +67,18 @@ def set_up_license(pkg):
# If not a file, what about an environment variable?
elif pkg.license_vars:
- tty.warn("A license is required to use %s. Please set %s to the "
- "full pathname to the license file, or port@host if you"
- " store your license keys on a dedicated license server" %
- (pkg.name, ' or '.join(pkg.license_vars)))
+ tty.warn(
+ "A license is required to use %s. Please set %s to the "
+ "full pathname to the license file, or port@host if you"
+ " store your license keys on a dedicated license server"
+ % (pkg.name, " or ".join(pkg.license_vars))
+ )
# If not a file or variable, suggest a website for further info
elif pkg.license_url:
- tty.warn("A license is required to use %s. See %s for details" %
- (pkg.name, pkg.license_url))
+ tty.warn(
+ "A license is required to use %s. See %s for details" % (pkg.name, pkg.license_url)
+ )
# If all else fails, you're on your own
else:
@@ -110,7 +115,9 @@ def write_license_file(pkg, license_path):
file UNCHANGED. The system may be configured if:
- A license file is installed in a default location.
-""".format(pkg.name)
+""".format(
+ pkg.name
+ )
if envvars:
txt += """\
@@ -118,7 +125,9 @@ def write_license_file(pkg, license_path):
a module file:
{0}
-""".format(envvars)
+""".format(
+ envvars
+ )
txt += """\
* Otherwise, depending on the license you have, enter AT THE BEGINNING of
@@ -131,14 +140,18 @@ def write_license_file(pkg, license_path):
this Spack-global file (relative to the installation prefix).
{0}
-""".format(linktargets)
+""".format(
+ linktargets
+ )
if url:
txt += """\
* For further information on licensing, see:
{0}
-""".format(url)
+""".format(
+ url
+ )
txt += """\
Recap:
@@ -150,7 +163,7 @@ def write_license_file(pkg, license_path):
os.makedirs(os.path.dirname(license_path))
# Output
- with open(license_path, 'w') as f:
+ with open(license_path, "w") as f:
for line in txt.splitlines():
f.write("{0}{1}\n".format(pkg.license_comment, line))
f.close()
@@ -181,5 +194,4 @@ def symlink_license(pkg):
if os.path.exists(target):
symlink(target, link_name)
- tty.msg("Added local symlink %s to global license file" %
- link_name)
+ tty.msg("Added local symlink %s to global license file" % link_name)
diff --git a/lib/spack/spack/hooks/module_file_generation.py b/lib/spack/spack/hooks/module_file_generation.py
index aa2d18d9ae..fa21443a82 100644
--- a/lib/spack/spack/hooks/module_file_generation.py
+++ b/lib/spack/spack/hooks/module_file_generation.py
@@ -12,18 +12,18 @@ import spack.modules.common
def _for_each_enabled(spec, method_name):
"""Calls a method for each enabled module"""
- set_names = set(spack.config.get('modules', {}).keys())
+ set_names = set(spack.config.get("modules", {}).keys())
# If we have old-style modules enabled, we put those in the default set
- old_default_enabled = spack.config.get('modules:enable')
+ old_default_enabled = spack.config.get("modules:enable")
if old_default_enabled:
- set_names.add('default')
+ set_names.add("default")
for name in set_names:
- enabled = spack.config.get('modules:%s:enable' % name)
- if name == 'default':
+ enabled = spack.config.get("modules:%s:enable" % name)
+ if name == "default":
# combine enabled modules from default and old format
- enabled = spack.config.merge_yaml(old_default_enabled, enabled)
+ enabled = spack.config.merge_yaml(old_default_enabled, enabled)
if not enabled:
- tty.debug('NO MODULE WRITTEN: list of enabled module files is empty')
+ tty.debug("NO MODULE WRITTEN: list of enabled module files is empty")
continue
for type in enabled:
@@ -31,26 +31,27 @@ def _for_each_enabled(spec, method_name):
try:
getattr(generator, method_name)()
except RuntimeError as e:
- msg = 'cannot perform the requested {0} operation on module files'
- msg += ' [{1}]'
+ msg = "cannot perform the requested {0} operation on module files"
+ msg += " [{1}]"
tty.warn(msg.format(method_name, str(e)))
def post_install(spec):
import spack.environment as ev # break import cycle
+
if ev.active_environment():
# If the installed through an environment, we skip post_install
# module generation and generate the modules on env_write so Spack
# can manage interactions between env views and modules
return
- _for_each_enabled(spec, 'write')
+ _for_each_enabled(spec, "write")
def post_uninstall(spec):
- _for_each_enabled(spec, 'remove')
+ _for_each_enabled(spec, "remove")
def post_env_write(env):
for spec in env.new_installs:
- _for_each_enabled(spec, 'write')
+ _for_each_enabled(spec, "write")
diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py
index b25ca796a1..8731ef0c07 100644
--- a/lib/spack/spack/hooks/sbang.py
+++ b/lib/spack/spack/hooks/sbang.py
@@ -23,21 +23,21 @@ import spack.store
#: OS-imposed character limit for shebang line: 127 for Linux; 511 for Mac.
#: Different Linux distributions have different limits, but 127 is the
#: smallest among all modern versions.
-if sys.platform == 'darwin':
+if sys.platform == "darwin":
system_shebang_limit = 511
else:
system_shebang_limit = 127
#: Groupdb does not exist on Windows, prevent imports
#: on supported systems
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
if not is_windows:
import grp
#: Spack itself also limits the shebang line to at most 4KB, which should be plenty.
spack_shebang_limit = 4096
-interpreter_regex = re.compile(b'#![ \t]*?([^ \t\0\n]+)')
+interpreter_regex = re.compile(b"#![ \t]*?([^ \t\0\n]+)")
def sbang_install_path():
@@ -46,8 +46,10 @@ def sbang_install_path():
install_path = os.path.join(sbang_root, "bin", "sbang")
path_length = len(install_path)
if path_length > system_shebang_limit:
- msg = ('Install tree root is too long. Spack cannot patch shebang lines'
- ' when script path length ({0}) exceeds limit ({1}).\n {2}')
+ msg = (
+ "Install tree root is too long. Spack cannot patch shebang lines"
+ " when script path length ({0}) exceeds limit ({1}).\n {2}"
+ )
msg = msg.format(path_length, system_shebang_limit, install_path)
raise SbangPathError(msg)
return install_path
@@ -62,7 +64,7 @@ def sbang_shebang_line():
This should be the only place in Spack that knows about what
interpreter we use for ``sbang``.
"""
- return '#!/bin/sh %s' % sbang_install_path()
+ return "#!/bin/sh %s" % sbang_install_path()
def get_interpreter(binary_string):
@@ -79,10 +81,10 @@ def filter_shebang(path):
file must occur before ``spack_shebang_limit`` bytes. If not, the file is not
patched.
"""
- with open(path, 'rb') as original:
+ with open(path, "rb") as original:
# If there is no shebang, we shouldn't replace anything.
old_shebang_line = original.read(2)
- if old_shebang_line != b'#!':
+ if old_shebang_line != b"#!":
return False
# Stop reading after b'\n'. Note that old_shebang_line includes the first b'\n'.
@@ -97,14 +99,11 @@ def filter_shebang(path):
# only the arguments are truncated, but note that for PHP we need the full line
# since we have to append `?>` to it. Since our shebang limit is already very
# generous, it's unlikely to happen, and it should be fine to ignore.
- if (
- len(old_shebang_line) == spack_shebang_limit and
- old_shebang_line[-1] != b'\n'
- ):
+ if len(old_shebang_line) == spack_shebang_limit and old_shebang_line[-1] != b"\n":
return False
# This line will be prepended to file
- new_sbang_line = (sbang_shebang_line() + '\n').encode('utf-8')
+ new_sbang_line = (sbang_shebang_line() + "\n").encode("utf-8")
# Skip files that are already using sbang.
if old_shebang_line == new_sbang_line:
@@ -124,7 +123,7 @@ def filter_shebang(path):
os.chmod(path, saved_mode | stat.S_IWUSR)
# No need to delete since we'll move it and overwrite the original.
- patched = tempfile.NamedTemporaryFile('wb', delete=False)
+ patched = tempfile.NamedTemporaryFile("wb", delete=False)
patched.write(new_sbang_line)
# Note that in Python this does not go out of bounds even if interpreter is a
@@ -133,15 +132,15 @@ def filter_shebang(path):
# been a \0 byte between all characters of lua, node, php; meaning that it would
# lead to truncation of the interpreter. So we don't have to worry about weird
# encodings here, and just looking at bytes is justified.
- if interpreter[-4:] == b'/lua' or interpreter[-7:] == b'/luajit':
+ if interpreter[-4:] == b"/lua" or interpreter[-7:] == b"/luajit":
# Use --! instead of #! on second line for lua.
- patched.write(b'--!' + old_shebang_line[2:])
- elif interpreter[-5:] == b'/node':
+ patched.write(b"--!" + old_shebang_line[2:])
+ elif interpreter[-5:] == b"/node":
# Use //! instead of #! on second line for node.js.
- patched.write(b'//!' + old_shebang_line[2:])
- elif interpreter[-4:] == b'/php':
+ patched.write(b"//!" + old_shebang_line[2:])
+ elif interpreter[-4:] == b"/php":
# Use <?php #!... ?> instead of #!... on second line for php.
- patched.write(b'<?php ' + old_shebang_line + b' ?>')
+ patched.write(b"<?php " + old_shebang_line + b" ?>")
else:
patched.write(old_shebang_line)
@@ -172,8 +171,7 @@ def filter_shebangs_in_directory(directory, filenames=None):
except (IOError, OSError):
continue
- if (stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or
- not st.st_mode & is_exe):
+ if stat.S_ISLNK(st.st_mode) or stat.S_ISDIR(st.st_mode) or not st.st_mode & is_exe:
continue
# test the file for a long shebang, and filter
@@ -190,8 +188,7 @@ def install_sbang():
"""
# copy in a new version of sbang if it differs from what's in spack
sbang_path = sbang_install_path()
- if os.path.exists(sbang_path) and filecmp.cmp(
- spack.paths.sbang_script, sbang_path):
+ if os.path.exists(sbang_path) and filecmp.cmp(spack.paths.sbang_script, sbang_path):
return
# make $install_tree/bin
@@ -200,22 +197,16 @@ def install_sbang():
# get permissions for bin dir from configuration files
group_name = spack.package_prefs.get_package_group(spack.spec.Spec("all"))
- config_mode = spack.package_prefs.get_package_dir_permissions(
- spack.spec.Spec("all")
- )
+ config_mode = spack.package_prefs.get_package_dir_permissions(spack.spec.Spec("all"))
if group_name:
- os.chmod(sbang_bin_dir, config_mode) # Use package directory permissions
+ os.chmod(sbang_bin_dir, config_mode) # Use package directory permissions
else:
fs.set_install_permissions(sbang_bin_dir)
# set group on sbang_bin_dir if not already set (only if set in configuration)
if group_name and grp.getgrgid(os.stat(sbang_bin_dir).st_gid).gr_name != group_name:
- os.chown(
- sbang_bin_dir,
- os.stat(sbang_bin_dir).st_uid,
- grp.getgrnam(group_name).gr_gid
- )
+ os.chown(sbang_bin_dir, os.stat(sbang_bin_dir).st_uid, grp.getgrnam(group_name).gr_gid)
# copy over the fresh copy of `sbang`
sbang_tmp_path = os.path.join(
@@ -227,11 +218,7 @@ def install_sbang():
# set permissions on `sbang` (including group if set in configuration)
os.chmod(sbang_tmp_path, config_mode)
if group_name:
- os.chown(
- sbang_tmp_path,
- os.stat(sbang_tmp_path).st_uid,
- grp.getgrnam(group_name).gr_gid
- )
+ os.chown(sbang_tmp_path, os.stat(sbang_tmp_path).st_uid, grp.getgrnam(group_name).gr_gid)
# Finally, move the new `sbang` into place atomically
os.rename(sbang_tmp_path, sbang_path)
@@ -243,7 +230,7 @@ def post_install(spec):
shebang limit.
"""
if spec.external:
- tty.debug('SKIP: shebang filtering [external package]')
+ tty.debug("SKIP: shebang filtering [external package]")
return
install_sbang()
diff --git a/lib/spack/spack/install_test.py b/lib/spack/spack/install_test.py
index 71561d28a6..861db1a556 100644
--- a/lib/spack/spack/install_test.py
+++ b/lib/spack/spack/install_test.py
@@ -19,8 +19,8 @@ import spack.util.prefix
import spack.util.spack_json as sjson
from spack.spec import Spec
-test_suite_filename = 'test_suite.lock'
-results_filename = 'results.txt'
+test_suite_filename = "test_suite.lock"
+results_filename = "results.txt"
def get_escaped_text_output(filename):
@@ -32,18 +32,18 @@ def get_escaped_text_output(filename):
Returns:
list: escaped text lines read from the file
"""
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
# Ensure special characters are escaped as needed
expected = f.read()
# Split the lines to make it easier to debug failures when there is
# a lot of output
- return [re.escape(ln) for ln in expected.split('\n')]
+ return [re.escape(ln) for ln in expected.split("\n")]
def get_test_stage_dir():
return spack.util.path.canonicalize_path(
- spack.config.get('config:test_stage', spack.paths.default_test_path)
+ spack.config.get("config:test_stage", spack.paths.default_test_path)
)
@@ -54,8 +54,7 @@ def get_all_test_suites():
def valid_stage(d):
dirpath = os.path.join(stage_root, d)
- return (os.path.isdir(dirpath) and
- test_suite_filename in os.listdir(dirpath))
+ return os.path.isdir(dirpath) and test_suite_filename in os.listdir(dirpath)
candidates = [
os.path.join(stage_root, d, test_suite_filename)
@@ -70,7 +69,7 @@ def get_all_test_suites():
def get_named_test_suites(name):
"""Return a list of the names of any test suites with that name."""
if not name:
- raise TestSuiteNameError('Test suite name is required.')
+ raise TestSuiteNameError("Test suite name is required.")
test_suites = get_all_test_suites()
return [ts for ts in test_suites if ts.name == name]
@@ -79,9 +78,7 @@ def get_named_test_suites(name):
def get_test_suite(name):
names = get_named_test_suites(name)
if len(names) > 1:
- raise TestSuiteNameError(
- 'Too many suites named "{0}". May shadow hash.'.format(name)
- )
+ raise TestSuiteNameError('Too many suites named "{0}". May shadow hash.'.format(name))
if not names:
return None
@@ -90,18 +87,23 @@ def get_test_suite(name):
def write_test_suite_file(suite):
"""Write the test suite to its lock file."""
- with open(suite.stage.join(test_suite_filename), 'w') as f:
+ with open(suite.stage.join(test_suite_filename), "w") as f:
sjson.dump(suite.to_dict(), stream=f)
def write_test_summary(num_failed, num_skipped, num_untested, num_specs):
- failed = "{0} failed, ".format(num_failed) if num_failed else ''
- skipped = "{0} skipped, ".format(num_skipped) if num_skipped else ''
- no_tests = "{0} no-tests, ".format(num_untested) if num_untested else ''
+ failed = "{0} failed, ".format(num_failed) if num_failed else ""
+ skipped = "{0} skipped, ".format(num_skipped) if num_skipped else ""
+ no_tests = "{0} no-tests, ".format(num_untested) if num_untested else ""
num_passed = num_specs - num_failed - num_untested - num_skipped
- print("{:=^80}".format(" {0}{1}{2}{3} passed of {4} specs "
- .format(failed, no_tests, skipped, num_passed, num_specs)))
+ print(
+ "{:=^80}".format(
+ " {0}{1}{2}{3} passed of {4} specs ".format(
+ failed, no_tests, skipped, num_passed, num_specs
+ )
+ )
+ )
class TestSuite(object):
@@ -125,28 +127,29 @@ class TestSuite(object):
def content_hash(self):
if not self._hash:
json_text = sjson.dump(self.to_dict())
- sha = hashlib.sha1(json_text.encode('utf-8'))
+ sha = hashlib.sha1(json_text.encode("utf-8"))
b32_hash = base64.b32encode(sha.digest()).lower()
if sys.version_info[0] >= 3:
- b32_hash = b32_hash.decode('utf-8')
+ b32_hash = b32_hash.decode("utf-8")
self._hash = b32_hash
return self._hash
def __call__(self, *args, **kwargs):
self.write_reproducibility_data()
- remove_directory = kwargs.get('remove_directory', True)
- dirty = kwargs.get('dirty', False)
- fail_first = kwargs.get('fail_first', False)
- externals = kwargs.get('externals', False)
+ remove_directory = kwargs.get("remove_directory", True)
+ dirty = kwargs.get("dirty", False)
+ fail_first = kwargs.get("fail_first", False)
+ externals = kwargs.get("externals", False)
skipped, untested = 0, 0
for spec in self.specs:
try:
if spec.package.test_suite:
raise TestSuiteSpecError(
- "Package {0} cannot be run in two test suites at once"
- .format(spec.package.name)
+ "Package {0} cannot be run in two test suites at once".format(
+ spec.package.name
+ )
)
# Set up the test suite to know which test is running
@@ -171,14 +174,14 @@ class TestSuite(object):
# functions were called
tested = os.path.exists(self.tested_file_for_spec(spec))
if tested:
- status = 'PASSED'
+ status = "PASSED"
else:
self.ensure_stage()
if spec.external and not externals:
- status = 'SKIPPED'
+ status = "SKIPPED"
skipped += 1
else:
- status = 'NO-TESTS'
+ status = "NO-TESTS"
untested += 1
self.write_test_result(spec, status)
@@ -187,11 +190,10 @@ class TestSuite(object):
if isinstance(exc, (SyntaxError, TestSuiteSpecError)):
# Create the test log file and report the error.
self.ensure_stage()
- msg = 'Testing package {0}\n{1}'\
- .format(self.test_pkg_id(spec), str(exc))
+ msg = "Testing package {0}\n{1}".format(self.test_pkg_id(spec), str(exc))
_add_msg_to_file(self.log_file_for_spec(spec), msg)
- self.write_test_result(spec, 'FAILED')
+ self.write_test_result(spec, "FAILED")
if fail_first:
break
finally:
@@ -210,8 +212,7 @@ class TestSuite(object):
@property
def stage(self):
- return spack.util.prefix.Prefix(
- os.path.join(get_test_stage_dir(), self.content_hash))
+ return spack.util.prefix.Prefix(os.path.join(get_test_stage_dir(), self.content_hash))
@property
def results_file(self):
@@ -227,11 +228,11 @@ class TestSuite(object):
Returns:
(str): the install test package identifier
"""
- return spec.format('{name}-{version}-{hash:7}')
+ return spec.format("{name}-{version}-{hash:7}")
@classmethod
def test_log_name(cls, spec):
- return '%s-test-out.txt' % cls.test_pkg_id(spec)
+ return "%s-test-out.txt" % cls.test_pkg_id(spec)
def log_file_for_spec(self, spec):
return self.stage.join(self.test_log_name(spec))
@@ -241,7 +242,7 @@ class TestSuite(object):
@classmethod
def tested_file_name(cls, spec):
- return '%s-tested.txt' % cls.test_pkg_id(spec)
+ return "%s-tested.txt" % cls.test_pkg_id(spec)
def tested_file_for_spec(self, spec):
return self.stage.join(self.tested_file_name(spec))
@@ -249,9 +250,7 @@ class TestSuite(object):
@property
def current_test_cache_dir(self):
if not (self.current_test_spec and self.current_base_spec):
- raise TestSuiteSpecError(
- "Unknown test cache directory: no specs being tested"
- )
+ raise TestSuiteSpecError("Unknown test cache directory: no specs being tested")
test_spec = self.current_test_spec
base_spec = self.current_base_spec
@@ -260,9 +259,7 @@ class TestSuite(object):
@property
def current_test_data_dir(self):
if not (self.current_test_spec and self.current_base_spec):
- raise TestSuiteSpecError(
- "Unknown test data directory: no specs being tested"
- )
+ raise TestSuiteSpecError("Unknown test data directory: no specs being tested")
test_spec = self.current_test_spec
base_spec = self.current_base_spec
@@ -294,21 +291,21 @@ class TestSuite(object):
def to_dict(self):
specs = [s.to_dict() for s in self.specs]
- d = {'specs': specs}
+ d = {"specs": specs}
if self.alias:
- d['alias'] = self.alias
+ d["alias"] = self.alias
return d
@staticmethod
def from_dict(d):
- specs = [Spec.from_dict(spec_dict) for spec_dict in d['specs']]
- alias = d.get('alias', None)
+ specs = [Spec.from_dict(spec_dict) for spec_dict in d["specs"]]
+ alias = d.get("alias", None)
return TestSuite(specs, alias)
@staticmethod
def from_file(filename):
try:
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
data = sjson.load(f)
test_suite = TestSuite.from_dict(data)
content_hash = os.path.basename(os.path.dirname(filename))
@@ -328,24 +325,26 @@ def _add_msg_to_file(filename, msg):
filename (str): path to the file
msg (str): message to be appended to the file
"""
- with open(filename, 'a+') as f:
- f.write('{0}\n'.format(msg))
+ with open(filename, "a+") as f:
+ f.write("{0}\n".format(msg))
class TestFailure(spack.error.SpackError):
"""Raised when package tests have failed for an installation."""
+
def __init__(self, failures):
# Failures are all exceptions
msg = "%d tests failed.\n" % len(failures)
for failure, message in failures:
- msg += '\n\n%s\n' % str(failure)
- msg += '\n%s\n' % message
+ msg += "\n\n%s\n" % str(failure)
+ msg += "\n%s\n" % message
super(TestFailure, self).__init__(msg)
class TestSuiteFailure(spack.error.SpackError):
"""Raised when one or more tests in a suite have failed."""
+
def __init__(self, num_failures):
msg = "%d test(s) in the suite failed.\n" % num_failures
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index d4acc1dd16..e327fccc65 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -64,24 +64,24 @@ from spack.util.timer import Timer
_counter = itertools.count(0)
#: Build status indicating task has been added.
-STATUS_ADDED = 'queued'
+STATUS_ADDED = "queued"
#: Build status indicating the spec failed to install
-STATUS_FAILED = 'failed'
+STATUS_FAILED = "failed"
#: Build status indicating the spec is being installed (possibly by another
#: process)
-STATUS_INSTALLING = 'installing'
+STATUS_INSTALLING = "installing"
#: Build status indicating the spec was sucessfully installed
-STATUS_INSTALLED = 'installed'
+STATUS_INSTALLED = "installed"
#: Build status indicating the task has been popped from the queue
-STATUS_DEQUEUED = 'dequeued'
+STATUS_DEQUEUED = "dequeued"
#: Build status indicating task has been removed (to maintain priority
#: queue invariants).
-STATUS_REMOVED = 'removed'
+STATUS_REMOVED = "removed"
class InstallAction(object):
@@ -135,13 +135,15 @@ def _handle_external_and_upstream(pkg, explicit):
# consists in module file generation and registration in the DB.
if pkg.spec.external:
_process_external_package(pkg, explicit)
- _print_installed_pkg('{0} (external {1})'
- .format(pkg.prefix, package_id(pkg)))
+ _print_installed_pkg("{0} (external {1})".format(pkg.prefix, package_id(pkg)))
return True
if pkg.spec.installed_upstream:
- tty.verbose('{0} is installed in an upstream Spack instance at {1}'
- .format(package_id(pkg), pkg.spec.prefix))
+ tty.verbose(
+ "{0} is installed in an upstream Spack instance at {1}".format(
+ package_id(pkg), pkg.spec.prefix
+ )
+ )
_print_installed_pkg(pkg.prefix)
# This will result in skipping all post-install hooks. In the case
@@ -153,32 +155,31 @@ def _handle_external_and_upstream(pkg, explicit):
def _do_fake_install(pkg):
- """Make a fake install directory with fake executables, headers, and libraries.
- """
+ """Make a fake install directory with fake executables, headers, and libraries."""
command = pkg.name
header = pkg.name
library = pkg.name
# Avoid double 'lib' for packages whose names already start with lib
- if not pkg.name.startswith('lib'):
- library = 'lib' + library
+ if not pkg.name.startswith("lib"):
+ library = "lib" + library
- dso_suffix = '.dylib' if sys.platform == 'darwin' else '.so'
+ dso_suffix = ".dylib" if sys.platform == "darwin" else ".so"
# Install fake command
fs.mkdirp(pkg.prefix.bin)
fs.touch(os.path.join(pkg.prefix.bin, command))
- if sys.platform != 'win32':
- chmod = which('chmod')
- chmod('+x', os.path.join(pkg.prefix.bin, command))
+ if sys.platform != "win32":
+ chmod = which("chmod")
+ chmod("+x", os.path.join(pkg.prefix.bin, command))
# Install fake header file
fs.mkdirp(pkg.prefix.include)
- fs.touch(os.path.join(pkg.prefix.include, header + '.h'))
+ fs.touch(os.path.join(pkg.prefix.include, header + ".h"))
# Install fake shared and static libraries
fs.mkdirp(pkg.prefix.lib)
- for suffix in [dso_suffix, '.a']:
+ for suffix in [dso_suffix, ".a"]:
fs.touch(os.path.join(pkg.prefix.lib, library + suffix))
# Install fake man page
@@ -208,9 +209,8 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
package is the bootstrap compiler (``True``) or one of its dependencies
(``False``). The list will be empty if there are no compilers.
"""
- tty.debug('Bootstrapping {0} compiler'.format(compiler))
- compilers = spack.compilers.compilers_for_spec(
- compiler, arch_spec=architecture)
+ tty.debug("Bootstrapping {0} compiler".format(compiler))
+ compilers = spack.compilers.compilers_for_spec(compiler, arch_spec=architecture)
if compilers:
return []
@@ -218,20 +218,16 @@ def _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs):
# Set the architecture for the compiler package in a way that allows the
# concretizer to back off if needed for the older bootstrapping compiler
- dep.constrain('platform=%s' % str(architecture.platform))
- dep.constrain('os=%s' % str(architecture.os))
- dep.constrain('target=%s:' %
- architecture.target.microarchitecture.family.name)
+ dep.constrain("platform=%s" % str(architecture.platform))
+ dep.constrain("os=%s" % str(architecture.os))
+ dep.constrain("target=%s:" % architecture.target.microarchitecture.family.name)
# concrete CompilerSpec has less info than concrete Spec
# concretize as Spec to add that information
dep.concretize()
# mark compiler as depended-on by the packages that use it
for pkg in pkgs:
- dep._dependents.add(
- spack.spec.DependencySpec(pkg.spec, dep, ('build',))
- )
- packages = [(s.package, False) for
- s in dep.traverse(order='post', root=False)]
+ dep._dependents.add(spack.spec.DependencySpec(pkg.spec, dep, ("build",)))
+ packages = [(s.package, False) for s in dep.traverse(order="post", root=False)]
packages.append((dep.package, True))
return packages
@@ -257,7 +253,7 @@ def _hms(seconds):
parts.append("%dm" % m)
if s:
parts.append("%.2fs" % s)
- return ' '.join(parts)
+ return " ".join(parts)
def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
@@ -276,18 +272,17 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
bool: ``True`` if the package was extract from binary cache,
``False`` otherwise
"""
- installed_from_cache = _try_install_from_binary_cache(
- pkg, explicit, unsigned=unsigned)
+ installed_from_cache = _try_install_from_binary_cache(pkg, explicit, unsigned=unsigned)
pkg_id = package_id(pkg)
if not installed_from_cache:
- pre = 'No binary for {0} found'.format(pkg_id)
+ pre = "No binary for {0} found".format(pkg_id)
if cache_only:
- tty.die('{0} when cache-only specified'.format(pre))
+ tty.die("{0} when cache-only specified".format(pre))
- tty.msg('{0}: installing from source'.format(pre))
+ tty.msg("{0}: installing from source".format(pre))
return False
- tty.debug('Successfully extracted {0} from binary cache'.format(pkg_id))
+ tty.debug("Successfully extracted {0} from binary cache".format(pkg_id))
_print_installed_pkg(pkg.spec.prefix)
spack.hooks.post_install(pkg.spec)
return True
@@ -300,7 +295,7 @@ def _print_installed_pkg(message):
Args:
message (str): message to be output
"""
- print(colorize('@*g{[+]} ') + message)
+ print(colorize("@*g{[+]} ") + message)
def _process_external_package(pkg, explicit):
@@ -313,25 +308,21 @@ def _process_external_package(pkg, explicit):
``False`` if it was pulled in as a dependency of an explicit
package.
"""
- assert pkg.spec.external, \
- 'Expected to post-install/register an external package.'
+ assert pkg.spec.external, "Expected to post-install/register an external package."
- pre = '{s.name}@{s.version} :'.format(s=pkg.spec)
+ pre = "{s.name}@{s.version} :".format(s=pkg.spec)
spec = pkg.spec
if spec.external_modules:
- tty.msg('{0} has external module in {1}'
- .format(pre, spec.external_modules))
- tty.debug('{0} is actually installed in {1}'
- .format(pre, spec.external_path))
+ tty.msg("{0} has external module in {1}".format(pre, spec.external_modules))
+ tty.debug("{0} is actually installed in {1}".format(pre, spec.external_path))
else:
- tty.debug('{0} externally installed in {1}'
- .format(pre, spec.external_path))
+ tty.debug("{0} externally installed in {1}".format(pre, spec.external_path))
try:
# Check if the package was already registered in the DB.
# If this is the case, then only make explicit if required.
- tty.debug('{0} already registered in DB'.format(pre))
+ tty.debug("{0} already registered in DB".format(pre))
record = spack.store.db.get_record(spec)
if explicit and not record.explicit:
spack.store.db.update_explicit(spec, explicit)
@@ -340,16 +331,15 @@ def _process_external_package(pkg, explicit):
# If not, register it and generate the module file.
# For external packages we just need to run
# post-install hooks to generate module files.
- tty.debug('{0} generating module file'.format(pre))
+ tty.debug("{0} generating module file".format(pre))
spack.hooks.post_install(spec)
# Add to the DB
- tty.debug('{0} registering into DB'.format(pre))
+ tty.debug("{0} registering into DB".format(pre))
spack.store.db.add(spec, None, explicit=explicit)
-def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
- mirrors_for_spec=None):
+def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned, mirrors_for_spec=None):
"""
Process the binary cache tarball.
@@ -367,21 +357,21 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
else ``False``
"""
download_result = binary_distribution.download_tarball(
- binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec)
+ binary_spec, unsigned, mirrors_for_spec=mirrors_for_spec
+ )
# see #10063 : install from source if tarball doesn't exist
if download_result is None:
- tty.msg('{0} exists in binary cache but with different hash'
- .format(pkg.name))
+ tty.msg("{0} exists in binary cache but with different hash".format(pkg.name))
return False
pkg_id = package_id(pkg)
- tty.msg('Extracting {0} from binary cache'.format(pkg_id))
+ tty.msg("Extracting {0} from binary cache".format(pkg_id))
# don't print long padded paths while extracting/relocating binaries
with spack.util.path.filter_padding():
- binary_distribution.extract_tarball(binary_spec, download_result,
- allow_root=False, unsigned=unsigned,
- force=False)
+ binary_distribution.extract_tarball(
+ binary_spec, download_result, allow_root=False, unsigned=unsigned, force=False
+ )
pkg.installed_from_binary_cache = True
spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
@@ -399,14 +389,15 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
otherwise, ``False``
"""
pkg_id = package_id(pkg)
- tty.debug('Searching for binary cache of {0}'.format(pkg_id))
+ tty.debug("Searching for binary cache of {0}".format(pkg_id))
matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
if not matches:
return False
- return _process_binary_cache_tarball(pkg, pkg.spec, explicit, unsigned,
- mirrors_for_spec=matches)
+ return _process_binary_cache_tarball(
+ pkg, pkg.spec, explicit, unsigned, mirrors_for_spec=matches
+ )
def clear_failures():
@@ -429,9 +420,9 @@ def combine_phase_logs(phase_log_files, log_path):
log_path (str): the path to combine them to
"""
- with open(log_path, 'w') as log_file:
+ with open(log_path, "w") as log_file:
for phase_log_file in phase_log_files:
- with open(phase_log_file, 'r') as phase_log:
+ with open(phase_log_file, "r") as phase_log:
log_file.write(phase_log.read())
@@ -473,14 +464,11 @@ def dump_packages(spec, path):
# Create a source repo and get the pkg directory out of it.
try:
source_repo = spack.repo.Repo(source_repo_root)
- source_pkg_dir = source_repo.dirname_for_package_name(
- node.name)
+ source_pkg_dir = source_repo.dirname_for_package_name(node.name)
except spack.repo.RepoError as err:
- tty.debug('Failed to create source repo for {0}: {1}'
- .format(node.name, str(err)))
+ tty.debug("Failed to create source repo for {0}: {1}".format(node.name, str(err)))
source_pkg_dir = None
- tty.warn("Warning: Couldn't copy in provenance for {0}"
- .format(node.name))
+ tty.warn("Warning: Couldn't copy in provenance for {0}".format(node.name))
# Create a destination repository
dest_repo_root = os.path.join(path, node.namespace)
@@ -520,8 +508,8 @@ def install_msg(name, pid):
Return:
str: Colorized installing message
"""
- pre = '{0}: '.format(pid) if tty.show_pid() else ''
- return pre + colorize('@*{Installing} @*g{%s}' % name)
+ pre = "{0}: ".format(pid) if tty.show_pid() else ""
+ return pre + colorize("@*{Installing} @*g{%s}" % name)
def log(pkg):
@@ -564,15 +552,14 @@ def log(pkg):
# Finally, archive files that are specific to each package
with fs.working_dir(pkg.stage.path):
errors = six.StringIO()
- target_dir = os.path.join(
- spack.store.layout.metadata_path(pkg.spec), 'archived-files')
+ target_dir = os.path.join(spack.store.layout.metadata_path(pkg.spec), "archived-files")
for glob_expr in pkg.archive_files:
# Check that we are trying to copy things that are
# in the stage tree (not arbitrary files)
abs_expr = os.path.realpath(glob_expr)
if os.path.realpath(pkg.stage.path) not in abs_expr:
- errors.write('[OUTSIDE SOURCE PATH]: {0}\n'.format(glob_expr))
+ errors.write("[OUTSIDE SOURCE PATH]: {0}\n".format(glob_expr))
continue
# Now that we are sure that the path is within the correct
# folder, make it relative and check for matches
@@ -592,15 +579,14 @@ def log(pkg):
# Here try to be conservative, and avoid discarding
# the whole install procedure because of copying a
# single file failed
- errors.write('[FAILED TO ARCHIVE]: {0}'.format(f))
+ errors.write("[FAILED TO ARCHIVE]: {0}".format(f))
if errors.getvalue():
- error_file = os.path.join(target_dir, 'errors.txt')
+ error_file = os.path.join(target_dir, "errors.txt")
fs.mkdirp(target_dir)
- with open(error_file, 'w') as err:
+ with open(error_file, "w") as err:
err.write(errors.getvalue())
- tty.warn('Errors occurred when archiving files.\n\t'
- 'See: {0}'.format(error_file))
+ tty.warn("Errors occurred when archiving files.\n\t" "See: {0}".format(error_file))
dump_packages(pkg.spec, packages_dir)
@@ -619,8 +605,9 @@ def package_id(pkg):
derived
"""
if not pkg.spec.concrete:
- raise ValueError("Cannot provide a unique, readable id when "
- "the spec is not concretized.")
+ raise ValueError(
+ "Cannot provide a unique, readable id when " "the spec is not concretized."
+ )
return "{0}-{1}-{2}".format(pkg.name, pkg.version, pkg.spec.dag_hash())
@@ -640,14 +627,14 @@ class TermTitle(object):
self.pkg_ids.add(pkg_id)
def set(self, text):
- if not spack.config.get('config:terminal_title', False):
+ if not spack.config.get("config:terminal_title", False):
return
if not sys.stdout.isatty():
return
- status = '{0} [{1}/{2}]'.format(text, self.pkg_num, self.pkg_count)
- sys.stdout.write('\033]0;Spack: {0}\007'.format(status))
+ status = "{0} [{1}/{2}]".format(text, self.pkg_num, self.pkg_count)
+ sys.stdout.write("\033]0;Spack: {0}\007".format(status))
sys.stdout.flush()
@@ -656,6 +643,7 @@ class TermStatusLine(object):
This class is used in distributed builds to inform the user that other packages are
being installed by another process.
"""
+
def __init__(self, enabled):
self.enabled = enabled
self.pkg_set = set()
@@ -670,7 +658,7 @@ class TermStatusLine(object):
self.pkg_set.add(pkg_id)
self.pkg_list.append(pkg_id)
- tty.msg(colorize('@*{Waiting for} @*g{%s}' % pkg_id))
+ tty.msg(colorize("@*{Waiting for} @*g{%s}" % pkg_id))
sys.stdout.flush()
def clear(self):
@@ -690,22 +678,22 @@ class TermStatusLine(object):
# Move the cursor to the beginning of the first "Waiting for" message and clear
# everything after it.
- sys.stdout.write('\x1b[%sF\x1b[J' % lines)
+ sys.stdout.write("\x1b[%sF\x1b[J" % lines)
sys.stdout.flush()
class PackageInstaller(object):
- '''
+ """
Class for managing the install process for a Spack instance based on a
bottom-up DAG approach.
This installer can coordinate concurrent batch and interactive, local
and distributed (on a shared file system) builds for the same Spack
instance.
- '''
+ """
def __init__(self, installs=[]):
- """ Initialize the installer.
+ """Initialize the installer.
Args:
installs (list): list of tuples, where each
@@ -715,8 +703,7 @@ class PackageInstaller(object):
PackageInstaller: instance
"""
# List of build requests
- self.build_requests = [BuildRequest(pkg, install_args)
- for pkg, install_args in installs]
+ self.build_requests = [BuildRequest(pkg, install_args) for pkg, install_args in installs]
# Priority queue of build tasks
self.build_pq = []
@@ -745,23 +732,20 @@ class PackageInstaller(object):
def __repr__(self):
"""Returns a formal representation of the package installer."""
- rep = '{0}('.format(self.__class__.__name__)
+ rep = "{0}(".format(self.__class__.__name__)
for attr, value in self.__dict__.items():
- rep += '{0}={1}, '.format(attr, value.__repr__())
- return '{0})'.format(rep.strip(', '))
+ rep += "{0}={1}, ".format(attr, value.__repr__())
+ return "{0})".format(rep.strip(", "))
def __str__(self):
"""Returns a printable version of the package installer."""
- requests = '#requests={0}'.format(len(self.build_requests))
- tasks = '#tasks={0}'.format(len(self.build_tasks))
- failed = 'failed ({0}) = {1}'.format(len(self.failed), self.failed)
- installed = 'installed ({0}) = {1}'.format(
- len(self.installed), self.installed)
- return '{0}: {1}; {2}; {3}; {4}'.format(
- self.pid, requests, tasks, installed, failed)
-
- def _add_bootstrap_compilers(
- self, compiler, architecture, pkgs, request, all_deps):
+ requests = "#requests={0}".format(len(self.build_requests))
+ tasks = "#tasks={0}".format(len(self.build_tasks))
+ failed = "failed ({0}) = {1}".format(len(self.failed), self.failed)
+ installed = "installed ({0}) = {1}".format(len(self.installed), self.installed)
+ return "{0}: {1}; {2}; {3}; {4}".format(self.pid, requests, tasks, installed, failed)
+
+ def _add_bootstrap_compilers(self, compiler, architecture, pkgs, request, all_deps):
"""
Add bootstrap compilers and dependencies to the build queue.
@@ -774,8 +758,7 @@ class PackageInstaller(object):
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
- packages = _packages_needed_to_bootstrap_compiler(
- compiler, architecture, pkgs)
+ packages = _packages_needed_to_bootstrap_compiler(compiler, architecture, pkgs)
for (comp_pkg, is_compiler) in packages:
if package_id(comp_pkg) not in self.build_tasks:
self._add_init_task(comp_pkg, request, is_compiler, all_deps)
@@ -793,8 +776,7 @@ class PackageInstaller(object):
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
- task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED,
- self.installed)
+ task = BuildTask(pkg, request, is_compiler, 0, 0, STATUS_ADDED, self.installed)
for dep_id in task.dependencies:
all_deps[dep_id].add(package_id(pkg))
@@ -828,7 +810,7 @@ class PackageInstaller(object):
Args:
request (BuildRequest): the associated install request
"""
- err = 'Cannot proceed with {0}: {1}'
+ err = "Cannot proceed with {0}: {1}"
for dep in request.traverse_dependencies():
dep_pkg = dep.package
dep_id = package_id(dep_pkg)
@@ -836,16 +818,15 @@ class PackageInstaller(object):
# Check for failure since a prefix lock is not required
if spack.store.db.prefix_failed(dep):
action = "'spack install' the dependency"
- msg = '{0} is marked as an install failure: {1}' \
- .format(dep_id, action)
+ msg = "{0} is marked as an install failure: {1}".format(dep_id, action)
raise InstallError(err.format(request.pkg_id, msg))
# Attempt to get a read lock to ensure another process does not
# uninstall the dependency while the requested spec is being
# installed
- ltype, lock = self._ensure_locked('read', dep_pkg)
+ ltype, lock = self._ensure_locked("read", dep_pkg)
if lock is None:
- msg = '{0} is write locked by another process'.format(dep_id)
+ msg = "{0} is write locked by another process".format(dep_id)
raise InstallError(err.format(request.pkg_id, msg))
# Flag external and upstream packages as being installed
@@ -857,10 +838,10 @@ class PackageInstaller(object):
# and flag as such if appropriate
rec, installed_in_db = self._check_db(dep)
if installed_in_db and (
- dep.dag_hash() not in request.overwrite or
- rec.installation_time > request.overwrite_time):
- tty.debug('Flagging {0} as installed per the database'
- .format(dep_id))
+ dep.dag_hash() not in request.overwrite
+ or rec.installation_time > request.overwrite_time
+ ):
+ tty.debug("Flagging {0} as installed per the database".format(dep_id))
self._flag_installed(dep_pkg)
else:
lock.release_read()
@@ -878,9 +859,9 @@ class PackageInstaller(object):
being checked
"""
install_args = task.request.install_args
- keep_prefix = install_args.get('keep_prefix')
- keep_stage = install_args.get('keep_stage')
- restage = install_args.get('restage')
+ keep_prefix = install_args.get("keep_prefix")
+ keep_stage = install_args.get("keep_stage")
+ restage = install_args.get("restage")
# Make sure the package is ready to be locally installed.
self._ensure_install_ready(task.pkg)
@@ -900,7 +881,8 @@ class PackageInstaller(object):
raise InstallError(
"Install prefix collision for {0}".format(task.pkg_id),
long_msg="Prefix directory {0} already used by another "
- "installed spec.".format(task.pkg.spec.prefix))
+ "installed spec.".format(task.pkg.spec.prefix),
+ )
# Make sure the installation directory is in the desired state
# for uninstalled specs.
@@ -908,15 +890,15 @@ class PackageInstaller(object):
if not keep_prefix:
task.pkg.remove_prefix()
else:
- tty.debug('{0} is partially installed'.format(task.pkg_id))
+ tty.debug("{0} is partially installed".format(task.pkg_id))
# Destroy the stage for a locally installed, non-DIYStage, package
if restage and task.pkg.stage.managed_by_spack:
task.pkg.stage.destroy()
if installed_in_db and (
- rec.spec.dag_hash() not in task.request.overwrite or
- rec.installation_time > task.request.overwrite_time
+ rec.spec.dag_hash() not in task.request.overwrite
+ or rec.installation_time > task.request.overwrite_time
):
self._update_installed(task)
@@ -955,7 +937,7 @@ class PackageInstaller(object):
lock = self.failed.get(pkg_id, None)
if lock is not None:
err = "{0} exception when removing failure tracking for {1}: {2}"
- msg = 'Removing failure mark on {0}'
+ msg = "Removing failure mark on {0}"
try:
tty.verbose(msg.format(pkg_id))
lock.release_write()
@@ -973,7 +955,7 @@ class PackageInstaller(object):
# Ensure we have a read lock to prevent others from uninstalling the
# spec during our installation.
- self._ensure_locked('read', pkg)
+ self._ensure_locked("read", pkg)
def _ensure_install_ready(self, pkg):
"""
@@ -988,15 +970,15 @@ class PackageInstaller(object):
# External packages cannot be installed locally.
if pkg.spec.external:
- raise ExternalPackageError('{0} {1}'.format(pre, 'is external'))
+ raise ExternalPackageError("{0} {1}".format(pre, "is external"))
# Upstream packages cannot be installed locally.
if pkg.spec.installed_upstream:
- raise UpstreamPackageError('{0} {1}'.format(pre, 'is upstream'))
+ raise UpstreamPackageError("{0} {1}".format(pre, "is upstream"))
# The package must have a prefix lock at this stage.
if pkg_id not in self.locks:
- raise InstallLockError('{0} {1}'.format(pre, 'not locked'))
+ raise InstallLockError("{0} {1}".format(pre, "not locked"))
def _ensure_locked(self, lock_type, pkg):
"""
@@ -1020,20 +1002,21 @@ class PackageInstaller(object):
(lock_type, lock) tuple where lock will be None if it could not
be obtained
"""
- assert lock_type in ['read', 'write'], \
- '"{0}" is not a supported package management lock type' \
- .format(lock_type)
+ assert lock_type in [
+ "read",
+ "write",
+ ], '"{0}" is not a supported package management lock type'.format(lock_type)
pkg_id = package_id(pkg)
ltype, lock = self.locks.get(pkg_id, (lock_type, None))
if lock and ltype == lock_type:
return ltype, lock
- desc = '{0} lock'.format(lock_type)
- msg = '{0} a {1} on {2} with timeout {3}'
- err = 'Failed to {0} a {1} for {2} due to {3}: {4}'
+ desc = "{0} lock".format(lock_type)
+ msg = "{0} a {1} on {2} with timeout {3}"
+ err = "Failed to {0} a {1} for {2} due to {3}: {4}"
- if lock_type == 'read':
+ if lock_type == "read":
# Wait until the other process finishes if there are no more
# build tasks with priority 0 (i.e., with no uninstalled
# dependencies).
@@ -1044,43 +1027,43 @@ class PackageInstaller(object):
try:
if lock is None:
- tty.debug(msg.format('Acquiring', desc, pkg_id, timeout))
- op = 'acquire'
+ tty.debug(msg.format("Acquiring", desc, pkg_id, timeout))
+ op = "acquire"
lock = spack.store.db.prefix_lock(pkg.spec, timeout)
if timeout != lock.default_timeout:
- tty.warn('Expected prefix lock timeout {0}, not {1}'
- .format(timeout, lock.default_timeout))
- if lock_type == 'read':
+ tty.warn(
+ "Expected prefix lock timeout {0}, not {1}".format(
+ timeout, lock.default_timeout
+ )
+ )
+ if lock_type == "read":
lock.acquire_read()
else:
lock.acquire_write()
- elif lock_type == 'read': # write -> read
+ elif lock_type == "read": # write -> read
# Only get here if the current lock is a write lock, which
# must be downgraded to be a read lock
# Retain the original lock timeout, which is in the lock's
# default_timeout setting.
- tty.debug(msg.format('Downgrading to', desc, pkg_id,
- lock.default_timeout))
- op = 'downgrade to'
+ tty.debug(msg.format("Downgrading to", desc, pkg_id, lock.default_timeout))
+ op = "downgrade to"
lock.downgrade_write_to_read()
else: # read -> write
# Only get here if the current lock is a read lock, which
# must be upgraded to be a write lock
- tty.debug(msg.format('Upgrading to', desc, pkg_id, timeout))
- op = 'upgrade to'
+ tty.debug(msg.format("Upgrading to", desc, pkg_id, timeout))
+ op = "upgrade to"
lock.upgrade_read_to_write(timeout)
- tty.debug('{0} is now {1} locked'.format(pkg_id, lock_type))
+ tty.debug("{0} is now {1} locked".format(pkg_id, lock_type))
except (lk.LockDowngradeError, lk.LockTimeoutError) as exc:
- tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__,
- str(exc)))
+ tty.debug(err.format(op, desc, pkg_id, exc.__class__.__name__, str(exc)))
return (lock_type, None)
except (Exception, KeyboardInterrupt, SystemExit) as exc:
- tty.error(err.format(op, desc, pkg_id, exc.__class__.__name__,
- str(exc)))
+ tty.error(err.format(op, desc, pkg_id, exc.__class__.__name__, str(exc)))
self._cleanup_all_tasks()
raise
@@ -1098,15 +1081,14 @@ class PackageInstaller(object):
all_deps (defaultdict(set)): dictionary of all dependencies and
associated dependents
"""
- tty.debug('Initializing the build queue for {0}'
- .format(request.pkg.name))
+ tty.debug("Initializing the build queue for {0}".format(request.pkg.name))
# Ensure not attempting to perform an installation when user didn't
# want to go that far for the requested package.
try:
_check_last_phase(request.pkg)
except BadInstallPhase as err:
- tty.warn('Installation request refused: {0}'.format(str(err)))
+ tty.warn("Installation request refused: {0}".format(str(err)))
return
# Skip out early if the spec is not being installed locally (i.e., if
@@ -1114,16 +1096,15 @@ class PackageInstaller(object):
#
# External and upstream packages need to get flagged as installed to
# ensure proper status tracking for environment build.
- explicit = request.install_args.get('explicit', True)
+ explicit = request.install_args.get("explicit", True)
not_local = _handle_external_and_upstream(request.pkg, explicit)
if not_local:
self._flag_installed(request.pkg)
return
- install_compilers = spack.config.get(
- 'config:install_missing_compilers', False)
+ install_compilers = spack.config.get("config:install_missing_compilers", False)
- install_deps = request.install_args.get('install_deps')
+ install_deps = request.install_args.get("install_deps")
# Bootstrap compilers first
if install_deps and install_compilers:
packages_per_compiler = {}
@@ -1153,8 +1134,7 @@ class PackageInstaller(object):
for compiler, archs in packages_per_compiler.items():
for arch, packages in archs.items():
- self._add_bootstrap_compilers(
- compiler, arch, packages, request, all_deps)
+ self._add_bootstrap_compilers(compiler, arch, packages, request, all_deps)
if install_deps:
for dep in request.traverse_dependencies():
@@ -1169,7 +1149,7 @@ class PackageInstaller(object):
# of the spec.
spack.store.db.clear_failure(dep, force=False)
- install_package = request.install_args.get('install_package')
+ install_package = request.install_args.get("install_package")
if install_package and request.pkg_id not in self.build_tasks:
# Be sure to clear any previous failure
@@ -1184,7 +1164,7 @@ class PackageInstaller(object):
self._add_init_task(request.pkg, request, False, all_deps)
# Ensure if one request is to fail fast then all requests will.
- fail_fast = request.install_args.get('fail_fast')
+ fail_fast = request.install_args.get("fail_fast")
self.fail_fast = self.fail_fast or fail_fast
def _install_task(self, task):
@@ -1196,11 +1176,11 @@ class PackageInstaller(object):
task (BuildTask): the installation build task for a package"""
install_args = task.request.install_args
- cache_only = install_args.get('cache_only')
+ cache_only = install_args.get("cache_only")
explicit = task.explicit
- tests = install_args.get('tests')
- unsigned = install_args.get('unsigned')
- use_cache = install_args.get('use_cache')
+ tests = install_args.get("tests")
+ unsigned = install_args.get("unsigned")
+ use_cache = install_args.get("use_cache")
pkg, pkg_id = task.pkg, task.pkg_id
@@ -1209,15 +1189,15 @@ class PackageInstaller(object):
task.status = STATUS_INSTALLING
# Use the binary cache if requested
- if use_cache and \
- _install_from_cache(pkg, cache_only, explicit, unsigned):
+ if use_cache and _install_from_cache(pkg, cache_only, explicit, unsigned):
self._update_installed(task)
if task.compiler:
spack.compilers.add_compilers_to_config(
- spack.compilers.find_compilers([pkg.spec.prefix]))
+ spack.compilers.find_compilers([pkg.spec.prefix])
+ )
return
- pkg.run_tests = (tests is True or tests and pkg.name in tests)
+ pkg.run_tests = tests is True or tests and pkg.name in tests
# hook that allows tests to inspect the Package before installation
# see unit_test_check() docs.
@@ -1229,27 +1209,26 @@ class PackageInstaller(object):
# Create a child process to do the actual installation.
# Preserve verbosity settings across installs.
- spack.package_base.PackageBase._verbose = (
- spack.build_environment.start_build_process(
- pkg, build_process, install_args)
+ spack.package_base.PackageBase._verbose = spack.build_environment.start_build_process(
+ pkg, build_process, install_args
)
# Note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
- spack.store.db.add(pkg.spec, spack.store.layout,
- explicit=explicit)
+ spack.store.db.add(pkg.spec, spack.store.layout, explicit=explicit)
# If a compiler, ensure it is added to the configuration
if task.compiler:
spack.compilers.add_compilers_to_config(
- spack.compilers.find_compilers([pkg.spec.prefix]))
+ spack.compilers.find_compilers([pkg.spec.prefix])
+ )
except spack.build_environment.StopPhase as e:
# A StopPhase exception means that do_install was asked to
# stop early from clients, and is not an error at this point
spack.hooks.on_install_failure(task.request.pkg.spec)
- pid = '{0}: '.format(self.pid) if tty.show_pid() else ''
- tty.debug('{0}{1}'.format(pid, str(e)))
- tty.debug('Package stage directory: {0}' .format(pkg.stage.source_path))
+ pid = "{0}: ".format(self.pid) if tty.show_pid() else ""
+ tty.debug("{0}{1}".format(pid, str(e)))
+ tty.debug("Package stage directory: {0}".format(pkg.stage.source_path))
def _next_is_pri0(self):
"""
@@ -1288,21 +1267,21 @@ class PackageInstaller(object):
task (BuildTask): the installation build task for a package
"""
msg = "{0} a build task for {1} with status '{2}'"
- skip = 'Skipping requeue of task for {0}: {1}'
+ skip = "Skipping requeue of task for {0}: {1}"
# Ensure do not (re-)queue installed or failed packages whose status
# may have been determined by a separate process.
if task.pkg_id in self.installed:
- tty.debug(skip.format(task.pkg_id, 'installed'))
+ tty.debug(skip.format(task.pkg_id, "installed"))
return
if task.pkg_id in self.failed:
- tty.debug(skip.format(task.pkg_id, 'failed'))
+ tty.debug(skip.format(task.pkg_id, "failed"))
return
# Remove any associated build task since its sequence will change
self._remove_task(task.pkg_id)
- desc = 'Queueing' if task.attempts == 0 else 'Requeueing'
+ desc = "Queueing" if task.attempts == 0 else "Requeueing"
tty.debug(msg.format(desc, task.pkg_id, task.status))
# Now add the new task to the queue with a new sequence number to
@@ -1321,18 +1300,17 @@ class PackageInstaller(object):
"""
if pkg_id in self.locks:
err = "{0} exception when releasing {1} lock for {2}: {3}"
- msg = 'Releasing {0} lock on {1}'
+ msg = "Releasing {0} lock on {1}"
ltype, lock = self.locks[pkg_id]
if lock is not None:
try:
tty.debug(msg.format(ltype, pkg_id))
- if ltype == 'read':
+ if ltype == "read":
lock.release_read()
else:
lock.release_write()
except Exception as exc:
- tty.warn(err.format(exc.__class__.__name__, ltype,
- pkg_id, str(exc)))
+ tty.warn(err.format(exc.__class__.__name__, ltype, pkg_id, str(exc)))
def _remove_task(self, pkg_id):
"""
@@ -1345,8 +1323,7 @@ class PackageInstaller(object):
pkg_id (str): identifier for the package to be removed
"""
if pkg_id in self.build_tasks:
- tty.debug('Removing build task for {0} from list'
- .format(pkg_id))
+ tty.debug("Removing build task for {0} from list".format(pkg_id))
task = self.build_tasks.pop(pkg_id)
task.status = STATUS_REMOVED
return task
@@ -1361,8 +1338,11 @@ class PackageInstaller(object):
task (BuildTask): the installation build task for a package
"""
if task.status not in [STATUS_INSTALLED, STATUS_INSTALLING]:
- tty.debug('{0} {1}'.format(install_msg(task.pkg_id, self.pid),
- 'in progress by another process'))
+ tty.debug(
+ "{0} {1}".format(
+ install_msg(task.pkg_id, self.pid), "in progress by another process"
+ )
+ )
new_task = task.next_attempt(self.installed)
new_task.status = STATUS_INSTALLING
@@ -1377,7 +1357,7 @@ class PackageInstaller(object):
pkg (spack.package_base.Package): the package to be built and installed
"""
if not os.path.exists(pkg.spec.prefix):
- tty.debug('Creating the installation directory {0}'.format(pkg.spec.prefix))
+ tty.debug("Creating the installation directory {0}".format(pkg.spec.prefix))
spack.store.layout.create_install_directory(pkg.spec)
else:
# Set the proper group for the prefix
@@ -1411,8 +1391,8 @@ class PackageInstaller(object):
exc (Exception): optional exception if associated with the failure
"""
pkg_id = task.pkg_id
- err = '' if exc is None else ': {0}'.format(str(exc))
- tty.debug('Flagging {0} as failed{1}'.format(pkg_id, err))
+ err = "" if exc is None else ": {0}".format(str(exc))
+ tty.debug("Flagging {0} as failed{1}".format(pkg_id, err))
if mark:
self.failed[pkg_id] = spack.store.db.mark_failed(task.pkg.spec)
else:
@@ -1421,16 +1401,14 @@ class PackageInstaller(object):
for dep_id in task.dependents:
if dep_id in self.build_tasks:
- tty.warn('Skipping build of {0} since {1} failed'
- .format(dep_id, pkg_id))
+ tty.warn("Skipping build of {0} since {1} failed".format(dep_id, pkg_id))
# Ensure the dependent's uninstalled dependents are
# up-to-date and their build tasks removed.
dep_task = self.build_tasks[dep_id]
self._update_failed(dep_task, mark)
self._remove_task(dep_id)
else:
- tty.debug('No build task for {0} to skip since {1} failed'
- .format(dep_id, pkg_id))
+ tty.debug("No build task for {0} to skip since {1} failed".format(dep_id, pkg_id))
def _update_installed(self, task):
"""
@@ -1460,35 +1438,35 @@ class PackageInstaller(object):
# Already determined the package has been installed
return
- tty.debug('Flagging {0} as installed'.format(pkg_id))
+ tty.debug("Flagging {0} as installed".format(pkg_id))
self.installed.add(pkg_id)
# Update affected dependents
dependent_ids = dependent_ids or get_dependent_ids(pkg.spec)
for dep_id in set(dependent_ids):
- tty.debug('Removing {0} from {1}\'s uninstalled dependencies.'
- .format(pkg_id, dep_id))
+ tty.debug("Removing {0} from {1}'s uninstalled dependencies.".format(pkg_id, dep_id))
if dep_id in self.build_tasks:
# Ensure the dependent's uninstalled dependencies are
# up-to-date. This will require requeueing the task.
dep_task = self.build_tasks[dep_id]
self._push_task(dep_task.next_attempt(self.installed))
else:
- tty.debug('{0} has no build task to update for {1}\'s success'
- .format(dep_id, pkg_id))
+ tty.debug(
+ "{0} has no build task to update for {1}'s success".format(dep_id, pkg_id)
+ )
def _init_queue(self):
"""Initialize the build queue from the list of build requests."""
all_dependencies = defaultdict(set)
- tty.debug('Initializing the build queue from the build requests')
+ tty.debug("Initializing the build queue from the build requests")
for request in self.build_requests:
self._add_tasks(request, all_dependencies)
# Add any missing dependents to ensure proper uninstalled dependency
# tracking when installing multiple specs
- tty.debug('Ensure all dependencies know all dependents across specs')
+ tty.debug("Ensure all dependencies know all dependents across specs")
for dep_id in all_dependencies:
if dep_id in self.build_tasks:
dependents = all_dependencies[dep_id]
@@ -1540,7 +1518,7 @@ class PackageInstaller(object):
pkg (spack.package_base.Package): the package to be built and installed"""
self._init_queue()
- fail_fast_err = 'Terminating after first install failure'
+ fail_fast_err = "Terminating after first install failure"
single_explicit_spec = len(self.build_requests) == 1
failed_explicits = []
@@ -1557,12 +1535,12 @@ class PackageInstaller(object):
spack.hooks.on_install_start(task.request.pkg.spec)
install_args = task.request.install_args
- keep_prefix = install_args.get('keep_prefix')
+ keep_prefix = install_args.get("keep_prefix")
pkg, pkg_id, spec = task.pkg, task.pkg_id, task.pkg.spec
term_title.next_pkg(pkg)
- term_title.set('Processing {0}'.format(pkg.name))
- tty.debug('Processing {0}: task={1}'.format(pkg_id, task))
+ term_title.set("Processing {0}".format(pkg.name))
+ tty.debug("Processing {0}: task={1}".format(pkg_id, task))
# Ensure that the current spec has NO uninstalled dependencies,
# which is assumed to be reflected directly in its priority.
#
@@ -1573,22 +1551,26 @@ class PackageInstaller(object):
# dependencies of this task.
if task.priority != 0:
term_status.clear()
- tty.error('Detected uninstalled dependencies for {0}: {1}'
- .format(pkg_id, task.uninstalled_deps))
- left = [dep_id for dep_id in task.uninstalled_deps if
- dep_id not in self.installed]
+ tty.error(
+ "Detected uninstalled dependencies for {0}: {1}".format(
+ pkg_id, task.uninstalled_deps
+ )
+ )
+ left = [dep_id for dep_id in task.uninstalled_deps if dep_id not in self.installed]
if not left:
- tty.warn('{0} does NOT actually have any uninstalled deps'
- ' left'.format(pkg_id))
- dep_str = 'dependencies' if task.priority > 1 else 'dependency'
+ tty.warn(
+ "{0} does NOT actually have any uninstalled deps" " left".format(pkg_id)
+ )
+ dep_str = "dependencies" if task.priority > 1 else "dependency"
# Hook to indicate task failure, but without an exception
spack.hooks.on_install_failure(task.request.pkg.spec)
raise InstallError(
- 'Cannot proceed with {0}: {1} uninstalled {2}: {3}'
- .format(pkg_id, task.priority, dep_str,
- ','.join(task.uninstalled_deps)))
+ "Cannot proceed with {0}: {1} uninstalled {2}: {3}".format(
+ pkg_id, task.priority, dep_str, ",".join(task.uninstalled_deps)
+ )
+ )
# Skip the installation if the spec is not being installed locally
# (i.e., if external or upstream) BUT flag it as installed since
@@ -1603,7 +1585,7 @@ class PackageInstaller(object):
# assume using a separate (failed) prefix lock file.
if pkg_id in self.failed or spack.store.db.prefix_failed(spec):
term_status.clear()
- tty.warn('{0} failed to install'.format(pkg_id))
+ tty.warn("{0} failed to install".format(pkg_id))
self._update_failed(task)
# Mark that the package failed
@@ -1620,14 +1602,14 @@ class PackageInstaller(object):
# another process is likely (un)installing the spec or has
# determined the spec has already been installed (though the
# other process may be hung).
- term_title.set('Acquiring lock for {0}'.format(pkg.name))
+ term_title.set("Acquiring lock for {0}".format(pkg.name))
term_status.add(pkg_id)
- ltype, lock = self._ensure_locked('write', pkg)
+ ltype, lock = self._ensure_locked("write", pkg)
if lock is None:
# Attempt to get a read lock instead. If this fails then
# another process has a write lock so must be (un)installing
# the spec (or that process is hung).
- ltype, lock = self._ensure_locked('read', pkg)
+ ltype, lock = self._ensure_locked("read", pkg)
# Requeue the spec if we cannot get at least a read lock so we
# can check the status presumably established by another process
@@ -1644,7 +1626,7 @@ class PackageInstaller(object):
task.request.overwrite_time = time.time()
# Determine state of installation artifacts and adjust accordingly.
- term_title.set('Preparing {0}'.format(pkg.name))
+ term_title.set("Preparing {0}".format(pkg.name))
self._prepare_for_install(task)
# Flag an already installed package
@@ -1652,7 +1634,7 @@ class PackageInstaller(object):
# Downgrade to a read lock to preclude other processes from
# uninstalling the package until we're done installing its
# dependents.
- ltype, lock = self._ensure_locked('read', pkg)
+ ltype, lock = self._ensure_locked("read", pkg)
if lock is not None:
self._update_installed(task)
_print_installed_pkg(pkg.prefix)
@@ -1660,7 +1642,8 @@ class PackageInstaller(object):
# It's an already installed compiler, add it to the config
if task.compiler:
spack.compilers.add_compilers_to_config(
- spack.compilers.find_compilers([pkg.spec.prefix]))
+ spack.compilers.find_compilers([pkg.spec.prefix])
+ )
else:
# At this point we've failed to get a write or a read
@@ -1683,14 +1666,14 @@ class PackageInstaller(object):
# Requeue the task so we can check the status presumably
# established by the other process -- failed, installed, or
# uninstalled -- on the next pass.
- if ltype == 'read':
+ if ltype == "read":
lock.release_read()
self._requeue_task(task)
continue
# Proceed with the installation since we have an exclusive write
# lock on the package.
- term_title.set('Installing {0}'.format(pkg.name))
+ term_title.set("Installing {0}".format(pkg.name))
try:
action = self._install_action(task)
@@ -1702,17 +1685,15 @@ class PackageInstaller(object):
self._update_installed(task)
# If we installed then we should keep the prefix
- stop_before_phase = getattr(pkg, 'stop_before_phase', None)
- last_phase = getattr(pkg, 'last_phase', None)
- keep_prefix = keep_prefix or \
- (stop_before_phase is None and last_phase is None)
+ stop_before_phase = getattr(pkg, "stop_before_phase", None)
+ last_phase = getattr(pkg, "last_phase", None)
+ keep_prefix = keep_prefix or (stop_before_phase is None and last_phase is None)
except KeyboardInterrupt as exc:
# The build has been terminated with a Ctrl-C so terminate
# regardless of the number of remaining specs.
- err = 'Failed to install {0} due to {1}: {2}'
- tty.error(err.format(pkg.name, exc.__class__.__name__,
- str(exc)))
+ err = "Failed to install {0} due to {1}: {2}"
+ tty.error(err.format(pkg.name, exc.__class__.__name__, str(exc)))
spack.hooks.on_install_cancel(task.request.pkg.spec)
raise
@@ -1722,19 +1703,19 @@ class PackageInstaller(object):
# Best effort installs suppress the exception and mark the
# package as a failure.
- if (not isinstance(exc, spack.error.SpackError) or
- not exc.printed):
+ if not isinstance(exc, spack.error.SpackError) or not exc.printed:
exc.printed = True
# SpackErrors can be printed by the build process or at
# lower levels -- skip printing if already printed.
# TODO: sort out this and SpackError.print_context()
- tty.error('Failed to install {0} due to {1}: {2}'
- .format(pkg.name, exc.__class__.__name__,
- str(exc)))
+ tty.error(
+ "Failed to install {0} due to {1}: {2}".format(
+ pkg.name, exc.__class__.__name__, str(exc)
+ )
+ )
# Terminate if requested to do so on the first failure.
if self.fail_fast:
- raise InstallError('{0}: {1}'
- .format(fail_fast_err, str(exc)))
+ raise InstallError("{0}: {1}".format(fail_fast_err, str(exc)))
# Terminate at this point if the single explicit spec has
# failed to install.
@@ -1765,18 +1746,21 @@ class PackageInstaller(object):
# Ensure we properly report if one or more explicit specs failed
# or were not installed when should have been.
- missing = [request.pkg_id for request in self.build_requests if
- request.install_args.get('install_package') and
- request.pkg_id not in self.installed]
+ missing = [
+ request.pkg_id
+ for request in self.build_requests
+ if request.install_args.get("install_package") and request.pkg_id not in self.installed
+ ]
if failed_explicits or missing:
for pkg_id, err in failed_explicits:
- tty.error('{0}: {1}'.format(pkg_id, err))
+ tty.error("{0}: {1}".format(pkg_id, err))
for pkg_id in missing:
- tty.error('{0}: Package was not installed'.format(pkg_id))
+ tty.error("{0}: Package was not installed".format(pkg_id))
- raise InstallError('Installation request failed. Refer to '
- 'reported errors for failing package(s).')
+ raise InstallError(
+ "Installation request failed. Refer to " "reported errors for failing package(s)."
+ )
class BuildProcessInstaller(object):
@@ -1796,26 +1780,25 @@ class BuildProcessInstaller(object):
self.pkg = pkg
# whether to do a fake install
- self.fake = install_args.get('fake', False)
+ self.fake = install_args.get("fake", False)
# whether to install source code with the packag
- self.install_source = install_args.get('install_source', False)
+ self.install_source = install_args.get("install_source", False)
# whether to keep the build stage after installation
- self.keep_stage = install_args.get('keep_stage', False)
+ self.keep_stage = install_args.get("keep_stage", False)
# whether to skip the patch phase
- self.skip_patch = install_args.get('skip_patch', False)
+ self.skip_patch = install_args.get("skip_patch", False)
# whether to enable echoing of build output initially or not
- self.verbose = install_args.get('verbose', False)
+ self.verbose = install_args.get("verbose", False)
# env before starting installation
- self.unmodified_env = install_args.get('unmodified_env', {})
+ self.unmodified_env = install_args.get("unmodified_env", {})
# env modifications by Spack
- self.env_mods = install_args.get(
- 'env_modifications', EnvironmentModifications())
+ self.env_mods = install_args.get("env_modifications", EnvironmentModifications())
# timer for build phases
self.timer = Timer()
@@ -1826,8 +1809,8 @@ class BuildProcessInstaller(object):
self.filter_fn = spack.util.path.padding_filter if filter_padding else None
# info/debug information
- pid = '{0}: '.format(os.getpid()) if tty.show_pid() else ''
- self.pre = '{0}{1}:'.format(pid, pkg.name)
+ pid = "{0}: ".format(os.getpid()) if tty.show_pid() else ""
+ self.pre = "{0}{1}:".format(pid, pkg.name)
self.pkg_id = package_id(pkg)
def run(self):
@@ -1840,11 +1823,7 @@ class BuildProcessInstaller(object):
self.pkg.do_stage()
tty.debug(
- '{0} Building {1} [{2}]' .format(
- self.pre,
- self.pkg_id,
- self.pkg.build_system_class
- )
+ "{0} Building {1} [{2}]".format(self.pre, self.pkg_id, self.pkg.build_system_class)
)
# get verbosity from do_install() parameter or saved value
@@ -1868,17 +1847,19 @@ class BuildProcessInstaller(object):
# Stop the timer and save results
self.timer.stop()
- with open(self.pkg.times_log_path, 'w') as timelog:
+ with open(self.pkg.times_log_path, "w") as timelog:
self.timer.write_json(timelog)
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self.pkg.spec)
build_time = self.timer.total - self.pkg._fetch_time
- tty.msg('{0} Successfully installed {1}'.format(self.pre, self.pkg_id),
- 'Fetch: {0}. Build: {1}. Total: {2}.'
- .format(_hms(self.pkg._fetch_time), _hms(build_time),
- _hms(self.timer.total)))
+ tty.msg(
+ "{0} Successfully installed {1}".format(self.pre, self.pkg_id),
+ "Fetch: {0}. Build: {1}. Total: {2}.".format(
+ _hms(self.pkg._fetch_time), _hms(build_time), _hms(self.timer.total)
+ ),
+ )
_print_installed_pkg(self.pkg.prefix)
# Send final status that install is successful
@@ -1893,8 +1874,8 @@ class BuildProcessInstaller(object):
if not os.path.isdir(pkg.stage.source_path):
return
- src_target = os.path.join(pkg.spec.prefix, 'share', pkg.name, 'src')
- tty.debug('{0} Copying source to {1}' .format(self.pre, src_target))
+ src_target = os.path.join(pkg.spec.prefix, "share", pkg.name, "src")
+ tty.debug("{0} Copying source to {1}".format(self.pre, src_target))
fs.install_tree(pkg.stage.source_path, src_target)
@@ -1908,20 +1889,16 @@ class BuildProcessInstaller(object):
# Save just the changes to the environment. This file can be
# safely installed, since it does not contain secret variables.
- with open(pkg.env_mods_path, 'w') as env_mods_file:
- mods = self.env_mods.shell_modifications(
- explicit=True,
- env=self.unmodified_env
- )
+ with open(pkg.env_mods_path, "w") as env_mods_file:
+ mods = self.env_mods.shell_modifications(explicit=True, env=self.unmodified_env)
env_mods_file.write(mods)
- for attr in ('configure_args', 'cmake_args'):
+ for attr in ("configure_args", "cmake_args"):
try:
configure_args = getattr(pkg, attr)()
- configure_args = ' '.join(configure_args)
+ configure_args = " ".join(configure_args)
- with open(pkg.configure_args_path, 'w') as \
- args_file:
+ with open(pkg.configure_args_path, "w") as args_file:
args_file.write(configure_args)
break
@@ -1933,14 +1910,13 @@ class BuildProcessInstaller(object):
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path, and provide the phase for logging
- for i, (phase_name, phase_attr) in enumerate(zip(
- pkg.phases, pkg._InstallPhase_phases)):
+ for i, (phase_name, phase_attr) in enumerate(
+ zip(pkg.phases, pkg._InstallPhase_phases)
+ ):
# Keep a log file for each phase
log_dir = os.path.dirname(pkg.log_path)
- log_file = "spack-build-%02d-%s-out.txt" % (
- i + 1, phase_name.lower()
- )
+ log_file = "spack-build-%02d-%s-out.txt" % (i + 1, phase_name.lower())
log_file = os.path.join(log_dir, log_file)
try:
@@ -1951,19 +1927,14 @@ class BuildProcessInstaller(object):
self.echo,
True,
env=self.unmodified_env,
- filter_fn=self.filter_fn
+ filter_fn=self.filter_fn,
)
with log_contextmanager as logger:
with logger.force_echo():
inner_debug_level = tty.debug_level()
tty.set_debug(debug_level)
- tty.msg(
- "{0} Executing phase: '{1}'" .format(
- self.pre,
- phase_name
- )
- )
+ tty.msg("{0} Executing phase: '{1}'".format(self.pre, phase_name))
tty.set_debug(inner_debug_level)
# Redirect stdout and stderr to daemon pipe
@@ -2031,11 +2002,14 @@ class OverwriteInstall(object):
self.installer._install_task(self.task)
except fs.CouldNotRestoreDirectoryBackup as e:
self.database.remove(self.task.pkg.spec)
- tty.error('Recovery of install dir of {0} failed due to '
- '{1}: {2}. The spec is now uninstalled.'.format(
- self.task.pkg.name,
- e.outer_exception.__class__.__name__,
- str(e.outer_exception)))
+ tty.error(
+ "Recovery of install dir of {0} failed due to "
+ "{1}: {2}. The spec is now uninstalled.".format(
+ self.task.pkg.name,
+ e.outer_exception.__class__.__name__,
+ str(e.outer_exception),
+ )
+ )
# Unwrap the actual installation exception.
raise e.inner_exception
@@ -2044,8 +2018,7 @@ class OverwriteInstall(object):
class BuildTask(object):
"""Class for representing the build task for a package."""
- def __init__(self, pkg, request, compiler, start, attempts, status,
- installed):
+ def __init__(self, pkg, request, compiler, start, attempts, status, installed):
"""
Instantiate a build task for a package.
@@ -2068,8 +2041,7 @@ class BuildTask(object):
self.pkg = pkg
if not self.pkg.spec.concrete:
- raise ValueError("{0} must have a concrete spec"
- .format(self.pkg.name))
+ raise ValueError("{0} must have a concrete spec".format(self.pkg.name))
# The "unique" identifier for the task's package
self.pkg_id = package_id(self.pkg)
@@ -2099,10 +2071,9 @@ class BuildTask(object):
# to support tracking of parallel, multi-spec, environment installs.
self.dependents = set(get_dependent_ids(self.pkg.spec))
- tty.debug(
- 'Pkg id {0} has the following dependents:'.format(self.pkg_id))
+ tty.debug("Pkg id {0} has the following dependents:".format(self.pkg_id))
for dep_id in self.dependents:
- tty.debug('- {0}'.format(dep_id))
+ tty.debug("- {0}".format(dep_id))
# Set of dependencies
#
@@ -2110,9 +2081,11 @@ class BuildTask(object):
# if use traverse for transitive dependencies, then must remove
# transitive dependents on failure.
deptypes = self.request.get_deptypes(self.pkg)
- self.dependencies = set(package_id(d.package) for d in
- self.pkg.spec.dependencies(deptype=deptypes)
- if package_id(d.package) != self.pkg_id)
+ self.dependencies = set(
+ package_id(d.package)
+ for d in self.pkg.spec.dependencies(deptype=deptypes)
+ if package_id(d.package) != self.pkg_id
+ )
# Handle bootstrapped compiler
#
@@ -2120,14 +2093,12 @@ class BuildTask(object):
# a dependency of the build task. Here we add it to self.dependencies
compiler_spec = self.pkg.spec.compiler
arch_spec = self.pkg.spec.architecture
- if not spack.compilers.compilers_for_spec(compiler_spec,
- arch_spec=arch_spec):
+ if not spack.compilers.compilers_for_spec(compiler_spec, arch_spec=arch_spec):
# The compiler is in the queue, identify it as dependency
dep = spack.compilers.pkg_spec_for_compiler(compiler_spec)
- dep.constrain('platform=%s' % str(arch_spec.platform))
- dep.constrain('os=%s' % str(arch_spec.os))
- dep.constrain('target=%s:' %
- arch_spec.target.microarchitecture.family.name)
+ dep.constrain("platform=%s" % str(arch_spec.platform))
+ dep.constrain("os=%s" % str(arch_spec.os))
+ dep.constrain("target=%s:" % arch_spec.target.microarchitecture.family.name)
dep.concretize()
dep_id = package_id(dep.package)
self.dependencies.add(dep_id)
@@ -2135,8 +2106,9 @@ class BuildTask(object):
# List of uninstalled dependencies, which is used to establish
# the priority of the build task.
#
- self.uninstalled_deps = set(pkg_id for pkg_id in self.dependencies if
- pkg_id not in installed)
+ self.uninstalled_deps = set(
+ pkg_id for pkg_id in self.dependencies if pkg_id not in installed
+ )
# Ensure key sequence-related properties are updated accordingly.
self.attempts = 0
@@ -2162,16 +2134,17 @@ class BuildTask(object):
def __repr__(self):
"""Returns a formal representation of the build task."""
- rep = '{0}('.format(self.__class__.__name__)
+ rep = "{0}(".format(self.__class__.__name__)
for attr, value in self.__dict__.items():
- rep += '{0}={1}, '.format(attr, value.__repr__())
- return '{0})'.format(rep.strip(', '))
+ rep += "{0}={1}, ".format(attr, value.__repr__())
+ return "{0})".format(rep.strip(", "))
def __str__(self):
"""Returns a printable version of the build task."""
- dependencies = '#dependencies={0}'.format(len(self.dependencies))
- return ('priority={0}, status={1}, start={2}, {3}'
- .format(self.priority, self.status, self.start, dependencies))
+ dependencies = "#dependencies={0}".format(len(self.dependencies))
+ return "priority={0}, status={1}, start={2}, {3}".format(
+ self.priority, self.status, self.start, dependencies
+ )
def _update(self):
"""Update properties associated with a new instance of a task."""
@@ -2191,8 +2164,7 @@ class BuildTask(object):
pkg_id (str): package identifier of the dependent package
"""
if pkg_id != self.pkg_id and pkg_id not in self.dependents:
- tty.debug('Adding {0} as a dependent of {1}'
- .format(pkg_id, self.pkg_id))
+ tty.debug("Adding {0} as a dependent of {1}".format(pkg_id, self.pkg_id))
self.dependents.add(pkg_id)
def flag_installed(self, installed):
@@ -2206,14 +2178,16 @@ class BuildTask(object):
now_installed = self.uninstalled_deps & set(installed)
for pkg_id in now_installed:
self.uninstalled_deps.remove(pkg_id)
- tty.debug('{0}: Removed {1} from uninstalled deps list: {2}'
- .format(self.pkg_id, pkg_id, self.uninstalled_deps))
+ tty.debug(
+ "{0}: Removed {1} from uninstalled deps list: {2}".format(
+ self.pkg_id, pkg_id, self.uninstalled_deps
+ )
+ )
@property
def explicit(self):
"""The package was explicitly requested by the user."""
- return self.pkg == self.request.pkg and \
- self.request.install_args.get('explicit', True)
+ return self.pkg == self.request.pkg and self.request.install_args.get("explicit", True)
@property
def key(self):
@@ -2251,14 +2225,13 @@ class BuildRequest(object):
self.pkg = pkg
if not self.pkg.spec.concrete:
- raise ValueError("{0} must have a concrete spec"
- .format(self.pkg.name))
+ raise ValueError("{0} must have a concrete spec".format(self.pkg.name))
# Cache the package phase options with the explicit package,
# popping the options to ensure installation of associated
# dependencies is NOT affected by these options.
- self.pkg.stop_before_phase = install_args.pop('stop_before', None)
- self.pkg.last_phase = install_args.pop('stop_at', None)
+ self.pkg.stop_before_phase = install_args.pop("stop_before", None)
+ self.pkg.last_phase = install_args.pop("stop_at", None)
# Cache the package id for convenience
self.pkg_id = package_id(pkg)
@@ -2269,47 +2242,50 @@ class BuildRequest(object):
self._add_default_args()
# Cache overwrite information
- self.overwrite = set(self.install_args.get('overwrite', []))
+ self.overwrite = set(self.install_args.get("overwrite", []))
self.overwrite_time = time.time()
# Save off dependency package ids for quick checks since traversals
# are not able to return full dependents for all packages across
# environment specs.
deptypes = self.get_deptypes(self.pkg)
- self.dependencies = set(package_id(d.package) for d in
- self.pkg.spec.dependencies(deptype=deptypes)
- if package_id(d.package) != self.pkg_id)
+ self.dependencies = set(
+ package_id(d.package)
+ for d in self.pkg.spec.dependencies(deptype=deptypes)
+ if package_id(d.package) != self.pkg_id
+ )
def __repr__(self):
"""Returns a formal representation of the build request."""
- rep = '{0}('.format(self.__class__.__name__)
+ rep = "{0}(".format(self.__class__.__name__)
for attr, value in self.__dict__.items():
- rep += '{0}={1}, '.format(attr, value.__repr__())
- return '{0})'.format(rep.strip(', '))
+ rep += "{0}={1}, ".format(attr, value.__repr__())
+ return "{0})".format(rep.strip(", "))
def __str__(self):
"""Returns a printable version of the build request."""
- return 'package={0}, install_args={1}' \
- .format(self.pkg.name, self.install_args)
+ return "package={0}, install_args={1}".format(self.pkg.name, self.install_args)
def _add_default_args(self):
"""Ensure standard install options are set to at least the default."""
- for arg, default in [('cache_only', False),
- ('context', 'build'), # installs *always* build
- ('dirty', False),
- ('fail_fast', False),
- ('fake', False),
- ('install_deps', True),
- ('install_package', True),
- ('install_source', False),
- ('keep_prefix', False),
- ('keep_stage', False),
- ('restage', False),
- ('skip_patch', False),
- ('tests', False),
- ('unsigned', False),
- ('use_cache', True),
- ('verbose', False), ]:
+ for arg, default in [
+ ("cache_only", False),
+ ("context", "build"), # installs *always* build
+ ("dirty", False),
+ ("fail_fast", False),
+ ("fake", False),
+ ("install_deps", True),
+ ("install_package", True),
+ ("install_source", False),
+ ("keep_prefix", False),
+ ("keep_stage", False),
+ ("restage", False),
+ ("skip_patch", False),
+ ("tests", False),
+ ("unsigned", False),
+ ("use_cache", True),
+ ("verbose", False),
+ ]:
_ = self.install_args.setdefault(arg, default)
def get_deptypes(self, pkg):
@@ -2322,17 +2298,17 @@ class BuildRequest(object):
Returns:
tuple: required dependency type(s) for the package
"""
- deptypes = ['link', 'run']
- include_build_deps = self.install_args.get('include_build_deps')
- if not self.install_args.get('cache_only') or include_build_deps:
- deptypes.append('build')
+ deptypes = ["link", "run"]
+ include_build_deps = self.install_args.get("include_build_deps")
+ if not self.install_args.get("cache_only") or include_build_deps:
+ deptypes.append("build")
if self.run_tests(pkg):
- deptypes.append('test')
+ deptypes.append("test")
return tuple(sorted(deptypes))
def has_dependency(self, dep_id):
"""Returns ``True`` if the package id represents a known dependency
- of the requested package, ``False`` otherwise."""
+ of the requested package, ``False`` otherwise."""
return dep_id in self.dependencies
def run_tests(self, pkg):
@@ -2345,7 +2321,7 @@ class BuildRequest(object):
Returns:
bool: ``True`` if they should be run; ``False`` otherwise
"""
- tests = self.install_args.get('tests', False)
+ tests = self.install_args.get("tests", False)
return tests is True or (tests and pkg.name in tests)
@property
@@ -2363,11 +2339,10 @@ class BuildRequest(object):
get_spec = lambda s: s.spec
deptypes = self.get_deptypes(self.pkg)
- tty.debug('Processing dependencies for {0}: {1}'
- .format(self.pkg_id, deptypes))
+ tty.debug("Processing dependencies for {0}: {1}".format(self.pkg_id, deptypes))
for dspec in self.spec.traverse_edges(
- deptype=deptypes, order='post', root=False,
- direction='children'):
+ deptype=deptypes, order="post", root=False, direction="children"
+ ):
yield get_spec(dspec)
@@ -2383,8 +2358,8 @@ class BadInstallPhase(InstallError):
def __init__(self, pkg_name, phase):
super(BadInstallPhase, self).__init__(
- '\'{0}\' is not a valid phase for package {1}'
- .format(phase, pkg_name))
+ "'{0}' is not a valid phase for package {1}".format(phase, pkg_name)
+ )
class ExternalPackageError(InstallError):
@@ -2397,4 +2372,4 @@ class InstallLockError(InstallError):
class UpstreamPackageError(InstallError):
"""Raised during install when something goes wrong with an upstream
- package."""
+ package."""
diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py
index b66c67c801..f69b1e0088 100644
--- a/lib/spack/spack/main.py
+++ b/lib/spack/spack/main.py
@@ -53,54 +53,58 @@ from spack.error import SpackError
stat_names = pstats.Stats.sort_arg_dict_default
#: top-level aliases for Spack commands
-aliases = {
- 'rm': 'remove'
-}
+aliases = {"rm": "remove"}
#: help levels in order of detail (i.e., number of commands shown)
-levels = ['short', 'long']
+levels = ["short", "long"]
#: intro text for help at different levels
intro_by_level = {
- 'short': 'These are common spack commands:',
- 'long': 'Complete list of spack commands:',
+ "short": "These are common spack commands:",
+ "long": "Complete list of spack commands:",
}
#: control top-level spack options shown in basic vs. advanced help
-options_by_level = {
- 'short': ['h', 'k', 'V', 'color'],
- 'long': 'all'
-}
+options_by_level = {"short": ["h", "k", "V", "color"], "long": "all"}
#: Longer text for each section, to show in help
section_descriptions = {
- 'admin': 'administration',
- 'basic': 'query packages',
- 'build': 'build packages',
- 'config': 'configuration',
- 'developer': 'developer',
- 'environment': 'environment',
- 'extensions': 'extensions',
- 'help': 'more help',
- 'packaging': 'create packages',
- 'system': 'system',
+ "admin": "administration",
+ "basic": "query packages",
+ "build": "build packages",
+ "config": "configuration",
+ "developer": "developer",
+ "environment": "environment",
+ "extensions": "extensions",
+ "help": "more help",
+ "packaging": "create packages",
+ "system": "system",
}
#: preferential command order for some sections (e.g., build pipeline is
#: in execution order, not alphabetical)
section_order = {
- 'basic': ['list', 'info', 'find'],
- 'build': ['fetch', 'stage', 'patch', 'configure', 'build', 'restage',
- 'install', 'uninstall', 'clean'],
- 'packaging': ['create', 'edit']
+ "basic": ["list", "info", "find"],
+ "build": [
+ "fetch",
+ "stage",
+ "patch",
+ "configure",
+ "build",
+ "restage",
+ "install",
+ "uninstall",
+ "clean",
+ ],
+ "packaging": ["create", "edit"],
}
#: Properties that commands are required to set.
-required_command_properties = ['level', 'section', 'description']
+required_command_properties = ["level", "section", "description"]
#: Recorded directory where spack command was originally invoked
spack_working_dir = None
-spack_ld_library_path = os.environ.get('LD_LIBRARY_PATH', '')
+spack_ld_library_path = os.environ.get("LD_LIBRARY_PATH", "")
def set_working_dir():
@@ -132,8 +136,15 @@ def get_version():
git = exe.which("git")
if not git:
return version
- rev = git('-C', spack.paths.prefix, 'rev-parse', 'HEAD',
- output=str, error=os.devnull, fail_on_error=False)
+ rev = git(
+ "-C",
+ spack.paths.prefix,
+ "rev-parse",
+ "HEAD",
+ output=str,
+ error=os.devnull,
+ fail_on_error=False,
+ )
if git.returncode != 0:
return version
match = re.match(r"[a-f\d]{7,}$", rev)
@@ -153,8 +164,7 @@ def index_commands():
for p in required_command_properties:
prop = getattr(cmd_module, p, None)
if not prop:
- tty.die("Command doesn't define a property '%s': %s"
- % (p, command))
+ tty.die("Command doesn't define a property '%s': %s" % (p, command))
# add commands to lists for their level and higher levels
for level in reversed(levels):
@@ -170,22 +180,21 @@ def index_commands():
class SpackHelpFormatter(argparse.RawTextHelpFormatter):
def _format_actions_usage(self, actions, groups):
"""Formatter with more concise usage strings."""
- usage = super(
- SpackHelpFormatter, self)._format_actions_usage(actions, groups)
+ usage = super(SpackHelpFormatter, self)._format_actions_usage(actions, groups)
# Eliminate any occurrence of two or more consecutive spaces
- usage = re.sub(r'[ ]{2,}', ' ', usage)
+ usage = re.sub(r"[ ]{2,}", " ", usage)
# compress single-character flags that are not mutually exclusive
# at the beginning of the usage string
- chars = ''.join(re.findall(r'\[-(.)\]', usage))
- usage = re.sub(r'\[-.\] ?', '', usage)
+ chars = "".join(re.findall(r"\[-(.)\]", usage))
+ usage = re.sub(r"\[-.\] ?", "", usage)
if chars:
- usage = '[-%s] %s' % (chars, usage)
+ usage = "[-%s] %s" % (chars, usage)
return usage.strip()
def add_arguments(self, actions):
- actions = sorted(actions, key=operator.attrgetter('option_strings'))
+ actions = sorted(actions, key=operator.attrgetter("option_strings"))
super(SpackHelpFormatter, self).add_arguments(actions)
@@ -207,7 +216,7 @@ class SpackArgumentParser(argparse.ArgumentParser):
# Create a list of subcommand actions. Argparse internals are nasty!
# Note: you can only call _get_subactions() once. Even nastier!
- if not hasattr(self, 'actions'):
+ if not hasattr(self, "actions"):
self.actions = self._subparsers._actions[-1]._get_subactions()
# make a set of commands not yet added.
@@ -224,8 +233,7 @@ class SpackArgumentParser(argparse.ArgumentParser):
cmd_set = set(c for c in commands)
# make a dict of commands of interest
- cmds = dict((a.dest, a) for a in self.actions
- if a.dest in cmd_set)
+ cmds = dict((a.dest, a) for a in self.actions if a.dest in cmd_set)
# add commands to a group in order, and add the group
group = argparse._ArgumentGroup(self, title=title)
@@ -237,9 +245,10 @@ class SpackArgumentParser(argparse.ArgumentParser):
# select only the options for the particular level we're showing.
show_options = options_by_level[level]
- if show_options != 'all':
- opts = dict((opt.option_strings[0].strip('-'), opt)
- for opt in self._optionals._group_actions)
+ if show_options != "all":
+ opts = dict(
+ (opt.option_strings[0].strip("-"), opt) for opt in self._optionals._group_actions
+ )
new_actions = [opts[letter] for letter in show_options]
self._optionals._group_actions = new_actions
@@ -247,8 +256,7 @@ class SpackArgumentParser(argparse.ArgumentParser):
# custom, more concise usage for top level
help_options = self._optionals._group_actions
help_options = help_options + [self._positionals._group_actions[-1]]
- formatter.add_usage(
- self.usage, help_options, self._mutually_exclusive_groups)
+ formatter.add_usage(self.usage, help_options, self._mutually_exclusive_groups)
# description
formatter.add_text(self.description)
@@ -261,8 +269,8 @@ class SpackArgumentParser(argparse.ArgumentParser):
sections = index[level]
for section in sorted(sections):
- if section == 'help':
- continue # Cover help in the epilog.
+ if section == "help":
+ continue # Cover help in the epilog.
group_description = section_descriptions.get(section, section)
@@ -271,12 +279,10 @@ class SpackArgumentParser(argparse.ArgumentParser):
# add commands whose order we care about first.
if section in section_order:
- commands.extend(cmd for cmd in section_order[section]
- if cmd in to_display)
+ commands.extend(cmd for cmd in section_order[section] if cmd in to_display)
# add rest in alphabetical order.
- commands.extend(cmd for cmd in sorted(sections[section])
- if cmd not in commands)
+ commands.extend(cmd for cmd in sorted(sections[section]) if cmd not in commands)
# add the group to the parser
add_subcommand_group(group_description, commands)
@@ -285,25 +291,29 @@ class SpackArgumentParser(argparse.ArgumentParser):
add_group(self._optionals)
# epilog
- formatter.add_text("""\
+ formatter.add_text(
+ """\
{help}:
spack help --all list all commands and options
spack help <command> help on a specific command
spack help --spec help on the package specification syntax
spack docs open https://spack.rtfd.io/ in a browser
-""".format(help=section_descriptions['help']))
+""".format(
+ help=section_descriptions["help"]
+ )
+ )
# determine help from format above
return formatter.format_help()
def add_subparsers(self, **kwargs):
"""Ensure that sensible defaults are propagated to subparsers"""
- kwargs.setdefault('metavar', 'SUBCOMMAND')
+ kwargs.setdefault("metavar", "SUBCOMMAND")
# From Python 3.7 we can require a subparser, earlier versions
# of argparse will error because required=True is unknown
if sys.version_info[:2] > (3, 6):
- kwargs.setdefault('required', True)
+ kwargs.setdefault("required", True)
sp = super(SpackArgumentParser, self).add_subparsers(**kwargs)
# This monkey patching is needed for Python 3.5 and 3.6, which support
@@ -314,20 +324,20 @@ class SpackArgumentParser(argparse.ArgumentParser):
old_add_parser = sp.add_parser
def add_parser(name, **kwargs):
- kwargs.setdefault('formatter_class', SpackHelpFormatter)
+ kwargs.setdefault("formatter_class", SpackHelpFormatter)
return old_add_parser(name, **kwargs)
+
sp.add_parser = add_parser
return sp
def add_command(self, cmd_name):
"""Add one subcommand to this parser."""
# lazily initialize any subparsers
- if not hasattr(self, 'subparsers'):
+ if not hasattr(self, "subparsers"):
# remove the dummy "command" argument.
- if self._actions[-1].dest == 'command':
+ if self._actions[-1].dest == "command":
self._remove_action(self._actions[-1])
- self.subparsers = self.add_subparsers(metavar='COMMAND',
- dest="command")
+ self.subparsers = self.add_subparsers(metavar="COMMAND", dest="command")
# each command module implements a parser() function, to which we
# pass its subparser for setup.
@@ -337,15 +347,15 @@ class SpackArgumentParser(argparse.ArgumentParser):
alias_list = [k for k, v in aliases.items() if v == cmd_name]
subparser = self.subparsers.add_parser(
- cmd_name, aliases=alias_list,
- help=module.description, description=module.description)
+ cmd_name, aliases=alias_list, help=module.description, description=module.description
+ )
module.setup_parser(subparser)
# return the callable function for the command
return spack.cmd.get_command(cmd_name)
- def format_help(self, level='short'):
- if self.prog == 'spack':
+ def format_help(self, level="short"):
+ if self.prog == "spack":
# use format_help_sections for the main spack parser, but not
# for subparsers
return self.format_help_sections(level)
@@ -356,107 +366,172 @@ class SpackArgumentParser(argparse.ArgumentParser):
def _check_value(self, action, value):
# converted value must be one of the choices (if specified)
if action.choices is not None and value not in action.choices:
- cols = llnl.util.tty.colify.colified(
- sorted(action.choices), indent=4, tty=True
- )
- msg = 'invalid choice: %r choose from:\n%s' % (value, cols)
+ cols = llnl.util.tty.colify.colified(sorted(action.choices), indent=4, tty=True)
+ msg = "invalid choice: %r choose from:\n%s" % (value, cols)
raise argparse.ArgumentError(action, msg)
def make_argument_parser(**kwargs):
"""Create an basic argument parser without any subcommands added."""
parser = SpackArgumentParser(
- formatter_class=SpackHelpFormatter, add_help=False,
+ formatter_class=SpackHelpFormatter,
+ add_help=False,
description=(
"A flexible package manager that supports multiple versions,\n"
- "configurations, platforms, and compilers."),
- **kwargs)
+ "configurations, platforms, and compilers."
+ ),
+ **kwargs
+ )
# stat names in groups of 7, for nice wrapping.
stat_lines = list(zip(*(iter(stat_names),) * 7))
parser.add_argument(
- '-h', '--help',
- dest='help', action='store_const', const='short', default=None,
- help="show this help message and exit")
- parser.add_argument(
- '-H', '--all-help',
- dest='help', action='store_const', const='long', default=None,
- help="show help for all commands (same as spack help --all)")
- parser.add_argument(
- '--color', action='store',
- default=os.environ.get('SPACK_COLOR', 'auto'),
- choices=('always', 'never', 'auto'),
- help="when to colorize output (default: auto)")
+ "-h",
+ "--help",
+ dest="help",
+ action="store_const",
+ const="short",
+ default=None,
+ help="show this help message and exit",
+ )
parser.add_argument(
- '-c', '--config', default=None, action="append", dest="config_vars",
- help="add one or more custom, one off config settings.")
+ "-H",
+ "--all-help",
+ dest="help",
+ action="store_const",
+ const="long",
+ default=None,
+ help="show help for all commands (same as spack help --all)",
+ )
parser.add_argument(
- '-C', '--config-scope', dest='config_scopes', action='append',
- metavar='DIR', help="add a custom configuration scope")
+ "--color",
+ action="store",
+ default=os.environ.get("SPACK_COLOR", "auto"),
+ choices=("always", "never", "auto"),
+ help="when to colorize output (default: auto)",
+ )
parser.add_argument(
- '-d', '--debug', action='count', default=0,
- help="write out debug messages "
- "(more d's for more verbosity: -d, -dd, -ddd, etc.)")
+ "-c",
+ "--config",
+ default=None,
+ action="append",
+ dest="config_vars",
+ help="add one or more custom, one off config settings.",
+ )
parser.add_argument(
- '--timestamp', action='store_true',
- help="Add a timestamp to tty output")
+ "-C",
+ "--config-scope",
+ dest="config_scopes",
+ action="append",
+ metavar="DIR",
+ help="add a custom configuration scope",
+ )
parser.add_argument(
- '--pdb', action='store_true',
- help="run spack under the pdb debugger")
+ "-d",
+ "--debug",
+ action="count",
+ default=0,
+ help="write out debug messages " "(more d's for more verbosity: -d, -dd, -ddd, etc.)",
+ )
+ parser.add_argument("--timestamp", action="store_true", help="Add a timestamp to tty output")
+ parser.add_argument("--pdb", action="store_true", help="run spack under the pdb debugger")
env_group = parser.add_mutually_exclusive_group()
env_group.add_argument(
- '-e', '--env', dest='env', metavar='ENV', action='store',
- help="run with a specific environment (see spack env)")
+ "-e",
+ "--env",
+ dest="env",
+ metavar="ENV",
+ action="store",
+ help="run with a specific environment (see spack env)",
+ )
env_group.add_argument(
- '-D', '--env-dir', dest='env_dir', metavar='DIR', action='store',
- help="run with an environment directory (ignore named environments)")
+ "-D",
+ "--env-dir",
+ dest="env_dir",
+ metavar="DIR",
+ action="store",
+ help="run with an environment directory (ignore named environments)",
+ )
env_group.add_argument(
- '-E', '--no-env', dest='no_env', action='store_true',
- help="run without any environments activated (see spack env)")
+ "-E",
+ "--no-env",
+ dest="no_env",
+ action="store_true",
+ help="run without any environments activated (see spack env)",
+ )
parser.add_argument(
- '--use-env-repo', action='store_true',
- help="when running in an environment, use its package repository")
+ "--use-env-repo",
+ action="store_true",
+ help="when running in an environment, use its package repository",
+ )
parser.add_argument(
- '-k', '--insecure', action='store_true',
- help="do not check ssl certificates when downloading")
+ "-k",
+ "--insecure",
+ action="store_true",
+ help="do not check ssl certificates when downloading",
+ )
parser.add_argument(
- '-l', '--enable-locks', action='store_true', dest='locks',
- default=None, help="use filesystem locking (default)")
+ "-l",
+ "--enable-locks",
+ action="store_true",
+ dest="locks",
+ default=None,
+ help="use filesystem locking (default)",
+ )
parser.add_argument(
- '-L', '--disable-locks', action='store_false', dest='locks',
- help="do not use filesystem locking (unsafe)")
+ "-L",
+ "--disable-locks",
+ action="store_false",
+ dest="locks",
+ help="do not use filesystem locking (unsafe)",
+ )
parser.add_argument(
- '-m', '--mock', action='store_true',
- help="use mock packages instead of real ones")
+ "-m", "--mock", action="store_true", help="use mock packages instead of real ones"
+ )
parser.add_argument(
- '-b', '--bootstrap', action='store_true',
- help="use bootstrap configuration (bootstrap store, config, externals)")
+ "-b",
+ "--bootstrap",
+ action="store_true",
+ help="use bootstrap configuration (bootstrap store, config, externals)",
+ )
parser.add_argument(
- '-p', '--profile', action='store_true', dest='spack_profile',
- help="profile execution using cProfile")
+ "-p",
+ "--profile",
+ action="store_true",
+ dest="spack_profile",
+ help="profile execution using cProfile",
+ )
parser.add_argument(
- '--sorted-profile', default=None, metavar="STAT",
- help="profile and sort by one or more of:\n[%s]" %
- ',\n '.join([', '.join(line) for line in stat_lines]))
+ "--sorted-profile",
+ default=None,
+ metavar="STAT",
+ help="profile and sort by one or more of:\n[%s]"
+ % ",\n ".join([", ".join(line) for line in stat_lines]),
+ )
parser.add_argument(
- '--lines', default=20, action='store',
- help="lines of profile output or 'all' (default: 20)")
+ "--lines",
+ default=20,
+ action="store",
+ help="lines of profile output or 'all' (default: 20)",
+ )
parser.add_argument(
- '-v', '--verbose', action='store_true',
- help="print additional output during builds")
+ "-v", "--verbose", action="store_true", help="print additional output during builds"
+ )
parser.add_argument(
- '--stacktrace', action='store_true',
- default='SPACK_STACKTRACE' in os.environ,
- help="add stacktraces to all printed statements")
+ "--stacktrace",
+ action="store_true",
+ default="SPACK_STACKTRACE" in os.environ,
+ help="add stacktraces to all printed statements",
+ )
parser.add_argument(
- '-V', '--version', action='store_true',
- help='show version number and exit')
+ "-V", "--version", action="store_true", help="show version number and exit"
+ )
parser.add_argument(
- '--print-shell-vars', action='store',
- help="print info needed by setup-env.[c]sh")
+ "--print-shell-vars", action="store", help="print info needed by setup-env.[c]sh"
+ )
return parser
@@ -482,7 +557,7 @@ def setup_main_options(args):
if args.debug:
spack.error.debug = args.debug
spack.util.debug.register_interrupt_handler()
- spack.config.set('config:debug', True, scope='command_line')
+ spack.config.set("config:debug", True, scope="command_line")
spack.util.environment.tracing_enabled = True
if args.timestamp:
@@ -492,7 +567,7 @@ def setup_main_options(args):
if args.locks is not None:
if args.locks is False:
spack.util.lock.check_lock_safety(spack.paths.prefix)
- spack.config.set('config:locks', args.locks, scope='command_line')
+ spack.config.set("config:locks", args.locks, scope="command_line")
if args.mock:
spack.repo.path = spack.repo.RepoPath(spack.paths.mock_packages_path)
@@ -500,10 +575,10 @@ def setup_main_options(args):
# If the user asked for it, don't check ssl certs.
if args.insecure:
tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
- spack.config.set('config:verify_ssl', False, scope='command_line')
+ spack.config.set("config:verify_ssl", False, scope="command_line")
# Use the spack config command to handle parsing the config strings
- for config_var in (args.config_vars or []):
+ for config_var in args.config_vars or []:
spack.config.add(fullpath=config_var, scope="command_line")
# when to use color (takes always, auto, or never)
@@ -518,9 +593,9 @@ def allows_unknown_args(command):
args in.
"""
info = dict(inspect.getmembers(command))
- varnames = info['__code__'].co_varnames
- argcount = info['__code__'].co_argcount
- return (argcount == 3 and varnames[2] == 'unknown_args')
+ varnames = info["__code__"].co_varnames
+ argcount = info["__code__"].co_argcount
+ return argcount == 3 and varnames[2] == "unknown_args"
def _invoke_command(command, parser, args, unknown_args):
@@ -529,7 +604,7 @@ def _invoke_command(command, parser, args, unknown_args):
return_val = command(parser, args, unknown_args)
else:
if unknown_args:
- tty.die('unrecognized arguments: %s' % ' '.join(unknown_args))
+ tty.die("unrecognized arguments: %s" % " ".join(unknown_args))
return_val = command(parser, args)
# Allow commands to return and error code if they want
@@ -547,6 +622,7 @@ class SpackCommand(object):
Use this to invoke Spack commands directly from Python and check
their output.
"""
+
def __init__(self, command_name):
"""Create a new SpackCommand that invokes ``command_name`` when called.
@@ -579,18 +655,16 @@ class SpackCommand(object):
self.returncode = None
self.error = None
- prepend = kwargs['global_args'] if 'global_args' in kwargs else []
+ prepend = kwargs["global_args"] if "global_args" in kwargs else []
- args, unknown = self.parser.parse_known_args(
- prepend + [self.command_name] + list(argv))
+ args, unknown = self.parser.parse_known_args(prepend + [self.command_name] + list(argv))
- fail_on_error = kwargs.get('fail_on_error', True)
+ fail_on_error = kwargs.get("fail_on_error", True)
out = StringIO()
try:
with log_output(out):
- self.returncode = _invoke_command(
- self.command, self.parser, args, unknown)
+ self.returncode = _invoke_command(self.command, self.parser, args, unknown)
except SystemExit as e:
self.returncode = e.code
@@ -605,18 +679,18 @@ class SpackCommand(object):
if fail_on_error and self.returncode not in (None, 0):
self._log_command_output(out)
raise SpackCommandError(
- "Command exited with code %d: %s(%s)" % (
- self.returncode, self.command_name,
- ', '.join("'%s'" % a for a in argv)))
+ "Command exited with code %d: %s(%s)"
+ % (self.returncode, self.command_name, ", ".join("'%s'" % a for a in argv))
+ )
return out.getvalue()
def _log_command_output(self, out):
if tty.is_verbose():
- fmt = self.command_name + ': {0}'
- for ln in out.getvalue().split('\n'):
+ fmt = self.command_name + ": {0}"
+ for ln in out.getvalue().split("\n"):
if len(ln) > 0:
- tty.verbose(fmt.format(ln.replace('==> ', '')))
+ tty.verbose(fmt.format(ln.replace("==> ", "")))
def _profile_wrapper(command, parser, args, unknown_args):
@@ -625,14 +699,14 @@ def _profile_wrapper(command, parser, args, unknown_args):
try:
nlines = int(args.lines)
except ValueError:
- if args.lines != 'all':
- tty.die('Invalid number for --lines: %s' % args.lines)
+ if args.lines != "all":
+ tty.die("Invalid number for --lines: %s" % args.lines)
nlines = -1
# allow comma-separated list of fields
- sortby = ['time']
+ sortby = ["time"]
if args.sorted_profile:
- sortby = args.sorted_profile.split(',')
+ sortby = args.sorted_profile.split(",")
for stat in sortby:
if stat not in stat_names:
tty.die("Invalid sort field: %s" % stat)
@@ -658,7 +732,7 @@ def _compatible_sys_types():
with the current host.
"""
host_platform = spack.platforms.host()
- host_os = str(host_platform.operating_system('default_os'))
+ host_os = str(host_platform.operating_system("default_os"))
host_target = archspec.cpu.host()
compatible_targets = [host_target] + host_target.ancestors
@@ -679,35 +753,30 @@ def print_setup_info(*info):
This is in ``main.py`` to make it fast; the setup scripts need to
invoke spack in login scripts, and it needs to be quick.
"""
- shell = 'csh' if 'csh' in info else 'sh'
+ shell = "csh" if "csh" in info else "sh"
def shell_set(var, value):
- if shell == 'sh':
+ if shell == "sh":
print("%s='%s'" % (var, value))
- elif shell == 'csh':
+ elif shell == "csh":
print("set %s = '%s'" % (var, value))
else:
- tty.die('shell must be sh or csh')
+ tty.die("shell must be sh or csh")
# print sys type
- shell_set('_sp_sys_type', str(spack.spec.ArchSpec.default_arch()))
- shell_set('_sp_compatible_sys_types', ':'.join(_compatible_sys_types()))
+ shell_set("_sp_sys_type", str(spack.spec.ArchSpec.default_arch()))
+ shell_set("_sp_compatible_sys_types", ":".join(_compatible_sys_types()))
# print roots for all module systems
- module_to_roots = {
- 'tcl': list(),
- 'lmod': list()
- }
+ module_to_roots = {"tcl": list(), "lmod": list()}
for name in module_to_roots.keys():
- path = spack.modules.common.root_path(name, 'default')
+ path = spack.modules.common.root_path(name, "default")
module_to_roots[name].append(path)
- other_spack_instances = spack.config.get(
- 'upstreams') or {}
+ other_spack_instances = spack.config.get("upstreams") or {}
for install_properties in other_spack_instances.values():
- upstream_module_roots = install_properties.get('modules', {})
+ upstream_module_roots = install_properties.get("modules", {})
upstream_module_roots = dict(
- (k, v) for k, v in upstream_module_roots.items()
- if k in module_to_roots
+ (k, v) for k, v in upstream_module_roots.items() if k in module_to_roots
)
for module_type, root in upstream_module_roots.items():
module_to_roots[module_type].append(root)
@@ -716,19 +785,19 @@ def print_setup_info(*info):
# Environment setup prepends paths, so the order is reversed here to
# preserve the intended priority: the modules of the local Spack
# instance are the highest-precedence.
- roots_val = ':'.join(reversed(paths))
- shell_set('_sp_%s_roots' % name, roots_val)
+ roots_val = ":".join(reversed(paths))
+ shell_set("_sp_%s_roots" % name, roots_val)
# print environment module system if available. This can be expensive
# on clusters, so skip it if not needed.
- if 'modules' in info:
+ if "modules" in info:
generic_arch = archspec.cpu.host().family
- module_spec = 'environment-modules target={0}'.format(generic_arch)
+ module_spec = "environment-modules target={0}".format(generic_arch)
specs = spack.store.db.query(module_spec)
if specs:
- shell_set('_sp_module_prefix', specs[-1].prefix)
+ shell_set("_sp_module_prefix", specs[-1].prefix)
else:
- shell_set('_sp_module_prefix', 'not_installed')
+ shell_set("_sp_module_prefix", "not_installed")
def _main(argv=None):
@@ -760,7 +829,7 @@ def _main(argv=None):
# avoid loading all the modules from spack.cmd when we don't need
# them, which reduces startup latency.
parser = make_argument_parser()
- parser.add_argument('command', nargs=argparse.REMAINDER)
+ parser.add_argument("command", nargs=argparse.REMAINDER)
args, unknown = parser.parse_known_args(argv)
# Recover stored LD_LIBRARY_PATH variables from spack shell function
@@ -769,11 +838,9 @@ def _main(argv=None):
# Spack clears these variables before building and installing packages,
# but needs to know the prior state for commands like `spack load` and
# `spack env activate that modify the user environment.
- recovered_vars = (
- 'LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'DYLD_FALLBACK_LIBRARY_PATH'
- )
+ recovered_vars = ("LD_LIBRARY_PATH", "DYLD_LIBRARY_PATH", "DYLD_FALLBACK_LIBRARY_PATH")
for var in recovered_vars:
- stored_var_name = 'SPACK_%s' % var
+ stored_var_name = "SPACK_%s" % var
if stored_var_name in os.environ:
os.environ[var] = os.environ[stored_var_name]
@@ -823,7 +890,7 @@ def _main(argv=None):
# Things that require configuration should go below here
# ------------------------------------------------------------------------
if args.print_shell_vars:
- print_setup_info(*args.print_shell_vars.split(','))
+ print_setup_info(*args.print_shell_vars.split(","))
return 0
# At this point we've considered all the options to spack itself, so we
@@ -842,6 +909,7 @@ def _main(argv=None):
bootstrap_context = llnl.util.lang.nullcontext()
if args.bootstrap:
import spack.bootstrap as bootstrap # avoid circular imports
+
bootstrap_context = bootstrap.ensure_bootstrap_configuration()
with bootstrap_context:
@@ -869,8 +937,8 @@ def finish_parse_and_run(parser, cmd_name, env_format_error):
_profile_wrapper(command, parser, args, unknown)
elif args.pdb:
import pdb
- pdb.runctx('_invoke_command(command, parser, args, unknown)',
- globals(), locals())
+
+ pdb.runctx("_invoke_command(command, parser, args, unknown)", globals(), locals())
return 0
else:
return _invoke_command(command, parser, args, unknown)
@@ -897,9 +965,9 @@ def main(argv=None):
e.die() # gracefully die on any SpackErrors
except KeyboardInterrupt:
- if spack.config.get('config:debug'):
+ if spack.config.get("config:debug"):
raise
- sys.stderr.write('\n')
+ sys.stderr.write("\n")
tty.error("Keyboard interrupt.")
if sys.version_info >= (3, 5):
return signal.SIGINT.value
@@ -907,12 +975,12 @@ def main(argv=None):
return signal.SIGINT
except SystemExit as e:
- if spack.config.get('config:debug'):
+ if spack.config.get("config:debug"):
traceback.print_exc()
return e.code
except Exception as e:
- if spack.config.get('config:debug'):
+ if spack.config.get("config:debug"):
raise
tty.error(e)
return 3
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 6a8f15e58f..1957ef223d 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -59,15 +59,13 @@ class Mirror(object):
to them. These two URLs are usually the same.
"""
- def __init__(self, fetch_url, push_url=None,
- name=None):
+ def __init__(self, fetch_url, push_url=None, name=None):
self._fetch_url = fetch_url
self._push_url = push_url
self._name = name
def __eq__(self, other):
- return (self._fetch_url == other._fetch_url and
- self._push_url == other._push_url)
+ return self._fetch_url == other._fetch_url and self._push_url == other._push_url
def to_json(self, stream=None):
return sjson.dump(self.to_dict(), stream)
@@ -99,55 +97,52 @@ class Mirror(object):
def to_dict(self):
if self._push_url is None:
- return syaml_dict([
- ('fetch', self._fetch_url),
- ('push', self._fetch_url)])
+ return syaml_dict([("fetch", self._fetch_url), ("push", self._fetch_url)])
else:
- return syaml_dict([
- ('fetch', self._fetch_url),
- ('push', self._push_url)])
+ return syaml_dict([("fetch", self._fetch_url), ("push", self._push_url)])
@staticmethod
def from_dict(d, name=None):
if isinstance(d, six.string_types):
return Mirror(d, name=name)
else:
- return Mirror(d['fetch'], d['push'], name=name)
+ return Mirror(d["fetch"], d["push"], name=name)
def display(self, max_len=0):
if self._push_url is None:
_display_mirror_entry(max_len, self._name, self.fetch_url)
else:
- _display_mirror_entry(
- max_len, self._name, self.fetch_url, "fetch")
- _display_mirror_entry(
- max_len, self._name, self.push_url, "push")
+ _display_mirror_entry(max_len, self._name, self.fetch_url, "fetch")
+ _display_mirror_entry(max_len, self._name, self.push_url, "push")
def __str__(self):
name = self._name
if name is None:
- name = ''
+ name = ""
else:
name = ' "%s"' % name
if self._push_url is None:
return "[Mirror%s (%s)]" % (name, self._fetch_url)
- return "[Mirror%s (fetch: %s, push: %s)]" % (
- name, self._fetch_url, self._push_url)
+ return "[Mirror%s (fetch: %s, push: %s)]" % (name, self._fetch_url, self._push_url)
def __repr__(self):
- return ''.join((
- 'Mirror(',
- ', '.join(
- '%s=%s' % (k, repr(v))
- for k, v in (
- ('fetch_url', self._fetch_url),
- ('push_url', self._push_url),
- ('name', self._name))
- if k == 'fetch_url' or v),
- ')'
- ))
+ return "".join(
+ (
+ "Mirror(",
+ ", ".join(
+ "%s=%s" % (k, repr(v))
+ for k, v in (
+ ("fetch_url", self._fetch_url),
+ ("push_url", self._push_url),
+ ("name", self._name),
+ )
+ if k == "fetch_url" or v
+ ),
+ ")",
+ )
+ )
@property
def name(self):
@@ -156,8 +151,8 @@ class Mirror(object):
def get_profile(self, url_type):
if isinstance(self._fetch_url, dict):
if url_type == "push":
- return self._push_url.get('profile', None)
- return self._fetch_url.get('profile', None)
+ return self._push_url.get("profile", None)
+ return self._fetch_url.get("profile", None)
else:
return None
@@ -170,8 +165,8 @@ class Mirror(object):
def get_access_pair(self, url_type):
if isinstance(self._fetch_url, dict):
if url_type == "push":
- return self._push_url.get('access_pair', None)
- return self._fetch_url.get('access_pair', None)
+ return self._push_url.get("access_pair", None)
+ return self._fetch_url.get("access_pair", None)
else:
return None
@@ -184,8 +179,8 @@ class Mirror(object):
def get_endpoint_url(self, url_type):
if isinstance(self._fetch_url, dict):
if url_type == "push":
- return self._push_url.get('endpoint_url', None)
- return self._fetch_url.get('endpoint_url', None)
+ return self._push_url.get("endpoint_url", None)
+ return self._fetch_url.get("endpoint_url", None)
else:
return None
@@ -198,8 +193,8 @@ class Mirror(object):
def get_access_token(self, url_type):
if isinstance(self._fetch_url, dict):
if url_type == "push":
- return self._push_url.get('access_token', None)
- return self._fetch_url.get('access_token', None)
+ return self._push_url.get("access_token", None)
+ return self._fetch_url.get("access_token", None)
else:
return None
@@ -211,8 +206,7 @@ class Mirror(object):
@property
def fetch_url(self):
- return self._fetch_url if _is_string(self._fetch_url) \
- else self._fetch_url["url"]
+ return self._fetch_url if _is_string(self._fetch_url) else self._fetch_url["url"]
@fetch_url.setter
def fetch_url(self, url):
@@ -222,10 +216,8 @@ class Mirror(object):
@property
def push_url(self):
if self._push_url is None:
- return self._fetch_url if _is_string(self._fetch_url) \
- else self._fetch_url["url"]
- return self._push_url if _is_string(self._push_url) \
- else self._push_url["url"]
+ return self._fetch_url if _is_string(self._fetch_url) else self._fetch_url["url"]
+ return self._push_url if _is_string(self._push_url) else self._push_url["url"]
@push_url.setter
def push_url(self, url):
@@ -244,8 +236,11 @@ class MirrorCollection(Mapping):
self._mirrors = collections.OrderedDict(
(name, Mirror.from_dict(mirror, name))
for name, mirror in (
- mirrors.items() if mirrors is not None else
- spack.config.get('mirrors', scope=scope).items()))
+ mirrors.items()
+ if mirrors is not None
+ else spack.config.get("mirrors", scope=scope).items()
+ )
+ )
def __eq__(self, other):
return self._mirrors == other._mirrors
@@ -280,12 +275,12 @@ class MirrorCollection(Mapping):
)
def to_dict(self, recursive=False):
- return syaml_dict(sorted(
- (
- (k, (v.to_dict() if recursive else v))
- for (k, v) in self._mirrors.items()
- ), key=operator.itemgetter(0)
- ))
+ return syaml_dict(
+ sorted(
+ ((k, (v.to_dict() if recursive else v)) for (k, v) in self._mirrors.items()),
+ key=operator.itemgetter(0),
+ )
+ )
@staticmethod
def from_dict(d):
@@ -329,7 +324,7 @@ def _determine_extension(fetcher):
if ext:
# Remove any leading dots
- ext = ext.lstrip('.')
+ ext = ext.lstrip(".")
else:
msg = """\
Unable to parse extension from {0}.
@@ -351,7 +346,7 @@ Spack not to expand it with the following syntax:
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
- ext = 'tar.gz'
+ ext = "tar.gz"
return ext
@@ -369,6 +364,7 @@ class MirrorReference(object):
this includes names generated by previous naming schemes that are no-longer
reported by ``storage_path`` or ``cosmetic_path``.
"""
+
def __init__(self, cosmetic_path, global_path=None):
self.global_path = global_path
self.cosmetic_path = cosmetic_path
@@ -393,7 +389,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
if spec:
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
versions = pkg_cls.versions.get(spec.version, {})
- ext = versions.get('extension', None)
+ ext = versions.get("extension", None)
# If the spec does not explicitly specify an extension (the default case),
# then try to determine it automatically. An extension can only be
# specified for the primary source of the package (e.g. the source code
@@ -406,7 +402,7 @@ def mirror_archive_paths(fetcher, per_package_ref, spec=None):
global_ref = fetcher.mirror_id()
if global_ref:
- global_ref = os.path.join('_source-cache', global_ref)
+ global_ref = os.path.join("_source-cache", global_ref)
if global_ref and ext:
global_ref += ".%s" % ext
@@ -511,24 +507,21 @@ def create(path, specs, skip_unstable_versions=False):
parsed = url_util.parse(path)
mirror_root = url_util.local_file_path(parsed)
if not mirror_root:
- raise spack.error.SpackError(
- 'MirrorCaches only work with file:// URLs')
+ raise spack.error.SpackError("MirrorCaches only work with file:// URLs")
# automatically spec-ify anything in the specs array.
- specs = [
- s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s)
- for s in specs]
+ specs = [s if isinstance(s, spack.spec.Spec) else spack.spec.Spec(s) for s in specs]
# Get the absolute path of the root before we start jumping around.
if not os.path.isdir(mirror_root):
try:
mkdirp(mirror_root)
except OSError as e:
- raise MirrorError(
- "Cannot create directory '%s':" % mirror_root, str(e))
+ raise MirrorError("Cannot create directory '%s':" % mirror_root, str(e))
mirror_cache = spack.caches.MirrorCache(
- mirror_root, skip_unstable_versions=skip_unstable_versions)
+ mirror_root, skip_unstable_versions=skip_unstable_versions
+ )
mirror_stats = MirrorStats()
# Iterate through packages and download all safe tarballs for each
@@ -541,7 +534,7 @@ def create(path, specs, skip_unstable_versions=False):
def add(name, url, scope, args={}):
"""Add a named mirror in the given scope"""
- mirrors = spack.config.get('mirrors', scope=scope)
+ mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml_dict()
@@ -553,21 +546,23 @@ def add(name, url, scope, args={}):
key_values = ["s3_access_key_id", "s3_access_token", "s3_profile"]
# On creation, assume connection data is set for both
if any(value for value in key_values if value in args):
- url_dict = {"url": url,
- "access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
- "access_token": args.s3_access_token,
- "profile": args.s3_profile,
- "endpoint_url": args.s3_endpoint_url}
+ url_dict = {
+ "url": url,
+ "access_pair": (args.s3_access_key_id, args.s3_access_key_secret),
+ "access_token": args.s3_access_token,
+ "profile": args.s3_profile,
+ "endpoint_url": args.s3_endpoint_url,
+ }
mirror_data = {"fetch": url_dict, "push": url_dict}
items.insert(0, (name, mirror_data))
mirrors = syaml_dict(items)
- spack.config.set('mirrors', mirrors, scope=scope)
+ spack.config.set("mirrors", mirrors, scope=scope)
def remove(name, scope):
"""Remove the named mirror in the given scope"""
- mirrors = spack.config.get('mirrors', scope=scope)
+ mirrors = spack.config.get("mirrors", scope=scope)
if not mirrors:
mirrors = syaml_dict()
@@ -575,15 +570,15 @@ def remove(name, scope):
tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name)
- spack.config.set('mirrors', mirrors, scope=scope)
+ spack.config.set("mirrors", mirrors, scope=scope)
debug_msg_url = "url %s"
debug_msg = ["Removed mirror %s with"]
values = [name]
try:
- fetch_value = old_value['fetch']
- push_value = old_value['push']
+ fetch_value = old_value["fetch"]
+ push_value = old_value["push"]
debug_msg.extend(("fetch", debug_msg_url, "and push", debug_msg_url))
values.extend((fetch_value, push_value))
@@ -641,9 +636,7 @@ def _add_single_spec(spec, mirror, mirror_stats):
if not spec.concrete:
spec = spec.concretized()
- tty.msg("Adding package {pkg} to mirror".format(
- pkg=spec.format("{name}{@version}")
- ))
+ tty.msg("Adding package {pkg} to mirror".format(pkg=spec.format("{name}{@version}")))
num_retries = 3
while num_retries > 0:
try:
@@ -661,12 +654,13 @@ def _add_single_spec(spec, mirror, mirror_stats):
num_retries -= 1
if exception:
- if spack.config.get('config:debug'):
+ if spack.config.get("config:debug"):
traceback.print_exception(file=sys.stderr, *exc_tuple)
else:
tty.warn(
- "Error while fetching %s" % spec.cformat('{name}{@version}'),
- getattr(exception, 'message', exception))
+ "Error while fetching %s" % spec.cformat("{name}{@version}"),
+ getattr(exception, "message", exception),
+ )
mirror_stats.error()
@@ -674,10 +668,10 @@ def push_url_from_directory(output_directory):
"""Given a directory in the local filesystem, return the URL on
which to push binary packages.
"""
- scheme = url_util.parse(output_directory, scheme='<missing>').scheme
- if scheme != '<missing>':
- raise ValueError('expected a local path, but got a URL instead')
- mirror_url = 'file://' + output_directory
+ scheme = url_util.parse(output_directory, scheme="<missing>").scheme
+ if scheme != "<missing>":
+ raise ValueError("expected a local path, but got a URL instead")
+ mirror_url = "file://" + output_directory
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
return url_util.format(mirror.push_url)
@@ -692,8 +686,8 @@ def push_url_from_mirror_name(mirror_name):
def push_url_from_mirror_url(mirror_url):
"""Given a mirror URL, return the URL on which to push binary packages."""
- scheme = url_util.parse(mirror_url, scheme='<missing>').scheme
- if scheme == '<missing>':
+ scheme = url_util.parse(mirror_url, scheme="<missing>").scheme
+ if scheme == "<missing>":
raise ValueError('"{0}" is not a valid URL'.format(mirror_url))
mirror = spack.mirror.MirrorCollection().lookup(mirror_url)
return url_util.format(mirror.push_url)
diff --git a/lib/spack/spack/mixins.py b/lib/spack/spack/mixins.py
index b247c28d30..ace3681e52 100644
--- a/lib/spack/spack/mixins.py
+++ b/lib/spack/spack/mixins.py
@@ -19,8 +19,8 @@ else:
import llnl.util.filesystem
__all__ = [
- 'filter_compiler_wrappers',
- 'PackageMixinsMeta',
+ "filter_compiler_wrappers",
+ "PackageMixinsMeta",
]
@@ -67,16 +67,12 @@ class PackageMixinsMeta(type):
attr_dict.update(PackageMixinsMeta._methods_to_be_added)
PackageMixinsMeta._methods_to_be_added.clear()
- attr_fmt = '_InstallPhase_{0}'
+ attr_fmt = "_InstallPhase_{0}"
# Copy the phases that needs it to the most derived classes
# in order not to interfere with other packages in the hierarchy
- phases_to_be_copied = list(
- PackageMixinsMeta._add_method_before.keys()
- )
- phases_to_be_copied += list(
- PackageMixinsMeta._add_method_after.keys()
- )
+ phases_to_be_copied = list(PackageMixinsMeta._add_method_before.keys())
+ phases_to_be_copied += list(PackageMixinsMeta._add_method_after.keys())
for phase in phases_to_be_copied:
@@ -158,24 +154,20 @@ def filter_compiler_wrappers(*files, **kwargs):
``find`` (see its documentation for more information on the
behavior)
"""
- after = kwargs.get('after', 'install')
- relative_root = kwargs.get('relative_root', None)
+ after = kwargs.get("after", "install")
+ relative_root = kwargs.get("relative_root", None)
filter_kwargs = {
- 'ignore_absent': kwargs.get('ignore_absent', True),
- 'backup': kwargs.get('backup', False),
- 'string': True
+ "ignore_absent": kwargs.get("ignore_absent", True),
+ "backup": kwargs.get("backup", False),
+ "string": True,
}
- find_kwargs = {
- 'recursive': kwargs.get('recursive', False)
- }
+ find_kwargs = {"recursive": kwargs.get("recursive", False)}
def _filter_compiler_wrappers_impl(self):
# Compute the absolute path of the search root
- root = os.path.join(
- self.prefix, relative_root
- ) if relative_root else self.prefix
+ root = os.path.join(self.prefix, relative_root) if relative_root else self.prefix
# Compute the absolute path of the files to be filtered and
# remove links from the list.
@@ -185,10 +177,10 @@ def filter_compiler_wrappers(*files, **kwargs):
x = llnl.util.filesystem.FileFilter(*abs_files)
compiler_vars = [
- ('CC', self.compiler.cc),
- ('CXX', self.compiler.cxx),
- ('F77', self.compiler.f77),
- ('FC', self.compiler.fc)
+ ("CC", self.compiler.cc),
+ ("CXX", self.compiler.cxx),
+ ("F77", self.compiler.f77),
+ ("FC", self.compiler.fc),
]
# Some paths to the compiler wrappers might be substrings of the others.
@@ -213,19 +205,15 @@ def filter_compiler_wrappers(*files, **kwargs):
for wrapper_path in (wrapper, os.path.realpath(wrapper)):
replacements.append((wrapper_path, -idx, compiler_path))
- for wrapper_path, _, compiler_path in sorted(replacements,
- reverse=True):
+ for wrapper_path, _, compiler_path in sorted(replacements, reverse=True):
x.filter(wrapper_path, compiler_path, **filter_kwargs)
# Remove this linking flag if present (it turns RPATH into RUNPATH)
- x.filter('{0}--enable-new-dtags'.format(self.compiler.linker_arg), '',
- **filter_kwargs)
+ x.filter("{0}--enable-new-dtags".format(self.compiler.linker_arg), "", **filter_kwargs)
# NAG compiler is usually mixed with GCC, which has a different
# prefix for linker arguments.
- if self.compiler.name == 'nag':
- x.filter('-Wl,--enable-new-dtags', '', **filter_kwargs)
+ if self.compiler.name == "nag":
+ x.filter("-Wl,--enable-new-dtags", "", **filter_kwargs)
- PackageMixinsMeta.register_method_after(
- _filter_compiler_wrappers_impl, after
- )
+ PackageMixinsMeta.register_method_after(_filter_compiler_wrappers_impl, after)
diff --git a/lib/spack/spack/modules/__init__.py b/lib/spack/spack/modules/__init__.py
index 76f63827d5..0d541f46e2 100644
--- a/lib/spack/spack/modules/__init__.py
+++ b/lib/spack/spack/modules/__init__.py
@@ -13,13 +13,6 @@ from .common import disable_modules
from .lmod import LmodModulefileWriter
from .tcl import TclModulefileWriter
-__all__ = [
- 'TclModulefileWriter',
- 'LmodModulefileWriter',
- 'disable_modules'
-]
+__all__ = ["TclModulefileWriter", "LmodModulefileWriter", "disable_modules"]
-module_types = {
- 'tcl': TclModulefileWriter,
- 'lmod': LmodModulefileWriter
-}
+module_types = {"tcl": TclModulefileWriter, "lmod": LmodModulefileWriter}
diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py
index 4969c34167..b9f2dce099 100644
--- a/lib/spack/spack/modules/common.py
+++ b/lib/spack/spack/modules/common.py
@@ -84,22 +84,22 @@ def get_deprecated(dictionary, name, old_name, default):
#: config section for this file
def configuration(module_set_name):
- config_path = 'modules:%s' % module_set_name
+ config_path = "modules:%s" % module_set_name
return spack.config.get(config_path, {})
#: Valid tokens for naming scheme and env variable names
_valid_tokens = (
- 'name',
- 'version',
- 'compiler',
- 'compiler.name',
- 'compiler.version',
- 'architecture',
+ "name",
+ "version",
+ "compiler",
+ "compiler.name",
+ "compiler.version",
+ "architecture",
# tokens from old-style format strings
- 'package',
- 'compilername',
- 'compilerver',
+ "package",
+ "compilername",
+ "compilerver",
)
@@ -115,12 +115,11 @@ def _check_tokens_are_valid(format_string, message):
tokens are found
"""
- named_tokens = re.findall(r'{(\w*)}', format_string)
- invalid_tokens = [x for x in named_tokens
- if x.lower() not in _valid_tokens]
+ named_tokens = re.findall(r"{(\w*)}", format_string)
+ invalid_tokens = [x for x in named_tokens if x.lower() not in _valid_tokens]
if invalid_tokens:
msg = message
- msg += ' [{0}]. '.format(', '.join(invalid_tokens))
+ msg += " [{0}]. ".format(", ".join(invalid_tokens))
msg += 'Did you check your "modules.yaml" configuration?'
raise RuntimeError(msg)
@@ -142,7 +141,7 @@ def update_dictionary_extending_lists(target, update):
target[key] = update[key]
-def dependencies(spec, request='all'):
+def dependencies(spec, request="all"):
"""Returns the list of dependent specs for a given spec, according to the
request passed as parameter.
@@ -157,16 +156,16 @@ def dependencies(spec, request='all'):
the direct dependencies if request is 'direct', or the entire DAG
if request is 'all'.
"""
- if request not in ('none', 'direct', 'all'):
+ if request not in ("none", "direct", "all"):
message = "Wrong value for argument 'request' : "
message += "should be one of ('none', 'direct', 'all')"
raise tty.error(message + " [current value is '%s']" % request)
- if request == 'none':
+ if request == "none":
return []
- if request == 'direct':
- return spec.dependencies(deptype=('link', 'run'))
+ if request == "direct":
+ return spec.dependencies(deptype=("link", "run"))
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits
@@ -175,11 +174,9 @@ def dependencies(spec, request='all'):
seen = set()
seen_add = seen.add
deps = sorted(
- spec.traverse(order='post',
- cover='nodes',
- deptype=('link', 'run'),
- root=False),
- reverse=True)
+ spec.traverse(order="post", cover="nodes", deptype=("link", "run"), root=False),
+ reverse=True,
+ )
return [d for d in deps if not (d in seen or seen_add(d))]
@@ -205,37 +202,36 @@ def merge_config_rules(configuration, spec):
# The keyword 'all' is always evaluated first, all the others are
# evaluated in order of appearance in the module file
- spec_configuration = module_specific_configuration.pop('all', {})
+ spec_configuration = module_specific_configuration.pop("all", {})
for constraint, action in module_specific_configuration.items():
if spec.satisfies(constraint, strict=True):
- if hasattr(constraint, 'override') and constraint.override:
+ if hasattr(constraint, "override") and constraint.override:
spec_configuration = {}
update_dictionary_extending_lists(spec_configuration, action)
# Transform keywords for dependencies or prerequisites into a list of spec
# Which modulefiles we want to autoload
- autoload_strategy = spec_configuration.get('autoload', 'direct')
- spec_configuration['autoload'] = dependencies(spec, autoload_strategy)
+ autoload_strategy = spec_configuration.get("autoload", "direct")
+ spec_configuration["autoload"] = dependencies(spec, autoload_strategy)
# Which instead we want to mark as prerequisites
- prerequisite_strategy = spec_configuration.get('prerequisites', 'none')
- spec_configuration['prerequisites'] = dependencies(
- spec, prerequisite_strategy)
+ prerequisite_strategy = spec_configuration.get("prerequisites", "none")
+ spec_configuration["prerequisites"] = dependencies(spec, prerequisite_strategy)
# Attach options that are spec-independent to the spec-specific
# configuration
# Hash length in module files
- hash_length = module_specific_configuration.get('hash_length', 7)
- spec_configuration['hash_length'] = hash_length
+ hash_length = module_specific_configuration.get("hash_length", 7)
+ spec_configuration["hash_length"] = hash_length
- verbose = module_specific_configuration.get('verbose', False)
- spec_configuration['verbose'] = verbose
+ verbose = module_specific_configuration.get("verbose", False)
+ spec_configuration["verbose"] = verbose
# module defaults per-package
- defaults = module_specific_configuration.get('defaults', [])
- spec_configuration['defaults'] = defaults
+ defaults = module_specific_configuration.get("defaults", [])
+ spec_configuration["defaults"] = defaults
return spec_configuration
@@ -251,11 +247,11 @@ def root_path(name, module_set_name):
root folder for module file installation
"""
defaults = {
- 'lmod': '$spack/share/spack/lmod',
- 'tcl': '$spack/share/spack/modules',
+ "lmod": "$spack/share/spack/lmod",
+ "tcl": "$spack/share/spack/modules",
}
# Root folders where the various module files should be written
- roots = spack.config.get('modules:%s:roots' % module_set_name, {})
+ roots = spack.config.get("modules:%s:roots" % module_set_name, {})
# Merge config values into the defaults so we prefer configured values
roots = spack.config.merge_yaml(defaults, roots)
@@ -265,23 +261,20 @@ def root_path(name, module_set_name):
def generate_module_index(root, modules, overwrite=False):
- index_path = os.path.join(root, 'module-index.yaml')
+ index_path = os.path.join(root, "module-index.yaml")
if overwrite or not os.path.exists(index_path):
entries = syaml.syaml_dict()
else:
with open(index_path) as index_file:
yaml_content = syaml.load(index_file)
- entries = yaml_content['module_index']
+ entries = yaml_content["module_index"]
for m in modules:
- entry = {
- 'path': m.layout.filename,
- 'use_name': m.layout.use_name
- }
+ entry = {"path": m.layout.filename, "use_name": m.layout.use_name}
entries[m.spec.dag_hash()] = entry
- index = {'module_index': entries}
+ index = {"module_index": entries}
llnl.util.filesystem.mkdirp(root)
- with open(index_path, 'w') as index_file:
+ with open(index_path, "w") as index_file:
syaml.dump(index, default_flow_style=False, stream=index_file)
@@ -291,45 +284,42 @@ def _generate_upstream_module_index():
return UpstreamModuleIndex(spack.store.db, module_indices)
-upstream_module_index = llnl.util.lang.Singleton(
- _generate_upstream_module_index)
+upstream_module_index = llnl.util.lang.Singleton(_generate_upstream_module_index)
-ModuleIndexEntry = collections.namedtuple(
- 'ModuleIndexEntry', ['path', 'use_name'])
+ModuleIndexEntry = collections.namedtuple("ModuleIndexEntry", ["path", "use_name"])
def read_module_index(root):
- index_path = os.path.join(root, 'module-index.yaml')
+ index_path = os.path.join(root, "module-index.yaml")
if not os.path.exists(index_path):
return {}
- with open(index_path, 'r') as index_file:
+ with open(index_path, "r") as index_file:
return _read_module_index(index_file)
def _read_module_index(str_or_file):
"""Read in the mapping of spec hash to module location/name. For a given
- Spack installation there is assumed to be (at most) one such mapping
- per module type."""
+ Spack installation there is assumed to be (at most) one such mapping
+ per module type."""
yaml_content = syaml.load(str_or_file)
index = {}
- yaml_index = yaml_content['module_index']
+ yaml_index = yaml_content["module_index"]
for dag_hash, module_properties in yaml_index.items():
index[dag_hash] = ModuleIndexEntry(
- module_properties['path'],
- module_properties['use_name'])
+ module_properties["path"], module_properties["use_name"]
+ )
return index
def read_module_indices():
- other_spack_instances = spack.config.get(
- 'upstreams') or {}
+ other_spack_instances = spack.config.get("upstreams") or {}
module_indices = []
for install_properties in other_spack_instances.values():
module_type_to_index = {}
- module_type_to_root = install_properties.get('modules', {})
+ module_type_to_root = install_properties.get("modules", {})
for module_type, root in module_type_to_root.items():
module_type_to_index[module_type] = read_module_index(root)
module_indices.append(module_type_to_index)
@@ -339,8 +329,9 @@ def read_module_indices():
class UpstreamModuleIndex(object):
"""This is responsible for taking the individual module indices of all
- upstream Spack installations and locating the module for a given spec
- based on which upstream install it is located in."""
+ upstream Spack installations and locating the module for a given spec
+ based on which upstream install it is located in."""
+
def __init__(self, local_db, module_indices):
self.local_db = local_db
self.upstream_dbs = local_db.upstream_dbs
@@ -351,30 +342,25 @@ class UpstreamModuleIndex(object):
if db_for_spec in self.upstream_dbs:
db_index = self.upstream_dbs.index(db_for_spec)
elif db_for_spec:
- raise spack.error.SpackError(
- "Unexpected: {0} is installed locally".format(spec))
+ raise spack.error.SpackError("Unexpected: {0} is installed locally".format(spec))
else:
- raise spack.error.SpackError(
- "Unexpected: no install DB found for {0}".format(spec))
+ raise spack.error.SpackError("Unexpected: no install DB found for {0}".format(spec))
module_index = self.module_indices[db_index]
module_type_index = module_index.get(module_type, {})
if not module_type_index:
tty.debug(
"No {0} modules associated with the Spack instance where"
- " {1} is installed".format(module_type, spec))
+ " {1} is installed".format(module_type, spec)
+ )
return None
if spec.dag_hash() in module_type_index:
return module_type_index[spec.dag_hash()]
else:
- tty.debug(
- "No module is available for upstream package {0}".format(spec))
+ tty.debug("No module is available for upstream package {0}".format(spec))
return None
-def get_module(
- module_type, spec, get_full_path,
- module_set_name='default', required=True
-):
+def get_module(module_type, spec, get_full_path, module_set_name="default", required=True):
"""Retrieve the module file for a given spec and module type.
Retrieve the module file for the given spec if it is available. If the
@@ -402,8 +388,7 @@ def get_module(
except spack.repo.UnknownPackageError:
upstream, record = spack.store.db.query_by_spec_hash(spec.dag_hash())
if upstream:
- module = (spack.modules.common.upstream_module_index
- .upstream_module(spec, module_type))
+ module = spack.modules.common.upstream_module_index.upstream_module(spec, module_type)
if not module:
return None
@@ -420,8 +405,7 @@ def get_module(
)
raise ModuleNotFoundError(err_msg)
elif required:
- tty.debug("The module configuration has excluded {0}: "
- "omitting it".format(spec))
+ tty.debug("The module configuration has excluded {0}: " "omitting it".format(spec))
else:
return None
@@ -436,8 +420,7 @@ class BaseConfiguration(object):
querying easier. It needs to be sub-classed for specific module types.
"""
- default_projections = {
- 'all': '{name}-{version}-{compiler.name}-{compiler.version}'}
+ default_projections = {"all": "{name}-{version}-{compiler.name}-{compiler.version}"}
def __init__(self, spec, module_set_name):
# Module where type(self) is defined
@@ -447,23 +430,22 @@ class BaseConfiguration(object):
self.name = module_set_name
# Dictionary of configuration options that should be applied
# to the spec
- self.conf = merge_config_rules(
- self.module.configuration(self.name), self.spec)
+ self.conf = merge_config_rules(self.module.configuration(self.name), self.spec)
@property
def projections(self):
"""Projection from specs to module names"""
# backwards compatiblity for naming_scheme key
conf = self.module.configuration(self.name)
- if 'naming_scheme' in conf:
- default = {'all': conf['naming_scheme']}
+ if "naming_scheme" in conf:
+ default = {"all": conf["naming_scheme"]}
else:
default = self.default_projections
- projections = conf.get('projections', default)
+ projections = conf.get("projections", default)
# Ensure the named tokens we are expanding are allowed, see
# issue #2884 for reference
- msg = 'some tokens cannot be part of the module naming scheme'
+ msg = "some tokens cannot be part of the module naming scheme"
for projection in projections.values():
_check_tokens_are_valid(projection, message=msg)
@@ -474,19 +456,19 @@ class BaseConfiguration(object):
"""Returns the name of the template to use for the module file
or None if not specified in the configuration.
"""
- return self.conf.get('template', None)
+ return self.conf.get("template", None)
@property
def defaults(self):
"""Returns the specs configured as defaults or []."""
- return self.conf.get('defaults', [])
+ return self.conf.get("defaults", [])
@property
def env(self):
"""List of environment modifications that should be done in the
module.
"""
- return spack.schema.environment.parse(self.conf.get('environment', {}))
+ return spack.schema.environment.parse(self.conf.get("environment", {}))
@property
def suffixes(self):
@@ -494,7 +476,7 @@ class BaseConfiguration(object):
file name.
"""
suffixes = []
- for constraint, suffix in self.conf.get('suffixes', {}).items():
+ for constraint, suffix in self.conf.get("suffixes", {}).items():
if constraint in self.spec:
suffixes.append(suffix)
suffixes = list(dedupe(suffixes))
@@ -505,7 +487,7 @@ class BaseConfiguration(object):
@property
def hash(self):
"""Hash tag for the module or None"""
- hash_length = self.conf.get('hash_length', 7)
+ hash_length = self.conf.get("hash_length", 7)
if hash_length != 0:
return self.spec.dag_hash(length=hash_length)
return None
@@ -530,24 +512,22 @@ class BaseConfiguration(object):
# Should I exclude the module because it's implicit?
# DEPRECATED: remove 'blacklist_implicits' in v0.20
- exclude_implicits = get_deprecated(
- conf, "exclude_implicits", "blacklist_implicits", None
- )
+ exclude_implicits = get_deprecated(conf, "exclude_implicits", "blacklist_implicits", None)
installed_implicitly = not spec._installed_explicitly()
excluded_as_implicit = exclude_implicits and installed_implicitly
def debug_info(line_header, match_list):
if match_list:
- msg = '\t{0} : {1}'.format(line_header, spec.cshort_spec)
+ msg = "\t{0} : {1}".format(line_header, spec.cshort_spec)
tty.debug(msg)
for rule in match_list:
- tty.debug('\t\tmatches rule: {0}'.format(rule))
+ tty.debug("\t\tmatches rule: {0}".format(rule))
- debug_info('INCLUDE', include_matches)
- debug_info('EXCLUDE', exclude_matches)
+ debug_info("INCLUDE", include_matches)
+ debug_info("EXCLUDE", exclude_matches)
if excluded_as_implicit:
- msg = '\tEXCLUDED_AS_IMPLICIT : {0}'.format(spec.cshort_spec)
+ msg = "\tEXCLUDED_AS_IMPLICIT : {0}".format(spec.cshort_spec)
tty.debug(msg)
is_excluded = exclude_matches or excluded_as_implicit
@@ -558,32 +538,30 @@ class BaseConfiguration(object):
@property
def context(self):
- return self.conf.get('context', {})
+ return self.conf.get("context", {})
@property
def specs_to_load(self):
"""List of specs that should be loaded in the module file."""
- return self._create_list_for('autoload')
+ return self._create_list_for("autoload")
@property
def literals_to_load(self):
"""List of literal modules to be loaded."""
- return self.conf.get('load', [])
+ return self.conf.get("load", [])
@property
def specs_to_prereq(self):
"""List of specs that should be prerequisite of the module file."""
- return self._create_list_for('prerequisites')
+ return self._create_list_for("prerequisites")
@property
def exclude_env_vars(self):
"""List of variables that should be left unmodified."""
- filter = self.conf.get('filter', {})
+ filter = self.conf.get("filter", {})
# DEPRECATED: remove in v0.20
- return get_deprecated(
- filter, "exclude_env_vars", "environment_blacklist", {}
- )
+ return get_deprecated(filter, "exclude_env_vars", "environment_blacklist", {})
def _create_list_for(self, what):
include = []
@@ -598,7 +576,7 @@ class BaseConfiguration(object):
"""Returns True if the module file needs to be verbose, False
otherwise
"""
- return self.conf.get('verbose')
+ return self.conf.get("verbose")
class BaseFileLayout(object):
@@ -619,7 +597,7 @@ class BaseFileLayout(object):
def dirname(self):
"""Root folder for module files of this type."""
- module_system = str(self.conf.module.__name__).split('.')[-1]
+ module_system = str(self.conf.module.__name__).split(".")[-1]
return root_path(module_system, self.conf.name)
@property
@@ -630,15 +608,15 @@ class BaseFileLayout(object):
"""
projection = proj.get_projection(self.conf.projections, self.spec)
if not projection:
- projection = self.conf.default_projections['all']
+ projection = self.conf.default_projections["all"]
name = self.spec.format(projection)
# Not everybody is working on linux...
- parts = name.split('/')
+ parts = name.split("/")
name = os.path.join(*parts)
# Add optional suffixes based on constraints
path_elements = [name] + self.conf.suffixes
- return '-'.join(path_elements)
+ return "-".join(path_elements)
@property
def filename(self):
@@ -646,10 +624,9 @@ class BaseFileLayout(object):
# Just the name of the file
filename = self.use_name
if self.extension:
- filename = '{0}.{1}'.format(self.use_name, self.extension)
+ filename = "{0}.{1}".format(self.use_name, self.extension)
# Architecture sub-folder
- arch_folder_conf = spack.config.get(
- 'modules:%s:arch_folder' % self.conf.name, True)
+ arch_folder_conf = spack.config.get("modules:%s:arch_folder" % self.conf.name, True)
if arch_folder_conf:
# include an arch specific folder between root and filename
arch_folder = str(self.spec.architecture)
@@ -681,16 +658,16 @@ class BaseContext(tengine.Context):
@tengine.context_property
def category(self):
- return getattr(self.spec, 'category', 'spack')
+ return getattr(self.spec, "category", "spack")
@tengine.context_property
def short_description(self):
# If we have a valid docstring return the first paragraph.
docstring = type(self.spec.package).__doc__
if docstring:
- value = docstring.split('\n\n')[0]
+ value = docstring.split("\n\n")[0]
# Transform tabs and friends into spaces
- value = re.sub(r'\s+', ' ', value)
+ value = re.sub(r"\s+", " ", value)
# Turn double quotes into single quotes (double quotes are needed
# to start and end strings)
value = re.sub(r'"', "'", value)
@@ -702,7 +679,7 @@ class BaseContext(tengine.Context):
def long_description(self):
# long description is the docstring with reduced whitespace.
if self.spec.package.__doc__:
- return re.sub(r'\s+', ' ', self.spec.package.__doc__)
+ return re.sub(r"\s+", " ", self.spec.package.__doc__)
return None
@tengine.context_property
@@ -711,11 +688,11 @@ class BaseContext(tengine.Context):
# If the spec is external Spack doesn't know its configure options
if self.spec.external:
- msg = 'unknown, software installed outside of Spack'
+ msg = "unknown, software installed outside of Spack"
return msg
if os.path.exists(pkg.install_configure_args_path):
- with open(pkg.install_configure_args_path, 'r') as args_file:
+ with open(pkg.install_configure_args_path, "r") as args_file:
return args_file.read()
# Returning a false-like value makes the default templates skip
@@ -727,13 +704,15 @@ class BaseContext(tengine.Context):
"""List of environment modifications to be processed."""
# Modifications guessed by inspecting the spec prefix
prefix_inspections = syaml.syaml_dict()
- spack.config.merge_yaml(prefix_inspections, spack.config.get(
- 'modules:prefix_inspections', {}))
- spack.config.merge_yaml(prefix_inspections, spack.config.get(
- 'modules:%s:prefix_inspections' % self.conf.name, {}))
+ spack.config.merge_yaml(
+ prefix_inspections, spack.config.get("modules:prefix_inspections", {})
+ )
+ spack.config.merge_yaml(
+ prefix_inspections,
+ spack.config.get("modules:%s:prefix_inspections" % self.conf.name, {}),
+ )
- use_view = spack.config.get(
- 'modules:%s:use_view' % self.conf.name, False)
+ use_view = spack.config.get("modules:%s:use_view" % self.conf.name, False)
spec = self.spec.copy() # defensive copy before setting prefix
if use_view:
@@ -751,18 +730,12 @@ class BaseContext(tengine.Context):
spec.prefix = view.get_projection_for_spec(spec)
env = spack.util.environment.inspect_path(
- spec.prefix,
- prefix_inspections,
- exclude=spack.util.environment.is_system_path
+ spec.prefix, prefix_inspections, exclude=spack.util.environment.is_system_path
)
# Let the extendee/dependency modify their extensions/dependencies
# before asking for package-specific modifications
- env.extend(
- spack.build_environment.modifications_from_dependencies(
- spec, context='run'
- )
- )
+ env.extend(spack.build_environment.modifications_from_dependencies(spec, context="run"))
# Package specific modifications
spack.build_environment.set_module_variables_for_package(spec.package)
spec.package.setup_run_environment(env)
@@ -784,7 +757,7 @@ class BaseContext(tengine.Context):
for x in env:
# Ensure all the tokens are valid in this context
- msg = 'some tokens cannot be expanded in an environment variable name'
+ msg = "some tokens cannot be expanded in an environment variable name"
_check_tokens_are_valid(x.name, message=msg)
# Transform them
x.name = spec.format(x.name, transform=transform)
@@ -793,7 +766,7 @@ class BaseContext(tengine.Context):
x.value = spec.format(x.value)
except AttributeError:
pass
- x.name = str(x.name).replace('-', '_')
+ x.name = str(x.name).replace("-", "_")
return [(type(x).__name__, x) for x in env if x.name not in exclude]
@@ -801,7 +774,7 @@ class BaseContext(tengine.Context):
def autoload(self):
"""List of modules that needs to be loaded automatically."""
# From 'autoload' configuration option
- specs = self._create_module_list_of('specs_to_load')
+ specs = self._create_module_list_of("specs_to_load")
# From 'load' configuration option
literals = self.conf.literals_to_load
return specs + literals
@@ -809,8 +782,7 @@ class BaseContext(tengine.Context):
def _create_module_list_of(self, what):
m = self.conf.module
name = self.conf.name
- return [m.make_layout(x, name).use_name
- for x in getattr(self.conf, what)]
+ return [m.make_layout(x, name).use_name for x in getattr(self.conf, what)]
@tengine.context_property
def verbose(self):
@@ -837,8 +809,8 @@ class BaseModuleFileWriter(object):
try:
self.default_template
except AttributeError:
- msg = '\'{0}\' object has no attribute \'default_template\'\n'
- msg += 'Did you forget to define it in the class?'
+ msg = "'{0}' object has no attribute 'default_template'\n"
+ msg += "Did you forget to define it in the class?"
name = type(self).__name__
raise DefaultTemplateNotDefined(msg.format(name))
@@ -848,12 +820,12 @@ class BaseModuleFileWriter(object):
# 1. template specified in "modules.yaml"
# 2. template specified in a package directly
# 3. default template (must be defined, check in __init__)
- module_system_name = str(self.module.__name__).split('.')[-1]
- package_attribute = '{0}_template'.format(module_system_name)
+ module_system_name = str(self.module.__name__).split(".")[-1]
+ package_attribute = "{0}_template".format(module_system_name)
choices = [
self.conf.template,
getattr(self.spec.package, package_attribute, None),
- self.default_template # This is always defined at this point
+ self.default_template, # This is always defined at this point
]
# Filter out false-ish values
choices = list(filter(lambda x: bool(x), choices))
@@ -870,19 +842,19 @@ class BaseModuleFileWriter(object):
"""
# Return immediately if the module is excluded
if self.conf.excluded:
- msg = '\tNOT WRITING: {0} [EXCLUDED]'
+ msg = "\tNOT WRITING: {0} [EXCLUDED]"
tty.debug(msg.format(self.spec.cshort_spec))
return
# Print a warning in case I am accidentally overwriting
# a module file that is already there (name clash)
if not overwrite and os.path.exists(self.layout.filename):
- message = 'Module file {0.filename} exists and will not be overwritten'
+ message = "Module file {0.filename} exists and will not be overwritten"
tty.warn(message.format(self.layout))
return
# If we are here it means it's ok to write the module file
- msg = '\tWRITE: {0} [{1}]'
+ msg = "\tWRITE: {0} [{1}]"
tty.debug(msg.format(self.spec.cshort_spec, self.layout.filename))
# If the directory where the module should reside does not exist
@@ -894,13 +866,14 @@ class BaseModuleFileWriter(object):
# Get the template for the module
template_name = self._get_template()
import jinja2
+
try:
env = tengine.make_environment()
template = env.get_template(template_name)
except jinja2.TemplateNotFound:
# If the template was not found raise an exception with a little
# more information
- msg = 'template \'{0}\' was not found for \'{1}\''
+ msg = "template '{0}' was not found for '{1}'"
name = type(self).__name__
msg = msg.format(template_name, name)
raise ModulesTemplateNotFoundError(msg)
@@ -913,8 +886,8 @@ class BaseModuleFileWriter(object):
context = self.context.to_dict()
# Attribute from package
- module_name = str(self.module.__name__).split('.')[-1]
- attr_name = '{0}_context'.format(module_name)
+ module_name = str(self.module.__name__).split(".")[-1]
+ attr_name = "{0}_context".format(module_name)
pkg_update = getattr(self.spec.package, attr_name, {})
context.update(pkg_update)
@@ -925,7 +898,7 @@ class BaseModuleFileWriter(object):
# Render the template
text = template.render(context)
# Write it to file
- with open(self.layout.filename, 'w') as f:
+ with open(self.layout.filename, "w") as f:
f.write(text)
# Set the file permissions of the module to match that of the package
@@ -940,10 +913,8 @@ class BaseModuleFileWriter(object):
# This spec matches a default, it needs to be symlinked to default
# Symlink to a tmp location first and move, so that existing
# symlinks do not cause an error.
- default_path = os.path.join(os.path.dirname(self.layout.filename),
- 'default')
- default_tmp = os.path.join(os.path.dirname(self.layout.filename),
- '.tmp_spack_default')
+ default_path = os.path.join(os.path.dirname(self.layout.filename), "default")
+ default_tmp = os.path.join(os.path.dirname(self.layout.filename), ".tmp_spack_default")
os.symlink(self.layout.filename, default_tmp)
os.rename(default_tmp, default_path)
@@ -964,14 +935,8 @@ class BaseModuleFileWriter(object):
@contextlib.contextmanager
def disable_modules():
"""Disable the generation of modulefiles within the context manager."""
- data = {
- 'modules:': {
- 'default': {
- 'enable': []
- }
- }
- }
- disable_scope = spack.config.InternalConfigScope('disable_modules', data=data)
+ data = {"modules:": {"default": {"enable": []}}}
+ disable_scope = spack.config.InternalConfigScope("disable_modules", data=data)
with spack.config.override(disable_scope):
yield
diff --git a/lib/spack/spack/modules/lmod.py b/lib/spack/spack/modules/lmod.py
index a64c05e880..54add9563d 100644
--- a/lib/spack/spack/modules/lmod.py
+++ b/lib/spack/spack/modules/lmod.py
@@ -23,11 +23,11 @@ from .common import BaseConfiguration, BaseContext, BaseFileLayout, BaseModuleFi
#: lmod specific part of the configuration
def configuration(module_set_name):
- config_path = 'modules:%s:lmod' % module_set_name
+ config_path = "modules:%s:lmod" % module_set_name
config = spack.config.get(config_path, {})
- if not config and module_set_name == 'default':
+ if not config and module_set_name == "default":
# return old format for backward compatibility
- return spack.config.get('modules:lmod', {})
+ return spack.config.get("modules:lmod", {})
return config
@@ -41,12 +41,11 @@ def make_configuration(spec, module_set_name):
try:
return configuration_registry[key]
except KeyError:
- return configuration_registry.setdefault(
- key, LmodConfiguration(spec, module_set_name))
+ return configuration_registry.setdefault(key, LmodConfiguration(spec, module_set_name))
def make_layout(spec, module_set_name):
- """Returns the layout information for spec """
+ """Returns the layout information for spec"""
conf = make_configuration(spec, module_set_name)
return LmodFileLayout(conf)
@@ -70,15 +69,16 @@ def guess_core_compilers(name, store=False):
core_compilers = []
for compiler_config in spack.compilers.all_compilers_config():
try:
- compiler = compiler_config['compiler']
+ compiler = compiler_config["compiler"]
# A compiler is considered to be a core compiler if any of the
# C, C++ or Fortran compilers reside in a system directory
is_system_compiler = any(
os.path.dirname(x) in spack.util.environment.system_dirs
- for x in compiler['paths'].values() if x is not None
+ for x in compiler["paths"].values()
+ if x is not None
)
if is_system_compiler:
- core_compilers.append(str(compiler['spec']))
+ core_compilers.append(str(compiler["spec"]))
except (KeyError, TypeError, AttributeError):
continue
@@ -87,23 +87,21 @@ def guess_core_compilers(name, store=False):
# in the default modify scope (i.e. within the directory hierarchy
# of Spack itself)
modules_cfg = spack.config.get(
- 'modules:' + name, {}, scope=spack.config.default_modify_scope()
- )
- modules_cfg.setdefault('lmod', {})['core_compilers'] = core_compilers
- spack.config.set(
- 'modules:' + name, modules_cfg,
- scope=spack.config.default_modify_scope()
+ "modules:" + name, {}, scope=spack.config.default_modify_scope()
)
+ modules_cfg.setdefault("lmod", {})["core_compilers"] = core_compilers
+ spack.config.set("modules:" + name, modules_cfg, scope=spack.config.default_modify_scope())
return core_compilers or None
class LmodConfiguration(BaseConfiguration):
"""Configuration class for lmod module files."""
+
# Note: Posixpath is used here as well as below as opposed to
# os.path.join due to spack.spec.Spec.format
# requiring forward slash path seperators at this stage
- default_projections = {'all': posixpath.join('{name}', '{version}')}
+ default_projections = {"all": posixpath.join("{name}", "{version}")}
@property
def core_compilers(self):
@@ -114,9 +112,9 @@ class LmodConfiguration(BaseConfiguration):
specified in the configuration file or the sequence
is empty
"""
- value = configuration(self.name).get(
- 'core_compilers'
- ) or guess_core_compilers(self.name, store=True)
+ value = configuration(self.name).get("core_compilers") or guess_core_compilers(
+ self.name, store=True
+ )
if not value:
msg = 'the key "core_compilers" must be set in modules.yaml'
@@ -126,28 +124,26 @@ class LmodConfiguration(BaseConfiguration):
@property
def core_specs(self):
"""Returns the list of "Core" specs"""
- return configuration(self.name).get('core_specs', [])
+ return configuration(self.name).get("core_specs", [])
@property
def hierarchy_tokens(self):
"""Returns the list of tokens that are part of the modulefile
hierarchy. 'compiler' is always present.
"""
- tokens = configuration(self.name).get('hierarchy', [])
+ tokens = configuration(self.name).get("hierarchy", [])
# Check if all the tokens in the hierarchy are virtual specs.
# If not warn the user and raise an error.
- not_virtual = [t for t in tokens
- if t != 'compiler' and
- not spack.repo.path.is_virtual(t)]
+ not_virtual = [t for t in tokens if t != "compiler" and not spack.repo.path.is_virtual(t)]
if not_virtual:
msg = "Non-virtual specs in 'hierarchy' list for lmod: {0}\n"
msg += "Please check the 'modules.yaml' configuration files"
- msg = msg.format(', '.join(not_virtual))
+ msg = msg.format(", ".join(not_virtual))
raise NonVirtualInHierarchyError(msg)
# Append 'compiler' which is always implied
- tokens.append('compiler')
+ tokens.append("compiler")
# Deduplicate tokens in case duplicates have been coded
tokens = list(lang.dedupe(tokens))
@@ -161,13 +157,12 @@ class LmodConfiguration(BaseConfiguration):
requirements.
"""
# If it's a core_spec, lie and say it requires a core compiler
- if any(self.spec.satisfies(core_spec)
- for core_spec in self.core_specs):
- return {'compiler': self.core_compilers[0]}
+ if any(self.spec.satisfies(core_spec) for core_spec in self.core_specs):
+ return {"compiler": self.core_compilers[0]}
# Keep track of the requirements that this package has in terms
# of virtual packages that participate in the hierarchical structure
- requirements = {'compiler': self.spec.compiler}
+ requirements = {"compiler": self.spec.compiler}
# For each virtual dependency in the hierarchy
for x in self.hierarchy_tokens:
# If I depend on it
@@ -187,23 +182,23 @@ class LmodConfiguration(BaseConfiguration):
# If it is in the list of supported compilers family -> compiler
if self.spec.name in spack.compilers.supported_compilers():
- provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
+ provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
# Special case for llvm
- if self.spec.name == 'llvm':
- provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
- provides['compiler'].name = 'clang'
+ if self.spec.name == "llvm":
+ provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
+ provides["compiler"].name = "clang"
# Special case for llvm-amdgpu
- if self.spec.name == 'llvm-amdgpu':
- provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
- provides['compiler'].name = 'rocmcc'
+ if self.spec.name == "llvm-amdgpu":
+ provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
+ provides["compiler"].name = "rocmcc"
# Special case for oneapi
- if self.spec.name == 'intel-oneapi-compilers':
- provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
- provides['compiler'].name = 'oneapi'
+ if self.spec.name == "intel-oneapi-compilers":
+ provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
+ provides["compiler"].name = "oneapi"
# Special case for oneapi classic
- if self.spec.name == 'intel-oneapi-compilers-classic':
- provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
- provides['compiler'].name = 'intel'
+ if self.spec.name == "intel-oneapi-compilers-classic":
+ provides["compiler"] = spack.spec.CompilerSpec(str(self.spec))
+ provides["compiler"].name = "intel"
# All the other tokens in the hierarchy must be virtual dependencies
for x in self.hierarchy_tokens:
@@ -233,21 +228,18 @@ class LmodFileLayout(BaseFileLayout):
"""File layout for lmod module files."""
#: file extension of lua module files
- extension = 'lua'
+ extension = "lua"
@property
def arch_dirname(self):
"""Returns the root folder for THIS architecture"""
# Architecture sub-folder
- arch_folder_conf = spack.config.get(
- 'modules:%s:arch_folder' % self.conf.name, True)
+ arch_folder_conf = spack.config.get("modules:%s:arch_folder" % self.conf.name, True)
if arch_folder_conf:
# include an arch specific folder between root and filename
- arch_folder = '-'.join([
- str(self.spec.platform),
- str(self.spec.os),
- str(self.spec.target.family)
- ])
+ arch_folder = "-".join(
+ [str(self.spec.platform), str(self.spec.os), str(self.spec.target.family)]
+ )
return os.path.join(self.dirname(), arch_folder)
return self.dirname()
@@ -269,7 +261,7 @@ class LmodFileLayout(BaseFileLayout):
fullname = os.path.join(
self.arch_dirname, # root for lmod files on this architecture
hierarchy_name, # relative path
- '.'.join([self.use_name, self.extension]) # file name
+ ".".join([self.use_name, self.extension]), # file name
)
return fullname
@@ -284,16 +276,16 @@ class LmodFileLayout(BaseFileLayout):
str: part of the path associated with the service
"""
# General format for the path part
- path_part_fmt = os.path.join('{token.name}', '{token.version}')
+ path_part_fmt = os.path.join("{token.name}", "{token.version}")
# If we are dealing with a core compiler, return 'Core'
core_compilers = self.conf.core_compilers
- if name == 'compiler' and str(value) in core_compilers:
- return 'Core'
+ if name == "compiler" and str(value) in core_compilers:
+ return "Core"
# CompilerSpec does not have an hash, as we are not allowed to
# use different flavors of the same compiler
- if name == 'compiler':
+ if name == "compiler":
return path_part_fmt.format(token=value)
# In case the hierarchy token refers to a virtual provider
@@ -301,7 +293,7 @@ class LmodFileLayout(BaseFileLayout):
# among flavors of the same library (e.g. openblas~openmp vs.
# openblas+openmp)
path = path_part_fmt.format(token=value)
- path = '-'.join([path, value.dag_hash(length=7)])
+ path = "-".join([path, value.dag_hash(length=7)])
return path
@property
@@ -314,8 +306,7 @@ class LmodFileLayout(BaseFileLayout):
# List of services that are part of the hierarchy
hierarchy = self.conf.hierarchy_tokens
# Tokenize each part that is both in the hierarchy and available
- parts = [self.token_to_path(x, available[x])
- for x in hierarchy if x in available]
+ parts = [self.token_to_path(x, available[x]) for x in hierarchy if x in available]
return parts
@property
@@ -336,8 +327,8 @@ class LmodFileLayout(BaseFileLayout):
# A compiler is always required. To avoid duplication pop the
# 'compiler' item from required if we also **provide** one
- if 'compiler' in provides_key:
- requires_key.remove('compiler')
+ if "compiler" in provides_key:
+ requires_key.remove("compiler")
# Compute the unique combinations of the services we provide
combinations = []
@@ -404,7 +395,7 @@ class LmodContext(BaseContext):
# In general we have conditional modifications if we have modifications
# and we are not providing **only** a compiler
provides = self.conf.provides
- provide_compiler_only = 'compiler' in provides and len(provides) == 1
+ provide_compiler_only = "compiler" in provides and len(provides) == 1
has_modifications = self.has_modulepath_modifications
return has_modifications and not provide_compiler_only
@@ -417,7 +408,7 @@ class LmodContext(BaseContext):
def version_part(self):
"""Version of this provider."""
s = self.spec
- return '-'.join([str(s.version), s.dag_hash(length=7)])
+ return "-".join([str(s.version), s.dag_hash(length=7)])
@tengine.context_property
def provides(self):
@@ -445,15 +436,15 @@ class LmodContext(BaseContext):
conditional_paths = layout.unlocked_paths
conditional_paths.pop(None)
for services_needed, list_of_path_parts in conditional_paths.items():
- condition = ' and '.join([x + '_name' for x in services_needed])
+ condition = " and ".join([x + "_name" for x in services_needed])
for parts in list_of_path_parts:
def manipulate_path(token):
if token in self.conf.hierarchy_tokens:
- return '{0}_name, {0}_version'.format(token)
+ return "{0}_name, {0}_version".format(token)
return '"' + token + '"'
- path = ', '.join([manipulate_path(x) for x in parts])
+ path = ", ".join([manipulate_path(x) for x in parts])
value.append((condition, path))
return value
@@ -461,7 +452,8 @@ class LmodContext(BaseContext):
class LmodModulefileWriter(BaseModuleFileWriter):
"""Writer class for lmod module files."""
- default_template = posixpath.join('modules', 'modulefile.lua')
+
+ default_template = posixpath.join("modules", "modulefile.lua")
class CoreCompilersNotFoundError(spack.error.SpackError, KeyError):
diff --git a/lib/spack/spack/modules/tcl.py b/lib/spack/spack/modules/tcl.py
index 6762616337..f49f6209ef 100644
--- a/lib/spack/spack/modules/tcl.py
+++ b/lib/spack/spack/modules/tcl.py
@@ -21,11 +21,11 @@ from .common import BaseConfiguration, BaseContext, BaseFileLayout, BaseModuleFi
#: TCL specific part of the configuration
def configuration(module_set_name):
- config_path = 'modules:%s:tcl' % module_set_name
+ config_path = "modules:%s:tcl" % module_set_name
config = spack.config.get(config_path, {})
- if not config and module_set_name == 'default':
+ if not config and module_set_name == "default":
# return old format for backward compatibility
- return spack.config.get('modules:tcl', {})
+ return spack.config.get("modules:tcl", {})
return config
@@ -39,12 +39,11 @@ def make_configuration(spec, module_set_name):
try:
return configuration_registry[key]
except KeyError:
- return configuration_registry.setdefault(
- key, TclConfiguration(spec, module_set_name))
+ return configuration_registry.setdefault(key, TclConfiguration(spec, module_set_name))
def make_layout(spec, module_set_name):
- """Returns the layout information for spec """
+ """Returns the layout information for spec"""
conf = make_configuration(spec, module_set_name)
return TclFileLayout(conf)
@@ -61,7 +60,7 @@ class TclConfiguration(BaseConfiguration):
@property
def conflicts(self):
"""Conflicts for this module file"""
- return self.conf.get('conflict', [])
+ return self.conf.get("conflict", [])
class TclFileLayout(BaseFileLayout):
@@ -74,7 +73,7 @@ class TclContext(BaseContext):
@tengine.context_property
def prerequisites(self):
"""List of modules that needs to be loaded automatically."""
- return self._create_module_list_of('specs_to_prereq')
+ return self._create_module_list_of("specs_to_prereq")
@tengine.context_property
def conflicts(self):
@@ -84,20 +83,16 @@ class TclContext(BaseContext):
f = string.Formatter()
for item in self.conf.conflicts:
if len([x for x in f.parse(item)]) > 1:
- for naming_dir, conflict_dir in zip(
- projection.split('/'), item.split('/')
- ):
+ for naming_dir, conflict_dir in zip(projection.split("/"), item.split("/")):
if naming_dir != conflict_dir:
- message = 'conflict scheme does not match naming '
- message += 'scheme [{spec}]\n\n'
+ message = "conflict scheme does not match naming "
+ message += "scheme [{spec}]\n\n"
message += 'naming scheme : "{nformat}"\n'
message += 'conflict scheme : "{cformat}"\n\n'
- message += '** You may want to check your '
- message += '`modules.yaml` configuration file **\n'
- tty.error(message.format(spec=self.spec,
- nformat=projection,
- cformat=item))
- raise SystemExit('Module generation aborted.')
+ message += "** You may want to check your "
+ message += "`modules.yaml` configuration file **\n"
+ tty.error(message.format(spec=self.spec, nformat=projection, cformat=item))
+ raise SystemExit("Module generation aborted.")
item = self.spec.format(item)
fmts.append(item)
# Substitute spec tokens if present
@@ -106,7 +101,8 @@ class TclContext(BaseContext):
class TclModulefileWriter(BaseModuleFileWriter):
"""Writer class for tcl module files."""
+
# Note: Posixpath is used here as opposed to
# os.path.join due to spack.spec.Spec.format
# requiring forward slash path seperators at this stage
- default_template = posixpath.join('modules', 'modulefile.tcl')
+ default_template = posixpath.join("modules", "modulefile.tcl")
diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py
index 437f4d8ab6..2c6a56db1a 100644
--- a/lib/spack/spack/multimethod.py
+++ b/lib/spack/spack/multimethod.py
@@ -54,33 +54,33 @@ class MultiMethodMeta(type):
class SpecMultiMethod(object):
"""This implements a multi-method for Spack specs. Packages are
- instantiated with a particular spec, and you may want to
- execute different versions of methods based on what the spec
- looks like. For example, you might want to call a different
- version of install() for one platform than you call on another.
+ instantiated with a particular spec, and you may want to
+ execute different versions of methods based on what the spec
+ looks like. For example, you might want to call a different
+ version of install() for one platform than you call on another.
- The SpecMultiMethod class implements a callable object that
- handles method dispatch. When it is called, it looks through
- registered methods and their associated specs, and it tries
- to find one that matches the package's spec. If it finds one
- (and only one), it will call that method.
+ The SpecMultiMethod class implements a callable object that
+ handles method dispatch. When it is called, it looks through
+ registered methods and their associated specs, and it tries
+ to find one that matches the package's spec. If it finds one
+ (and only one), it will call that method.
- This is intended for use with decorators (see below). The
- decorator (see docs below) creates SpecMultiMethods and
- registers method versions with them.
+ This is intended for use with decorators (see below). The
+ decorator (see docs below) creates SpecMultiMethods and
+ registers method versions with them.
- To register a method, you can do something like this:
- mm = SpecMultiMethod()
- mm.register("^chaos_5_x86_64_ib", some_method)
+ To register a method, you can do something like this:
+ mm = SpecMultiMethod()
+ mm.register("^chaos_5_x86_64_ib", some_method)
- The object registered needs to be a Spec or some string that
- will parse to be a valid spec.
+ The object registered needs to be a Spec or some string that
+ will parse to be a valid spec.
- When the mm is actually called, it selects a version of the
- method to call based on the sys_type of the object it is
- called on.
+ When the mm is actually called, it selects a version of the
+ method to call based on the sys_type of the object it is
+ called on.
- See the docs for decorators below for more details.
+ See the docs for decorators below for more details.
"""
def __init__(self, default=None):
@@ -93,10 +93,10 @@ class SpecMultiMethod(object):
"""Register a version of a method for a particular spec."""
self.method_list.append((spec, method))
- if not hasattr(self, '__name__'):
+ if not hasattr(self, "__name__"):
functools.update_wrapper(self, method)
else:
- assert(self.__name__ == method.__name__)
+ assert self.__name__ == method.__name__
def __get__(self, obj, objtype):
"""This makes __call__ support instance methods."""
@@ -108,14 +108,12 @@ class SpecMultiMethod(object):
# Call functools.wraps manually to get all the attributes
# we need to be disguised as the wrapped_method
- func = functools.wraps(wrapped_method)(
- functools.partial(self.__call__, obj)
- )
+ func = functools.wraps(wrapped_method)(functools.partial(self.__call__, obj))
return func
def _get_method_by_spec(self, spec):
"""Find the method of this SpecMultiMethod object that satisfies the
- given spec, if one exists
+ given spec, if one exists
"""
for condition, method in self.method_list:
if spec.satisfies(condition):
@@ -124,8 +122,8 @@ class SpecMultiMethod(object):
def __call__(self, package_self, *args, **kwargs):
"""Find the first method with a spec that matches the
- package's spec. If none is found, call the default
- or if there is none, then raise a NoSuchMethodError.
+ package's spec. If none is found, call the default
+ or if there is none, then raise a NoSuchMethodError.
"""
spec_method = self._get_method_by_spec(package_self.spec)
if spec_method:
@@ -140,17 +138,14 @@ class SpecMultiMethod(object):
superself = cls.__dict__.get(self.__name__, None)
if isinstance(superself, SpecMultiMethod):
# Check parent multimethod for method for spec.
- superself_method = superself._get_method_by_spec(
- package_self.spec
- )
+ superself_method = superself._get_method_by_spec(package_self.spec)
if superself_method:
return superself_method(package_self, *args, **kwargs)
elif superself:
return superself(package_self, *args, **kwargs)
raise NoSuchMethodError(
- type(package_self), self.__name__, package_self.spec,
- [m[0] for m in self.method_list]
+ type(package_self), self.__name__, package_self.spec, [m[0] for m in self.method_list]
)
@@ -285,5 +280,5 @@ class NoSuchMethodError(spack.error.SpackError):
def __init__(self, cls, method_name, spec, possible_specs):
super(NoSuchMethodError, self).__init__(
"Package %s does not support %s called with %s. Options are: %s"
- % (cls.__name__, method_name, spec,
- ", ".join(str(s) for s in possible_specs)))
+ % (cls.__name__, method_name, spec, ", ".join(str(s) for s in possible_specs))
+ )
diff --git a/lib/spack/spack/operating_systems/__init__.py b/lib/spack/spack/operating_systems/__init__.py
index 5a029ae5a8..88083a034f 100644
--- a/lib/spack/spack/operating_systems/__init__.py
+++ b/lib/spack/spack/operating_systems/__init__.py
@@ -9,14 +9,7 @@ from .linux_distro import LinuxDistro
from .mac_os import MacOs
from .windows_os import WindowsOs
-__all__ = [
- 'OperatingSystem',
- 'LinuxDistro',
- 'MacOs',
- 'CrayFrontend',
- 'CrayBackend',
- 'WindowsOs'
-]
+__all__ = ["OperatingSystem", "LinuxDistro", "MacOs", "CrayFrontend", "CrayBackend", "WindowsOs"]
#: List of all the Operating Systems known to Spack
operating_systems = [LinuxDistro, MacOs, CrayFrontend, CrayBackend, WindowsOs]
diff --git a/lib/spack/spack/operating_systems/_operating_system.py b/lib/spack/spack/operating_systems/_operating_system.py
index 5c58b0f331..432dfb1042 100644
--- a/lib/spack/spack/operating_systems/_operating_system.py
+++ b/lib/spack/spack/operating_systems/_operating_system.py
@@ -27,9 +27,10 @@ class OperatingSystem(object):
For platforms such as linux and darwin, the operating system is autodetected.
"""
+
def __init__(self, name, version):
- self.name = name.replace('-', '_')
- self.version = str(version).replace('-', '_')
+ self.name = name.replace("-", "_")
+ self.version = str(version).replace("-", "_")
def __str__(self):
return "%s%s" % (self.name, self.version)
@@ -42,7 +43,4 @@ class OperatingSystem(object):
yield self.version
def to_dict(self):
- return syaml.syaml_dict([
- ('name', self.name),
- ('version', self.version)
- ])
+ return syaml.syaml_dict([("name", self.name), ("version", self.version)])
diff --git a/lib/spack/spack/operating_systems/cray_backend.py b/lib/spack/spack/operating_systems/cray_backend.py
index 0a4e392496..0076bc7df5 100644
--- a/lib/spack/spack/operating_systems/cray_backend.py
+++ b/lib/spack/spack/operating_systems/cray_backend.py
@@ -16,8 +16,8 @@ from .linux_distro import LinuxDistro
#: Possible locations of the Cray CLE release file,
#: which we look at to get the CNL OS version.
-_cle_release_file = '/etc/opt/cray/release/cle-release'
-_clerelease_file = '/etc/opt/cray/release/clerelease'
+_cle_release_file = "/etc/opt/cray/release/cle-release"
+_clerelease_file = "/etc/opt/cray/release/clerelease"
def read_cle_release_file():
@@ -47,7 +47,7 @@ def read_cle_release_file():
for line in release_file:
# use partition instead of split() to ensure we only split on
# the first '=' in the line.
- key, _, value = line.partition('=')
+ key, _, value = line.partition("=")
result[key] = value.strip()
return result
@@ -78,7 +78,7 @@ class CrayBackend(LinuxDistro):
"""
def __init__(self):
- name = 'cnl'
+ name = "cnl"
version = self._detect_crayos_version()
if version:
# If we found a CrayOS version, we do not want the information
@@ -98,10 +98,10 @@ class CrayBackend(LinuxDistro):
def _detect_crayos_version(cls):
if os.path.isfile(_cle_release_file):
release_attrs = read_cle_release_file()
- if 'RELEASE' not in release_attrs:
+ if "RELEASE" not in release_attrs:
# This Cray system uses a base OS not CLE/CNL
return None
- v = spack.version.Version(release_attrs['RELEASE'])
+ v = spack.version.Version(release_attrs["RELEASE"])
return v[0]
elif os.path.isfile(_clerelease_file):
v = read_clerelease_file()
@@ -130,44 +130,43 @@ class CrayBackend(LinuxDistro):
continue
if cmp_cls.PrgEnv_compiler is None:
- tty.die('Must supply PrgEnv_compiler with PrgEnv')
+ tty.die("Must supply PrgEnv_compiler with PrgEnv")
compiler_id = spack.compilers.CompilerID(self, compiler_name, None)
detect_version_args = spack.compilers.DetectVersionArgs(
- id=compiler_id, variation=(None, None),
- language='cc', path='cc'
+ id=compiler_id, variation=(None, None), language="cc", path="cc"
)
command_arguments.append(detect_version_args)
return command_arguments
def detect_version(self, detect_version_args):
import spack.compilers
+
modulecmd = self.modulecmd
compiler_name = detect_version_args.id.compiler_name
compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
- output = modulecmd('avail', compiler_cls.PrgEnv_compiler)
- version_regex = r'({0})/([\d\.]+[\d]-?[\w]*)'.format(
- compiler_cls.PrgEnv_compiler
- )
+ output = modulecmd("avail", compiler_cls.PrgEnv_compiler)
+ version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
matches = re.findall(version_regex, output)
- version = tuple(version for _, version in matches
- if 'classic' not in version)
+ version = tuple(version for _, version in matches if "classic" not in version)
compiler_id = detect_version_args.id
- value = detect_version_args._replace(
- id=compiler_id._replace(version=version)
- )
+ value = detect_version_args._replace(id=compiler_id._replace(version=version))
return value, None
def make_compilers(self, compiler_id, paths):
import spack.spec
+
name = compiler_id.compiler_name
cmp_cls = spack.compilers.class_for_compiler_name(name)
compilers = []
for v in compiler_id.version:
comp = cmp_cls(
- spack.spec.CompilerSpec(name + '@' + v),
- self, "any",
- ['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v])
+ spack.spec.CompilerSpec(name + "@" + v),
+ self,
+ "any",
+ ["cc", "CC", "ftn"],
+ [cmp_cls.PrgEnv, name + "/" + v],
+ )
compilers.append(comp)
return compilers
diff --git a/lib/spack/spack/operating_systems/cray_frontend.py b/lib/spack/spack/operating_systems/cray_frontend.py
index 02419fd032..1b1d769149 100644
--- a/lib/spack/spack/operating_systems/cray_frontend.py
+++ b/lib/spack/spack/operating_systems/cray_frontend.py
@@ -24,14 +24,14 @@ def unload_programming_environment():
# We rely on the fact that the PrgEnv-* modules set the PE_ENV
# environment variable.
- if 'PE_ENV' in os.environ:
+ if "PE_ENV" in os.environ:
# Copy environment variables to restore them after the compiler
# detection. We expect that the only thing PrgEnv-* modules do is
# the environment variables modifications.
env_bu = os.environ.copy()
# Get the name of the module from the environment variable.
- prg_env = 'PrgEnv-' + os.environ['PE_ENV'].lower()
+ prg_env = "PrgEnv-" + os.environ["PE_ENV"].lower()
# Unload the PrgEnv-* module. By doing this we intentionally
# provoke errors when the Cray's compiler wrappers are executed
@@ -41,7 +41,7 @@ def unload_programming_environment():
# into the PATH environment variable (i.e. the following modules:
# 'intel', 'cce', 'gcc', etc.) will also be unloaded since they are
# specified as prerequisites in the PrgEnv-* modulefiles.
- module('unload', prg_env)
+ module("unload", prg_env)
yield
@@ -67,39 +67,38 @@ class CrayFrontend(LinuxDistro):
import spack.compilers
with unload_programming_environment():
- search_paths = get_path('PATH')
+ search_paths = get_path("PATH")
- extract_path_re = re.compile(r'prepend-path[\s]*PATH[\s]*([/\w\.:-]*)')
+ extract_path_re = re.compile(r"prepend-path[\s]*PATH[\s]*([/\w\.:-]*)")
for compiler_cls in spack.compilers.all_compiler_types():
# Check if the compiler class is supported on Cray
- prg_env = getattr(compiler_cls, 'PrgEnv', None)
- compiler_module = getattr(compiler_cls, 'PrgEnv_compiler', None)
+ prg_env = getattr(compiler_cls, "PrgEnv", None)
+ compiler_module = getattr(compiler_cls, "PrgEnv_compiler", None)
if not (prg_env and compiler_module):
continue
# It is supported, check which versions are available
- output = module('avail', compiler_cls.PrgEnv_compiler)
- version_regex = r'({0})/([\d\.]+[\d]-?[\w]*)'.format(
- compiler_cls.PrgEnv_compiler
- )
+ output = module("avail", compiler_cls.PrgEnv_compiler)
+ version_regex = r"({0})/([\d\.]+[\d]-?[\w]*)".format(compiler_cls.PrgEnv_compiler)
matches = re.findall(version_regex, output)
- versions = tuple(version for _, version in matches
- if 'classic' not in version)
+ versions = tuple(version for _, version in matches if "classic" not in version)
# Now inspect the modules and add to paths
msg = "[CRAY FE] Detected FE compiler [name={0}, versions={1}]"
tty.debug(msg.format(compiler_module, versions))
for v in versions:
try:
- current_module = compiler_module + '/' + v
- out = module('show', current_module)
+ current_module = compiler_module + "/" + v
+ out = module("show", current_module)
match = extract_path_re.search(out)
- search_paths += match.group(1).split(':')
+ search_paths += match.group(1).split(":")
except Exception as e:
- msg = ("[CRAY FE] An unexpected error occurred while "
- "detecting FE compiler [compiler={0}, "
- " version={1}, error={2}]")
+ msg = (
+ "[CRAY FE] An unexpected error occurred while "
+ "detecting FE compiler [compiler={0}, "
+ " version={1}, error={2}]"
+ )
tty.debug(msg.format(compiler_cls.name, v, str(e)))
search_paths = list(llnl.util.lang.dedupe(search_paths))
diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py
index 70ea6a22b5..403d3a0d1c 100644
--- a/lib/spack/spack/operating_systems/linux_distro.py
+++ b/lib/spack/spack/operating_systems/linux_distro.py
@@ -13,55 +13,56 @@ from ._operating_system import OperatingSystem
def kernel_version():
"""Return the kernel version as a Version object.
- Note that the kernel version is distinct from OS and/or
- distribution versions. For instance:
- >>> external.distro.id()
- 'centos'
- >>> external.distro.version()
- '7'
- >>> platform.release()
- '5.10.84+'
+ Note that the kernel version is distinct from OS and/or
+ distribution versions. For instance:
+ >>> external.distro.id()
+ 'centos'
+ >>> external.distro.version()
+ '7'
+ >>> platform.release()
+ '5.10.84+'
"""
# Strip '+' characters just in case we're running a
# version built from git/etc
- clean_version = re.sub(r'\+', r'', py_platform.release())
+ clean_version = re.sub(r"\+", r"", py_platform.release())
return Version(clean_version)
class LinuxDistro(OperatingSystem):
- """ This class will represent the autodetected operating system
- for a Linux System. Since there are many different flavors of
- Linux, this class will attempt to encompass them all through
- autodetection using the python module platform and the method
- platform.dist()
+ """This class will represent the autodetected operating system
+ for a Linux System. Since there are many different flavors of
+ Linux, this class will attempt to encompass them all through
+ autodetection using the python module platform and the method
+ platform.dist()
"""
def __init__(self):
try:
# This will throw an error if imported on a non-Linux platform.
import external.distro
+
distname, version = external.distro.id(), external.distro.version()
except ImportError:
- distname, version = 'unknown', ''
+ distname, version = "unknown", ""
# Grabs major version from tuple on redhat; on other platforms
# grab the first legal identifier in the version field. On
# debian you get things like 'wheezy/sid'; sid means unstable.
# We just record 'wheezy' and don't get quite so detailed.
- version = re.split(r'[^\w-]', version)
+ version = re.split(r"[^\w-]", version)
- if 'ubuntu' in distname:
- version = '.'.join(version[0:2])
+ if "ubuntu" in distname:
+ version = ".".join(version[0:2])
# openSUSE Tumbleweed is a rolling release which can change
# more than once in a week, so set version to tumbleweed$GLIBVERS
- elif 'opensuse-tumbleweed' in distname or 'opensusetumbleweed' in distname:
- distname = 'opensuse'
+ elif "opensuse-tumbleweed" in distname or "opensusetumbleweed" in distname:
+ distname = "opensuse"
output = check_output(["ldd", "--version"]).decode()
- libcvers = re.findall(r'ldd \(GNU libc\) (.*)', output)
+ libcvers = re.findall(r"ldd \(GNU libc\) (.*)", output)
if len(libcvers) == 1:
- version = 'tumbleweed' + libcvers[0]
+ version = "tumbleweed" + libcvers[0]
else:
- version = 'tumbleweed' + version[0]
+ version = "tumbleweed" + version[0]
else:
version = version[0]
diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py
index 403188262d..a988a18205 100644
--- a/lib/spack/spack/operating_systems/mac_os.py
+++ b/lib/spack/spack/operating_systems/mac_os.py
@@ -43,17 +43,17 @@ def macos_version():
installations report the OS
on which Python was *built* rather than the one on which it is running.
"""
- env_ver = os.environ.get('MACOSX_DEPLOYMENT_TARGET', None)
+ env_ver = os.environ.get("MACOSX_DEPLOYMENT_TARGET", None)
if env_ver:
return Version(env_ver)
try:
- output = Executable('sw_vers')(output=str, fail_on_error=False)
+ output = Executable("sw_vers")(output=str, fail_on_error=False)
except Exception:
# FileNotFoundError, or spack.util.executable.ProcessError
pass
else:
- match = re.search(r'ProductVersion:\s*([0-9.]+)', output)
+ match = re.search(r"ProductVersion:\s*([0-9.]+)", output)
if match:
return Version(match.group(1))
@@ -69,17 +69,17 @@ def macos_cltools_version():
The CLT version might only affect the build if it's selected as the macOS
SDK path.
"""
- pkgutil = Executable('pkgutil')
- output = pkgutil('--pkg-info=com.apple.pkg.cltools_executables',
- output=str, fail_on_error=False)
- match = re.search(r'version:\s*([0-9.]+)', output)
+ pkgutil = Executable("pkgutil")
+ output = pkgutil(
+ "--pkg-info=com.apple.pkg.cltools_executables", output=str, fail_on_error=False
+ )
+ match = re.search(r"version:\s*([0-9.]+)", output)
if match:
return Version(match.group(1))
# No CLTools installed by package manager: try Xcode
- output = pkgutil('--pkg-info=com.apple.pkg.Xcode',
- output=str, fail_on_error=False)
- match = re.search(r'version:\s*([0-9.]+)', output)
+ output = pkgutil("--pkg-info=com.apple.pkg.Xcode", output=str, fail_on_error=False)
+ match = re.search(r"version:\s*([0-9.]+)", output)
if match:
return Version(match.group(1))
@@ -88,10 +88,9 @@ def macos_cltools_version():
@llnl.util.lang.memoized
def macos_sdk_path():
- """Return path to the active macOS SDK.
- """
- xcrun = Executable('xcrun')
- return xcrun('--show-sdk-path', output=str).rstrip()
+ """Return path to the active macOS SDK."""
+ xcrun = Executable("xcrun")
+ return xcrun("--show-sdk-path", output=str).rstrip()
def macos_sdk_version():
@@ -105,8 +104,8 @@ def macos_sdk_version():
The macOS deployment target cannot be greater than the SDK version, but
usually it can be at least a few versions less.
"""
- xcrun = Executable('xcrun')
- return Version(xcrun('--show-sdk-version', output=str).rstrip())
+ xcrun = Executable("xcrun")
+ return Version(xcrun("--show-sdk-version", output=str).rstrip())
class MacOs(OperatingSystem):
@@ -123,32 +122,32 @@ class MacOs(OperatingSystem):
will use a generic "macos" version string until Spack is updated.
"""
mac_releases = {
- '10.0': 'cheetah',
- '10.1': 'puma',
- '10.2': 'jaguar',
- '10.3': 'panther',
- '10.4': 'tiger',
- '10.5': 'leopard',
- '10.6': 'snowleopard',
- '10.7': 'lion',
- '10.8': 'mountainlion',
- '10.9': 'mavericks',
- '10.10': 'yosemite',
- '10.11': 'elcapitan',
- '10.12': 'sierra',
- '10.13': 'highsierra',
- '10.14': 'mojave',
- '10.15': 'catalina',
- '10.16': 'bigsur',
- '11': 'bigsur',
- '12': 'monterey',
+ "10.0": "cheetah",
+ "10.1": "puma",
+ "10.2": "jaguar",
+ "10.3": "panther",
+ "10.4": "tiger",
+ "10.5": "leopard",
+ "10.6": "snowleopard",
+ "10.7": "lion",
+ "10.8": "mountainlion",
+ "10.9": "mavericks",
+ "10.10": "yosemite",
+ "10.11": "elcapitan",
+ "10.12": "sierra",
+ "10.13": "highsierra",
+ "10.14": "mojave",
+ "10.15": "catalina",
+ "10.16": "bigsur",
+ "11": "bigsur",
+ "12": "monterey",
}
version = macos_version()
# Big Sur versions go 11.0, 11.0.1, 11.1 (vs. prior versions that
# only used the minor component)
- part = 1 if version >= Version('11') else 2
+ part = 1 if version >= Version("11") else 2
mac_ver = str(version.up_to(part))
name = mac_releases.get(mac_ver, "macos")
diff --git a/lib/spack/spack/operating_systems/windows_os.py b/lib/spack/spack/operating_systems/windows_os.py
index 61525df887..47929ba61d 100755
--- a/lib/spack/spack/operating_systems/windows_os.py
+++ b/lib/spack/spack/operating_systems/windows_os.py
@@ -16,8 +16,7 @@ from ._operating_system import OperatingSystem
def windows_version():
- """temporary workaround to return a Windows version as a Version object
- """
+ """temporary workaround to return a Windows version as a Version object"""
return Version(platform.release())
@@ -32,33 +31,39 @@ class WindowsOs(OperatingSystem):
# Find MSVC directories using vswhere
comp_search_paths = []
vs_install_paths = []
- root = os.environ.get('ProgramFiles(x86)') or os.environ.get('ProgramFiles')
+ root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
if root:
try:
extra_args = {}
if sys.version_info[:3] >= (3, 6, 0):
- extra_args = {'encoding': 'mbcs', 'errors': 'strict'}
- paths = subprocess.check_output([ # type: ignore[call-overload] # novermin
- os.path.join(root, "Microsoft Visual Studio",
- "Installer", "vswhere.exe"),
- "-prerelease",
- "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
- "-property", "installationPath",
- "-products", "*",
- ], **extra_args).strip()
+ extra_args = {"encoding": "mbcs", "errors": "strict"}
+ paths = subprocess.check_output(
+ [ # type: ignore[call-overload] # novermin
+ os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
+ "-prerelease",
+ "-requires",
+ "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
+ "-property",
+ "installationPath",
+ "-products",
+ "*",
+ ],
+ **extra_args
+ ).strip()
if (3, 0) <= sys.version_info[:2] <= (3, 5):
paths = paths.decode()
- vs_install_paths = paths.split('\n')
- msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC")
- for path in vs_install_paths]
+ vs_install_paths = paths.split("\n")
+ msvc_paths = [os.path.join(path, "VC", "Tools", "MSVC") for path in vs_install_paths]
for p in msvc_paths:
- comp_search_paths.extend(
- glob.glob(os.path.join(p, '*', 'bin', 'Hostx64', 'x64')))
+ comp_search_paths.extend(glob.glob(os.path.join(p, "*", "bin", "Hostx64", "x64")))
if os.getenv("ONEAPI_ROOT"):
- comp_search_paths.extend(glob.glob(os.path.join(
- str(os.getenv("ONEAPI_ROOT")),
- 'compiler', '*',
- 'windows', 'bin')))
+ comp_search_paths.extend(
+ glob.glob(
+ os.path.join(
+ str(os.getenv("ONEAPI_ROOT")), "compiler", "*", "windows", "bin"
+ )
+ )
+ )
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
pass
if comp_search_paths:
@@ -66,9 +71,9 @@ class WindowsOs(OperatingSystem):
def __init__(self):
plat_ver = platform.release()
- if Version(plat_ver) < Version('10'):
+ if Version(plat_ver) < Version("10"):
raise SpackError("Spack is not supported on Windows versions older than 10")
- super(WindowsOs, self).__init__('windows{}'.format(plat_ver), plat_ver)
+ super(WindowsOs, self).__init__("windows{}".format(plat_ver), plat_ver)
def __str__(self):
return self.name
diff --git a/lib/spack/spack/package_base.py b/lib/spack/spack/package_base.py
index 7eeb727745..441563eeba 100644
--- a/lib/spack/spack/package_base.py
+++ b/lib/spack/spack/package_base.py
@@ -71,32 +71,29 @@ if sys.version_info[0] >= 3:
Optional[Iterable[str]],
Optional[Iterable[str]],
]
- FLAG_HANDLER_TYPE = Callable[
- [str, Iterable[str]],
- FLAG_HANDLER_RETURN_TYPE
- ]
+ FLAG_HANDLER_TYPE = Callable[[str, Iterable[str]], FLAG_HANDLER_RETURN_TYPE]
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
# Filename for the Spack build/install log.
-_spack_build_logfile = 'spack-build-out.txt'
+_spack_build_logfile = "spack-build-out.txt"
# Filename for the Spack build/install environment file.
-_spack_build_envfile = 'spack-build-env.txt'
+_spack_build_envfile = "spack-build-env.txt"
# Filename for the Spack build/install environment modifications file.
-_spack_build_envmodsfile = 'spack-build-env-mods.txt'
+_spack_build_envmodsfile = "spack-build-env-mods.txt"
# Filename for the Spack install phase-time test log.
-_spack_install_test_log = 'install-time-test-log.txt'
+_spack_install_test_log = "install-time-test-log.txt"
# Filename of json with total build and phase times (seconds)
-_spack_times_log = 'install_times.json'
+_spack_times_log = "install_times.json"
# Filename for the Spack configure args file.
-_spack_configure_argsfile = 'spack-configure-args.txt'
+_spack_configure_argsfile = "spack-configure-args.txt"
def preferred_version(pkg):
@@ -109,9 +106,7 @@ def preferred_version(pkg):
# Here we sort first on the fact that a version is marked
# as preferred in the package, then on the fact that the
# version is not develop, then lexicographically
- key_fn = lambda v: (pkg.versions[v].get('preferred', False),
- not v.isdevelop(),
- v)
+ key_fn = lambda v: (pkg.versions[v].get("preferred", False), not v.isdevelop(), v)
return sorted(pkg.versions, key=key_fn).pop()
@@ -155,21 +150,24 @@ class InstallPhase(object):
callback(instance)
# Check instance attributes at the end of a phase
self._on_phase_exit(instance)
+
return phase_wrapper
def _on_phase_start(self, instance):
# If a phase has a matching stop_before_phase attribute,
# stop the installation process raising a StopPhase
- if getattr(instance, 'stop_before_phase', None) == self.name:
+ if getattr(instance, "stop_before_phase", None) == self.name:
from spack.build_environment import StopPhase
- raise StopPhase('Stopping before \'{0}\' phase'.format(self.name))
+
+ raise StopPhase("Stopping before '{0}' phase".format(self.name))
def _on_phase_exit(self, instance):
# If a phase has a matching last_phase attribute,
# stop the installation process raising a StopPhase
- if getattr(instance, 'last_phase', None) == self.name:
+ if getattr(instance, "last_phase", None) == self.name:
from spack.build_environment import StopPhase
- raise StopPhase('Stopping at \'{0}\' phase'.format(self.name))
+
+ raise StopPhase("Stopping at '{0}' phase".format(self.name))
def copy(self):
try:
@@ -192,8 +190,9 @@ class DetectablePackageMeta(object):
"""Check if a package is detectable and add default implementations
for the detection function.
"""
+
def __init__(cls, name, bases, attr_dict):
- if hasattr(cls, 'executables') and hasattr(cls, 'libraries'):
+ if hasattr(cls, "executables") and hasattr(cls, "libraries"):
msg = "a package can have either an 'executables' or 'libraries' attribute"
msg += " [package '{0.name}' defines both]"
raise spack.error.SpackError(msg.format(cls))
@@ -207,19 +206,21 @@ class DetectablePackageMeta(object):
# If a package has the executables or libraries attribute then it's
# assumed to be detectable
- if hasattr(cls, 'executables') or hasattr(cls, 'libraries'):
+ if hasattr(cls, "executables") or hasattr(cls, "libraries"):
+
@classmethod
def platform_executables(cls):
def to_windows_exe(exe):
- if exe.endswith('$'):
- exe = exe.replace('$', '%s$' % spack.util.path.win_exe_ext())
+ if exe.endswith("$"):
+ exe = exe.replace("$", "%s$" % spack.util.path.win_exe_ext())
else:
exe += spack.util.path.win_exe_ext()
return exe
+
plat_exe = []
- if hasattr(cls, 'executables'):
+ if hasattr(cls, "executables"):
for exe in cls.executables:
- if sys.platform == 'win32':
+ if sys.platform == "win32":
exe = to_windows_exe(exe)
plat_exe.append(exe)
return plat_exe
@@ -240,8 +241,7 @@ class DetectablePackageMeta(object):
objs_by_version = collections.defaultdict(list)
# The default filter function is the identity function for the
# list of executables
- filter_fn = getattr(cls, 'filter_detected_exes',
- lambda x, exes: exes)
+ filter_fn = getattr(cls, "filter_detected_exes", lambda x, exes: exes)
objs_in_prefix = filter_fn(prefix, objs_in_prefix)
for obj in objs_in_prefix:
try:
@@ -249,8 +249,9 @@ class DetectablePackageMeta(object):
if version_str:
objs_by_version[version_str].append(obj)
except Exception as e:
- msg = ('An error occurred when trying to detect '
- 'the version of "{0}" [{1}]')
+ msg = (
+ "An error occurred when trying to detect " 'the version of "{0}" [{1}]'
+ )
tty.debug(msg.format(obj, str(e)))
specs = []
@@ -264,57 +265,57 @@ class DetectablePackageMeta(object):
if isinstance(variant, six.string_types):
variant = (variant, {})
variant_str, extra_attributes = variant
- spec_str = '{0}@{1} {2}'.format(
- cls.name, version_str, variant_str
- )
+ spec_str = "{0}@{1} {2}".format(cls.name, version_str, variant_str)
# Pop a few reserved keys from extra attributes, since
# they have a different semantics
- external_path = extra_attributes.pop('prefix', None)
- external_modules = extra_attributes.pop(
- 'modules', None
- )
+ external_path = extra_attributes.pop("prefix", None)
+ external_modules = extra_attributes.pop("modules", None)
try:
spec = spack.spec.Spec(
spec_str,
external_path=external_path,
- external_modules=external_modules
+ external_modules=external_modules,
)
except Exception as e:
msg = 'Parsing failed [spec_str="{0}", error={1}]'
tty.debug(msg.format(spec_str, str(e)))
else:
- specs.append(spack.spec.Spec.from_detection(
- spec, extra_attributes=extra_attributes
- ))
+ specs.append(
+ spack.spec.Spec.from_detection(
+ spec, extra_attributes=extra_attributes
+ )
+ )
return sorted(specs)
@classmethod
def determine_variants(cls, objs, version_str):
- return ''
+ return ""
# Register the class as a detectable package
detectable_packages[cls.namespace].append(cls.name)
# Attach function implementations to the detectable class
default = False
- if not hasattr(cls, 'determine_spec_details'):
+ if not hasattr(cls, "determine_spec_details"):
default = True
cls.determine_spec_details = determine_spec_details
- if default and not hasattr(cls, 'determine_version'):
- msg = ('the package "{0}" in the "{1}" repo needs to define'
- ' the "determine_version" method to be detectable')
+ if default and not hasattr(cls, "determine_version"):
+ msg = (
+ 'the package "{0}" in the "{1}" repo needs to define'
+ ' the "determine_version" method to be detectable'
+ )
NotImplementedError(msg.format(cls.name, cls.namespace))
- if default and not hasattr(cls, 'determine_variants'):
+ if default and not hasattr(cls, "determine_variants"):
cls.determine_variants = determine_variants
# This function should not be overridden by subclasses,
# as it is not designed for bespoke pkg detection but rather
# on a per-platform basis
- if 'platform_executables' in cls.__dict__.keys():
+ if "platform_executables" in cls.__dict__.keys():
raise PackageError("Packages should not override platform_executables")
cls.platform_executables = platform_executables
@@ -325,12 +326,13 @@ class PackageMeta(
DetectablePackageMeta,
spack.directives.DirectiveMeta,
spack.mixins.PackageMixinsMeta,
- spack.multimethod.MultiMethodMeta
+ spack.multimethod.MultiMethodMeta,
):
"""
Package metaclass for supporting directives (e.g., depends_on) and phases
"""
- phase_fmt = '_InstallPhase_{0}'
+
+ phase_fmt = "_InstallPhase_{0}"
# These are accessed only through getattr, by name
_InstallPhase_run_before = {} # type: Dict[str, List[Callable]]
@@ -345,13 +347,13 @@ class PackageMeta(
InstallPhase attributes in the class that will be initialized in
__init__.
"""
- if 'phases' in attr_dict:
+ if "phases" in attr_dict:
# Turn the strings in 'phases' into InstallPhase instances
# and add them as private attributes
- _InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict['phases']]
- for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict['phases']):
+ _InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict["phases"]]
+ for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict["phases"]):
attr_dict[phase_name] = InstallPhase(callback_name)
- attr_dict['_InstallPhase_phases'] = _InstallPhase_phases
+ attr_dict["_InstallPhase_phases"] = _InstallPhase_phases
def _flush_callbacks(check_name):
# Name of the attribute I am going to check it exists
@@ -377,12 +379,12 @@ class PackageMeta(
# Clear the attribute for the next class
setattr(cls, check_attr, {})
- _flush_callbacks('run_before')
- _flush_callbacks('run_after')
+ _flush_callbacks("run_before")
+ _flush_callbacks("run_after")
# Reset names for packages that inherit from another
# package with a different name
- attr_dict['_name'] = None
+ attr_dict["_name"] = None
return super(PackageMeta, cls).__new__(cls, name, bases, attr_dict)
@@ -396,17 +398,18 @@ class PackageMeta(
checks.append(func)
setattr(PackageMeta, attr_name, check_list)
return func
+
return _decorator
def run_before(*phases):
"""Registers a method of a package to be run before a given phase"""
- return PackageMeta.register_callback('run_before', *phases)
+ return PackageMeta.register_callback("run_before", *phases)
def run_after(*phases):
"""Registers a method of a package to be run after a given phase"""
- return PackageMeta.register_callback('run_after', *phases)
+ return PackageMeta.register_callback("run_after", *phases)
def on_package_attributes(**attr_dict):
@@ -419,20 +422,19 @@ def on_package_attributes(**attr_dict):
attr_dict (dict): dictionary mapping attribute names to their
required values
"""
- def _execute_under_condition(func):
+ def _execute_under_condition(func):
@functools.wraps(func)
def _wrapper(instance, *args, **kwargs):
# If all the attributes have the value we require, then execute
- has_all_attributes = all(
- [hasattr(instance, key) for key in attr_dict]
- )
+ has_all_attributes = all([hasattr(instance, key) for key in attr_dict])
if has_all_attributes:
has_the_right_values = all(
[getattr(instance, key) == value for key, value in attr_dict.items()]
)
if has_the_right_values:
func(instance, *args, **kwargs)
+
return _wrapper
return _execute_under_condition
@@ -443,6 +445,7 @@ class PackageViewMixin(object):
package to views. Packages can customize how they are added to views by
overriding these functions.
"""
+
def view_source(self):
"""The source root directory that will be added to the view: files are
added such that their path relative to the view destination matches
@@ -506,8 +509,7 @@ def test_log_pathname(test_stage, spec):
Returns:
(str): the pathname of the test log file
"""
- return os.path.join(test_stage,
- 'test-{0}-out.txt'.format(TestSuite.test_pkg_id(spec)))
+ return os.path.join(test_stage, "test-{0}-out.txt".format(TestSuite.test_pkg_id(spec)))
class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
@@ -592,6 +594,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
specific build systems.
"""
+
#
# These are default values for instance variables.
#
@@ -664,7 +667,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
#: String. Contains the symbol used by the license manager to denote
#: a comment. Defaults to ``#``.
- license_comment = '#'
+ license_comment = "#"
#: List of strings. These are files that the software searches for when
#: looking for a license. All file paths must be relative to the
@@ -679,7 +682,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
#: String. A URL pointing to license setup instructions for the software.
#: Defaults to the empty string.
- license_url = ''
+ license_url = ""
#: Verbosity level, preserved across installs.
_verbose = None
@@ -737,7 +740,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
self.installed_from_binary_cache = False
# Ensure that only one of these two attributes are present
- if getattr(self, 'url', None) and getattr(self, 'urls', None):
+ if getattr(self, "url", None) and getattr(self, "urls", None):
msg = "a package can have either a 'url' or a 'urls' attribute"
msg += " [package '{0.name}' defines both]"
raise ValueError(msg.format(self))
@@ -757,8 +760,14 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
@classmethod
def possible_dependencies(
- cls, transitive=True, expand_virtuals=True, deptype='all',
- visited=None, missing=None, virtuals=None):
+ cls,
+ transitive=True,
+ expand_virtuals=True,
+ deptype="all",
+ visited=None,
+ missing=None,
+ virtuals=None,
+ ):
"""Return dict of possible dependencies of this package.
Args:
@@ -842,8 +851,8 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
continue
dep_cls.possible_dependencies(
- transitive, expand_virtuals, deptype, visited, missing,
- virtuals)
+ transitive, expand_virtuals, deptype, visited, missing, virtuals
+ )
return visited
@@ -869,17 +878,17 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
@classproperty
def fullname(cls):
"""Name of this package, including the namespace"""
- return '%s.%s' % (cls.namespace, cls.name)
+ return "%s.%s" % (cls.namespace, cls.name)
@classproperty
def fullnames(cls):
"""Fullnames for this package and any packages from which it inherits."""
fullnames = []
for cls in inspect.getmro(cls):
- namespace = getattr(cls, 'namespace', None)
+ namespace = getattr(cls, "namespace", None)
if namespace:
- fullnames.append('%s.%s' % (namespace, cls.name))
- if namespace == 'builtin':
+ fullnames.append("%s.%s" % (namespace, cls.name))
+ if namespace == "builtin":
# builtin packages cannot inherit from other repos
break
return fullnames
@@ -893,29 +902,31 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
if cls._name is None:
cls._name = cls.module.__name__
- if '.' in cls._name:
- cls._name = cls._name[cls._name.rindex('.') + 1:]
+ if "." in cls._name:
+ cls._name = cls._name[cls._name.rindex(".") + 1 :]
return cls._name
@classproperty
def global_license_dir(cls):
"""Returns the directory where license files for all packages are stored."""
- return spack.util.path.canonicalize_path(spack.config.get('config:license_dir'))
+ return spack.util.path.canonicalize_path(spack.config.get("config:license_dir"))
@property
def global_license_file(self):
"""Returns the path where a global license file for this
- particular package should be stored."""
+ particular package should be stored."""
if not self.license_files:
return
- return os.path.join(self.global_license_dir, self.name,
- os.path.basename(self.license_files[0]))
+ return os.path.join(
+ self.global_license_dir, self.name, os.path.basename(self.license_files[0])
+ )
@property
def version(self):
if not self.spec.versions.concrete:
- raise ValueError("Version requested for a package that"
- " does not have a concrete version.")
+ raise ValueError(
+ "Version requested for a package that" " does not have a concrete version."
+ )
return self.spec.versions[0]
@classmethod
@@ -933,8 +944,8 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
version_urls = collections.OrderedDict()
for v, args in sorted(cls.versions.items()):
- if 'url' in args:
- version_urls[v] = args['url']
+ if "url" in args:
+ version_urls[v] = args["url"]
return version_urls
def nearest_url(self, version):
@@ -1012,12 +1023,13 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
nu = spack.url.substitute_version(u, self.url_version(version))
urls.append(nu)
+
# If no specific URL, use the default, class-level URL
- sub_and_add(getattr(self, 'url', None))
- for u in getattr(self, 'urls', []):
+ sub_and_add(getattr(self, "url", None))
+ for u in getattr(self, "urls", []):
sub_and_add(u)
- sub_and_add(getattr(self, 'list_url', None))
+ sub_and_add(getattr(self, "list_url", None))
# if no version-bearing URLs can be found, try them raw
if not urls:
@@ -1030,11 +1042,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
# if there are NO URLs to go by, then we can't do anything
if not default_url:
raise NoURLError(self.__class__)
- urls.append(
- spack.url.substitute_version(
- default_url, self.url_version(version)
- )
- )
+ urls.append(spack.url.substitute_version(default_url, self.url_version(version)))
return urls
@@ -1059,14 +1067,16 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
def _make_resource_stage(self, root_stage, fetcher, resource):
resource_stage_folder = self._resource_stage(resource)
mirror_paths = spack.mirror.mirror_archive_paths(
- fetcher,
- os.path.join(self.name, "%s-%s" % (resource.name, self.version)))
- stage = ResourceStage(resource.fetcher,
- root=root_stage,
- resource=resource,
- name=resource_stage_folder,
- mirror_paths=mirror_paths,
- path=self.path)
+ fetcher, os.path.join(self.name, "%s-%s" % (resource.name, self.version))
+ )
+ stage = ResourceStage(
+ resource.fetcher,
+ root=root_stage,
+ resource=resource,
+ name=resource_stage_folder,
+ mirror_paths=mirror_paths,
+ path=self.path,
+ )
return stage
def _download_search(self):
@@ -1076,21 +1086,24 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
def _make_root_stage(self, fetcher):
# Construct a mirror path (TODO: get this out of package.py)
mirror_paths = spack.mirror.mirror_archive_paths(
- fetcher,
- os.path.join(self.name, "%s-%s" % (self.name, self.version)),
- self.spec)
+ fetcher, os.path.join(self.name, "%s-%s" % (self.name, self.version)), self.spec
+ )
# Construct a path where the stage should build..
s = self.spec
- stage_name = "{0}{1}-{2}-{3}".format(stage_prefix, s.name, s.version,
- s.dag_hash())
+ stage_name = "{0}{1}-{2}-{3}".format(stage_prefix, s.name, s.version, s.dag_hash())
- stage = Stage(fetcher, mirror_paths=mirror_paths, name=stage_name,
- path=self.path, search_fn=self._download_search)
+ stage = Stage(
+ fetcher,
+ mirror_paths=mirror_paths,
+ name=stage_name,
+ path=self.path,
+ search_fn=self._download_search,
+ )
return stage
def _make_stage(self):
# If it's a dev package (not transitively), use a DIY stage object
- dev_path_var = self.spec.variants.get('dev_path', None)
+ dev_path_var = self.spec.variants.get("dev_path", None)
if dev_path_var:
return spack.stage.DIYStage(dev_path_var.value)
@@ -1105,8 +1118,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
else:
# Construct resource stage
resource = resources[ii - 1] # ii == 0 is root!
- stage = self._make_resource_stage(composite_stage[0], fetcher,
- resource)
+ stage = self._make_resource_stage(composite_stage[0], fetcher, resource)
# Append the item to the composite
composite_stage.append(stage)
@@ -1121,8 +1133,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
on the filesystem.
"""
if not self.spec.versions.concrete:
- raise ValueError(
- "Cannot retrieve stage for package without concrete version.")
+ raise ValueError("Cannot retrieve stage for package without concrete version.")
if self._stage is None:
self._stage = self._make_stage()
return self._stage
@@ -1137,7 +1148,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""Return the build environment file path associated with staging."""
# Backward compatibility: Return the name of an existing log path;
# otherwise, return the current install env path name.
- old_filename = os.path.join(self.stage.path, 'spack-build.env')
+ old_filename = os.path.join(self.stage.path, "spack-build.env")
if os.path.exists(old_filename):
return old_filename
else:
@@ -1163,7 +1174,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
# Backward compatibility: Return the name of an existing log path;
# otherwise, return the current install env path name.
- old_filename = os.path.join(self.metadata_dir, 'build.env')
+ old_filename = os.path.join(self.metadata_dir, "build.env")
if os.path.exists(old_filename):
return old_filename
else:
@@ -1173,7 +1184,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
def log_path(self):
"""Return the build log file path associated with staging."""
# Backward compatibility: Return the name of an existing log path.
- for filename in ['spack-build.out', 'spack-build.txt']:
+ for filename in ["spack-build.out", "spack-build.txt"]:
old_log = os.path.join(self.stage.path, filename)
if os.path.exists(old_log):
return old_log
@@ -1193,7 +1204,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
def install_log_path(self):
"""Return the build log file path on successful installation."""
# Backward compatibility: Return the name of an existing install log.
- for filename in ['build.out', 'build.txt']:
+ for filename in ["build.out", "build.txt"]:
old_log = os.path.join(self.metadata_dir, filename)
if os.path.exists(old_log):
return old_log
@@ -1209,7 +1220,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
@property
def test_install_log_path(self):
"""Return the install phase-time test log file path, if set."""
- return getattr(self, 'test_log_file', None)
+ return getattr(self, "test_log_file", None)
@property
def install_test_install_log_path(self):
@@ -1229,19 +1240,23 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
@property
def install_test_root(self):
"""Return the install test root directory."""
- return os.path.join(self.metadata_dir, 'test')
+ return os.path.join(self.metadata_dir, "test")
@property
def installed(self):
- msg = ('the "PackageBase.installed" property is deprecated and will be '
- 'removed in Spack v0.19, use "Spec.installed" instead')
+ msg = (
+ 'the "PackageBase.installed" property is deprecated and will be '
+ 'removed in Spack v0.19, use "Spec.installed" instead'
+ )
warnings.warn(msg)
return self.spec.installed
@property
def installed_upstream(self):
- msg = ('the "PackageBase.installed_upstream" property is deprecated and will '
- 'be removed in Spack v0.19, use "Spec.installed_upstream" instead')
+ msg = (
+ 'the "PackageBase.installed_upstream" property is deprecated and will '
+ 'be removed in Spack v0.19, use "Spec.installed_upstream" instead'
+ )
warnings.warn(msg)
return self.spec.installed_upstream
@@ -1262,8 +1277,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
@property
def fetcher(self):
if not self.spec.versions.concrete:
- raise ValueError("Cannot retrieve fetcher for"
- " package without concrete version.")
+ raise ValueError("Cannot retrieve fetcher for" " package without concrete version.")
if not self._fetcher:
self._fetcher = self._make_fetcher()
return self._fetcher
@@ -1284,9 +1298,10 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
run dependency in another.
"""
return dict(
- (name, conds) for name, conds in cls.dependencies.items()
- if any(dt in cls.dependencies[name][cond].type
- for cond in conds for dt in deptypes))
+ (name, conds)
+ for name, conds in cls.dependencies.items()
+ if any(dt in cls.dependencies[name][cond].type for cond in conds for dt in deptypes)
+ )
@property
def extendee_spec(self):
@@ -1299,7 +1314,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
deps = []
# If the extendee is in the spec's deps already, return that.
- for dep in self.spec.traverse(deptype=('link', 'run')):
+ for dep in self.spec.traverse(deptype=("link", "run")):
if dep.name in self.extendees:
deps.append(dep)
@@ -1342,7 +1357,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
return bool(self.extendees)
def extends(self, spec):
- '''
+ """
Returns True if this package extends the given spec.
If ``self.spec`` is concrete, this returns whether this package extends
@@ -1350,7 +1365,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
If ``self.spec`` is not concrete, this returns whether this package may
extend the given spec.
- '''
+ """
if spec.name not in self.extendees:
return False
s = self.extendee_spec
@@ -1359,8 +1374,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
def is_activated(self, view):
"""Return True if package is activated."""
if not self.is_extension:
- raise ValueError(
- "is_activated called on package that is not an extension.")
+ raise ValueError("is_activated called on package that is not an extension.")
if self.extendee_spec.installed_upstream:
# If this extends an upstream package, it cannot be activated for
# it. This bypasses construction of the extension map, which can
@@ -1376,7 +1390,8 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
return any(
any(self.spec.satisfies(c) for c in constraints)
- for s, constraints in self.provided.items() if s.name == vpkg_name
+ for s, constraints in self.provided.items()
+ if s.name == vpkg_name
)
@property
@@ -1384,8 +1399,11 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
virtual packages provided by this package with its spec
"""
- return [vspec for vspec, constraints in self.provided.items()
- if any(self.spec.satisfies(c) for c in constraints)]
+ return [
+ vspec
+ for vspec, constraints in self.provided.items()
+ if any(self.spec.satisfies(c) for c in constraints)
+ ]
@property
def prefix(self):
@@ -1403,8 +1421,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
if not self.spec.concrete:
raise ValueError("Can only get a compiler for a concrete package.")
- return spack.compilers.compiler_for_spec(self.spec.compiler,
- self.spec.architecture)
+ return spack.compilers.compiler_for_spec(self.spec.compiler, self.spec.architecture)
def url_version(self, version):
"""
@@ -1433,10 +1450,14 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
Returns:
(str): default manual download instructions
"""
- required = ('Manual download is required for {0}. '
- .format(self.spec.name) if self.manual_download else '')
- return ('{0}Refer to {1} for download instructions.'
- .format(required, self.spec.package.homepage))
+ required = (
+ "Manual download is required for {0}. ".format(self.spec.name)
+ if self.manual_download
+ else ""
+ )
+ return "{0}Refer to {1} for download instructions.".format(
+ required, self.spec.package.homepage
+ )
def do_fetch(self, mirror_only=False):
"""
@@ -1444,55 +1465,61 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
Working directory will be set to the stage directory.
"""
if not self.has_code or self.spec.external:
- tty.debug('No fetch required for {0}'.format(self.name))
+ tty.debug("No fetch required for {0}".format(self.name))
return
- checksum = spack.config.get('config:checksum')
+ checksum = spack.config.get("config:checksum")
fetch = self.stage.managed_by_spack
- if checksum and fetch and (self.version not in self.versions) \
- and (not isinstance(self.version, GitVersion)):
- tty.warn("There is no checksum on file to fetch %s safely." %
- self.spec.cformat('{name}{@version}'))
+ if (
+ checksum
+ and fetch
+ and (self.version not in self.versions)
+ and (not isinstance(self.version, GitVersion))
+ ):
+ tty.warn(
+ "There is no checksum on file to fetch %s safely."
+ % self.spec.cformat("{name}{@version}")
+ )
# Ask the user whether to skip the checksum if we're
# interactive, but just fail if non-interactive.
ck_msg = "Add a checksum or use --no-checksum to skip this check."
ignore_checksum = False
if sys.stdout.isatty():
- ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
- default=False)
+ ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", default=False)
if ignore_checksum:
- tty.debug('Fetching with no checksum. {0}'
- .format(ck_msg))
+ tty.debug("Fetching with no checksum. {0}".format(ck_msg))
if not ignore_checksum:
- raise FetchError("Will not fetch %s" %
- self.spec.format('{name}{@version}'), ck_msg)
+ raise FetchError(
+ "Will not fetch %s" % self.spec.format("{name}{@version}"), ck_msg
+ )
- deprecated = spack.config.get('config:deprecated')
- if not deprecated and self.versions.get(
- self.version, {}).get('deprecated', False):
- tty.warn("{0} is deprecated and may be removed in a future Spack "
- "release.".format(
- self.spec.format('{name}{@version}')))
+ deprecated = spack.config.get("config:deprecated")
+ if not deprecated and self.versions.get(self.version, {}).get("deprecated", False):
+ tty.warn(
+ "{0} is deprecated and may be removed in a future Spack "
+ "release.".format(self.spec.format("{name}{@version}"))
+ )
# Ask the user whether to install deprecated version if we're
# interactive, but just fail if non-interactive.
- dp_msg = ("If you are willing to be a maintainer for this version "
- "of the package, submit a PR to remove `deprecated=False"
- "`, or use `--deprecated` to skip this check.")
+ dp_msg = (
+ "If you are willing to be a maintainer for this version "
+ "of the package, submit a PR to remove `deprecated=False"
+ "`, or use `--deprecated` to skip this check."
+ )
ignore_deprecation = False
if sys.stdout.isatty():
- ignore_deprecation = tty.get_yes_or_no(" Fetch anyway?",
- default=False)
+ ignore_deprecation = tty.get_yes_or_no(" Fetch anyway?", default=False)
if ignore_deprecation:
- tty.debug("Fetching deprecated version. {0}".format(
- dp_msg))
+ tty.debug("Fetching deprecated version. {0}".format(dp_msg))
if not ignore_deprecation:
- raise FetchError("Will not fetch {0}".format(
- self.spec.format('{name}{@version}')), dp_msg)
+ raise FetchError(
+ "Will not fetch {0}".format(self.spec.format("{name}{@version}")), dp_msg
+ )
self.stage.create()
err_msg = None if not self.manual_download else self.download_instr
@@ -1536,43 +1563,45 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
self.do_stage()
# Package can add its own patch function.
- has_patch_fun = hasattr(self, 'patch') and callable(self.patch)
+ has_patch_fun = hasattr(self, "patch") and callable(self.patch)
# Get the patches from the spec (this is a shortcut for the MV-variant)
patches = self.spec.patches
# If there are no patches, note it.
if not patches and not has_patch_fun:
- tty.msg('No patches needed for {0}'.format(self.name))
+ tty.msg("No patches needed for {0}".format(self.name))
return
# Construct paths to special files in the archive dir used to
# keep track of whether patches were successfully applied.
archive_dir = self.stage.source_path
- good_file = os.path.join(archive_dir, '.spack_patched')
- no_patches_file = os.path.join(archive_dir, '.spack_no_patches')
- bad_file = os.path.join(archive_dir, '.spack_patch_failed')
+ good_file = os.path.join(archive_dir, ".spack_patched")
+ no_patches_file = os.path.join(archive_dir, ".spack_no_patches")
+ bad_file = os.path.join(archive_dir, ".spack_patch_failed")
# If we encounter an archive that failed to patch, restage it
# so that we can apply all the patches again.
if os.path.isfile(bad_file):
if self.stage.managed_by_spack:
- tty.debug('Patching failed last time. Restaging.')
+ tty.debug("Patching failed last time. Restaging.")
self.stage.restage()
else:
# develop specs/ DIYStages may have patch failures but
# should never be restaged
- msg = ('A patch failure was detected in %s.' % self.name +
- ' Build errors may occur due to this.')
+ msg = (
+ "A patch failure was detected in %s." % self.name
+ + " Build errors may occur due to this."
+ )
tty.warn(msg)
return
# If this file exists, then we already applied all the patches.
if os.path.isfile(good_file):
- tty.msg('Already patched {0}'.format(self.name))
+ tty.msg("Already patched {0}".format(self.name))
return
elif os.path.isfile(no_patches_file):
- tty.msg('No patches needed for {0}'.format(self.name))
+ tty.msg("No patches needed for {0}".format(self.name))
return
# Apply all the patches for specs that match this one
@@ -1581,13 +1610,13 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
try:
with fsys.working_dir(self.stage.source_path):
patch.apply(self.stage)
- tty.msg('Applied patch {0}'.format(patch.path_or_url))
+ tty.msg("Applied patch {0}".format(patch.path_or_url))
patched = True
except spack.error.SpackError as e:
tty.debug(e)
# Touch bad file if anything goes wrong.
- tty.msg('Patch %s failed.' % patch.path_or_url)
+ tty.msg("Patch %s failed." % patch.path_or_url)
fsys.touch(bad_file)
raise
@@ -1595,7 +1624,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
try:
with fsys.working_dir(self.stage.source_path):
self.patch()
- tty.msg('Ran patch() for {0}'.format(self.name))
+ tty.msg("Ran patch() for {0}".format(self.name))
patched = True
except spack.multimethod.NoSuchMethodError:
# We are running a multimethod without a default case.
@@ -1605,12 +1634,12 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
# directive, AND the patch function didn't apply, say
# no patches are needed. Otherwise, we already
# printed a message for each patch.
- tty.msg('No patches needed for {0}'.format(self.name))
+ tty.msg("No patches needed for {0}".format(self.name))
except spack.error.SpackError as e:
tty.debug(e)
# Touch bad file if anything goes wrong.
- tty.msg('patch() function failed for {0}'.format(self.name))
+ tty.msg("patch() function failed for {0}".format(self.name))
fsys.touch(bad_file)
raise
@@ -1681,11 +1710,11 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
env = spack.environment.active_environment()
from_local_sources = env and env.is_develop(self.spec)
if self.has_code and not self.spec.external and not from_local_sources:
- message = 'Missing a source id for {s.name}@{s.version}'
+ message = "Missing a source id for {s.name}@{s.version}"
tty.warn(message.format(s=self))
- hash_content.append(''.encode('utf-8'))
+ hash_content.append("".encode("utf-8"))
else:
- hash_content.append(source_id.encode('utf-8'))
+ hash_content.append(source_id.encode("utf-8"))
# patch sha256's
# Only include these if they've been assigned by the concretizer.
@@ -1693,23 +1722,20 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
# we have to call package_hash *before* marking specs concrete
if self.spec._patches_assigned():
hash_content.extend(
- ':'.join((p.sha256, str(p.level))).encode('utf-8')
- for p in self.spec.patches
+ ":".join((p.sha256, str(p.level))).encode("utf-8") for p in self.spec.patches
)
# package.py contents
- hash_content.append(package_hash(self.spec, source=content).encode('utf-8'))
+ hash_content.append(package_hash(self.spec, source=content).encode("utf-8"))
# put it all together and encode as base32
b32_hash = base64.b32encode(
- hashlib.sha256(
- bytes().join(sorted(hash_content))
- ).digest()
+ hashlib.sha256(bytes().join(sorted(hash_content))).digest()
).lower()
# convert from bytes if running python 3
if sys.version_info[0] >= 3:
- b32_hash = b32_hash.decode('utf-8')
+ b32_hash = b32_hash.decode("utf-8")
return b32_hash
@@ -1726,14 +1752,14 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
make = copy.deepcopy(inspect.getmodule(self).make)
# Use English locale for missing target message comparison
- make.add_default_env('LC_ALL', 'C')
+ make.add_default_env("LC_ALL", "C")
# Check if we have a Makefile
- for makefile in ['GNUmakefile', 'Makefile', 'makefile']:
+ for makefile in ["GNUmakefile", "Makefile", "makefile"]:
if os.path.exists(makefile):
break
else:
- tty.debug('No Makefile found in the build directory')
+ tty.debug("No Makefile found in the build directory")
return False
# Check if 'target' is a valid target.
@@ -1755,17 +1781,16 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
]
kwargs = {
- 'fail_on_error': False,
- 'output': os.devnull,
- 'error': str,
+ "fail_on_error": False,
+ "output": os.devnull,
+ "error": str,
}
- stderr = make('-n', target, **kwargs)
+ stderr = make("-n", target, **kwargs)
for missing_target_msg in missing_target_msgs:
if missing_target_msg.format(target) in stderr:
- tty.debug("Target '{0}' not found in {1}"
- .format(target, makefile))
+ tty.debug("Target '{0}' not found in {1}".format(target, makefile))
return False
return True
@@ -1792,21 +1817,19 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
ninja = inspect.getmodule(self).ninja
# Check if we have a Ninja build script
- if not os.path.exists('build.ninja'):
- tty.debug('No Ninja build script found in the build directory')
+ if not os.path.exists("build.ninja"):
+ tty.debug("No Ninja build script found in the build directory")
return False
# Get a list of all targets in the Ninja build script
# https://ninja-build.org/manual.html#_extra_tools
- all_targets = ninja('-t', 'targets', 'all', output=str).split('\n')
+ all_targets = ninja("-t", "targets", "all", output=str).split("\n")
# Check if 'target' is a valid target
- matches = [line for line in all_targets
- if line.startswith(target + ':')]
+ matches = [line for line in all_targets if line.startswith(target + ":")]
if not matches:
- tty.debug("Target '{0}' not found in build.ninja"
- .format(target))
+ tty.debug("Target '{0}' not found in build.ninja".format(target))
return False
return True
@@ -1847,8 +1870,8 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
return resources
def _resource_stage(self, resource):
- pieces = ['resource', resource.name, self.spec.dag_hash()]
- resource_stage_folder = '-'.join(pieces)
+ pieces = ["resource", resource.name, self.spec.dag_hash()]
+ resource_stage_folder = "-".join(pieces)
return resource_stage_folder
@contextlib.contextmanager
@@ -1897,9 +1920,9 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
# Non-transitive dev specs need to keep the dev stage and be built from
# source every time. Transitive ones just need to be built from source.
- dev_path_var = self.spec.variants.get('dev_path', None)
+ dev_path_var = self.spec.variants.get("dev_path", None)
if dev_path_var:
- kwargs['keep_stage'] = True
+ kwargs["keep_stage"] = True
builder = PackageInstaller([(self, kwargs)])
builder.install()
@@ -1937,14 +1960,13 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
self.tested_file = self.test_suite.tested_file_for_spec(self.spec)
pkg_id = self.test_suite.test_pkg_id(self.spec)
else:
- self.test_log_file = fsys.join_path(
- self.stage.path, _spack_install_test_log)
- pkg_id = self.spec.format('{name}-{version}-{hash:7}')
+ self.test_log_file = fsys.join_path(self.stage.path, _spack_install_test_log)
+ pkg_id = self.spec.format("{name}-{version}-{hash:7}")
fsys.touch(self.test_log_file) # Otherwise log_parse complains
with tty.log.log_output(self.test_log_file, verbose) as logger:
with logger.force_echo():
- tty.msg('Testing package {0}'.format(pkg_id))
+ tty.msg("Testing package {0}".format(pkg_id))
# use debug print levels for log file to record commands
old_debug = tty.is_debug()
@@ -1959,30 +1981,36 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
def do_test(self, dirty=False, externals=False):
if self.test_requires_compiler:
compilers = spack.compilers.compilers_for_spec(
- self.spec.compiler, arch_spec=self.spec.architecture)
+ self.spec.compiler, arch_spec=self.spec.architecture
+ )
if not compilers:
- tty.error('Skipping tests for package %s\n' %
- self.spec.format('{name}-{version}-{hash:7}') +
- 'Package test requires missing compiler %s' %
- self.spec.compiler)
+ tty.error(
+ "Skipping tests for package %s\n"
+ % self.spec.format("{name}-{version}-{hash:7}")
+ + "Package test requires missing compiler %s" % self.spec.compiler
+ )
return
- kwargs = {
- 'dirty': dirty, 'fake': False, 'context': 'test',
- 'externals': externals
- }
+ kwargs = {"dirty": dirty, "fake": False, "context": "test", "externals": externals}
if tty.is_verbose():
- kwargs['verbose'] = True
- spack.build_environment.start_build_process(
- self, test_process, kwargs)
+ kwargs["verbose"] = True
+ spack.build_environment.start_build_process(self, test_process, kwargs)
def test(self):
# Defer tests to virtual and concrete packages
pass
- def run_test(self, exe, options=[], expected=[], status=0,
- installed=False, purpose='', skip_missing=False,
- work_dir=None):
+ def run_test(
+ self,
+ exe,
+ options=[],
+ expected=[],
+ status=0,
+ installed=False,
+ purpose="",
+ skip_missing=False,
+ work_dir=None,
+ ):
"""Run the test and confirm the expected results are obtained
Log any failures and continue, they will be re-raised later
@@ -2001,24 +2029,22 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
in the install prefix bin directory or the provided work_dir
work_dir (str or None): path to the smoke test directory
"""
- wdir = '.' if work_dir is None else work_dir
+ wdir = "." if work_dir is None else work_dir
with fsys.working_dir(wdir, create=True):
try:
runner = which(exe)
if runner is None and skip_missing:
return
- assert runner is not None, \
- "Failed to find executable '{0}'".format(exe)
+ assert runner is not None, "Failed to find executable '{0}'".format(exe)
- self._run_test_helper(
- runner, options, expected, status, installed, purpose)
+ self._run_test_helper(runner, options, expected, status, installed, purpose)
print("PASSED")
return True
except BaseException as e:
# print a summary of the error to the log file
# so that cdash and junit reporters know about it
exc_type, _, tb = sys.exc_info()
- print('FAILED: {0}'.format(e))
+ print("FAILED: {0}".format(e))
import traceback
# remove the current call frame to exclude the extract_stack
@@ -2033,7 +2059,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
for i, entry in enumerate(stack):
filename, lineno, function, text = entry
if spack.repo.is_package_file(filename):
- with open(filename, 'r') as f:
+ with open(filename, "r") as f:
lines = f.readlines()
new_lineno = lineno - 2
text = lines[new_lineno]
@@ -2042,44 +2068,39 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
# Format the stack to print and print it
out = traceback.format_list(stack)
for line in out:
- print(line.rstrip('\n'))
+ print(line.rstrip("\n"))
if exc_type is spack.util.executable.ProcessError:
out = six.StringIO()
spack.build_environment.write_log_summary(
- out, 'test', self.test_log_file, last=1)
+ out, "test", self.test_log_file, last=1
+ )
m = out.getvalue()
else:
# We're below the package context, so get context from
# stack instead of from traceback.
# The traceback is truncated here, so we can't use it to
# traverse the stack.
- m = '\n'.join(
- spack.build_environment.get_package_context(tb)
- )
+ m = "\n".join(spack.build_environment.get_package_context(tb))
exc = e # e is deleted after this block
# If we fail fast, raise another error
- if spack.config.get('config:fail_fast', False):
+ if spack.config.get("config:fail_fast", False):
raise TestFailure([(exc, m)])
else:
self.test_failures.append((exc, m))
return False
- def _run_test_helper(self, runner, options, expected, status, installed,
- purpose):
+ def _run_test_helper(self, runner, options, expected, status, installed, purpose):
status = [status] if isinstance(status, six.integer_types) else status
- expected = [expected] if isinstance(expected, six.string_types) else \
- expected
- options = [options] if isinstance(options, six.string_types) else \
- options
+ expected = [expected] if isinstance(expected, six.string_types) else expected
+ options = [options] if isinstance(options, six.string_types) else options
if purpose:
tty.msg(purpose)
else:
- tty.debug('test: {0}: expect command status in {1}'
- .format(runner.name, status))
+ tty.debug("test: {0}: expect command status in {1}".format(runner.name, status))
if installed:
msg = "Executable '{0}' expected in prefix".format(runner.name)
@@ -2089,18 +2110,17 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
try:
output = runner(*options, output=str.split, error=str.split)
- assert 0 in status, \
- 'Expected {0} execution to fail'.format(runner.name)
+ assert 0 in status, "Expected {0} execution to fail".format(runner.name)
except ProcessError as err:
output = str(err)
- match = re.search(r'exited with status ([0-9]+)', output)
+ match = re.search(r"exited with status ([0-9]+)", output)
if not (match and int(match.group(1)) in status):
raise
for check in expected:
- cmd = ' '.join([runner.name] + options)
+ cmd = " ".join([runner.name] + options)
msg = "Expected '{0}' to match output of `{1}`".format(check, cmd)
- msg += '\n\nOutput: {0}'.format(output)
+ msg += "\n\nOutput: {0}".format(output)
assert re.search(check, output), msg
def unit_test_check(self):
@@ -2131,16 +2151,16 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
abs_path = os.path.join(self.prefix, path)
if not predicate(abs_path):
raise InstallError(
- "Install failed for %s. No such %s in prefix: %s" %
- (self.name, filetype, path))
+ "Install failed for %s. No such %s in prefix: %s"
+ % (self.name, filetype, path)
+ )
- check_paths(self.sanity_check_is_file, 'file', os.path.isfile)
- check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
+ check_paths(self.sanity_check_is_file, "file", os.path.isfile)
+ check_paths(self.sanity_check_is_dir, "directory", os.path.isdir)
ignore_file = match_predicate(spack.store.layout.hidden_file_regexes)
if all(map(ignore_file, os.listdir(self.prefix))):
- raise InstallError(
- "Install failed for %s. Nothing was installed!" % self.name)
+ raise InstallError("Install failed for %s. Nothing was installed!" % self.name)
def apply_macos_rpath_fixups(self):
"""On Darwin, make installed libraries more easily relocatable.
@@ -2153,10 +2173,11 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
It should be added as a @run_after to packaging systems (or individual
packages) that do not install relocatable libraries by default.
"""
- if 'platform=darwin' not in self.spec:
+ if "platform=darwin" not in self.spec:
return
from spack.relocate import fixup_macos_rpaths
+
fixup_macos_rpaths(self.spec)
@property
@@ -2337,8 +2358,8 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
def flags_to_build_system_args(self, flags):
# Takes flags as a dict name: list of values
if any(v for v in flags.values()):
- msg = 'The {0} build system'.format(self.__class__.__name__)
- msg += ' cannot take command line arguments for compiler flags'
+ msg = "The {0} build system".format(self.__class__.__name__)
+ msg += " cannot take command line arguments for compiler flags"
raise NotImplementedError(msg)
@staticmethod
@@ -2350,12 +2371,10 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
if specs:
if deprecator:
spack.store.db.deprecate(specs[0], deprecator)
- tty.debug('Deprecating stale DB entry for {0}'
- .format(spec.short_spec))
+ tty.debug("Deprecating stale DB entry for {0}".format(spec.short_spec))
else:
spack.store.db.remove(specs[0])
- tty.debug('Removed stale DB entry for {0}'
- .format(spec.short_spec))
+ tty.debug("Removed stale DB entry for {0}".format(spec.short_spec))
return
else:
raise InstallError(str(spec) + " is not installed.")
@@ -2363,7 +2382,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
if not force:
dependents = spack.store.db.installed_relatives(
spec,
- direction='parents',
+ direction="parents",
transitive=True,
deptype=("link", "run"),
)
@@ -2387,10 +2406,10 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
error_msg = (
"One or more pre_uninstall hooks have failed"
" for {0}, but Spack is continuing with the"
- " uninstall".format(str(spec)))
+ " uninstall".format(str(spec))
+ )
if isinstance(error, spack.error.SpackError):
- error_msg += (
- "\n\nError message: {0}".format(str(error)))
+ error_msg += "\n\nError message: {0}".format(str(error))
tty.warn(error_msg)
# Note that if the uninstall succeeds then we won't be
# seeing this error again and won't have another chance
@@ -2400,7 +2419,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
# Uninstalling in Spack only requires removing the prefix.
if not spec.external:
- msg = 'Deleting package prefix [{0}]'
+ msg = "Deleting package prefix [{0}]"
tty.debug(msg.format(spec.short_spec))
# test if spec is already deprecated, not whether we want to
# deprecate it now
@@ -2408,11 +2427,11 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
spack.store.layout.remove_install_directory(spec, deprecated)
# Delete DB entry
if deprecator:
- msg = 'deprecating DB entry [{0}] in favor of [{1}]'
+ msg = "deprecating DB entry [{0}] in favor of [{1}]"
tty.debug(msg.format(spec.short_spec, deprecator.short_spec))
spack.store.db.deprecate(spec, deprecator)
else:
- msg = 'Deleting DB entry [{0}]'
+ msg = "Deleting DB entry [{0}]"
tty.debug(msg.format(spec.short_spec))
spack.store.db.remove(spec)
@@ -2427,12 +2446,13 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
error_msg = (
"One or more post-uninstallation hooks failed for"
" {0}, but the prefix has been removed (if it is not"
- " external).".format(str(spec)))
+ " external).".format(str(spec))
+ )
tb_msg = traceback.format_exc()
error_msg += "\n\nThe error:\n\n{0}".format(tb_msg)
tty.warn(error_msg)
- tty.msg('Successfully uninstalled {0}'.format(spec.short_spec))
+ tty.msg("Successfully uninstalled {0}".format(spec.short_spec))
def do_uninstall(self, force=False):
"""Uninstall this package by spec."""
@@ -2445,28 +2465,25 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
# Check whether package to deprecate has active extensions
if self.extendable:
- view = spack.filesystem_view.YamlFilesystemView(spec.prefix,
- spack.store.layout)
+ view = spack.filesystem_view.YamlFilesystemView(spec.prefix, spack.store.layout)
active_exts = view.extensions_layout.extension_map(spec).values()
if active_exts:
- short = spec.format('{name}/{hash:7}')
+ short = spec.format("{name}/{hash:7}")
m = "Spec %s has active extensions\n" % short
for active in active_exts:
- m += ' %s\n' % active.format('{name}/{hash:7}')
+ m += " %s\n" % active.format("{name}/{hash:7}")
m += "Deactivate extensions before deprecating %s" % short
tty.die(m)
# Check whether package to deprecate is an active extension
if self.is_extension:
extendee = self.extendee_spec
- view = spack.filesystem_view.YamlFilesystemView(extendee.prefix,
- spack.store.layout)
+ view = spack.filesystem_view.YamlFilesystemView(extendee.prefix, spack.store.layout)
if self.is_activated(view):
- short = spec.format('{name}/{hash:7}')
- short_ext = extendee.format('{name}/{hash:7}')
- msg = "Spec %s is an active extension of %s\n" % (short,
- short_ext)
+ short = spec.format("{name}/{hash:7}")
+ short_ext = extendee.format("{name}/{hash:7}")
+ msg = "Spec %s is an active extension of %s\n" % (short, short_ext)
msg += "Deactivate %s to be able to deprecate it" % short
tty.die(msg)
@@ -2477,14 +2494,12 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
old_deprecator = spack.store.db.deprecator(spec)
if old_deprecator:
# Find this specs yaml file from its old deprecation
- self_yaml = spack.store.layout.deprecated_file_path(spec,
- old_deprecator)
+ self_yaml = spack.store.layout.deprecated_file_path(spec, old_deprecator)
else:
self_yaml = spack.store.layout.spec_file_path(spec)
# copy spec metadata to "deprecated" dir of deprecator
- depr_yaml = spack.store.layout.deprecated_file_path(spec,
- deprecator)
+ depr_yaml = spack.store.layout.deprecated_file_path(spec, deprecator)
fsys.mkdirp(os.path.dirname(depr_yaml))
shutil.copy2(self_yaml, depr_yaml)
@@ -2509,13 +2524,11 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
extendee_package._check_extendable()
if not self.extendee_spec.installed:
- raise ActivationError(
- "Can only (de)activate extensions for installed packages.")
+ raise ActivationError("Can only (de)activate extensions for installed packages.")
if not self.spec.installed:
raise ActivationError("Extensions must first be installed.")
if self.extendee_spec.name not in self.extendees:
- raise ActivationError("%s does not extend %s!" %
- (self.name, self.extendee.name))
+ raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name))
def do_activate(self, view=None, with_dependencies=True, verbose=True):
"""Called on an extension to invoke the extendee's activate method.
@@ -2524,19 +2537,20 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
activate() directly.
"""
if verbose:
- tty.msg('Activating extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+ tty.msg(
+ "Activating extension {0} for {1}".format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec
+ )
+ )
self._sanity_check_extension()
if not view:
- view = YamlFilesystemView(
- self.extendee_spec.prefix, spack.store.layout)
+ view = YamlFilesystemView(self.extendee_spec.prefix, spack.store.layout)
extensions_layout = view.extensions_layout
try:
- extensions_layout.check_extension_conflict(
- self.extendee_spec, self.spec)
+ extensions_layout.check_extension_conflict(self.extendee_spec, self.spec)
except spack.directory_layout.ExtensionAlreadyInstalledError as e:
# already installed, let caller know
tty.msg(e.message)
@@ -2547,21 +2561,26 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
for spec in self.dependency_activations():
if not spec.package.is_activated(view):
spec.package.do_activate(
- view, with_dependencies=with_dependencies,
- verbose=verbose)
+ view, with_dependencies=with_dependencies, verbose=verbose
+ )
- self.extendee_spec.package.activate(
- self, view, **self.extendee_args)
+ self.extendee_spec.package.activate(self, view, **self.extendee_args)
extensions_layout.add_extension(self.extendee_spec, self.spec)
if verbose:
- tty.debug('Activated extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+ tty.debug(
+ "Activated extension {0} for {1}".format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec
+ )
+ )
def dependency_activations(self):
- return (spec for spec in self.spec.traverse(root=False, deptype='run')
- if spec.package.extends(self.extendee_spec))
+ return (
+ spec
+ for spec in self.spec.traverse(root=False, deptype="run")
+ if spec.package.extends(self.extendee_spec)
+ )
def activate(self, extension, view, **kwargs):
"""
@@ -2574,7 +2593,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
Spack internals (commands, hooks, etc.) should call
do_activate() method so that proper checks are always executed.
"""
- view.merge(extension.spec, ignore=kwargs.get('ignore', None))
+ view.merge(extension.spec, ignore=kwargs.get("ignore", None))
def do_deactivate(self, view=None, **kwargs):
"""Remove this extension package from the specified view. Called
@@ -2584,52 +2603,56 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
package instead of raising an error.
"""
self._sanity_check_extension()
- force = kwargs.get('force', False)
- verbose = kwargs.get('verbose', True)
- remove_dependents = kwargs.get('remove_dependents', False)
+ force = kwargs.get("force", False)
+ verbose = kwargs.get("verbose", True)
+ remove_dependents = kwargs.get("remove_dependents", False)
if verbose:
- tty.msg('Deactivating extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+ tty.msg(
+ "Deactivating extension {0} for {1}".format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec
+ )
+ )
if not view:
- view = YamlFilesystemView(
- self.extendee_spec.prefix, spack.store.layout)
+ view = YamlFilesystemView(self.extendee_spec.prefix, spack.store.layout)
extensions_layout = view.extensions_layout
# Allow a force deactivate to happen. This can unlink
# spurious files if something was corrupted.
if not force:
- extensions_layout.check_activated(
- self.extendee_spec, self.spec)
+ extensions_layout.check_activated(self.extendee_spec, self.spec)
- activated = extensions_layout.extension_map(
- self.extendee_spec)
+ activated = extensions_layout.extension_map(self.extendee_spec)
for name, aspec in activated.items():
if aspec == self.spec:
continue
- for dep in aspec.traverse(deptype='run'):
+ for dep in aspec.traverse(deptype="run"):
if self.spec == dep:
if remove_dependents:
aspec.package.do_deactivate(**kwargs)
else:
- msg = ('Cannot deactivate {0} because {1} is '
- 'activated and depends on it')
- raise ActivationError(msg.format(
- self.spec.cshort_spec, aspec.cshort_spec))
+ msg = (
+ "Cannot deactivate {0} because {1} is "
+ "activated and depends on it"
+ )
+ raise ActivationError(
+ msg.format(self.spec.cshort_spec, aspec.cshort_spec)
+ )
- self.extendee_spec.package.deactivate(
- self, view, **self.extendee_args)
+ self.extendee_spec.package.deactivate(self, view, **self.extendee_args)
# redundant activation check -- makes SURE the spec is not
# still activated even if something was wrong above.
if self.is_activated(view):
- extensions_layout.remove_extension(
- self.extendee_spec, self.spec)
+ extensions_layout.remove_extension(self.extendee_spec, self.spec)
if verbose:
- tty.debug('Deactivated extension {0} for {1}'.format(
- self.spec.cshort_spec, self.extendee_spec.cshort_spec))
+ tty.debug(
+ "Deactivated extension {0} for {1}".format(
+ self.spec.cshort_spec, self.extendee_spec.cshort_spec
+ )
+ )
def deactivate(self, extension, view, **kwargs):
"""
@@ -2640,7 +2663,7 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
should call do_deactivate() method so that proper checks are
always executed.
"""
- view.unmerge(extension.spec, ignore=kwargs.get('ignore', None))
+ view.unmerge(extension.spec, ignore=kwargs.get("ignore", None))
def view(self):
"""Create a view with the prefix of this package as the root.
@@ -2663,12 +2686,12 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
@classmethod
def format_doc(cls, **kwargs):
"""Wrap doc string at 72 characters and format nicely"""
- indent = kwargs.get('indent', 0)
+ indent = kwargs.get("indent", 0)
if not cls.__doc__:
return ""
- doc = re.sub(r'\s+', ' ', cls.__doc__)
+ doc = re.sub(r"\s+", " ", cls.__doc__)
lines = textwrap.wrap(doc, 72)
results = six.StringIO()
for line in lines:
@@ -2685,16 +2708,16 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
list: a list of URLs
"""
urls = []
- if hasattr(self, 'url') and self.url:
+ if hasattr(self, "url") and self.url:
urls.append(self.url)
# fetch from first entry in urls to save time
- if hasattr(self, 'urls') and self.urls:
+ if hasattr(self, "urls") and self.urls:
urls.append(self.urls[0])
for args in self.versions.values():
- if 'url' in args:
- urls.append(args['url'])
+ if "url" in args:
+ urls.append(args["url"])
return urls
def fetch_remote_versions(self, concurrency=128):
@@ -2717,18 +2740,15 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
reference_package=self,
)
except spack.util.web.NoNetworkConnectionError as e:
- tty.die("Package.fetch_versions couldn't connect to:", e.url,
- e.message)
+ tty.die("Package.fetch_versions couldn't connect to:", e.url, e.message)
@property
def rpath(self):
"""Get the rpath this package links with, as a list of paths."""
rpaths = [self.prefix.lib, self.prefix.lib64]
- deps = self.spec.dependencies(deptype='link')
- rpaths.extend(d.prefix.lib for d in deps
- if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in deps
- if os.path.isdir(d.prefix.lib64))
+ deps = self.spec.dependencies(deptype="link")
+ rpaths.extend(d.prefix.lib for d in deps if os.path.isdir(d.prefix.lib))
+ rpaths.extend(d.prefix.lib64 for d in deps if os.path.isdir(d.prefix.lib64))
return rpaths
@property
@@ -2738,31 +2758,28 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
- def _run_test_callbacks(self, method_names, callback_type='install'):
+ def _run_test_callbacks(self, method_names, callback_type="install"):
"""Tries to call all of the listed methods, returning immediately
- if the list is None."""
+ if the list is None."""
if method_names is None:
return
- fail_fast = spack.config.get('config:fail_fast', False)
+ fail_fast = spack.config.get("config:fail_fast", False)
with self._setup_test(verbose=False, externals=False) as logger:
# Report running each of the methods in the build log
- print_test_message(
- logger, 'Running {0}-time tests'.format(callback_type), True)
+ print_test_message(logger, "Running {0}-time tests".format(callback_type), True)
for name in method_names:
try:
fn = getattr(self, name)
- msg = 'RUN-TESTS: {0}-time tests [{1}]' \
- .format(callback_type, name),
+ msg = ("RUN-TESTS: {0}-time tests [{1}]".format(callback_type, name),)
print_test_message(logger, msg, True)
fn()
except AttributeError as e:
- msg = 'RUN-TESTS: method not implemented [{0}]' \
- .format(name),
+ msg = ("RUN-TESTS: method not implemented [{0}]".format(name),)
print_test_message(logger, msg, True)
self.test_failures.append((e, msg))
@@ -2778,14 +2795,14 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)):
"""Tries to call all the methods that are listed in the attribute
``build_time_test_callbacks`` if ``self.run_tests is True``.
"""
- self._run_test_callbacks(self.build_time_test_callbacks, 'build')
+ self._run_test_callbacks(self.build_time_test_callbacks, "build")
@on_package_attributes(run_tests=True)
def _run_default_install_time_test_callbacks(self):
"""Tries to call all the methods that are listed in the attribute
``install_time_test_callbacks`` if ``self.run_tests is True``.
"""
- self._run_test_callbacks(self.install_time_test_callbacks, 'install')
+ self._run_test_callbacks(self.install_time_test_callbacks, "install")
def has_test_method(pkg):
@@ -2799,11 +2816,10 @@ def has_test_method(pkg):
``False``
"""
if not inspect.isclass(pkg):
- tty.die('{0}: is not a class, it is {1}'.format(pkg, type(pkg)))
+ tty.die("{0}: is not a class, it is {1}".format(pkg, type(pkg)))
- return (
- (issubclass(pkg, PackageBase) and pkg.test != PackageBase.test) or
- (isinstance(pkg, PackageBase) and pkg.test.__func__ != PackageBase.test)
+ return (issubclass(pkg, PackageBase) and pkg.test != PackageBase.test) or (
+ isinstance(pkg, PackageBase) and pkg.test.__func__ != PackageBase.test
)
@@ -2816,30 +2832,27 @@ def print_test_message(logger, msg, verbose):
def test_process(pkg, kwargs):
- verbose = kwargs.get('verbose', False)
- externals = kwargs.get('externals', False)
+ verbose = kwargs.get("verbose", False)
+ externals = kwargs.get("externals", False)
with pkg._setup_test(verbose, externals) as logger:
if pkg.spec.external and not externals:
- print_test_message(
- logger, 'Skipped tests for external package', verbose)
+ print_test_message(logger, "Skipped tests for external package", verbose)
return
# run test methods from the package and all virtuals it
# provides virtuals have to be deduped by name
- v_names = list(set([vspec.name
- for vspec in pkg.virtuals_provided]))
+ v_names = list(set([vspec.name for vspec in pkg.virtuals_provided]))
# hack for compilers that are not dependencies (yet)
# TODO: this all eventually goes away
- c_names = ('gcc', 'intel', 'intel-parallel-studio', 'pgi')
+ c_names = ("gcc", "intel", "intel-parallel-studio", "pgi")
if pkg.name in c_names:
- v_names.extend(['c', 'cxx', 'fortran'])
- if pkg.spec.satisfies('llvm+clang'):
- v_names.extend(['c', 'cxx'])
+ v_names.extend(["c", "cxx", "fortran"])
+ if pkg.spec.satisfies("llvm+clang"):
+ v_names.extend(["c", "cxx"])
- test_specs = [pkg.spec] + [spack.spec.Spec(v_name)
- for v_name in sorted(v_names)]
+ test_specs = [pkg.spec] + [spack.spec.Spec(v_name) for v_name in sorted(v_names)]
ran_actual_test_function = False
try:
@@ -2856,15 +2869,13 @@ def test_process(pkg, kwargs):
if spec.concrete:
cache_source = spec_pkg.install_test_root
cache_dir = pkg.test_suite.current_test_cache_dir
- if (os.path.isdir(cache_source) and
- not os.path.exists(cache_dir)):
+ if os.path.isdir(cache_source) and not os.path.exists(cache_dir):
fsys.install_tree(cache_source, cache_dir)
# copy test data into test data dir
data_source = Prefix(spec_pkg.package_dir).test
data_dir = pkg.test_suite.current_test_data_dir
- if (os.path.isdir(data_source) and
- not os.path.exists(data_dir)):
+ if os.path.isdir(data_source) and not os.path.exists(data_dir):
# We assume data dir is used read-only
# maybe enforce this later
shutil.copytree(data_source, data_dir)
@@ -2879,8 +2890,8 @@ def test_process(pkg, kwargs):
# since they do not contribute to package testing.
source = (inspect.getsource(test_fn)).splitlines()[1:]
lines = (ln.strip() for ln in source)
- statements = [ln for ln in lines if not ln.startswith('#')]
- if len(statements) > 0 and statements[0] == 'pass':
+ statements = [ln for ln in lines if not ln.startswith("#")]
+ if len(statements) > 0 and statements[0] == "pass":
continue
# Run the tests
@@ -2900,7 +2911,7 @@ def test_process(pkg, kwargs):
if ran_actual_test_function:
fsys.touch(pkg.tested_file)
else:
- print_test_message(logger, 'No tests to run', verbose)
+ print_test_message(logger, "No tests to run", verbose)
inject_flags = PackageBase.inject_flags
@@ -2910,12 +2921,13 @@ build_system_flags = PackageBase.build_system_flags
class BundlePackage(PackageBase):
"""General purpose bundle, or no-code, package class."""
+
#: There are no phases by default but the property is required to support
#: post-install hooks (e.g., for module generation).
phases = [] # type: List[str]
#: This attribute is used in UI queries that require to know which
#: build-system class we are using
- build_system_class = 'BundlePackage'
+ build_system_class = "BundlePackage"
#: Bundle packages do not have associated source or binary code.
has_code = False
@@ -2925,16 +2937,17 @@ class Package(PackageBase):
"""General purpose class with a single ``install``
phase that needs to be coded by packagers.
"""
+
#: The one and only phase
- phases = ['install']
+ phases = ["install"]
#: This attribute is used in UI queries that require to know which
#: build-system class we are using
- build_system_class = 'Package'
+ build_system_class = "Package"
# This will be used as a registration decorator in user
# packages, if need be
- run_after('install')(PackageBase.sanity_check_prefix)
+ run_after("install")(PackageBase.sanity_check_prefix)
# On macOS, force rpaths for shared library IDs and remove duplicate rpaths
- run_after('install')(PackageBase.apply_macos_rpath_fixups)
+ run_after("install")(PackageBase.apply_macos_rpath_fixups)
def install_dependency_symlinks(pkg, spec, prefix):
@@ -2952,10 +2965,10 @@ def install_dependency_symlinks(pkg, spec, prefix):
def use_cray_compiler_names():
"""Compiler names for builds that rely on cray compiler names."""
- os.environ['CC'] = 'cc'
- os.environ['CXX'] = 'CC'
- os.environ['FC'] = 'ftn'
- os.environ['F77'] = 'ftn'
+ os.environ["CC"] = "cc"
+ os.environ["CXX"] = "CC"
+ os.environ["FC"] = "ftn"
+ os.environ["F77"] = "ftn"
def flatten_dependencies(spec, flat_dir):
@@ -2966,13 +2979,13 @@ def flatten_dependencies(spec, flat_dir):
dep_path = spack.store.layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
- os.mkdir(flat_dir + '/' + name)
+ os.mkdir(flat_dir + "/" + name)
- conflict = dep_files.find_conflict(flat_dir + '/' + name)
+ conflict = dep_files.find_conflict(flat_dir + "/" + name)
if conflict:
raise DependencyConflictError(conflict)
- dep_files.merge(flat_dir + '/' + name)
+ dep_files.merge(flat_dir + "/" + name)
def possible_dependencies(*pkg_or_spec, **kwargs):
@@ -2990,10 +3003,7 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
pos = spack.spec.Spec(pos)
if spack.repo.path.is_virtual(pos.name):
- packages.extend(
- p.package_class
- for p in spack.repo.path.providers_for(pos.name)
- )
+ packages.extend(p.package_class for p in spack.repo.path.providers_for(pos.name))
continue
else:
packages.append(pos.package_class)
@@ -3014,33 +3024,35 @@ class FetchError(spack.error.SpackError):
class PackageStillNeededError(InstallError):
"""Raised when package is still needed by another on uninstall."""
+
def __init__(self, spec, dependents):
- super(PackageStillNeededError, self).__init__("Cannot uninstall %s" %
- spec)
+ super(PackageStillNeededError, self).__init__("Cannot uninstall %s" % spec)
self.spec = spec
self.dependents = dependents
class PackageError(spack.error.SpackError):
"""Raised when something is wrong with a package definition."""
+
def __init__(self, message, long_msg=None):
super(PackageError, self).__init__(message, long_msg)
class PackageVersionError(PackageError):
"""Raised when a version URL cannot automatically be determined."""
+
def __init__(self, version):
super(PackageVersionError, self).__init__(
"Cannot determine a URL automatically for version %s" % version,
- "Please provide a url for this version in the package.py file.")
+ "Please provide a url for this version in the package.py file.",
+ )
class NoURLError(PackageError):
"""Raised when someone tries to build a URL for a package with no URLs."""
def __init__(self, cls):
- super(NoURLError, self).__init__(
- "Package %s has no version with a URL." % cls.__name__)
+ super(NoURLError, self).__init__("Package %s has no version with a URL." % cls.__name__)
class InvalidPackageOpError(PackageError):
@@ -3053,13 +3065,15 @@ class ExtensionError(PackageError):
class ActivationError(ExtensionError):
"""Raised when there are problems activating an extension."""
+
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)
class DependencyConflictError(spack.error.SpackError):
"""Raised when the dependencies cannot be flattened as asked for."""
+
def __init__(self, conflict):
super(DependencyConflictError, self).__init__(
- "%s conflicts with another file in the flattened directory." % (
- conflict))
+ "%s conflicts with another file in the flattened directory." % (conflict)
+ )
diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py
index 6d08f609fa..975585ac95 100644
--- a/lib/spack/spack/package_prefs.py
+++ b/lib/spack/spack/package_prefs.py
@@ -12,8 +12,7 @@ from spack.config import ConfigError
from spack.util.path import canonicalize_path
from spack.version import VersionList
-_lesser_spec_types = {'compiler': spack.spec.CompilerSpec,
- 'version': VersionList}
+_lesser_spec_types = {"compiler": spack.spec.CompilerSpec, "version": VersionList}
def _spec_type(component):
@@ -50,6 +49,7 @@ class PackagePrefs(object):
provider_spec_list.sort(key=kf)
"""
+
def __init__(self, pkgname, component, vpkg=None, all=True):
self.pkgname = pkgname
self.component = component
@@ -61,20 +61,21 @@ class PackagePrefs(object):
def __call__(self, spec):
"""Return a key object (an index) that can be used to sort spec.
- Sort is done in package order. We don't cache the result of
- this function as Python's sort functions already ensure that the
- key function is called at most once per sorted element.
+ Sort is done in package order. We don't cache the result of
+ this function as Python's sort functions already ensure that the
+ key function is called at most once per sorted element.
"""
if self._spec_order is None:
self._spec_order = self._specs_for_pkg(
- self.pkgname, self.component, self.vpkg, self.all)
+ self.pkgname, self.component, self.vpkg, self.all
+ )
spec_order = self._spec_order
# integer is the index of the first spec in order that satisfies
# spec, or it's a number larger than any position in the order.
match_index = next(
- (i for i, s in enumerate(spec_order) if spec.satisfies(s)),
- len(spec_order))
+ (i for i, s in enumerate(spec_order) if spec.satisfies(s)), len(spec_order)
+ )
if match_index < len(spec_order) and spec_order[match_index] == spec:
# If this is called with multiple specs that all satisfy the same
# minimum index in spec_order, the one which matches that element
@@ -87,14 +88,14 @@ class PackagePrefs(object):
@classmethod
def order_for_package(cls, pkgname, component, vpkg=None, all=True):
"""Given a package name, sort component (e.g, version, compiler, ...),
- and an optional vpkg, return the list from the packages config.
+ and an optional vpkg, return the list from the packages config.
"""
pkglist = [pkgname]
if all:
- pkglist.append('all')
+ pkglist.append("all")
for pkg in pkglist:
- pkg_entry = spack.config.get('packages').get(pkg)
+ pkg_entry = spack.config.get("packages").get(pkg)
if not pkg_entry:
continue
@@ -108,8 +109,8 @@ class PackagePrefs(object):
if order:
ret = [str(s).strip() for s in order]
- if component == 'target':
- ret = ['target=%s' % tname for tname in ret]
+ if component == "target":
+ ret = ["target=%s" % tname for tname in ret]
return ret
return []
@@ -117,30 +118,28 @@ class PackagePrefs(object):
@classmethod
def _specs_for_pkg(cls, pkgname, component, vpkg=None, all=True):
"""Given a sort order specified by the pkgname/component/second_key,
- return a list of CompilerSpecs, VersionLists, or Specs for
- that sorting list.
+ return a list of CompilerSpecs, VersionLists, or Specs for
+ that sorting list.
"""
- pkglist = cls.order_for_package(
- pkgname, component, vpkg, all)
+ pkglist = cls.order_for_package(pkgname, component, vpkg, all)
spec_type = _spec_type(component)
return [spec_type(s) for s in pkglist]
@classmethod
def has_preferred_providers(cls, pkgname, vpkg):
"""Whether specific package has a preferred vpkg providers."""
- return bool(cls.order_for_package(pkgname, 'providers', vpkg, False))
+ return bool(cls.order_for_package(pkgname, "providers", vpkg, False))
@classmethod
def has_preferred_targets(cls, pkg_name):
"""Whether specific package has a preferred vpkg providers."""
- return bool(cls.order_for_package(pkg_name, 'target'))
+ return bool(cls.order_for_package(pkg_name, "target"))
@classmethod
def preferred_variants(cls, pkg_name):
"""Return a VariantMap of preferred variants/values for a spec."""
- for pkg_cls in (pkg_name, 'all'):
- variants = spack.config.get('packages').get(pkg_cls, {}).get(
- 'variants', '')
+ for pkg_cls in (pkg_name, "all"):
+ variants = spack.config.get("packages").get(pkg_cls, {}).get("variants", "")
if variants:
break
@@ -151,8 +150,9 @@ class PackagePrefs(object):
# Only return variants that are actually supported by the package
pkg_cls = spack.repo.path.get_pkg_class(pkg_name)
spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
- return dict((name, variant) for name, variant in spec.variants.items()
- if name in pkg_cls.variants)
+ return dict(
+ (name, variant) for name, variant in spec.variants.items() if name in pkg_cls.variants
+ )
def spec_externals(spec):
@@ -166,26 +166,25 @@ def spec_externals(spec):
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
return pkg_cls(maybe_abstract_spec)
- allpkgs = spack.config.get('packages')
+ allpkgs = spack.config.get("packages")
names = set([spec.name])
names |= set(vspec.name for vspec in _package(spec).virtuals_provided)
external_specs = []
for name in names:
pkg_config = allpkgs.get(name, {})
- pkg_externals = pkg_config.get('externals', [])
+ pkg_externals = pkg_config.get("externals", [])
for entry in pkg_externals:
- spec_str = entry['spec']
- external_path = entry.get('prefix', None)
+ spec_str = entry["spec"]
+ external_path = entry.get("prefix", None)
if external_path:
external_path = canonicalize_path(external_path)
- external_modules = entry.get('modules', None)
+ external_modules = entry.get("modules", None)
external_spec = spack.spec.Spec.from_detection(
spack.spec.Spec(
- spec_str,
- external_path=external_path,
- external_modules=external_modules
- ), extra_attributes=entry.get('extra_attributes', {})
+ spec_str, external_path=external_path, external_modules=external_modules
+ ),
+ extra_attributes=entry.get("extra_attributes", {}),
)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
@@ -197,19 +196,21 @@ def spec_externals(spec):
def is_spec_buildable(spec):
"""Return true if the spec is configured as buildable"""
- allpkgs = spack.config.get('packages')
- all_buildable = allpkgs.get('all', {}).get('buildable', True)
+ allpkgs = spack.config.get("packages")
+ all_buildable = allpkgs.get("all", {}).get("buildable", True)
def _package(s):
pkg_cls = spack.repo.path.get_pkg_class(s.name)
return pkg_cls(s)
# Get the list of names for which all_buildable is overridden
- reverse = [name for name, entry in allpkgs.items()
- if entry.get('buildable', all_buildable) != all_buildable]
+ reverse = [
+ name
+ for name, entry in allpkgs.items()
+ if entry.get("buildable", all_buildable) != all_buildable
+ ]
# Does this spec override all_buildable
- spec_reversed = (spec.name in reverse or
- any(_package(spec).provides(name) for name in reverse))
+ spec_reversed = spec.name in reverse or any(_package(spec).provides(name) for name in reverse)
return not all_buildable if spec_reversed else all_buildable
@@ -220,7 +221,7 @@ def get_package_dir_permissions(spec):
attribute sticky for the directory. Package-specific settings take
precedent over settings for ``all``"""
perms = get_package_permissions(spec)
- if perms & stat.S_IRWXG and spack.config.get('config:allow_sgid', True):
+ if perms & stat.S_IRWXG and spack.config.get("config:allow_sgid", True):
perms |= stat.S_ISGID
return perms
@@ -231,42 +232,44 @@ def get_package_permissions(spec):
Package-specific settings take precedence over settings for ``all``"""
# Get read permissions level
- for name in (spec.name, 'all'):
+ for name in (spec.name, "all"):
try:
- readable = spack.config.get('packages:%s:permissions:read' % name,
- '')
+ readable = spack.config.get("packages:%s:permissions:read" % name, "")
if readable:
break
except AttributeError:
- readable = 'world'
+ readable = "world"
# Get write permissions level
- for name in (spec.name, 'all'):
+ for name in (spec.name, "all"):
try:
- writable = spack.config.get('packages:%s:permissions:write' % name,
- '')
+ writable = spack.config.get("packages:%s:permissions:write" % name, "")
if writable:
break
except AttributeError:
- writable = 'user'
+ writable = "user"
perms = stat.S_IRWXU
- if readable in ('world', 'group'): # world includes group
+ if readable in ("world", "group"): # world includes group
perms |= stat.S_IRGRP | stat.S_IXGRP
- if readable == 'world':
+ if readable == "world":
perms |= stat.S_IROTH | stat.S_IXOTH
- if writable in ('world', 'group'):
- if readable == 'user':
- raise ConfigError('Writable permissions may not be more' +
- ' permissive than readable permissions.\n' +
- ' Violating package is %s' % spec.name)
+ if writable in ("world", "group"):
+ if readable == "user":
+ raise ConfigError(
+ "Writable permissions may not be more"
+ + " permissive than readable permissions.\n"
+ + " Violating package is %s" % spec.name
+ )
perms |= stat.S_IWGRP
- if writable == 'world':
- if readable != 'world':
- raise ConfigError('Writable permissions may not be more' +
- ' permissive than readable permissions.\n' +
- ' Violating package is %s' % spec.name)
+ if writable == "world":
+ if readable != "world":
+ raise ConfigError(
+ "Writable permissions may not be more"
+ + " permissive than readable permissions.\n"
+ + " Violating package is %s" % spec.name
+ )
perms |= stat.S_IWOTH
return perms
@@ -276,14 +279,13 @@ def get_package_group(spec):
"""Return the unix group associated with the spec.
Package-specific settings take precedence over settings for ``all``"""
- for name in (spec.name, 'all'):
+ for name in (spec.name, "all"):
try:
- group = spack.config.get('packages:%s:permissions:group' % name,
- '')
+ group = spack.config.get("packages:%s:permissions:group" % name, "")
if group:
break
except AttributeError:
- group = ''
+ group = ""
return group
diff --git a/lib/spack/spack/package_test.py b/lib/spack/spack/package_test.py
index 1883d3eda1..ba5a604f01 100644
--- a/lib/spack/spack/package_test.py
+++ b/lib/spack/spack/package_test.py
@@ -12,15 +12,14 @@ def compile_c_and_execute(source_file, include_flags, link_flags):
"""Compile C @p source_file with @p include_flags and @p link_flags,
run and return the output.
"""
- cc = which('cc')
+ cc = which("cc")
flags = include_flags
flags.extend([source_file])
- cc('-c', *flags)
+ cc("-c", *flags)
name = os.path.splitext(os.path.basename(source_file))[0]
- cc('-o', "check", "%s.o" % name,
- *link_flags)
+ cc("-o", "check", "%s.o" % name, *link_flags)
- check = Executable('./check')
+ check = Executable("./check")
return check(output=str)
@@ -29,20 +28,19 @@ def compare_output(current_output, blessed_output):
if not (current_output == blessed_output):
print("Produced output does not match expected output.")
print("Expected output:")
- print('-' * 80)
+ print("-" * 80)
print(blessed_output)
- print('-' * 80)
+ print("-" * 80)
print("Produced output:")
- print('-' * 80)
+ print("-" * 80)
print(current_output)
- print('-' * 80)
- raise RuntimeError("Ouput check failed.",
- "See spack_output.log for details")
+ print("-" * 80)
+ raise RuntimeError("Ouput check failed.", "See spack_output.log for details")
def compare_output_file(current_output, blessed_output_file):
"""Same as above, but when the blessed output is given as a file."""
- with open(blessed_output_file, 'r') as f:
+ with open(blessed_output_file, "r") as f:
blessed_output = f.read()
compare_output(current_output, blessed_output)
diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py
index e06338e0c8..941cb5a93e 100644
--- a/lib/spack/spack/parse.py
+++ b/lib/spack/spack/parse.py
@@ -17,7 +17,7 @@ import spack.util.path as sp
class Token(object):
"""Represents tokens; generated from input by lexer and fed to parse()."""
- def __init__(self, type, value='', start=0, end=0):
+ def __init__(self, type, value="", start=0, end=0):
self.type = type
self.value = value
self.start = start
@@ -39,23 +39,18 @@ class Token(object):
class Lexer(object):
"""Base class for Lexers that keep track of line numbers."""
- def __init__(self, lexicon0, mode_switches_01=[],
- lexicon1=[], mode_switches_10=[]):
+ def __init__(self, lexicon0, mode_switches_01=[], lexicon1=[], mode_switches_10=[]):
self.scanner0 = re.Scanner(lexicon0)
self.mode_switches_01 = mode_switches_01
self.scanner1 = re.Scanner(lexicon1)
self.mode_switches_10 = mode_switches_10
self.mode = 0
- def token(self, type, value=''):
+ def token(self, type, value=""):
if self.mode == 0:
- return Token(type, value,
- self.scanner0.match.start(0),
- self.scanner0.match.end(0))
+ return Token(type, value, self.scanner0.match.start(0), self.scanner0.match.end(0))
else:
- return Token(type, value,
- self.scanner1.match.start(0),
- self.scanner1.match.end(0))
+ return Token(type, value, self.scanner1.match.start(0), self.scanner1.match.end(0))
def lex_word(self, word):
scanner = self.scanner0
@@ -73,8 +68,9 @@ class Lexer(object):
# scan in other mode
self.mode = 1 - self.mode # swap 0/1
remainder_used = 1
- tokens = tokens[:i + 1] + self.lex_word(
- word[word.index(t.value) + len(t.value):])
+ tokens = tokens[: i + 1] + self.lex_word(
+ word[word.index(t.value) + len(t.value) :]
+ )
break
if remainder and not remainder_used:
@@ -94,9 +90,9 @@ class Parser(object):
"""Base class for simple recursive descent parsers."""
def __init__(self, lexer):
- self.tokens = iter([]) # iterators over tokens, handled in order.
+ self.tokens = iter([]) # iterators over tokens, handled in order.
self.token = Token(None) # last accepted token
- self.next = None # next token
+ self.next = None # next token
self.lexer = lexer
self.text = None
@@ -109,8 +105,7 @@ class Parser(object):
def push_tokens(self, iterable):
"""Adds all tokens in some iterable to the token stream."""
- self.tokens = itertools.chain(
- iter(iterable), iter([self.next]), self.tokens)
+ self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens)
self.gettok()
def accept(self, id):
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index 47daf9ecc7..8ff56e5d51 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -23,7 +23,7 @@ from spack.util.crypto import Checker, checksum
from spack.util.executable import which, which_string
-def apply_patch(stage, patch_path, level=1, working_dir='.'):
+def apply_patch(stage, patch_path, level=1, working_dir="."):
"""Apply the patch at patch_path to code in the stage.
Args:
@@ -33,11 +33,11 @@ def apply_patch(stage, patch_path, level=1, working_dir='.'):
working_dir (str): relative path *within* the stage to change to
(default '.')
"""
- git_utils_path = os.environ.get('PATH', '')
- if sys.platform == 'win32':
- git = which_string('git', required=True)
- git_root = git.split('\\')[:-2]
- git_root.extend(['usr', 'bin'])
+ git_utils_path = os.environ.get("PATH", "")
+ if sys.platform == "win32":
+ git = which_string("git", required=True)
+ git_root = git.split("\\")[:-2]
+ git_root.extend(["usr", "bin"])
git_utils_path = os.sep.join(git_root)
# TODO: Decouple Spack's patch support on Windows from Git
@@ -48,10 +48,7 @@ def apply_patch(stage, patch_path, level=1, working_dir='.'):
# flag is passed.
patch = which("patch", required=True, path=git_utils_path)
with llnl.util.filesystem.working_dir(stage.source_path):
- patch('-s',
- '-p', str(level),
- '-i', patch_path,
- '-d', working_dir)
+ patch("-s", "-p", str(level), "-i", patch_path, "-d", working_dir)
class Patch(object):
@@ -65,6 +62,7 @@ class Patch(object):
it is the dependent's fullname.
"""
+
def __init__(self, pkg, path_or_url, level, working_dir):
# validate level (must be an integer >= 0)
if not isinstance(level, int) or not level >= 0:
@@ -78,8 +76,7 @@ class Patch(object):
self.working_dir = working_dir
def fetch(self):
- """Fetch the patch in case of a UrlPatch
- """
+ """Fetch the patch in case of a UrlPatch"""
def clean(self):
"""Clean up the patch stage in case of a UrlPatch"""
@@ -90,8 +87,7 @@ class Patch(object):
Arguments:
stage (spack.stage.Stage): stage where source code lives
"""
- assert self.path, (
- "Path for patch not set in apply: %s" % self.path_or_url)
+ assert self.path, "Path for patch not set in apply: %s" % self.path_or_url
if not os.path.isfile(self.path):
raise NoSuchPatchError("No such patch: %s" % self.path)
@@ -105,10 +101,10 @@ class Patch(object):
def to_dict(self):
"""Partial dictionary -- subclases should add to this."""
return {
- 'owner': self.owner,
- 'sha256': self.sha256,
- 'level': self.level,
- 'working_dir': self.working_dir,
+ "owner": self.owner,
+ "sha256": self.sha256,
+ "level": self.level,
+ "working_dir": self.working_dir,
}
def __eq__(self, other):
@@ -129,8 +125,8 @@ class FilePatch(Patch):
working_dir (str): path within the source directory where patch
should be applied
"""
- def __init__(self, pkg, relative_path, level, working_dir,
- ordering_key=None):
+
+ def __init__(self, pkg, relative_path, level, working_dir, ordering_key=None):
self.relative_path = relative_path
# patches may be defined by relative paths to parent classes
@@ -139,7 +135,7 @@ class FilePatch(Patch):
# At different times we call FilePatch on instances and classes
pkg_cls = pkg if inspect.isclass(pkg) else pkg.__class__
for cls in inspect.getmro(pkg_cls):
- if not hasattr(cls, 'module'):
+ if not hasattr(cls, "module"):
# We've gone too far up the MRO
break
@@ -152,8 +148,8 @@ class FilePatch(Patch):
break
if abs_path is None:
- msg = 'FilePatch: Patch file %s for ' % relative_path
- msg += 'package %s.%s does not exist.' % (pkg.namespace, pkg.name)
+ msg = "FilePatch: Patch file %s for " % relative_path
+ msg += "package %s.%s does not exist." % (pkg.namespace, pkg.name)
raise ValueError(msg)
super(FilePatch, self).__init__(pkg, abs_path, level, working_dir)
@@ -169,8 +165,8 @@ class FilePatch(Patch):
def to_dict(self):
return llnl.util.lang.union_dicts(
- super(FilePatch, self).to_dict(),
- {'relative_path': self.relative_path})
+ super(FilePatch, self).to_dict(), {"relative_path": self.relative_path}
+ )
class UrlPatch(Patch):
@@ -183,8 +179,8 @@ class UrlPatch(Patch):
working_dir (str): path within the source directory where patch
should be applied
"""
- def __init__(self, pkg, url, level=1, working_dir='.', ordering_key=None,
- **kwargs):
+
+ def __init__(self, pkg, url, level=1, working_dir=".", ordering_key=None, **kwargs):
super(UrlPatch, self).__init__(pkg, url, level, working_dir)
self.url = url
@@ -192,13 +188,14 @@ class UrlPatch(Patch):
self.ordering_key = ordering_key
- self.archive_sha256 = kwargs.get('archive_sha256')
+ self.archive_sha256 = kwargs.get("archive_sha256")
if allowed_archive(self.url) and not self.archive_sha256:
raise PatchDirectiveError(
"Compressed patches require 'archive_sha256' "
- "and patch 'sha256' attributes: %s" % self.url)
+ "and patch 'sha256' attributes: %s" % self.url
+ )
- self.sha256 = kwargs.get('sha256')
+ self.sha256 = kwargs.get("sha256")
if not self.sha256:
raise PatchDirectiveError("URL patches require a sha256 checksum")
@@ -220,26 +217,24 @@ class UrlPatch(Patch):
files = os.listdir(root)
if not files:
if self.archive_sha256:
- raise NoSuchPatchError(
- "Archive was empty: %s" % self.url)
+ raise NoSuchPatchError("Archive was empty: %s" % self.url)
else:
- raise NoSuchPatchError(
- "Patch failed to download: %s" % self.url)
+ raise NoSuchPatchError("Patch failed to download: %s" % self.url)
self.path = os.path.join(root, files.pop())
if not os.path.isfile(self.path):
- raise NoSuchPatchError(
- "Archive %s contains no patch file!" % self.url)
+ raise NoSuchPatchError("Archive %s contains no patch file!" % self.url)
# for a compressed archive, Need to check the patch sha256 again
# and the patch is in a directory, not in the same place
- if self.archive_sha256 and spack.config.get('config:checksum'):
+ if self.archive_sha256 and spack.config.get("config:checksum"):
checker = Checker(self.sha256)
if not checker.check(self.path):
raise fs.ChecksumError(
"sha256 checksum failed for %s" % self.path,
- "Expected %s but got %s" % (self.sha256, checker.sum))
+ "Expected %s but got %s" % (self.sha256, checker.sum),
+ )
@property
def stage(self):
@@ -251,18 +246,15 @@ class UrlPatch(Patch):
if self.archive_sha256:
fetch_digest = self.archive_sha256
- fetcher = fs.URLFetchStrategy(self.url, fetch_digest,
- expand=bool(self.archive_sha256))
+ fetcher = fs.URLFetchStrategy(self.url, fetch_digest, expand=bool(self.archive_sha256))
# The same package can have multiple patches with the same name but
# with different contents, therefore apply a subset of the hash.
- name = '{0}-{1}'.format(os.path.basename(self.url), fetch_digest[:7])
+ name = "{0}-{1}".format(os.path.basename(self.url), fetch_digest[:7])
- per_package_ref = os.path.join(self.owner.split('.')[-1], name)
+ per_package_ref = os.path.join(self.owner.split(".")[-1], name)
# Reference starting with "spack." is required to avoid cyclic imports
- mirror_ref = spack.mirror.mirror_archive_paths(
- fetcher,
- per_package_ref)
+ mirror_ref = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
self._stage = spack.stage.Stage(fetcher, mirror_paths=mirror_ref)
self._stage.create()
@@ -273,45 +265,45 @@ class UrlPatch(Patch):
def to_dict(self):
data = super(UrlPatch, self).to_dict()
- data['url'] = self.url
+ data["url"] = self.url
if self.archive_sha256:
- data['archive_sha256'] = self.archive_sha256
+ data["archive_sha256"] = self.archive_sha256
return data
def from_dict(dictionary):
"""Create a patch from json dictionary."""
- owner = dictionary.get('owner')
- if 'owner' not in dictionary:
- raise ValueError('Invalid patch dictionary: %s' % dictionary)
+ owner = dictionary.get("owner")
+ if "owner" not in dictionary:
+ raise ValueError("Invalid patch dictionary: %s" % dictionary)
pkg_cls = spack.repo.path.get_pkg_class(owner)
- if 'url' in dictionary:
+ if "url" in dictionary:
return UrlPatch(
pkg_cls,
- dictionary['url'],
- dictionary['level'],
- dictionary['working_dir'],
- sha256=dictionary['sha256'],
- archive_sha256=dictionary.get('archive_sha256'))
-
- elif 'relative_path' in dictionary:
+ dictionary["url"],
+ dictionary["level"],
+ dictionary["working_dir"],
+ sha256=dictionary["sha256"],
+ archive_sha256=dictionary.get("archive_sha256"),
+ )
+
+ elif "relative_path" in dictionary:
patch = FilePatch(
- pkg_cls,
- dictionary['relative_path'],
- dictionary['level'],
- dictionary['working_dir'])
+ pkg_cls, dictionary["relative_path"], dictionary["level"], dictionary["working_dir"]
+ )
# If the patch in the repo changes, we cannot get it back, so we
# just check it and fail here.
# TODO: handle this more gracefully.
- sha256 = dictionary['sha256']
+ sha256 = dictionary["sha256"]
checker = Checker(sha256)
if not checker.check(patch.path):
raise fs.ChecksumError(
"sha256 checksum failed for %s" % patch.path,
- "Expected %s but got %s " % (sha256, checker.sum) +
- "Patch may have changed since concretization.")
+ "Expected %s but got %s " % (sha256, checker.sum)
+ + "Patch may have changed since concretization.",
+ )
return patch
else:
raise ValueError("Invalid patch dictionary: %s" % dictionary)
@@ -336,20 +328,21 @@ class PatchCache(object):
... etc. ...
"""
+
def __init__(self, data=None):
if data is None:
self.index = {}
else:
- if 'patches' not in data:
- raise IndexError('invalid patch index; try `spack clean -m`')
- self.index = data['patches']
+ if "patches" not in data:
+ raise IndexError("invalid patch index; try `spack clean -m`")
+ self.index = data["patches"]
@classmethod
def from_json(cls, stream):
return PatchCache(sjson.load(stream))
def to_json(self, stream):
- sjson.dump({'patches': self.index}, stream)
+ sjson.dump({"patches": self.index}, stream)
def patch_for_package(self, sha256, pkg):
"""Look up a patch in the index and build a patch object for it.
@@ -365,8 +358,8 @@ class PatchCache(object):
sha_index = self.index.get(sha256)
if not sha_index:
raise NoSuchPatchError(
- "Couldn't find patch for package %s with sha256: %s"
- % (pkg.fullname, sha256))
+ "Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256)
+ )
# Find patches for this class or any class it inherits from
for fullname in pkg.fullnames:
@@ -375,13 +368,13 @@ class PatchCache(object):
break
else:
raise NoSuchPatchError(
- "Couldn't find patch for package %s with sha256: %s"
- % (pkg.fullname, sha256))
+ "Couldn't find patch for package %s with sha256: %s" % (pkg.fullname, sha256)
+ )
# add the sha256 back (we take it out on write to save space,
# because it's the index key)
patch_dict = dict(patch_dict)
- patch_dict['sha256'] = sha256
+ patch_dict["sha256"] = sha256
return from_dict(patch_dict)
def update_package(self, pkg_fullname):
@@ -390,7 +383,7 @@ class PatchCache(object):
for sha256, package_to_patch in self.index.items():
remove = []
for fullname, patch_dict in package_to_patch.items():
- if patch_dict['owner'] == pkg_fullname:
+ if patch_dict["owner"] == pkg_fullname:
remove.append(fullname)
for fullname in remove:
@@ -424,7 +417,7 @@ class PatchCache(object):
for cond, patch_list in pkg_class.patches.items():
for patch in patch_list:
patch_dict = patch.to_dict()
- patch_dict.pop('sha256') # save some space
+ patch_dict.pop("sha256") # save some space
index[patch.sha256] = {pkg_class.fullname: patch_dict}
# and patches on dependencies
@@ -434,7 +427,7 @@ class PatchCache(object):
for patch in patch_list:
dspec_cls = spack.repo.path.get_pkg_class(dependency.spec.name)
patch_dict = patch.to_dict()
- patch_dict.pop('sha256') # save some space
+ patch_dict.pop("sha256") # save some space
index[patch.sha256] = {dspec_cls.fullname: patch_dict}
return index
diff --git a/lib/spack/spack/paths.py b/lib/spack/spack/paths.py
index fcf98aac29..fc617342e8 100644
--- a/lib/spack/spack/paths.py
+++ b/lib/spack/spack/paths.py
@@ -29,36 +29,36 @@ spack_script = os.path.join(bin_path, "spack")
sbang_script = os.path.join(bin_path, "sbang")
# spack directory hierarchy
-lib_path = os.path.join(prefix, "lib", "spack")
-external_path = os.path.join(lib_path, "external")
-build_env_path = os.path.join(lib_path, "env")
-module_path = os.path.join(lib_path, "spack")
-command_path = os.path.join(module_path, "cmd")
-analyzers_path = os.path.join(module_path, "analyzers")
-platform_path = os.path.join(module_path, 'platforms')
-compilers_path = os.path.join(module_path, "compilers")
-build_systems_path = os.path.join(module_path, 'build_systems')
-operating_system_path = os.path.join(module_path, 'operating_systems')
-test_path = os.path.join(module_path, "test")
-hooks_path = os.path.join(module_path, "hooks")
-opt_path = os.path.join(prefix, "opt")
-share_path = os.path.join(prefix, "share", "spack")
-etc_path = os.path.join(prefix, "etc", "spack")
+lib_path = os.path.join(prefix, "lib", "spack")
+external_path = os.path.join(lib_path, "external")
+build_env_path = os.path.join(lib_path, "env")
+module_path = os.path.join(lib_path, "spack")
+command_path = os.path.join(module_path, "cmd")
+analyzers_path = os.path.join(module_path, "analyzers")
+platform_path = os.path.join(module_path, "platforms")
+compilers_path = os.path.join(module_path, "compilers")
+build_systems_path = os.path.join(module_path, "build_systems")
+operating_system_path = os.path.join(module_path, "operating_systems")
+test_path = os.path.join(module_path, "test")
+hooks_path = os.path.join(module_path, "hooks")
+opt_path = os.path.join(prefix, "opt")
+share_path = os.path.join(prefix, "share", "spack")
+etc_path = os.path.join(prefix, "etc", "spack")
#
# Things in $spack/etc/spack
#
-default_license_dir = os.path.join(etc_path, "licenses")
+default_license_dir = os.path.join(etc_path, "licenses")
#
# Things in $spack/var/spack
#
-var_path = os.path.join(prefix, "var", "spack")
+var_path = os.path.join(prefix, "var", "spack")
# read-only things in $spack/var/spack
-repos_path = os.path.join(var_path, "repos")
-packages_path = os.path.join(repos_path, "builtin")
-mock_packages_path = os.path.join(repos_path, "builtin.mock")
+repos_path = os.path.join(var_path, "repos")
+packages_path = os.path.join(repos_path, "builtin")
+mock_packages_path = os.path.join(repos_path, "builtin.mock")
#
# Writable things in $spack/var/spack
@@ -69,10 +69,10 @@ mock_packages_path = os.path.join(repos_path, "builtin.mock")
default_fetch_cache_path = os.path.join(var_path, "cache")
# GPG paths.
-gpg_keys_path = os.path.join(var_path, "gpg")
+gpg_keys_path = os.path.join(var_path, "gpg")
mock_gpg_data_path = os.path.join(var_path, "gpg.mock", "data")
mock_gpg_keys_path = os.path.join(var_path, "gpg.mock", "keys")
-gpg_path = os.path.join(opt_path, "spack", "gpg")
+gpg_path = os.path.join(opt_path, "spack", "gpg")
# Below paths are where Spack can write information for the user.
@@ -85,8 +85,7 @@ gpg_path = os.path.join(opt_path, "spack", "gpg")
# setting `SPACK_USER_CACHE_PATH`. Otherwise it defaults to ~/.spack.
#
def _get_user_cache_path():
- return os.path.expanduser(os.getenv('SPACK_USER_CACHE_PATH')
- or "~%s.spack" % os.sep)
+ return os.path.expanduser(os.getenv("SPACK_USER_CACHE_PATH") or "~%s.spack" % os.sep)
user_cache_path = _get_user_cache_path()
@@ -101,13 +100,13 @@ default_test_path = os.path.join(user_cache_path, "test")
default_monitor_path = os.path.join(reports_path, "monitor")
#: git repositories fetched to compare commits to versions
-user_repos_cache_path = os.path.join(user_cache_path, 'git_repos')
+user_repos_cache_path = os.path.join(user_cache_path, "git_repos")
#: bootstrap store for bootstrapping clingo and other tools
-default_user_bootstrap_path = os.path.join(user_cache_path, 'bootstrap')
+default_user_bootstrap_path = os.path.join(user_cache_path, "bootstrap")
#: transient caches for Spack data (virtual cache, patch sha256 lookup, etc.)
-default_misc_cache_path = os.path.join(user_cache_path, 'cache')
+default_misc_cache_path = os.path.join(user_cache_path, "cache")
# Below paths pull configuration from the host environment.
@@ -121,14 +120,14 @@ default_misc_cache_path = os.path.join(user_cache_path, 'cache')
# User configuration and caches in $HOME/.spack
def _get_user_config_path():
- return os.path.expanduser(os.getenv('SPACK_USER_CONFIG_PATH') or
- "~%s.spack" % os.sep)
+ return os.path.expanduser(os.getenv("SPACK_USER_CONFIG_PATH") or "~%s.spack" % os.sep)
# Configuration in /etc/spack on the system
def _get_system_config_path():
- return os.path.expanduser(os.getenv('SPACK_SYSTEM_CONFIG_PATH') or
- os.sep + os.path.join('etc', 'spack'))
+ return os.path.expanduser(
+ os.getenv("SPACK_SYSTEM_CONFIG_PATH") or os.sep + os.path.join("etc", "spack")
+ )
#: User configuration location
diff --git a/lib/spack/spack/platforms/__init__.py b/lib/spack/spack/platforms/__init__.py
index 50acc3ec42..220e831282 100644
--- a/lib/spack/spack/platforms/__init__.py
+++ b/lib/spack/spack/platforms/__init__.py
@@ -13,17 +13,17 @@ from .test import Test
from .windows import Windows
__all__ = [
- 'Platform',
- 'Cray',
- 'Darwin',
- 'Linux',
- 'Test',
- 'Windows',
- 'platforms',
- 'host',
- 'by_name',
- 'reset',
- 'prevent_cray_detection'
+ "Platform",
+ "Cray",
+ "Darwin",
+ "Linux",
+ "Test",
+ "Windows",
+ "platforms",
+ "host",
+ "by_name",
+ "reset",
+ "prevent_cray_detection",
]
#: The "real" platform of the host running Spack. This should not be changed
@@ -40,6 +40,7 @@ class _PickleableCallable(object):
_platform or _all_platforms. Lambda or nested functions are
not pickleable.
"""
+
def __init__(self, return_value):
self.return_value = return_value
diff --git a/lib/spack/spack/platforms/_functions.py b/lib/spack/spack/platforms/_functions.py
index 503be5afcb..4e46927847 100644
--- a/lib/spack/spack/platforms/_functions.py
+++ b/lib/spack/spack/platforms/_functions.py
@@ -64,7 +64,7 @@ def prevent_cray_detection():
"""Context manager that prevents the detection of the Cray platform"""
reset()
try:
- with spack.util.environment.set_env(MODULEPATH=''):
+ with spack.util.environment.set_env(MODULEPATH=""):
yield
finally:
reset()
diff --git a/lib/spack/spack/platforms/_platform.py b/lib/spack/spack/platforms/_platform.py
index b68b7f1299..e59eba38b5 100644
--- a/lib/spack/spack/platforms/_platform.py
+++ b/lib/spack/spack/platforms/_platform.py
@@ -37,21 +37,21 @@ class Platform(object):
"""
# Subclass sets number. Controls detection order
- priority = None # type: int
+ priority = None # type: int
#: binary formats used on this platform; used by relocation logic
- binary_formats = ['elf']
+ binary_formats = ["elf"]
- front_end = None # type: str
- back_end = None # type: str
- default = None # type: str # The default back end target.
+ front_end = None # type: str
+ back_end = None # type: str
+ default = None # type: str # The default back end target.
- front_os = None # type: str
- back_os = None # type: str
- default_os = None # type: str
+ front_os = None # type: str
+ back_os = None # type: str
+ default_os = None # type: str
- reserved_targets = ['default_target', 'frontend', 'fe', 'backend', 'be']
- reserved_oss = ['default_os', 'frontend', 'fe', 'backend', 'be']
+ reserved_targets = ["default_target", "frontend", "fe", "backend", "be"]
+ reserved_oss = ["default_os", "frontend", "fe", "backend", "be"]
def __init__(self, name):
self.targets = {}
@@ -76,11 +76,11 @@ class Platform(object):
"""
# TODO: Check if we can avoid using strings here
name = str(name)
- if name == 'default_target':
+ if name == "default_target":
name = self.default
- elif name == 'frontend' or name == 'fe':
+ elif name == "frontend" or name == "fe":
name = self.front_end
- elif name == 'backend' or name == 'be':
+ elif name == "backend" or name == "be":
name = self.back_end
return self.targets.get(name, None)
@@ -95,11 +95,11 @@ class Platform(object):
self.operating_sys[name] = os_class
def operating_system(self, name):
- if name == 'default_os':
+ if name == "default_os":
name = self.default_os
- if name == 'frontend' or name == "fe":
+ if name == "frontend" or name == "fe":
name = self.front_os
- if name == 'backend' or name == 'be':
+ if name == "backend" or name == "be":
name = self.back_os
return self.operating_sys.get(name, None)
@@ -137,9 +137,11 @@ class Platform(object):
def targets():
for t in sorted(self.targets.values()):
yield t._cmp_iter
+
yield targets
def oses():
for o in sorted(self.operating_sys.values()):
yield o._cmp_iter
+
yield oses
diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py
index 23724dffbe..8f87740e4c 100644
--- a/lib/spack/spack/platforms/cray.py
+++ b/lib/spack/spack/platforms/cray.py
@@ -21,14 +21,14 @@ from spack.util.module_cmd import module
from ._platform import NoPlatformError, Platform
_craype_name_to_target_name = {
- 'x86-cascadelake': 'cascadelake',
- 'x86-naples': 'zen',
- 'x86-rome': 'zen2',
- 'x86-milan': 'zen3',
- 'x86-skylake': 'skylake_avx512',
- 'mic-knl': 'mic_knl',
- 'interlagos': 'bulldozer',
- 'abudhabi': 'piledriver',
+ "x86-cascadelake": "cascadelake",
+ "x86-naples": "zen",
+ "x86-rome": "zen2",
+ "x86-milan": "zen3",
+ "x86-skylake": "skylake_avx512",
+ "mic-knl": "mic_knl",
+ "interlagos": "bulldozer",
+ "abudhabi": "piledriver",
}
@@ -40,7 +40,7 @@ class Cray(Platform):
priority = 10
def __init__(self):
- ''' Create a Cray system platform.
+ """Create a Cray system platform.
Target names should use craype target names but not include the
'craype-' prefix. Uses first viable target from:
@@ -48,16 +48,15 @@ class Cray(Platform):
envars [SPACK_FRONT_END, SPACK_BACK_END]
configuration file "targets.yaml" with keys 'front_end', 'back_end'
scanning /etc/bash/bashrc.local for back_end only
- '''
- super(Cray, self).__init__('cray')
+ """
+ super(Cray, self).__init__("cray")
# Make all craype targets available.
for target in self._avail_targets():
name = _target_name_from_craype_target_name(target)
- self.add_target(name, spack.target.Target(name, 'craype-%s' % target))
+ self.add_target(name, spack.target.Target(name, "craype-%s" % target))
- self.back_end = os.environ.get('SPACK_BACK_END',
- self._default_target_from_env())
+ self.back_end = os.environ.get("SPACK_BACK_END", self._default_target_from_env())
self.default = self.back_end
if self.back_end not in self.targets:
# We didn't find a target module for the backend
@@ -67,9 +66,7 @@ class Cray(Platform):
for name in archspec.cpu.TARGETS:
if name not in self.targets:
self.add_target(name, spack.target.Target(name))
- self.front_end = os.environ.get(
- 'SPACK_FRONT_END', archspec.cpu.host().name
- )
+ self.front_end = os.environ.get("SPACK_FRONT_END", archspec.cpu.host().name)
if self.front_end not in self.targets:
self.add_target(self.front_end, spack.target.Target(self.front_end))
@@ -85,21 +82,21 @@ class Cray(Platform):
self.add_operating_system(self.front_os, front_distro)
def setup_platform_environment(self, pkg, env):
- """ Change the linker to default dynamic to be more
- similar to linux/standard linker behavior
+ """Change the linker to default dynamic to be more
+ similar to linux/standard linker behavior
"""
# Unload these modules to prevent any silent linking or unnecessary
# I/O profiling in the case of darshan.
modules_to_unload = ["cray-mpich", "darshan", "cray-libsci", "altd"]
for mod in modules_to_unload:
- module('unload', mod)
+ module("unload", mod)
- env.set('CRAYPE_LINK_TYPE', 'dynamic')
- cray_wrapper_names = os.path.join(build_env_path, 'cray')
+ env.set("CRAYPE_LINK_TYPE", "dynamic")
+ cray_wrapper_names = os.path.join(build_env_path, "cray")
if os.path.isdir(cray_wrapper_names):
- env.prepend_path('PATH', cray_wrapper_names)
- env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names)
+ env.prepend_path("PATH", cray_wrapper_names)
+ env.prepend_path("SPACK_ENV_PATH", cray_wrapper_names)
# Makes spack installed pkg-config work on Crays
env.append_path("PKG_CONFIG_PATH", "/usr/lib64/pkgconfig")
@@ -109,9 +106,8 @@ class Cray(Platform):
# wrappers to augment LD_LIBRARY_PATH. This is to avoid long load
# times at runtime. This behavior is not always respected on cray
# "cluster" systems, so we reproduce it here.
- if os.environ.get('CRAY_LD_LIBRARY_PATH'):
- env.prepend_path('LD_LIBRARY_PATH',
- os.environ['CRAY_LD_LIBRARY_PATH'])
+ if os.environ.get("CRAY_LD_LIBRARY_PATH"):
+ env.prepend_path("LD_LIBRARY_PATH", os.environ["CRAY_LD_LIBRARY_PATH"])
@classmethod
def detect(cls):
@@ -126,25 +122,29 @@ class Cray(Platform):
and other components of the Cray programming environment are
irrelevant without module support.
"""
- return 'opt/cray' in os.environ.get('MODULEPATH', '')
+ return "opt/cray" in os.environ.get("MODULEPATH", "")
def _default_target_from_env(self):
- '''Set and return the default CrayPE target loaded in a clean login
+ """Set and return the default CrayPE target loaded in a clean login
session.
A bash subshell is launched with a wiped environment and the list of
loaded modules is parsed for the first acceptable CrayPE target.
- '''
+ """
# env -i /bin/bash -lc echo $CRAY_CPU_TARGET 2> /dev/null
- if getattr(self, 'default', None) is None:
- bash = Executable('/bin/bash')
+ if getattr(self, "default", None) is None:
+ bash = Executable("/bin/bash")
output = bash(
- '--norc', '--noprofile', '-lc', 'echo $CRAY_CPU_TARGET',
- env={'TERM': os.environ.get('TERM', '')},
- output=str, error=os.devnull
+ "--norc",
+ "--noprofile",
+ "-lc",
+ "echo $CRAY_CPU_TARGET",
+ env={"TERM": os.environ.get("TERM", "")},
+ output=str,
+ error=os.devnull,
)
- default_from_module = ''.join(output.split()) # rm all whitespace
+ default_from_module = "".join(output.split()) # rm all whitespace
if default_from_module:
tty.debug("Found default module:%s" % default_from_module)
return default_from_module
@@ -152,10 +152,7 @@ class Cray(Platform):
front_end = archspec.cpu.host()
# Look for the frontend architecture or closest ancestor
# available in cray target modules
- avail = [
- _target_name_from_craype_target_name(x)
- for x in self._avail_targets()
- ]
+ avail = [_target_name_from_craype_target_name(x) for x in self._avail_targets()]
for front_end_possibility in [front_end] + front_end.ancestors:
if front_end_possibility.name in avail:
tty.debug("using front-end architecture or available ancestor")
@@ -165,22 +162,21 @@ class Cray(Platform):
return platform.machine()
def _avail_targets(self):
- '''Return a list of available CrayPE CPU targets.'''
+ """Return a list of available CrayPE CPU targets."""
def modules_in_output(output):
"""Returns a list of valid modules parsed from modulecmd output"""
- return [i for i in re.split(r'\s\s+|\n', output)]
+ return [i for i in re.split(r"\s\s+|\n", output)]
def target_names_from_modules(modules):
# Craype- module prefixes that are not valid CPU targets.
targets = []
for mod in modules:
- if 'craype-' in mod:
+ if "craype-" in mod:
name = mod[7:]
name = name.split()[0]
- _n = name.replace('-', '_') # test for mic-knl/mic_knl
- is_target_name = (name in archspec.cpu.TARGETS or
- _n in archspec.cpu.TARGETS)
+ _n = name.replace("-", "_") # test for mic-knl/mic_knl
+ is_target_name = name in archspec.cpu.TARGETS or _n in archspec.cpu.TARGETS
is_cray_target_name = name in _craype_name_to_target_name
if is_target_name or is_cray_target_name:
targets.append(name)
@@ -188,15 +184,15 @@ class Cray(Platform):
return targets
def modules_from_listdir():
- craype_default_path = '/opt/cray/pe/craype/default/modulefiles'
+ craype_default_path = "/opt/cray/pe/craype/default/modulefiles"
if os.path.isdir(craype_default_path):
return os.listdir(craype_default_path)
return []
- if getattr(self, '_craype_targets', None) is None:
+ if getattr(self, "_craype_targets", None) is None:
strategies = [
- lambda: modules_in_output(module('avail', '-t', 'craype-')),
- modules_from_listdir
+ lambda: modules_in_output(module("avail", "-t", "craype-")),
+ modules_from_listdir,
]
for available_craype_modules in strategies:
craype_modules = available_craype_modules()
diff --git a/lib/spack/spack/platforms/darwin.py b/lib/spack/spack/platforms/darwin.py
index e0393aab57..75e7860def 100644
--- a/lib/spack/spack/platforms/darwin.py
+++ b/lib/spack/spack/platforms/darwin.py
@@ -15,12 +15,12 @@ from ._platform import Platform
class Darwin(Platform):
- priority = 89
+ priority = 89
- binary_formats = ['macho']
+ binary_formats = ["macho"]
def __init__(self):
- super(Darwin, self).__init__('darwin')
+ super(Darwin, self).__init__("darwin")
for name in archspec.cpu.TARGETS:
self.add_target(name, spack.target.Target(name))
@@ -32,14 +32,14 @@ class Darwin(Platform):
mac_os = MacOs()
self.default_os = str(mac_os)
- self.front_os = str(mac_os)
- self.back_os = str(mac_os)
+ self.front_os = str(mac_os)
+ self.back_os = str(mac_os)
self.add_operating_system(str(mac_os), mac_os)
@classmethod
def detect(cls):
- return 'darwin' in py_platform.system().lower()
+ return "darwin" in py_platform.system().lower()
def setup_platform_environment(self, pkg, env):
"""Specify deployment target based on target OS version.
@@ -69,5 +69,5 @@ class Darwin(Platform):
# but succeeds with `-macosx_version_min 11.0`.
# Most compilers seem to perform this translation automatically,
# but older GCC does not.
- version = str(version) + '.0'
- env.set('MACOSX_DEPLOYMENT_TARGET', str(version))
+ version = str(version) + ".0"
+ env.set("MACOSX_DEPLOYMENT_TARGET", str(version))
diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py
index 82a0b88c34..f21efc7060 100644
--- a/lib/spack/spack/platforms/linux.py
+++ b/lib/spack/spack/platforms/linux.py
@@ -13,10 +13,10 @@ from ._platform import Platform
class Linux(Platform):
- priority = 90
+ priority = 90
def __init__(self):
- super(Linux, self).__init__('linux')
+ super(Linux, self).__init__("linux")
for name in archspec.cpu.TARGETS:
self.add_target(name, spack.target.Target(name))
@@ -34,4 +34,4 @@ class Linux(Platform):
@classmethod
def detect(cls):
- return 'linux' in platform.system().lower()
+ return "linux" in platform.system().lower()
diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py
index 3bc49d3a65..d181f2e926 100644
--- a/lib/spack/spack/platforms/test.py
+++ b/lib/spack/spack/platforms/test.py
@@ -11,28 +11,30 @@ from ._platform import Platform
class Test(Platform):
- priority = 1000000
+ priority = 1000000
- if platform.system().lower() == 'darwin':
- binary_formats = ['macho']
+ if platform.system().lower() == "darwin":
+ binary_formats = ["macho"]
- front_end = 'x86_64'
- back_end = 'core2'
- default = 'core2'
+ front_end = "x86_64"
+ back_end = "core2"
+ default = "core2"
- front_os = 'redhat6'
- back_os = 'debian6'
- default_os = 'debian6'
+ front_os = "redhat6"
+ back_os = "debian6"
+ default_os = "debian6"
def __init__(self):
- super(Test, self).__init__('test')
+ super(Test, self).__init__("test")
self.add_target(self.default, spack.target.Target(self.default))
self.add_target(self.front_end, spack.target.Target(self.front_end))
self.add_operating_system(
- self.default_os, spack.operating_systems.OperatingSystem('debian', 6))
+ self.default_os, spack.operating_systems.OperatingSystem("debian", 6)
+ )
self.add_operating_system(
- self.front_os, spack.operating_systems.OperatingSystem('redhat', 6))
+ self.front_os, spack.operating_systems.OperatingSystem("redhat", 6)
+ )
@classmethod
def detect(cls):
diff --git a/lib/spack/spack/platforms/windows.py b/lib/spack/spack/platforms/windows.py
index 9626b29cc8..0beb67e815 100755
--- a/lib/spack/spack/platforms/windows.py
+++ b/lib/spack/spack/platforms/windows.py
@@ -14,10 +14,10 @@ from ._platform import Platform
class Windows(Platform):
- priority = 101
+ priority = 101
def __init__(self):
- super(Windows, self).__init__('windows')
+ super(Windows, self).__init__("windows")
for name in archspec.cpu.TARGETS:
self.add_target(name, spack.target.Target(name))
@@ -29,12 +29,12 @@ class Windows(Platform):
windows_os = WindowsOs()
self.default_os = str(windows_os)
- self.front_os = str(windows_os)
- self.back_os = str(windows_os)
+ self.front_os = str(windows_os)
+ self.back_os = str(windows_os)
self.add_operating_system(str(windows_os), windows_os)
@classmethod
def detect(cls):
plat = platform.system().lower()
- return 'cygwin' in plat or 'win32' in plat or 'windows' in plat
+ return "cygwin" in plat or "win32" in plat or "windows" in plat
diff --git a/lib/spack/spack/projections.py b/lib/spack/spack/projections.py
index 2a9cf54aa8..295e7ebec1 100644
--- a/lib/spack/spack/projections.py
+++ b/lib/spack/spack/projections.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
def get_projection(projections, spec):
"""
Get the projection for a spec from a projections dict.
@@ -11,6 +12,6 @@ def get_projection(projections, spec):
for spec_like, projection in projections.items():
if spec.satisfies(spec_like, strict=True):
return projection
- elif spec_like == 'all':
+ elif spec_like == "all":
all_projection = projection
return all_projection
diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py
index 5cdfff8a57..b1f723c1cc 100644
--- a/lib/spack/spack/provider_index.py
+++ b/lib/spack/spack/provider_index.py
@@ -98,9 +98,7 @@ class _IndexBase(object):
# vpkg constraints on self.
result = {}
for name in common:
- crossed = _cross_provider_maps(
- self.providers[name], other.providers[name]
- )
+ crossed = _cross_provider_maps(self.providers[name], other.providers[name])
if crossed:
result[name] = crossed
@@ -195,8 +193,7 @@ class ProviderIndex(_IndexBase):
# If this package existed in the index before,
# need to take the old versions out, as they're
# now more constrained.
- old = set(
- [s for s in provider_set if s.name == spec.name])
+ old = set([s for s in provider_set if s.name == spec.name])
provider_set.difference_update(old)
# Now add the new version.
@@ -216,10 +213,10 @@ class ProviderIndex(_IndexBase):
stream: stream where to dump
"""
provider_list = self._transform(
- lambda vpkg, pset: [
- vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list)
+ lambda vpkg, pset: [vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list
+ )
- sjson.dump({'provider_index': {'providers': provider_list}}, stream)
+ sjson.dump({"provider_index": {"providers": provider_list}}, stream)
def merge(self, other):
"""Merge another provider index into this one.
@@ -227,7 +224,7 @@ class ProviderIndex(_IndexBase):
Args:
other (ProviderIndex): provider index to be merged
"""
- other = other.copy() # defensive copy.
+ other = other.copy() # defensive copy.
for pkg in other.providers:
if pkg not in self.providers:
@@ -240,8 +237,7 @@ class ProviderIndex(_IndexBase):
spdict[provided_spec] = opdict[provided_spec]
continue
- spdict[provided_spec] = \
- spdict[provided_spec].union(opdict[provided_spec])
+ spdict[provided_spec] = spdict[provided_spec].union(opdict[provided_spec])
def remove_provider(self, pkg_name):
"""Remove a provider from the ProviderIndex."""
@@ -267,8 +263,7 @@ class ProviderIndex(_IndexBase):
def copy(self):
"""Return a deep copy of this index."""
clone = ProviderIndex()
- clone.providers = self._transform(
- lambda vpkg, pset: (vpkg, set((p.copy() for p in pset))))
+ clone.providers = self._transform(lambda vpkg, pset: (vpkg, set((p.copy() for p in pset))))
return clone
@staticmethod
@@ -283,17 +278,18 @@ class ProviderIndex(_IndexBase):
if not isinstance(data, dict):
raise ProviderIndexError("JSON ProviderIndex data was not a dict.")
- if 'provider_index' not in data:
- raise ProviderIndexError(
- "YAML ProviderIndex does not start with 'provider_index'")
+ if "provider_index" not in data:
+ raise ProviderIndexError("YAML ProviderIndex does not start with 'provider_index'")
index = ProviderIndex()
- providers = data['provider_index']['providers']
+ providers = data["provider_index"]["providers"]
index.providers = _transform(
providers,
lambda vpkg, plist: (
spack.spec.Spec.from_node_dict(vpkg),
- set(spack.spec.Spec.from_node_dict(p) for p in plist)))
+ set(spack.spec.Spec.from_node_dict(p) for p in plist),
+ ),
+ )
return index
@@ -310,6 +306,7 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
Returns:
Transformed mapping
"""
+
def mapiter(mappings):
if isinstance(mappings, dict):
return six.iteritems(mappings)
@@ -317,10 +314,9 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
return iter(mappings)
return dict(
- (name, out_mapping_type(
- [transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)]
- ))
- for name, mappings in providers.items())
+ (name, out_mapping_type([transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)]))
+ for name, mappings in providers.items()
+ )
class ProviderIndexError(spack.error.SpackError):
diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py
index 47c447c5e0..3f90f1d252 100644
--- a/lib/spack/spack/relocate.py
+++ b/lib/spack/spack/relocate.py
@@ -21,7 +21,7 @@ import spack.repo
import spack.spec
import spack.util.executable as executable
-is_macos = (str(spack.platforms.real_host()) == 'darwin')
+is_macos = str(spack.platforms.real_host()) == "darwin"
class InstallRootStringError(spack.error.SpackError):
@@ -36,8 +36,8 @@ class InstallRootStringError(spack.error.SpackError):
super(InstallRootStringError, self).__init__(
"\n %s \ncontains string\n %s \n"
"after replacing it in rpaths.\n"
- "Package should not be relocated.\n Use -a to override." %
- (file_path, root_path))
+ "Package should not be relocated.\n Use -a to override." % (file_path, root_path)
+ )
class BinaryStringReplacementError(spack.error.SpackError):
@@ -52,8 +52,8 @@ class BinaryStringReplacementError(spack.error.SpackError):
super(BinaryStringReplacementError, self).__init__(
"Doing a binary string replacement in %s failed.\n"
"The size of the file changed from %s to %s\n"
- "when it should have remanined the same." %
- (file_path, old_len, new_len))
+ "when it should have remanined the same." % (file_path, old_len, new_len)
+ )
class BinaryTextReplaceError(spack.error.SpackError):
@@ -81,7 +81,7 @@ def _patchelf():
if is_macos:
return None
- patchelf = executable.which('patchelf')
+ patchelf = executable.which("patchelf")
if patchelf is None:
with spack.bootstrap.ensure_bootstrap_configuration():
patchelf = spack.bootstrap.ensure_patchelf_in_path_or_raise()
@@ -104,15 +104,15 @@ def _elf_rpaths_for(path):
patchelf_path = path if path.endswith("/bin/patchelf") else _patchelf()
patchelf = executable.Executable(patchelf_path)
- output = ''
+ output = ""
try:
- output = patchelf('--print-rpath', path, output=str, error=str)
- output = output.strip('\n')
+ output = patchelf("--print-rpath", path, output=str, error=str)
+ output = output.strip("\n")
except executable.ProcessError as e:
- msg = 'patchelf --print-rpath {0} produced an error [{1}]'
+ msg = "patchelf --print-rpath {0} produced an error [{1}]"
tty.warn(msg.format(path, str(e)))
- return output.split(':') if output else []
+ return output.split(":") if output else []
def _make_relative(reference_file, path_root, paths):
@@ -139,7 +139,7 @@ def _make_relative(reference_file, path_root, paths):
for path in paths:
if pattern.match(path):
rel = os.path.relpath(path, start=start_directory)
- path = os.path.join('$ORIGIN', rel)
+ path = os.path.join("$ORIGIN", rel)
relative_paths.append(path)
@@ -167,11 +167,11 @@ def _normalize_relative_paths(start_path, relative_paths):
List of normalized paths
"""
normalized_paths = []
- pattern = re.compile(re.escape('$ORIGIN'))
+ pattern = re.compile(re.escape("$ORIGIN"))
start_directory = os.path.dirname(start_path)
for path in relative_paths:
- if path.startswith('$ORIGIN'):
+ if path.startswith("$ORIGIN"):
sub = pattern.sub(start_directory, path)
path = os.path.normpath(sub)
normalized_paths.append(path)
@@ -181,15 +181,14 @@ def _normalize_relative_paths(start_path, relative_paths):
def _placeholder(dirname):
"""String of of @'s with same length of the argument"""
- return '@' * len(dirname)
+ return "@" * len(dirname)
def _decode_macho_data(bytestring):
- return bytestring.rstrip(b'\x00').decode('ascii')
+ return bytestring.rstrip(b"\x00").decode("ascii")
-def macho_make_paths_relative(path_name, old_layout_root,
- rpaths, deps, idpath):
+def macho_make_paths_relative(path_name, old_layout_root, rpaths, deps, idpath):
"""
Return a dictionary mapping the original rpaths to the relativized rpaths.
This dictionary is used to replace paths in mach-o binaries.
@@ -198,18 +197,17 @@ def macho_make_paths_relative(path_name, old_layout_root,
"""
paths_to_paths = dict()
if idpath:
- paths_to_paths[idpath] = os.path.join(
- '@rpath', '%s' % os.path.basename(idpath))
+ paths_to_paths[idpath] = os.path.join("@rpath", "%s" % os.path.basename(idpath))
for rpath in rpaths:
if re.match(old_layout_root, rpath):
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
- paths_to_paths[rpath] = os.path.join('@loader_path', '%s' % rel)
+ paths_to_paths[rpath] = os.path.join("@loader_path", "%s" % rel)
else:
paths_to_paths[rpath] = rpath
for dep in deps:
if re.match(old_layout_root, dep):
rel = os.path.relpath(dep, start=os.path.dirname(path_name))
- paths_to_paths[dep] = os.path.join('@loader_path', '%s' % rel)
+ paths_to_paths[dep] = os.path.join("@loader_path", "%s" % rel)
else:
paths_to_paths[dep] = dep
return paths_to_paths
@@ -227,26 +225,25 @@ def macho_make_paths_normal(orig_path_name, rpaths, deps, idpath):
rel_to_orig[idpath] = orig_path_name
for rpath in rpaths:
- if re.match('@loader_path', rpath):
- norm = os.path.normpath(re.sub(re.escape('@loader_path'),
- os.path.dirname(orig_path_name),
- rpath))
+ if re.match("@loader_path", rpath):
+ norm = os.path.normpath(
+ re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), rpath)
+ )
rel_to_orig[rpath] = norm
else:
rel_to_orig[rpath] = rpath
for dep in deps:
- if re.match('@loader_path', dep):
- norm = os.path.normpath(re.sub(re.escape('@loader_path'),
- os.path.dirname(orig_path_name),
- dep))
+ if re.match("@loader_path", dep):
+ norm = os.path.normpath(
+ re.sub(re.escape("@loader_path"), os.path.dirname(orig_path_name), dep)
+ )
rel_to_orig[dep] = norm
else:
rel_to_orig[dep] = dep
return rel_to_orig
-def macho_find_paths(orig_rpaths, deps, idpath,
- old_layout_root, prefix_to_prefix):
+def macho_find_paths(orig_rpaths, deps, idpath, old_layout_root, prefix_to_prefix):
"""
Inputs
original rpaths from mach-o binaries
@@ -263,8 +260,7 @@ def macho_find_paths(orig_rpaths, deps, idpath,
if orig_rpath.startswith(old_layout_root):
for old_prefix, new_prefix in prefix_to_prefix.items():
if orig_rpath.startswith(old_prefix):
- new_rpath = re.sub(re.escape(old_prefix),
- new_prefix, orig_rpath)
+ new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
paths_to_paths[orig_rpath] = new_rpath
else:
paths_to_paths[orig_rpath] = orig_rpath
@@ -272,21 +268,18 @@ def macho_find_paths(orig_rpaths, deps, idpath,
if idpath:
for old_prefix, new_prefix in prefix_to_prefix.items():
if idpath.startswith(old_prefix):
- paths_to_paths[idpath] = re.sub(
- re.escape(old_prefix), new_prefix, idpath)
+ paths_to_paths[idpath] = re.sub(re.escape(old_prefix), new_prefix, idpath)
for dep in deps:
for old_prefix, new_prefix in prefix_to_prefix.items():
if dep.startswith(old_prefix):
- paths_to_paths[dep] = re.sub(
- re.escape(old_prefix), new_prefix, dep)
- if dep.startswith('@'):
+ paths_to_paths[dep] = re.sub(re.escape(old_prefix), new_prefix, dep)
+ if dep.startswith("@"):
paths_to_paths[dep] = dep
return paths_to_paths
-def modify_macho_object(cur_path, rpaths, deps, idpath,
- paths_to_paths):
+def modify_macho_object(cur_path, rpaths, deps, idpath, paths_to_paths):
"""
This function is used to make machO buildcaches on macOS by
replacing old paths with new paths using install_name_tool
@@ -298,27 +291,27 @@ def modify_macho_object(cur_path, rpaths, deps, idpath,
dictionary mapping paths in old install layout to new install layout
"""
# avoid error message for libgcc_s
- if 'libgcc_' in cur_path:
+ if "libgcc_" in cur_path:
return
args = []
if idpath:
new_idpath = paths_to_paths.get(idpath, None)
if new_idpath and not idpath == new_idpath:
- args += ['-id', new_idpath]
+ args += ["-id", new_idpath]
for dep in deps:
new_dep = paths_to_paths.get(dep)
if new_dep and dep != new_dep:
- args += ['-change', dep, new_dep]
+ args += ["-change", dep, new_dep]
for orig_rpath in rpaths:
new_rpath = paths_to_paths.get(orig_rpath)
if new_rpath and not orig_rpath == new_rpath:
- args += ['-rpath', orig_rpath, new_rpath]
+ args += ["-rpath", orig_rpath, new_rpath]
if args:
args.append(str(cur_path))
- install_name_tool = executable.Executable('install_name_tool')
+ install_name_tool = executable.Executable("install_name_tool")
install_name_tool(*args)
return
@@ -339,7 +332,7 @@ def modify_object_macholib(cur_path, paths_to_paths):
dll.rewriteLoadCommands(paths_to_paths.get)
try:
- f = open(dll.filename, 'rb+')
+ f = open(dll.filename, "rb+")
for header in dll.headers:
f.seek(0)
dll.write(f)
@@ -353,8 +346,7 @@ def modify_object_macholib(cur_path, paths_to_paths):
def macholib_get_paths(cur_path):
- """Get rpaths, dependent libraries, and library id of mach-o objects.
- """
+ """Get rpaths, dependent libraries, and library id of mach-o objects."""
headers = macholib.MachO.MachO(cur_path).headers
if not headers:
tty.warn("Failed to read Mach-O headers: {0}".format(cur_path))
@@ -364,7 +356,7 @@ def macholib_get_paths(cur_path):
# Reproduce original behavior of only returning the last mach-O
# header section
tty.warn("Encountered fat binary: {0}".format(cur_path))
- if headers[-1].filetype == 'dylib_stub':
+ if headers[-1].filetype == "dylib_stub":
tty.warn("File is a stub, not a full library: {0}".format(cur_path))
commands = headers[-1].commands
@@ -402,7 +394,7 @@ def _set_elf_rpaths(target, rpaths):
to ``patchelf``
"""
# Join the paths using ':' as a separator
- rpaths_str = ':'.join(rpaths)
+ rpaths_str = ":".join(rpaths)
# If we're relocating patchelf itself, make a copy and use it
bak_path = None
@@ -414,10 +406,10 @@ def _set_elf_rpaths(target, rpaths):
try:
# TODO: revisit the use of --force-rpath as it might be conditional
# TODO: if we want to support setting RUNPATH from binary packages
- patchelf_args = ['--force-rpath', '--set-rpath', rpaths_str, target]
+ patchelf_args = ["--force-rpath", "--set-rpath", rpaths_str, target]
output = patchelf(*patchelf_args, output=str, error=str)
except executable.ProcessError as e:
- msg = 'patchelf --force-rpath --set-rpath {0} failed with error {1}'
+ msg = "patchelf --force-rpath --set-rpath {0} failed with error {1}"
tty.warn(msg.format(target, e))
finally:
if bak_path and os.path.exists(bak_path):
@@ -433,8 +425,8 @@ def needs_binary_relocation(m_type, m_subtype):
m_type (str): MIME type of the file
m_subtype (str): MIME subtype of the file
"""
- subtypes = ('x-executable', 'x-sharedlib', 'x-mach-binary', 'x-pie-executable')
- if m_type == 'application':
+ subtypes = ("x-executable", "x-sharedlib", "x-mach-binary", "x-pie-executable")
+ if m_type == "application":
if m_subtype in subtypes:
return True
return False
@@ -448,7 +440,7 @@ def needs_text_relocation(m_type, m_subtype):
m_type (str): MIME type of the file
m_subtype (str): MIME subtype of the file
"""
- return m_type == 'text'
+ return m_type == "text"
def _replace_prefix_text(filename, compiled_prefixes):
@@ -461,7 +453,7 @@ def _replace_prefix_text(filename, compiled_prefixes):
precompiled regex of the old prefixes and the values are the new
prefixes (uft-8 encoded)
"""
- with open(filename, 'rb+') as f:
+ with open(filename, "rb+") as f:
data = f.read()
f.seek(0)
for orig_prefix_rexp, new_bytes in compiled_prefixes.items():
@@ -484,7 +476,7 @@ def _replace_prefix_bin(filename, byte_prefixes):
prefixes (uft-8 encoded)
"""
- with open(filename, 'rb+') as f:
+ with open(filename, "rb+") as f:
data = f.read()
f.seek(0)
for orig_bytes, new_bytes in byte_prefixes.items():
@@ -495,24 +487,24 @@ def _replace_prefix_bin(filename, byte_prefixes):
# We only care about this problem if we are about to replace
length_compatible = len(new_bytes) <= len(orig_bytes)
if not length_compatible:
- tty.debug('Binary failing to relocate is %s' % filename)
+ tty.debug("Binary failing to relocate is %s" % filename)
raise BinaryTextReplaceError(orig_bytes, new_bytes)
pad_length = len(orig_bytes) - len(new_bytes)
padding = os.sep * pad_length
- padding = padding.encode('utf-8')
+ padding = padding.encode("utf-8")
data = data.replace(orig_bytes, new_bytes + padding)
# Really needs to be the same length
if not len(data) == original_data_len:
- print('Length of pad:', pad_length, 'should be', len(padding))
- print(new_bytes, 'was to replace', orig_bytes)
- raise BinaryStringReplacementError(
- filename, original_data_len, len(data))
+ print("Length of pad:", pad_length, "should be", len(padding))
+ print(new_bytes, "was to replace", orig_bytes)
+ raise BinaryStringReplacementError(filename, original_data_len, len(data))
f.write(data)
f.truncate()
-def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
- prefix_to_prefix, rel, old_prefix, new_prefix):
+def relocate_macho_binaries(
+ path_names, old_layout_root, new_layout_root, prefix_to_prefix, rel, old_prefix, new_prefix
+):
"""
Use macholib python package to get the rpaths, depedent libraries
and library identity for libraries from the MachO object. Modify them
@@ -523,66 +515,55 @@ def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
for path_name in path_names:
# Corner case where macho object file ended up in the path name list
- if path_name.endswith('.o'):
+ if path_name.endswith(".o"):
continue
if rel:
# get the relativized paths
rpaths, deps, idpath = macholib_get_paths(path_name)
# get the file path name in the original prefix
- orig_path_name = re.sub(re.escape(new_prefix), old_prefix,
- path_name)
+ orig_path_name = re.sub(re.escape(new_prefix), old_prefix, path_name)
# get the mapping of the relativized paths to the original
# normalized paths
- rel_to_orig = macho_make_paths_normal(orig_path_name,
- rpaths, deps,
- idpath)
+ rel_to_orig = macho_make_paths_normal(orig_path_name, rpaths, deps, idpath)
# replace the relativized paths with normalized paths
if is_macos:
- modify_macho_object(path_name, rpaths, deps,
- idpath, rel_to_orig)
+ modify_macho_object(path_name, rpaths, deps, idpath, rel_to_orig)
else:
- modify_object_macholib(path_name,
- rel_to_orig)
+ modify_object_macholib(path_name, rel_to_orig)
# get the normalized paths in the mach-o binary
rpaths, deps, idpath = macholib_get_paths(path_name)
# get the mapping of paths in old prefix to path in new prefix
- paths_to_paths = macho_find_paths(rpaths, deps, idpath,
- old_layout_root,
- prefix_to_prefix)
+ paths_to_paths = macho_find_paths(
+ rpaths, deps, idpath, old_layout_root, prefix_to_prefix
+ )
# replace the old paths with new paths
if is_macos:
- modify_macho_object(path_name, rpaths, deps,
- idpath, paths_to_paths)
+ modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
else:
- modify_object_macholib(path_name,
- paths_to_paths)
+ modify_object_macholib(path_name, paths_to_paths)
# get the new normalized path in the mach-o binary
rpaths, deps, idpath = macholib_get_paths(path_name)
# get the mapping of paths to relative paths in the new prefix
- paths_to_paths = macho_make_paths_relative(path_name,
- new_layout_root,
- rpaths, deps, idpath)
+ paths_to_paths = macho_make_paths_relative(
+ path_name, new_layout_root, rpaths, deps, idpath
+ )
# replace the new paths with relativized paths in the new prefix
if is_macos:
- modify_macho_object(path_name, rpaths, deps,
- idpath, paths_to_paths)
+ modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
else:
- modify_object_macholib(path_name,
- paths_to_paths)
+ modify_object_macholib(path_name, paths_to_paths)
else:
# get the paths in the old prefix
rpaths, deps, idpath = macholib_get_paths(path_name)
# get the mapping of paths in the old prerix to the new prefix
- paths_to_paths = macho_find_paths(rpaths, deps, idpath,
- old_layout_root,
- prefix_to_prefix)
+ paths_to_paths = macho_find_paths(
+ rpaths, deps, idpath, old_layout_root, prefix_to_prefix
+ )
# replace the old paths with new paths
if is_macos:
- modify_macho_object(path_name, rpaths, deps,
- idpath, paths_to_paths)
+ modify_macho_object(path_name, rpaths, deps, idpath, paths_to_paths)
else:
- modify_object_macholib(path_name,
- paths_to_paths)
+ modify_object_macholib(path_name, paths_to_paths)
def _transform_rpaths(orig_rpaths, orig_root, new_prefixes):
@@ -612,15 +593,15 @@ def _transform_rpaths(orig_rpaths, orig_root, new_prefixes):
# avoiding duplicates
for old_prefix, new_prefix in new_prefixes.items():
if orig_rpath.startswith(old_prefix):
- new_rpath = re.sub(re.escape(old_prefix), new_prefix,
- orig_rpath)
+ new_rpath = re.sub(re.escape(old_prefix), new_prefix, orig_rpath)
if new_rpath not in new_rpaths:
new_rpaths.append(new_rpath)
return new_rpaths
-def relocate_elf_binaries(binaries, orig_root, new_root,
- new_prefixes, rel, orig_prefix, new_prefix):
+def relocate_elf_binaries(
+ binaries, orig_root, new_root, new_prefixes, rel, orig_prefix, new_prefix
+):
"""Relocate the binaries passed as arguments by changing their RPATHs.
Use patchelf to get the original RPATHs and then replace them with
@@ -646,30 +627,20 @@ def relocate_elf_binaries(binaries, orig_root, new_root,
# TODO: Can we deduce `rel` from the original RPATHs?
if rel:
# Get the file path in the original prefix
- orig_binary = re.sub(
- re.escape(new_prefix), orig_prefix, new_binary
- )
+ orig_binary = re.sub(re.escape(new_prefix), orig_prefix, new_binary)
# Get the normalized RPATHs in the old prefix using the file path
# in the orig prefix
- orig_norm_rpaths = _normalize_relative_paths(
- orig_binary, orig_rpaths
- )
+ orig_norm_rpaths = _normalize_relative_paths(orig_binary, orig_rpaths)
# Get the normalize RPATHs in the new prefix
- new_norm_rpaths = _transform_rpaths(
- orig_norm_rpaths, orig_root, new_prefixes
- )
+ new_norm_rpaths = _transform_rpaths(orig_norm_rpaths, orig_root, new_prefixes)
# Get the relative RPATHs in the new prefix
- new_rpaths = _make_relative(
- new_binary, new_root, new_norm_rpaths
- )
+ new_rpaths = _make_relative(new_binary, new_root, new_norm_rpaths)
# check to see if relative rpaths are changed before rewriting
if sorted(new_rpaths) != sorted(orig_rpaths):
_set_elf_rpaths(new_binary, new_rpaths)
else:
- new_rpaths = _transform_rpaths(
- orig_rpaths, orig_root, new_prefixes
- )
+ new_rpaths = _transform_rpaths(orig_rpaths, orig_root, new_prefixes)
_set_elf_rpaths(new_binary, new_rpaths)
@@ -688,8 +659,7 @@ def make_link_relative(new_links, orig_links):
symlink(relative_target, new_link)
-def make_macho_binaries_relative(cur_path_names, orig_path_names,
- old_layout_root):
+def make_macho_binaries_relative(cur_path_names, orig_path_names, old_layout_root):
"""
Replace old RPATHs with paths relative to old_dir in binary files
"""
@@ -717,9 +687,7 @@ def make_elf_binaries_relative(new_binaries, orig_binaries, orig_layout_root):
for new_binary, orig_binary in zip(new_binaries, orig_binaries):
orig_rpaths = _elf_rpaths_for(new_binary)
if orig_rpaths:
- new_rpaths = _make_relative(
- orig_binary, orig_layout_root, orig_rpaths
- )
+ new_rpaths = _make_relative(orig_binary, orig_layout_root, orig_rpaths)
_set_elf_rpaths(new_binary, new_rpaths)
@@ -738,8 +706,7 @@ def raise_if_not_relocatable(binaries, allow_root):
raise InstallRootStringError(binary, spack.store.layout.root)
-def relocate_links(links, orig_layout_root,
- orig_install_prefix, new_install_prefix):
+def relocate_links(links, orig_layout_root, orig_install_prefix, new_install_prefix):
"""Relocate links to a new install prefix.
The symbolic links are relative to the original installation prefix.
@@ -762,31 +729,30 @@ def relocate_links(links, orig_layout_root,
# If the link points to a file in the original install prefix,
# compute the corresponding target in the new prefix and relink
if link_target.startswith(orig_install_prefix):
- link_target = re.sub(
- orig_install_prefix, new_install_prefix, link_target
- )
+ link_target = re.sub(orig_install_prefix, new_install_prefix, link_target)
os.unlink(abs_link)
symlink(link_target, abs_link)
# If the link is absolute and has not been relocated then
# warn the user about that
- if (os.path.isabs(link_target) and
- not link_target.startswith(new_install_prefix)):
- msg = ('Link target "{0}" for symbolic link "{1}" is outside'
- ' of the new install prefix {2}')
+ if os.path.isabs(link_target) and not link_target.startswith(new_install_prefix):
+ msg = (
+ 'Link target "{0}" for symbolic link "{1}" is outside'
+ " of the new install prefix {2}"
+ )
tty.warn(msg.format(link_target, abs_link, new_install_prefix))
def relocate_text(files, prefixes, concurrency=32):
"""Relocate text file from the original installation prefix to the
- new prefix.
+ new prefix.
- Relocation also affects the the path in Spack's sbang script.
+ Relocation also affects the the path in Spack's sbang script.
- Args:
- files (list): Text files to be relocated
- prefixes (OrderedDict): String prefixes which need to be changed
- concurrency (int): Preferred degree of parallelism
+ Args:
+ files (list): Text files to be relocated
+ prefixes (OrderedDict): String prefixes which need to be changed
+ concurrency (int): Preferred degree of parallelism
"""
# This now needs to be handled by the caller in all cases
@@ -797,10 +763,11 @@ def relocate_text(files, prefixes, concurrency=32):
for orig_prefix, new_prefix in prefixes.items():
if orig_prefix != new_prefix:
- orig_bytes = orig_prefix.encode('utf-8')
+ orig_bytes = orig_prefix.encode("utf-8")
orig_prefix_rexp = re.compile(
- b'(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)' % orig_bytes)
- new_bytes = b'\\1%s\\2' % new_prefix.encode('utf-8')
+ b"(?<![\\w\\-_/])([\\w\\-_]*?)%s([\\w\\-_/]*)" % orig_bytes
+ )
+ new_bytes = b"\\1%s\\2" % new_prefix.encode("utf-8")
compiled_prefixes[orig_prefix_rexp] = new_bytes
# Do relocations on text that refers to the install tree
@@ -838,11 +805,11 @@ def relocate_text_bin(binaries, prefixes, concurrency=32):
if isinstance(orig_prefix, bytes):
orig_bytes = orig_prefix
else:
- orig_bytes = orig_prefix.encode('utf-8')
+ orig_bytes = orig_prefix.encode("utf-8")
if isinstance(new_prefix, bytes):
new_bytes = new_prefix
else:
- new_bytes = new_prefix.encode('utf-8')
+ new_bytes = new_prefix.encode("utf-8")
byte_prefixes[orig_bytes] = new_bytes
# Do relocations on text in binaries that refers to the install tree
@@ -875,16 +842,15 @@ def is_relocatable(spec):
ValueError: if the spec is not installed
"""
if not spec.install_status():
- raise ValueError('spec is not installed [{0}]'.format(str(spec)))
+ raise ValueError("spec is not installed [{0}]".format(str(spec)))
if spec.external or spec.virtual:
- tty.warn('external or virtual package %s is not relocatable' %
- spec.name)
+ tty.warn("external or virtual package %s is not relocatable" % spec.name)
return False
# Explore the installation prefix of the spec
for root, dirs, files in os.walk(spec.prefix, topdown=True):
- dirs[:] = [d for d in dirs if d not in ('.spack', 'man')]
+ dirs[:] = [d for d in dirs if d not in (".spack", "man")]
abs_files = [os.path.join(root, f) for f in files]
if not all(file_is_relocatable(f) for f in abs_files if is_binary(f)):
# If any of the file is not relocatable, the entire
@@ -911,26 +877,26 @@ def file_is_relocatable(filename, paths_to_relocate=None):
paths_to_relocate = paths_to_relocate or default_paths_to_relocate
if not os.path.exists(filename):
- raise ValueError('{0} does not exist'.format(filename))
+ raise ValueError("{0} does not exist".format(filename))
if not os.path.isabs(filename):
- raise ValueError('{0} is not an absolute path'.format(filename))
+ raise ValueError("{0} is not an absolute path".format(filename))
- strings = executable.Executable('strings')
+ strings = executable.Executable("strings")
# Remove the RPATHS from the strings in the executable
set_of_strings = set(strings(filename, output=str).split())
m_type, m_subtype = mime_type(filename)
- if m_type == 'application':
- tty.debug('{0},{1}'.format(m_type, m_subtype))
+ if m_type == "application":
+ tty.debug("{0},{1}".format(m_type, m_subtype))
if not is_macos:
- if m_subtype == 'x-executable' or m_subtype == 'x-sharedlib':
- rpaths = ':'.join(_elf_rpaths_for(filename))
+ if m_subtype == "x-executable" or m_subtype == "x-sharedlib":
+ rpaths = ":".join(_elf_rpaths_for(filename))
set_of_strings.discard(rpaths)
else:
- if m_subtype == 'x-mach-binary':
+ if m_subtype == "x-mach-binary":
rpaths, deps, idpath = macholib_get_paths(filename)
set_of_strings.discard(set(rpaths))
set_of_strings.discard(set(deps))
@@ -959,19 +925,19 @@ def is_binary(filename):
"""
m_type, _ = mime_type(filename)
- msg = '[{0}] -> '.format(filename)
- if m_type == 'application':
- tty.debug(msg + 'BINARY FILE')
+ msg = "[{0}] -> ".format(filename)
+ if m_type == "application":
+ tty.debug(msg + "BINARY FILE")
return True
- tty.debug(msg + 'TEXT FILE')
+ tty.debug(msg + "TEXT FILE")
return False
@llnl.util.lang.memoized
def _get_mime_type():
- file_cmd = executable.which('file')
- for arg in ['-b', '-h', '--mime-type']:
+ file_cmd = executable.which("file")
+ for arg in ["-b", "-h", "--mime-type"]:
file_cmd.add_default_arg(arg)
return file_cmd
@@ -987,8 +953,8 @@ def mime_type(filename):
Tuple containing the MIME type and subtype
"""
output = _get_mime_type()(filename, output=str, error=str).strip()
- tty.debug('==> ' + output)
- type, _, subtype = output.partition('/')
+ tty.debug("==> " + output)
+ type, _, subtype = output.partition("/")
return type, subtype
@@ -1009,7 +975,7 @@ def fixup_macos_rpath(root, filename):
True if fixups were applied, else False
"""
abspath = os.path.join(root, filename)
- if mime_type(abspath) != ('application', 'x-mach-binary'):
+ if mime_type(abspath) != ("application", "x-mach-binary"):
return False
# Get Mach-O header commands
@@ -1028,22 +994,20 @@ def fixup_macos_rpath(root, filename):
spack_root = spack.store.layout.root
for name in deps:
if name.startswith(spack_root):
- tty.debug("Spack-installed dependency for {0}: {1}"
- .format(abspath, name))
+ tty.debug("Spack-installed dependency for {0}: {1}".format(abspath, name))
(dirname, basename) = os.path.split(name)
if dirname != root or dirname in rpaths:
# Only change the rpath if it's a dependency *or* if the root
# rpath was already added to the library (this is to prevent
# GCC or similar getting rpaths when they weren't at all
# configured)
- args += ['-change', name, '@rpath/' + basename]
- add_rpaths.add(dirname.rstrip('/'))
+ args += ["-change", name, "@rpath/" + basename]
+ add_rpaths.add(dirname.rstrip("/"))
# Check for nonexistent rpaths (often added by spack linker overzealousness
# with both lib/ and lib64/) and duplicate rpaths
for (rpath, count) in rpaths.items():
- if (rpath.startswith('@loader_path')
- or rpath.startswith('@executable_path')):
+ if rpath.startswith("@loader_path") or rpath.startswith("@executable_path"):
# Allowable relative paths
pass
elif not _exists_dir(rpath):
@@ -1054,25 +1018,23 @@ def fixup_macos_rpath(root, filename):
# duplicated between Spack's compiler and libtool. If there are
# more copies of the same one, something is very odd....
tty_debug = tty.debug if count == 2 else tty.warn
- tty_debug("Rpath appears {0} times in {1}: {2}".format(
- count, abspath, rpath
- ))
+ tty_debug("Rpath appears {0} times in {1}: {2}".format(count, abspath, rpath))
del_rpaths.add(rpath)
# Delete bad rpaths
for rpath in del_rpaths:
- args += ['-delete_rpath', rpath]
+ args += ["-delete_rpath", rpath]
# Add missing rpaths that are not set for deletion
for rpath in add_rpaths - del_rpaths - set(rpaths):
- args += ['-add_rpath', rpath]
+ args += ["-add_rpath", rpath]
if not args:
# No fixes needed
return False
args.append(abspath)
- executable.Executable('install_name_tool')(*args)
+ executable.Executable("install_name_tool")(*args)
return True
@@ -1086,23 +1048,21 @@ def fixup_macos_rpaths(spec):
-delete_rpath``.
"""
if spec.external or spec.virtual:
- tty.warn('external or virtual package cannot be fixed up: {0!s}'
- .format(spec))
+ tty.warn("external or virtual package cannot be fixed up: {0!s}".format(spec))
return False
- if 'platform=darwin' not in spec:
- raise NotImplementedError('fixup_macos_rpaths requires macOS')
+ if "platform=darwin" not in spec:
+ raise NotImplementedError("fixup_macos_rpaths requires macOS")
applied = 0
- libs = frozenset(['lib', 'lib64', 'libexec', 'plugins',
- 'Library', 'Frameworks'])
+ libs = frozenset(["lib", "lib64", "libexec", "plugins", "Library", "Frameworks"])
prefix = spec.prefix
if not os.path.exists(prefix):
raise RuntimeError(
- 'Could not fix up install prefix spec {0} because it does '
- 'not exist: {1!s}'.format(prefix, spec.name)
+ "Could not fix up install prefix spec {0} because it does "
+ "not exist: {1!s}".format(prefix, spec.name)
)
# Explore the installation prefix of the spec
@@ -1112,18 +1072,17 @@ def fixup_macos_rpaths(spec):
try:
needed_fix = fixup_macos_rpath(root, name)
except Exception as e:
- tty.warn("Failed to apply library fixups to: {0}/{1}: {2!s}"
- .format(root, name, e))
+ tty.warn("Failed to apply library fixups to: {0}/{1}: {2!s}".format(root, name, e))
needed_fix = False
if needed_fix:
applied += 1
- specname = spec.format('{name}{/hash:7}')
+ specname = spec.format("{name}{/hash:7}")
if applied:
- tty.info('Fixed rpaths for {0:d} {1} installed to {2}'.format(
- applied,
- "binary" if applied == 1 else "binaries",
- specname
- ))
+ tty.info(
+ "Fixed rpaths for {0:d} {1} installed to {2}".format(
+ applied, "binary" if applied == 1 else "binaries", specname
+ )
+ )
else:
- tty.debug('No rpath fixup needed for ' + specname)
+ tty.debug("No rpath fixup needed for " + specname)
diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py
index 8d10de4818..11a4e14416 100644
--- a/lib/spack/spack/repo.py
+++ b/lib/spack/spack/repo.py
@@ -42,7 +42,7 @@ import spack.util.path
from spack.util.executable import which
#: Package modules are imported as spack.pkg.<repo-namespace>.<pkg-name>
-ROOT_PYTHON_NAMESPACE = 'spack.pkg'
+ROOT_PYTHON_NAMESPACE = "spack.pkg"
def python_package_for_repo(namespace):
@@ -55,7 +55,7 @@ def python_package_for_repo(namespace):
Args:
namespace (str): repo namespace
"""
- return '{0}.{1}'.format(ROOT_PYTHON_NAMESPACE, namespace)
+ return "{0}.{1}".format(ROOT_PYTHON_NAMESPACE, namespace)
def namespace_from_fullname(fullname):
@@ -68,10 +68,10 @@ def namespace_from_fullname(fullname):
Args:
fullname (str): full name for the Python module
"""
- namespace, dot, module = fullname.rpartition('.')
- prefix_and_dot = '{0}.'.format(ROOT_PYTHON_NAMESPACE)
+ namespace, dot, module = fullname.rpartition(".")
+ prefix_and_dot = "{0}.".format(ROOT_PYTHON_NAMESPACE)
if namespace.startswith(prefix_and_dot):
- namespace = namespace[len(prefix_and_dot):]
+ namespace = namespace[len(prefix_and_dot) :]
return namespace
@@ -121,12 +121,12 @@ if sys.version_info[0] == 2:
See the ``importlib``-based importer for a faster way to do this in
later versions of python.
"""
- text = kwargs.get('text', None)
+ text = kwargs.get("text", None)
with open(f, *args) as f:
- with tempfile.NamedTemporaryFile(mode='w+') as tf:
+ with tempfile.NamedTemporaryFile(mode="w+") as tf:
if text:
- tf.write(text + '\n')
+ tf.write(text + "\n")
tf.write(f.read())
tf.seek(0)
yield tf.file
@@ -141,14 +141,12 @@ if sys.version_info[0] == 2:
def package_module(self):
try:
- module = load_source(
- self.fullname, self.package_py, prepend=self._package_prepend
- )
+ module = load_source(self.fullname, self.package_py, prepend=self._package_prepend)
except SyntaxError as e:
# SyntaxError strips the path from the filename, so we need to
# manually construct the error message in order to give the
# user the correct package.py where the syntax error is located
- msg = 'invalid syntax in {0:}, line {1:}'
+ msg = "invalid syntax in {0:}, line {1:}"
raise SyntaxError(msg.format(self.package_py, e.lineno))
module.__package__ = self.repo.full_namespace
@@ -160,7 +158,7 @@ if sys.version_info[0] == 2:
if fullname in sys.modules:
return sys.modules[fullname]
- namespace, dot, module_name = fullname.rpartition('.')
+ namespace, dot, module_name = fullname.rpartition(".")
try:
module = self.package_module()
@@ -200,13 +198,13 @@ else:
class RepoLoader(_PrependFileLoader):
"""Loads a Python module associated with a package in specific repository"""
+
#: Code in ``_package_prepend`` is prepended to imported packages.
#:
#: Spack packages are expected to call `from spack.package import *`
#: themselves, but we are allowing a deprecation period before breaking
#: external repos that don't do this yet.
- _package_prepend = ('from __future__ import absolute_import;'
- 'from spack.package import *')
+ _package_prepend = "from __future__ import absolute_import;" "from spack.package import *"
def __init__(self, fullname, repo, package_name):
self.repo = repo
@@ -232,7 +230,7 @@ class SpackNamespaceLoader(object):
module = SpackNamespace(fullname)
self.exec_module(module)
- namespace, dot, module_name = fullname.rpartition('.')
+ namespace, dot, module_name = fullname.rpartition(".")
sys.modules[fullname] = module
if namespace != fullname:
parent = sys.modules[namespace]
@@ -247,6 +245,7 @@ class ReposFinder(object):
Return a loader based on the inspection of the current global repository list.
"""
+
def find_spec(self, fullname, python_path, target=None):
# This function is Python 3 only and will not be called by Python 2.7
import importlib.util
@@ -266,7 +265,7 @@ class ReposFinder(object):
def compute_loader(self, fullname):
# namespaces are added to repo, and package modules are leaves.
- namespace, dot, module_name = fullname.rpartition('.')
+ namespace, dot, module_name = fullname.rpartition(".")
# If it's a module in some repo, or if it is the repo's
# namespace, let the repo handle it.
@@ -299,10 +298,10 @@ class ReposFinder(object):
#
# These names describe how repos should be laid out in the filesystem.
#
-repo_config_name = 'repo.yaml' # Top-level filename for repo config.
-repo_index_name = 'index.yaml' # Top-level filename for repository index.
-packages_dir_name = 'packages' # Top-level repo directory containing pkgs.
-package_file_name = 'package.py' # Filename for packages in a repository.
+repo_config_name = "repo.yaml" # Top-level filename for repo config.
+repo_index_name = "index.yaml" # Top-level filename for repository index.
+packages_dir_name = "packages" # Top-level repo directory containing pkgs.
+package_file_name = "package.py" # Filename for packages in a repository.
#: Guaranteed unused default value for some functions.
NOT_PROVIDED = object()
@@ -311,9 +310,9 @@ NOT_PROVIDED = object()
def packages_path():
"""Get the test repo if it is active, otherwise the builtin repo."""
try:
- return spack.repo.path.get_repo('builtin.mock').packages_path
+ return spack.repo.path.get_repo("builtin.mock").packages_path
except spack.repo.UnknownNamespaceError:
- return spack.repo.path.get_repo('builtin').packages_path
+ return spack.repo.path.get_repo("builtin").packages_path
class GitExe:
@@ -322,7 +321,7 @@ class GitExe:
#
# Not using -C as that is not supported for git < 1.8.5.
def __init__(self):
- self._git_cmd = which('git', required=True)
+ self._git_cmd = which("git", required=True)
def __call__(self, *args, **kwargs):
with working_dir(packages_path()):
@@ -345,11 +344,11 @@ def list_packages(rev):
git = get_git()
# git ls-tree does not support ... merge-base syntax, so do it manually
- if rev.endswith('...'):
- ref = rev.replace('...', '')
- rev = git('merge-base', ref, 'HEAD', output=str).strip()
+ if rev.endswith("..."):
+ ref = rev.replace("...", "")
+ rev = git("merge-base", ref, "HEAD", output=str).strip()
- output = git('ls-tree', '-r', '--name-only', rev, output=str)
+ output = git("ls-tree", "-r", "--name-only", rev, output=str)
# recursively list the packages directory
package_paths = [
@@ -364,14 +363,14 @@ def list_packages(rev):
def diff_packages(rev1, rev2):
"""Compute packages lists for the two revisions and return a tuple
- containing all the packages in rev1 but not in rev2 and all the
- packages in rev2 but not in rev1."""
+ containing all the packages in rev1 but not in rev2 and all the
+ packages in rev2 but not in rev1."""
p1 = set(list_packages(rev1))
p2 = set(list_packages(rev2))
return p1.difference(p2), p2.difference(p1)
-def get_all_package_diffs(type, rev1='HEAD^1', rev2='HEAD'):
+def get_all_package_diffs(type, rev1="HEAD^1", rev2="HEAD"):
"""Show packages changed, added, or removed (or any combination of those)
since a commit.
@@ -386,17 +385,18 @@ def get_all_package_diffs(type, rev1='HEAD^1', rev2='HEAD'):
A set contain names of affected packages.
"""
lower_type = type.lower()
- if not re.match('^[arc]*$', lower_type):
- tty.die("Invald change type: '%s'." % type,
- "Can contain only A (added), R (removed), or C (changed)")
+ if not re.match("^[arc]*$", lower_type):
+ tty.die(
+ "Invald change type: '%s'." % type,
+ "Can contain only A (added), R (removed), or C (changed)",
+ )
removed, added = diff_packages(rev1, rev2)
git = get_git()
- out = git('diff', '--relative', '--name-only', rev1, rev2,
- output=str).strip()
+ out = git("diff", "--relative", "--name-only", rev1, rev2, output=str).strip()
- lines = [] if not out else re.split(r'\s+', out)
+ lines = [] if not out else re.split(r"\s+", out)
changed = set()
for path in lines:
pkg_name, _, _ = path.partition(os.sep)
@@ -404,11 +404,11 @@ def get_all_package_diffs(type, rev1='HEAD^1', rev2='HEAD'):
changed.add(pkg_name)
packages = set()
- if 'a' in lower_type:
+ if "a" in lower_type:
packages |= added
- if 'r' in lower_type:
+ if "r" in lower_type:
packages |= removed
- if 'c' in lower_type:
+ if "c" in lower_type:
packages |= changed
return packages
@@ -421,21 +421,22 @@ def add_package_to_git_stage(packages):
for pkg_name in packages:
filename = spack.repo.path.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
- tty.die("No such package: %s. Path does not exist:" %
- pkg_name, filename)
+ tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
- git('add', filename)
+ git("add", filename)
def autospec(function):
"""Decorator that automatically converts the first argument of a
function to a Spec.
"""
+
@functools.wraps(function)
def converter(self, spec_like, *args, **kwargs):
if not isinstance(spec_like, spack.spec.Spec):
spec_like = spack.spec.Spec(spec_like)
return function(self, spec_like, *args, **kwargs)
+
return converter
@@ -445,15 +446,19 @@ def is_package_file(filename):
# We have to remove the file extension because it can be .py and can be
# .pyc depending on context, and can differ between the files
import spack.package_base # break cycle
+
filename_noext = os.path.splitext(filename)[0]
- packagebase_filename_noext = os.path.splitext(
- inspect.getfile(spack.package_base.PackageBase))[0]
- return (filename_noext != packagebase_filename_noext and
- os.path.basename(filename_noext) == 'package')
+ packagebase_filename_noext = os.path.splitext(inspect.getfile(spack.package_base.PackageBase))[
+ 0
+ ]
+ return (
+ filename_noext != packagebase_filename_noext
+ and os.path.basename(filename_noext) == "package"
+ )
class SpackNamespace(types.ModuleType):
- """ Allow lazy loading of modules."""
+ """Allow lazy loading of modules."""
def __init__(self, namespace):
super(SpackNamespace, self).__init__(namespace)
@@ -465,7 +470,7 @@ class SpackNamespace(types.ModuleType):
def __getattr__(self, name):
"""Getattr lazily loads modules if they're not already loaded."""
- submodule = self.__package__ + '.' + name
+ submodule = self.__package__ + "." + name
try:
setattr(self, name, __import__(submodule))
except ImportError:
@@ -482,6 +487,7 @@ class FastPackageChecker(Mapping):
all instances referring to it. Update of the global cache is done lazily
during instance initialization.
"""
+
#: Global cache, reused by every instance
_paths_cache = {} # type: Dict[str, Dict[str, os.stat_result]]
@@ -518,16 +524,15 @@ class FastPackageChecker(Mapping):
# Warn about invalid names that look like packages.
if not nm.valid_module_name(pkg_name):
- if not pkg_name.startswith('.'):
- tty.warn('Skipping package at {0}. "{1}" is not '
- 'a valid Spack module name.'.format(
- pkg_dir, pkg_name))
+ if not pkg_name.startswith("."):
+ tty.warn(
+ 'Skipping package at {0}. "{1}" is not '
+ "a valid Spack module name.".format(pkg_dir, pkg_name)
+ )
continue
# Construct the file name from the directory
- pkg_file = os.path.join(
- self.packages_path, pkg_name, package_file_name
- )
+ pkg_file = os.path.join(self.packages_path, pkg_name, package_file_name)
# Use stat here to avoid lots of calls to the filesystem.
try:
@@ -552,8 +557,7 @@ class FastPackageChecker(Mapping):
return cache
def last_mtime(self):
- return max(
- sinfo.st_mtime for sinfo in self._packages_to_stats.values())
+ return max(sinfo.st_mtime for sinfo in self._packages_to_stats.values())
def __getitem__(self, item):
return self._packages_to_stats[item]
@@ -607,6 +611,7 @@ class Indexer(object):
class TagIndexer(Indexer):
"""Lifecycle methods for a TagIndex on a Repo."""
+
def _create(self):
return spack.tag.TagIndex()
@@ -622,6 +627,7 @@ class TagIndexer(Indexer):
class ProviderIndexer(Indexer):
"""Lifecycle methods for virtual package providers."""
+
def _create(self):
return spack.provider_index.ProviderIndex()
@@ -629,7 +635,7 @@ class ProviderIndexer(Indexer):
self.index = spack.provider_index.ProviderIndex.from_json(stream)
def update(self, pkg_fullname):
- name = pkg_fullname.split('.')[-1]
+ name = pkg_fullname.split(".")[-1]
if spack.repo.path.is_virtual(name, use_index=False):
return
self.index.remove_provider(pkg_fullname)
@@ -641,6 +647,7 @@ class ProviderIndexer(Indexer):
class PatchIndexer(Indexer):
"""Lifecycle methods for patch cache."""
+
def _create(self):
return spack.patch.PatchCache()
@@ -676,12 +683,12 @@ class RepoIndex(object):
Generated indexes are accessed by name via ``__getitem__()``.
"""
+
def __init__(self, package_checker, namespace):
self.checker = package_checker
self.packages_path = self.checker.packages_path
- if sys.platform == 'win32':
- self.packages_path = \
- spack.util.path.convert_to_posix_path(self.packages_path)
+ if sys.platform == "win32":
+ self.packages_path = spack.util.path.convert_to_posix_path(self.packages_path)
self.namespace = namespace
self.indexers = {}
@@ -703,7 +710,7 @@ class RepoIndex(object):
"""Get the index with the specified name, reindexing if needed."""
indexer = self.indexers.get(name)
if not indexer:
- raise KeyError('no such index: %s' % name)
+ raise KeyError("no such index: %s" % name)
if name not in self.indexes:
self._build_all_indexes()
@@ -727,16 +734,13 @@ class RepoIndex(object):
"""Determine which packages need an update, and update indexes."""
# Filename of the provider index cache (we assume they're all json)
- cache_filename = '{0}/{1}-index.json'.format(name, self.namespace)
+ cache_filename = "{0}/{1}-index.json".format(name, self.namespace)
# Compute which packages needs to be updated in the cache
misc_cache = spack.caches.misc_cache
index_mtime = misc_cache.mtime(cache_filename)
- needs_update = [
- x for x, sinfo in self.checker.items()
- if sinfo.st_mtime > index_mtime
- ]
+ needs_update = [x for x, sinfo in self.checker.items() if sinfo.st_mtime > index_mtime]
index_existed = misc_cache.init_entry(cache_filename)
if index_existed and not needs_update:
@@ -750,7 +754,7 @@ class RepoIndex(object):
indexer.read(old) if old else indexer.create()
for pkg_name in needs_update:
- namespaced_name = '%s.%s' % (self.namespace, pkg_name)
+ namespaced_name = "%s.%s" % (self.namespace, pkg_name)
indexer.update(namespaced_name)
indexer.write(new)
@@ -784,10 +788,12 @@ class RepoPath(object):
repo = Repo(repo)
self.put_last(repo)
except RepoError as e:
- tty.warn("Failed to initialize repository: '%s'." % repo,
- e.message,
- "To remove the bad repository, run this command:",
- " spack repo rm %s" % repo)
+ tty.warn(
+ "Failed to initialize repository: '%s'." % repo,
+ e.message,
+ "To remove the bad repository, run this command:",
+ " spack repo rm %s" % repo,
+ )
def put_first(self, repo):
"""Add repo first in the search path."""
@@ -905,9 +911,11 @@ class RepoPath(object):
@autospec
def extensions_for(self, extendee_spec):
- return [pkg_cls(spack.spec.Spec(pkg_cls.name))
- for pkg_cls in self.all_package_classes()
- if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)]
+ return [
+ pkg_cls(spack.spec.Spec(pkg_cls.name))
+ for pkg_cls in self.all_package_classes()
+ if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)
+ ]
def last_mtime(self):
"""Time a package file in this repo was last updated."""
@@ -923,7 +931,7 @@ class RepoPath(object):
name = spec.name
else:
# handle strings directly for speed instead of @_autospec'ing
- namespace, _, name = spec.rpartition('.')
+ namespace, _, name = spec.rpartition(".")
# If the spec already has a namespace, then return the
# corresponding repo if we know about it.
@@ -957,8 +965,8 @@ class RepoPath(object):
def dump_provenance(self, spec, path):
"""Dump provenance information for a spec to a particular path.
- This dumps the package file and any associated patch files.
- Raises UnknownPackageError if not found.
+ This dumps the package file and any associated patch files.
+ Raises UnknownPackageError if not found.
"""
return self.repo_for_pkg(spec).dump_provenance(spec, path)
@@ -982,13 +990,13 @@ class RepoPath(object):
be run during the computation of the provider index."""
have_name = pkg_name is not None
if have_name and not isinstance(pkg_name, str):
- raise ValueError(
- "is_virtual(): expected package name, got %s" % type(pkg_name))
+ raise ValueError("is_virtual(): expected package name, got %s" % type(pkg_name))
if use_index:
return have_name and pkg_name in self.provider_index
else:
- return have_name and (not self.exists(pkg_name) or
- self.get_pkg_class(pkg_name).virtual)
+ return have_name and (
+ not self.exists(pkg_name) or self.get_pkg_class(pkg_name).virtual
+ )
def __contains__(self, pkg_name):
return self.exists(pkg_name)
@@ -1024,29 +1032,33 @@ class Repo(object):
# Validate repository layout.
self.config_file = os.path.join(self.root, repo_config_name)
- check(os.path.isfile(self.config_file),
- "No %s found in '%s'" % (repo_config_name, root))
+ check(os.path.isfile(self.config_file), "No %s found in '%s'" % (repo_config_name, root))
self.packages_path = os.path.join(self.root, packages_dir_name)
- check(os.path.isdir(self.packages_path),
- "No directory '%s' found in '%s'" % (packages_dir_name, root))
+ check(
+ os.path.isdir(self.packages_path),
+ "No directory '%s' found in '%s'" % (packages_dir_name, root),
+ )
# Read configuration and validate namespace
config = self._read_config()
- check('namespace' in config, '%s must define a namespace.'
- % os.path.join(root, repo_config_name))
+ check(
+ "namespace" in config,
+ "%s must define a namespace." % os.path.join(root, repo_config_name),
+ )
- self.namespace = config['namespace']
- check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace),
- ("Invalid namespace '%s' in repo '%s'. "
- % (self.namespace, self.root)) +
- "Namespaces must be valid python identifiers separated by '.'")
+ self.namespace = config["namespace"]
+ check(
+ re.match(r"[a-zA-Z][a-zA-Z0-9_.]+", self.namespace),
+ ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root))
+ + "Namespaces must be valid python identifiers separated by '.'",
+ )
# Set up 'full_namespace' to include the super-namespace
self.full_namespace = python_package_for_repo(self.namespace)
# Keep name components around for checking prefixes.
- self._names = self.full_namespace.split('.')
+ self._names = self.full_namespace.split(".")
# These are internal cache variables.
self._modules = {}
@@ -1086,8 +1098,8 @@ class Repo(object):
def is_prefix(self, fullname):
"""True if fullname is a prefix of this Repo's namespace."""
- parts = fullname.split('.')
- return self._names[:len(parts)] == parts
+ parts = fullname.split(".")
+ return self._names[: len(parts)] == parts
def _read_config(self):
"""Check for a YAML config file in this db's root directory."""
@@ -1095,16 +1107,17 @@ class Repo(object):
with open(self.config_file) as reponame_file:
yaml_data = yaml.load(reponame_file)
- if (not yaml_data or 'repo' not in yaml_data or
- not isinstance(yaml_data['repo'], dict)):
- tty.die("Invalid %s in repository %s" % (
- repo_config_name, self.root))
+ if (
+ not yaml_data
+ or "repo" not in yaml_data
+ or not isinstance(yaml_data["repo"], dict)
+ ):
+ tty.die("Invalid %s in repository %s" % (repo_config_name, self.root))
- return yaml_data['repo']
+ return yaml_data["repo"]
except IOError:
- tty.die("Error reading %s when opening %s"
- % (self.config_file, self.root))
+ tty.die("Error reading %s when opening %s" % (self.config_file, self.root))
def get(self, spec):
"""Returns the package associated with the supplied spec."""
@@ -1131,7 +1144,7 @@ class Repo(object):
# Make sure other errors in constructors hit the error
# handler by wrapping them
- if spack.config.get('config:debug'):
+ if spack.config.get("config:debug"):
sys.excepthook(*sys.exc_info())
raise FailedConstructorError(spec.fullname, *sys.exc_info())
@@ -1139,18 +1152,17 @@ class Repo(object):
def dump_provenance(self, spec, path):
"""Dump provenance information for a spec to a particular path.
- This dumps the package file and any associated patch files.
- Raises UnknownPackageError if not found.
+ This dumps the package file and any associated patch files.
+ Raises UnknownPackageError if not found.
"""
if spec.namespace and spec.namespace != self.namespace:
raise UnknownPackageError(
- "Repository %s does not contain package %s."
- % (self.namespace, spec.fullname))
+ "Repository %s does not contain package %s." % (self.namespace, spec.fullname)
+ )
# Install patch files needed by the package.
fs.mkdirp(path)
- for patch in itertools.chain.from_iterable(
- spec.package.patches.values()):
+ for patch in itertools.chain.from_iterable(spec.package.patches.values()):
if patch.path:
if os.path.exists(patch.path):
@@ -1170,25 +1182,25 @@ class Repo(object):
"""Construct the index for this repo lazily."""
if self._repo_index is None:
self._repo_index = RepoIndex(self._pkg_checker, self.namespace)
- self._repo_index.add_indexer('providers', ProviderIndexer())
- self._repo_index.add_indexer('tags', TagIndexer())
- self._repo_index.add_indexer('patches', PatchIndexer())
+ self._repo_index.add_indexer("providers", ProviderIndexer())
+ self._repo_index.add_indexer("tags", TagIndexer())
+ self._repo_index.add_indexer("patches", PatchIndexer())
return self._repo_index
@property
def provider_index(self):
"""A provider index with names *specific* to this repo."""
- return self.index['providers']
+ return self.index["providers"]
@property
def tag_index(self):
"""Index of tags and which packages they're defined on."""
- return self.index['tags']
+ return self.index["tags"]
@property
def patch_index(self):
"""Index of patches and packages they're defined on."""
- return self.index['patches']
+ return self.index["patches"]
@autospec
def providers_for(self, vpkg_spec):
@@ -1199,23 +1211,25 @@ class Repo(object):
@autospec
def extensions_for(self, extendee_spec):
- return [pkg_cls(spack.spec.Spec(pkg_cls.name))
- for pkg_cls in self.all_package_classes()
- if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)]
+ return [
+ pkg_cls(spack.spec.Spec(pkg_cls.name))
+ for pkg_cls in self.all_package_classes()
+ if pkg_cls(spack.spec.Spec(pkg_cls.name)).extends(extendee_spec)
+ ]
def dirname_for_package_name(self, pkg_name):
"""Get the directory name for a particular package. This is the
- directory that contains its package.py file."""
+ directory that contains its package.py file."""
return os.path.join(self.packages_path, pkg_name)
def filename_for_package_name(self, pkg_name):
"""Get the filename for the module we should load for a particular
- package. Packages for a Repo live in
- ``$root/<package_name>/package.py``
+ package. Packages for a Repo live in
+ ``$root/<package_name>/package.py``
- This will return a proper package.py path even if the
- package doesn't exist yet, so callers will need to ensure
- the package exists before importing.
+ This will return a proper package.py path even if the
+ package doesn't exist yet, so callers will need to ensure
+ the package exists before importing.
"""
pkg_dir = self.dirname_for_package_name(pkg_name)
return os.path.join(pkg_dir, package_file_name)
@@ -1279,10 +1293,11 @@ class Repo(object):
package. Then extracts the package class from the module
according to Spack's naming convention.
"""
- namespace, _, pkg_name = pkg_name.rpartition('.')
+ namespace, _, pkg_name = pkg_name.rpartition(".")
if namespace and (namespace != self.namespace):
- raise InvalidNamespaceError('Invalid namespace for %s repo: %s'
- % (self.namespace, namespace))
+ raise InvalidNamespaceError(
+ "Invalid namespace for %s repo: %s" % (self.namespace, namespace)
+ )
class_name = nm.mod_to_class(pkg_name)
@@ -1311,45 +1326,38 @@ class Repo(object):
def create_repo(root, namespace=None):
"""Create a new repository in root with the specified namespace.
- If the namespace is not provided, use basename of root.
- Return the canonicalized path and namespace of the created repository.
+ If the namespace is not provided, use basename of root.
+ Return the canonicalized path and namespace of the created repository.
"""
root = spack.util.path.canonicalize_path(root)
if not namespace:
namespace = os.path.basename(root)
- if not re.match(r'\w[\.\w-]*', namespace):
- raise InvalidNamespaceError(
- "'%s' is not a valid namespace." % namespace)
+ if not re.match(r"\w[\.\w-]*", namespace):
+ raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
existed = False
if os.path.exists(root):
if os.path.isfile(root):
- raise BadRepoError('File %s already exists and is not a directory'
- % root)
+ raise BadRepoError("File %s already exists and is not a directory" % root)
elif os.path.isdir(root):
if not os.access(root, os.R_OK | os.W_OK):
- raise BadRepoError(
- 'Cannot create new repo in %s: cannot access directory.'
- % root)
+ raise BadRepoError("Cannot create new repo in %s: cannot access directory." % root)
if os.listdir(root):
- raise BadRepoError(
- 'Cannot create new repo in %s: directory is not empty.'
- % root)
+ raise BadRepoError("Cannot create new repo in %s: directory is not empty." % root)
existed = True
full_path = os.path.realpath(root)
parent = os.path.dirname(full_path)
if not os.access(parent, os.R_OK | os.W_OK):
- raise BadRepoError(
- "Cannot create repository in %s: can't access parent!" % root)
+ raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
try:
config_path = os.path.join(root, repo_config_name)
packages_path = os.path.join(root, packages_dir_name)
fs.mkdirp(packages_path)
- with open(config_path, 'w') as config:
+ with open(config_path, "w") as config:
config.write("repo:\n")
config.write(" namespace: '%s'\n" % namespace)
@@ -1361,8 +1369,9 @@ def create_repo(root, namespace=None):
else:
shutil.rmtree(root, ignore_errors=True)
- raise BadRepoError('Failed to create new repository in %s.' % root,
- "Caused by %s: %s" % (type(e), e))
+ raise BadRepoError(
+ "Failed to create new repository in %s." % root, "Caused by %s: %s" % (type(e), e)
+ )
return full_path, namespace
@@ -1377,10 +1386,9 @@ def create_or_construct(path, namespace=None):
def _path(repo_dirs=None):
"""Get the singleton RepoPath instance for Spack."""
- repo_dirs = repo_dirs or spack.config.get('repos')
+ repo_dirs = repo_dirs or spack.config.get("repos")
if not repo_dirs:
- raise NoRepoConfiguredError(
- "Spack configuration contains no package repositories.")
+ raise NoRepoConfiguredError("Spack configuration contains no package repositories.")
return RepoPath(*repo_dirs)
@@ -1481,8 +1489,7 @@ class UnknownNamespaceError(UnknownEntityError):
"""Raised when we encounter an unknown namespace"""
def __init__(self, namespace):
- super(UnknownNamespaceError, self).__init__(
- "Unknown namespace: %s" % namespace)
+ super(UnknownNamespaceError, self).__init__("Unknown namespace: %s" % namespace)
class FailedConstructorError(RepoError):
@@ -1491,7 +1498,8 @@ class FailedConstructorError(RepoError):
def __init__(self, name, exc_type, exc_obj, exc_tb):
super(FailedConstructorError, self).__init__(
"Class constructor failed for package '%s'." % name,
- '\nCaused by:\n' +
- ('%s: %s\n' % (exc_type.__name__, exc_obj)) +
- ''.join(traceback.format_tb(exc_tb)))
+ "\nCaused by:\n"
+ + ("%s: %s\n" % (exc_type.__name__, exc_obj))
+ + "".join(traceback.format_tb(exc_tb)),
+ )
self.name = name
diff --git a/lib/spack/spack/report.py b/lib/spack/spack/report.py
index 8e50ebece6..ec81502887 100644
--- a/lib/spack/spack/report.py
+++ b/lib/spack/spack/report.py
@@ -20,33 +20,24 @@ from spack.reporter import Reporter
from spack.reporters.cdash import CDash
from spack.reporters.junit import JUnit
-report_writers = {
- None: Reporter,
- 'junit': JUnit,
- 'cdash': CDash
-}
+report_writers = {None: Reporter, "junit": JUnit, "cdash": CDash}
#: Allowed report formats
valid_formats = list(report_writers.keys())
-__all__ = [
- 'valid_formats',
- 'collect_info'
-]
+__all__ = ["valid_formats", "collect_info"]
def fetch_log(pkg, do_fn, dir):
log_files = {
- '_install_task': pkg.build_log_path,
- 'do_test': os.path.join(dir, TestSuite.test_log_name(pkg.spec)),
+ "_install_task": pkg.build_log_path,
+ "do_test": os.path.join(dir, TestSuite.test_log_name(pkg.spec)),
}
try:
- with codecs.open(log_files[do_fn.__name__], 'r', 'utf-8') as f:
- return ''.join(f.readlines())
+ with codecs.open(log_files[do_fn.__name__], "r", "utf-8") as f:
+ return "".join(f.readlines())
except Exception:
- return 'Cannot open log for {0}'.format(
- pkg.spec.cshort_spec
- )
+ return "Cannot open log for {0}".format(pkg.spec.cshort_spec)
class InfoCollector(object):
@@ -64,6 +55,7 @@ class InfoCollector(object):
specs (list of Spec): specs whose install information will
be recorded
"""
+
def __init__(self, wrap_class, do_fn, specs, dir):
#: Class for which to wrap a function
self.wrap_class = wrap_class
@@ -82,44 +74,38 @@ class InfoCollector(object):
def __enter__(self):
# Initialize the spec report with the data that is available upfront.
for input_spec in self.input_specs:
- name_fmt = '{0}_{1}'
- name = name_fmt.format(input_spec.name,
- input_spec.dag_hash(length=7))
+ name_fmt = "{0}_{1}"
+ name = name_fmt.format(input_spec.name, input_spec.dag_hash(length=7))
spec = {
- 'name': name,
- 'nerrors': None,
- 'nfailures': None,
- 'npackages': None,
- 'time': None,
- 'timestamp': time.strftime(
- "%a, %d %b %Y %H:%M:%S", time.gmtime()
- ),
- 'properties': [],
- 'packages': []
+ "name": name,
+ "nerrors": None,
+ "nfailures": None,
+ "npackages": None,
+ "time": None,
+ "timestamp": time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()),
+ "properties": [],
+ "packages": [],
}
self.specs.append(spec)
- Property = collections.namedtuple('Property', ['name', 'value'])
- spec['properties'].append(
- Property('architecture', input_spec.architecture)
- )
- spec['properties'].append(
- Property('compiler', input_spec.compiler))
+ Property = collections.namedtuple("Property", ["name", "value"])
+ spec["properties"].append(Property("architecture", input_spec.architecture))
+ spec["properties"].append(Property("compiler", input_spec.compiler))
# Check which specs are already installed and mark them as skipped
# only for install_task
- if self.do_fn == '_install_task':
+ if self.do_fn == "_install_task":
for dep in filter(lambda x: x.installed, input_spec.traverse()):
package = {
- 'name': dep.name,
- 'id': dep.dag_hash(),
- 'elapsed_time': '0.0',
- 'result': 'skipped',
- 'message': 'Spec already installed'
+ "name": dep.name,
+ "id": dep.dag_hash(),
+ "elapsed_time": "0.0",
+ "result": "skipped",
+ "message": "Spec already installed",
}
- spec['packages'].append(package)
+ spec["packages"].append(package)
def gather_info(do_fn):
"""Decorates do_fn to gather useful information for
@@ -128,11 +114,12 @@ class InfoCollector(object):
It's defined here to capture the environment and build
this context as the installations proceed.
"""
+
@functools.wraps(do_fn)
def wrapper(instance, *args, **kwargs):
if isinstance(instance, spack.package_base.PackageBase):
pkg = instance
- elif hasattr(args[0], 'pkg'):
+ elif hasattr(args[0], "pkg"):
pkg = args[0].pkg
else:
raise Exception
@@ -141,12 +128,12 @@ class InfoCollector(object):
installed_already = pkg.spec.installed
package = {
- 'name': pkg.name,
- 'id': pkg.spec.dag_hash(),
- 'elapsed_time': None,
- 'result': None,
- 'message': None,
- 'installed_from_binary_cache': False
+ "name": pkg.name,
+ "id": pkg.spec.dag_hash(),
+ "elapsed_time": None,
+ "result": None,
+ "message": None,
+ "installed_from_binary_cache": False,
}
# Append the package to the correct spec report. In some
@@ -157,11 +144,8 @@ class InfoCollector(object):
for s in llnl.util.lang.dedupe([pkg.spec.root, pkg.spec]):
name = name_fmt.format(s.name, s.dag_hash(length=7))
try:
- item = next((
- x for x in self.specs
- if x['name'] == name
- ))
- item['packages'].append(package)
+ item = next((x for x in self.specs if x["name"] == name))
+ item["packages"].append(package)
except StopIteration:
pass
@@ -170,47 +154,44 @@ class InfoCollector(object):
try:
value = do_fn(instance, *args, **kwargs)
- externals = kwargs.get('externals', False)
+ externals = kwargs.get("externals", False)
skip_externals = pkg.spec.external and not externals
- if do_fn.__name__ == 'do_test' and skip_externals:
- package['result'] = 'skipped'
+ if do_fn.__name__ == "do_test" and skip_externals:
+ package["result"] = "skipped"
else:
- package['result'] = 'success'
- package['stdout'] = fetch_log(pkg, do_fn, self.dir)
- package['installed_from_binary_cache'] = \
- pkg.installed_from_binary_cache
- if do_fn.__name__ == '_install_task' and installed_already:
+ package["result"] = "success"
+ package["stdout"] = fetch_log(pkg, do_fn, self.dir)
+ package["installed_from_binary_cache"] = pkg.installed_from_binary_cache
+ if do_fn.__name__ == "_install_task" and installed_already:
return
except spack.build_environment.InstallError as e:
# An InstallError is considered a failure (the recipe
# didn't work correctly)
- package['result'] = 'failure'
- package['message'] = e.message or 'Installation failure'
- package['stdout'] = fetch_log(pkg, do_fn, self.dir)
- package['stdout'] += package['message']
- package['exception'] = e.traceback
+ package["result"] = "failure"
+ package["message"] = e.message or "Installation failure"
+ package["stdout"] = fetch_log(pkg, do_fn, self.dir)
+ package["stdout"] += package["message"]
+ package["exception"] = e.traceback
raise
except (Exception, BaseException) as e:
# Everything else is an error (the installation
# failed outside of the child process)
- package['result'] = 'error'
- package['stdout'] = fetch_log(pkg, do_fn, self.dir)
- package['message'] = str(e) or 'Unknown error'
- package['exception'] = traceback.format_exc()
+ package["result"] = "error"
+ package["stdout"] = fetch_log(pkg, do_fn, self.dir)
+ package["message"] = str(e) or "Unknown error"
+ package["exception"] = traceback.format_exc()
raise
finally:
- package['elapsed_time'] = time.time() - start_time
+ package["elapsed_time"] = time.time() - start_time
return value
return wrapper
- setattr(self.wrap_class, self.do_fn, gather_info(
- getattr(self.wrap_class, self.do_fn)
- ))
+ setattr(self.wrap_class, self.do_fn, gather_info(getattr(self.wrap_class, self.do_fn)))
def __exit__(self, exc_type, exc_val, exc_tb):
@@ -218,16 +199,10 @@ class InfoCollector(object):
setattr(self.wrap_class, self.do_fn, self._backup_do_fn)
for spec in self.specs:
- spec['npackages'] = len(spec['packages'])
- spec['nfailures'] = len(
- [x for x in spec['packages'] if x['result'] == 'failure']
- )
- spec['nerrors'] = len(
- [x for x in spec['packages'] if x['result'] == 'error']
- )
- spec['time'] = sum([
- float(x['elapsed_time']) for x in spec['packages']
- ])
+ spec["npackages"] = len(spec["packages"])
+ spec["nfailures"] = len([x for x in spec["packages"] if x["result"] == "failure"])
+ spec["nerrors"] = len([x for x in spec["packages"] if x["result"] == "error"])
+ spec["time"] = sum([float(x["elapsed_time"]) for x in spec["packages"]])
class collect_info(object):
@@ -265,19 +240,19 @@ class collect_info(object):
Raises:
ValueError: when ``format_name`` is not in ``valid_formats``
"""
+
def __init__(self, cls, function, format_name, args):
self.cls = cls
self.function = function
self.filename = None
if args.cdash_upload_url:
- self.format_name = 'cdash'
- self.filename = 'cdash_report'
+ self.format_name = "cdash"
+ self.filename = "cdash_report"
else:
self.format_name = format_name
# Check that the format is valid.
if self.format_name not in valid_formats:
- raise ValueError('invalid report type: {0}'
- .format(self.format_name))
+ raise ValueError("invalid report type: {0}".format(self.format_name))
self.report_writer = report_writers[self.format_name](args)
def __call__(self, type, dir=None):
@@ -291,8 +266,7 @@ class collect_info(object):
def __enter__(self):
if self.format_name:
# Start the collector and patch self.function on appropriate class
- self.collector = InfoCollector(
- self.cls, self.function, self.specs, self.dir)
+ self.collector = InfoCollector(self.cls, self.function, self.specs, self.dir)
self.collector.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
@@ -301,6 +275,6 @@ class collect_info(object):
# original PackageInstaller._install_task
self.collector.__exit__(exc_type, exc_val, exc_tb)
- report_data = {'specs': self.collector.specs}
- report_fn = getattr(self.report_writer, '%s_report' % self.type)
+ report_data = {"specs": self.collector.specs}
+ report_fn = getattr(self.report_writer, "%s_report" % self.type)
report_fn(self.filename, report_data)
diff --git a/lib/spack/spack/reporter.py b/lib/spack/spack/reporter.py
index dcb552f749..6dc8cff2e0 100644
--- a/lib/spack/spack/reporter.py
+++ b/lib/spack/spack/reporter.py
@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-__all__ = ['Reporter']
+__all__ = ["Reporter"]
class Reporter(object):
diff --git a/lib/spack/spack/reporters/cdash.py b/lib/spack/spack/reporters/cdash.py
index 388884039d..6107aaed50 100644
--- a/lib/spack/spack/reporters/cdash.py
+++ b/lib/spack/spack/reporters/cdash.py
@@ -29,21 +29,21 @@ from spack.util.crypto import checksum
from spack.util.executable import which
from spack.util.log_parse import parse_log_events
-__all__ = ['CDash']
+__all__ = ["CDash"]
# Mapping Spack phases to the corresponding CTest/CDash phase.
map_phases_to_cdash = {
- 'autoreconf': 'configure',
- 'cmake': 'configure',
- 'configure': 'configure',
- 'edit': 'configure',
- 'build': 'build',
- 'install': 'build'
+ "autoreconf": "configure",
+ "cmake": "configure",
+ "configure": "configure",
+ "edit": "configure",
+ "build": "build",
+ "install": "build",
}
# Initialize data structures common to each phase's report.
cdash_phases = set(map_phases_to_cdash.values())
-cdash_phases.add('update')
+cdash_phases.add("update")
class CDash(Reporter):
@@ -64,7 +64,7 @@ class CDash(Reporter):
self.success = True
# Posixpath is used here to support the underlying template enginge
# Jinja2, which expects `/` path separators
- self.template_dir = posixpath.join('reports', 'cdash')
+ self.template_dir = posixpath.join("reports", "cdash")
self.cdash_upload_url = args.cdash_upload_url
if self.cdash_upload_url:
@@ -72,21 +72,21 @@ class CDash(Reporter):
self.phase_regexp = re.compile(r"Executing phase: '(.*)'")
self.authtoken = None
- if 'SPACK_CDASH_AUTH_TOKEN' in os.environ:
+ if "SPACK_CDASH_AUTH_TOKEN" in os.environ:
tty.verbose("Using CDash auth token from environment")
- self.authtoken = os.environ.get('SPACK_CDASH_AUTH_TOKEN')
+ self.authtoken = os.environ.get("SPACK_CDASH_AUTH_TOKEN")
- if getattr(args, 'spec', ''):
+ if getattr(args, "spec", ""):
packages = args.spec
- elif getattr(args, 'specs', ''):
+ elif getattr(args, "specs", ""):
packages = args.specs
else:
packages = []
for file in args.specfiles:
- with open(file, 'r') as f:
+ with open(file, "r") as f:
s = spack.spec.Spec.from_yaml(f)
packages.append(s.format())
- self.install_command = ' '.join(packages)
+ self.install_command = " ".join(packages)
self.base_buildname = args.cdash_build or self.install_command
self.site = args.cdash_site or socket.gethostname()
self.osname = platform.system()
@@ -95,60 +95,56 @@ class CDash(Reporter):
self.buildstamp = args.cdash_buildstamp
else:
buildstamp_format = "%Y%m%d-%H%M-{0}".format(args.cdash_track)
- self.buildstamp = time.strftime(buildstamp_format,
- time.localtime(self.endtime))
+ self.buildstamp = time.strftime(buildstamp_format, time.localtime(self.endtime))
self.buildIds = collections.OrderedDict()
- self.revision = ''
- git = which('git')
+ self.revision = ""
+ git = which("git")
with working_dir(spack.paths.spack_root):
- self.revision = git('rev-parse', 'HEAD', output=str).strip()
+ self.revision = git("rev-parse", "HEAD", output=str).strip()
self.multiple_packages = False
def build_report_for_package(self, directory_name, package, duration):
- if 'stdout' not in package:
+ if "stdout" not in package:
# Skip reporting on packages that did not generate any output.
return
- self.current_package_name = package['name']
+ self.current_package_name = package["name"]
if self.multiple_packages:
- self.buildname = "{0} - {1}".format(
- self.base_buildname, package['name'])
+ self.buildname = "{0} - {1}".format(self.base_buildname, package["name"])
else:
self.buildname = self.base_buildname
report_data = self.initialize_report(directory_name)
for phase in cdash_phases:
report_data[phase] = {}
- report_data[phase]['loglines'] = []
- report_data[phase]['status'] = 0
- report_data[phase]['endtime'] = self.endtime
+ report_data[phase]["loglines"] = []
+ report_data[phase]["status"] = 0
+ report_data[phase]["endtime"] = self.endtime
# Track the phases we perform so we know what reports to create.
# We always report the update step because this is how we tell CDash
# what revision of Spack we are using.
- phases_encountered = ['update']
+ phases_encountered = ["update"]
# Generate a report for this package.
- current_phase = ''
- cdash_phase = ''
- for line in package['stdout'].splitlines():
+ current_phase = ""
+ cdash_phase = ""
+ for line in package["stdout"].splitlines():
match = None
if line.find("Executing phase: '") != -1:
match = self.phase_regexp.search(line)
if match:
current_phase = match.group(1)
if current_phase not in map_phases_to_cdash:
- current_phase = ''
+ current_phase = ""
continue
- cdash_phase = \
- map_phases_to_cdash[current_phase]
+ cdash_phase = map_phases_to_cdash[current_phase]
if cdash_phase not in phases_encountered:
phases_encountered.append(cdash_phase)
- report_data[cdash_phase]['loglines'].append(
- text_type("{0} output for {1}:".format(
- cdash_phase, package['name'])))
+ report_data[cdash_phase]["loglines"].append(
+ text_type("{0} output for {1}:".format(cdash_phase, package["name"]))
+ )
elif cdash_phase:
- report_data[cdash_phase]['loglines'].append(
- xml.sax.saxutils.escape(line))
+ report_data[cdash_phase]["loglines"].append(xml.sax.saxutils.escape(line))
# Move the build phase to the front of the list if it occurred.
# This supports older versions of CDash that expect this phase
@@ -159,13 +155,12 @@ class CDash(Reporter):
self.starttime = self.endtime - duration
for phase in phases_encountered:
- report_data[phase]['starttime'] = self.starttime
- report_data[phase]['log'] = \
- '\n'.join(report_data[phase]['loglines'])
- errors, warnings = parse_log_events(report_data[phase]['loglines'])
+ report_data[phase]["starttime"] = self.starttime
+ report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
+ errors, warnings = parse_log_events(report_data[phase]["loglines"])
# Convert errors to warnings if the package reported success.
- if package['result'] == 'success':
+ if package["result"] == "success":
warnings = errors + warnings
errors = []
@@ -176,53 +171,51 @@ class CDash(Reporter):
if nerrors > 0:
self.success = False
- if phase == 'configure':
- report_data[phase]['status'] = 1
+ if phase == "configure":
+ report_data[phase]["status"] = 1
- if phase == 'build':
+ if phase == "build":
# Convert log output from ASCII to Unicode and escape for XML.
def clean_log_event(event):
event = vars(event)
- event['text'] = xml.sax.saxutils.escape(event['text'])
- event['pre_context'] = xml.sax.saxutils.escape(
- '\n'.join(event['pre_context']))
- event['post_context'] = xml.sax.saxutils.escape(
- '\n'.join(event['post_context']))
+ event["text"] = xml.sax.saxutils.escape(event["text"])
+ event["pre_context"] = xml.sax.saxutils.escape("\n".join(event["pre_context"]))
+ event["post_context"] = xml.sax.saxutils.escape(
+ "\n".join(event["post_context"])
+ )
# source_file and source_line_no are either strings or
# the tuple (None,). Distinguish between these two cases.
- if event['source_file'][0] is None:
- event['source_file'] = ''
- event['source_line_no'] = ''
+ if event["source_file"][0] is None:
+ event["source_file"] = ""
+ event["source_line_no"] = ""
else:
- event['source_file'] = xml.sax.saxutils.escape(
- event['source_file'])
+ event["source_file"] = xml.sax.saxutils.escape(event["source_file"])
return event
- report_data[phase]['errors'] = []
- report_data[phase]['warnings'] = []
+ report_data[phase]["errors"] = []
+ report_data[phase]["warnings"] = []
for error in errors:
- report_data[phase]['errors'].append(clean_log_event(error))
+ report_data[phase]["errors"].append(clean_log_event(error))
for warning in warnings:
- report_data[phase]['warnings'].append(
- clean_log_event(warning))
+ report_data[phase]["warnings"].append(clean_log_event(warning))
- if phase == 'update':
- report_data[phase]['revision'] = self.revision
+ if phase == "update":
+ report_data[phase]["revision"] = self.revision
# Write the report.
report_name = phase.capitalize() + ".xml"
if self.multiple_packages:
- report_file_name = package['name'] + "_" + report_name
+ report_file_name = package["name"] + "_" + report_name
else:
report_file_name = report_name
phase_report = os.path.join(directory_name, report_file_name)
- with codecs.open(phase_report, 'w', 'utf-8') as f:
+ with codecs.open(phase_report, "w", "utf-8") as f:
env = spack.tengine.make_environment()
- if phase != 'update':
+ if phase != "update":
# Update.xml stores site information differently
# than the rest of the CTest XML files.
- site_template = posixpath.join(self.template_dir, 'Site.xml')
+ site_template = posixpath.join(self.template_dir, "Site.xml")
t = env.get_template(site_template)
f.write(t.render(report_data))
@@ -236,16 +229,16 @@ class CDash(Reporter):
# than one package. When we're only reporting on a single package we
# do not explicitly include the package's name in the CDash build name.
num_packages = 0
- for spec in input_data['specs']:
+ for spec in input_data["specs"]:
# Do not generate reports for packages that were installed
# from the binary cache.
- spec['packages'] = [
- x for x in spec['packages']
- if 'installed_from_binary_cache' not in x or
- not x['installed_from_binary_cache']
+ spec["packages"] = [
+ x
+ for x in spec["packages"]
+ if "installed_from_binary_cache" not in x or not x["installed_from_binary_cache"]
]
- for package in spec['packages']:
- if 'stdout' in package:
+ for package in spec["packages"]:
+ if "stdout" in package:
num_packages += 1
if num_packages > 1:
self.multiple_packages = True
@@ -254,102 +247,96 @@ class CDash(Reporter):
break
# Generate reports for each package in each spec.
- for spec in input_data['specs']:
+ for spec in input_data["specs"]:
duration = 0
- if 'time' in spec:
- duration = int(spec['time'])
- for package in spec['packages']:
- self.build_report_for_package(
- directory_name, package, duration)
+ if "time" in spec:
+ duration = int(spec["time"])
+ for package in spec["packages"]:
+ self.build_report_for_package(directory_name, package, duration)
self.finalize_report()
def test_report_for_package(self, directory_name, package, duration):
- if 'stdout' not in package:
+ if "stdout" not in package:
# Skip reporting on packages that did not generate any output.
return
- self.current_package_name = package['name']
- self.buildname = "{0} - {1}".format(
- self.base_buildname, package['name'])
+ self.current_package_name = package["name"]
+ self.buildname = "{0} - {1}".format(self.base_buildname, package["name"])
report_data = self.initialize_report(directory_name)
- for phase in ('test', 'update'):
+ for phase in ("test", "update"):
report_data[phase] = {}
- report_data[phase]['loglines'] = []
- report_data[phase]['status'] = 0
- report_data[phase]['endtime'] = self.endtime
+ report_data[phase]["loglines"] = []
+ report_data[phase]["status"] = 0
+ report_data[phase]["endtime"] = self.endtime
# Track the phases we perform so we know what reports to create.
# We always report the update step because this is how we tell CDash
# what revision of Spack we are using.
- phases_encountered = ['test', 'update']
+ phases_encountered = ["test", "update"]
# Generate a report for this package.
# The first line just says "Testing package name-hash"
- report_data['test']['loglines'].append(
- text_type("{0} output for {1}:".format(
- 'test', package['name'])))
- for line in package['stdout'].splitlines()[1:]:
- report_data['test']['loglines'].append(
- xml.sax.saxutils.escape(line))
+ report_data["test"]["loglines"].append(
+ text_type("{0} output for {1}:".format("test", package["name"]))
+ )
+ for line in package["stdout"].splitlines()[1:]:
+ report_data["test"]["loglines"].append(xml.sax.saxutils.escape(line))
self.starttime = self.endtime - duration
for phase in phases_encountered:
- report_data[phase]['starttime'] = self.starttime
- report_data[phase]['log'] = \
- '\n'.join(report_data[phase]['loglines'])
- errors, warnings = parse_log_events(report_data[phase]['loglines'])
+ report_data[phase]["starttime"] = self.starttime
+ report_data[phase]["log"] = "\n".join(report_data[phase]["loglines"])
+ errors, warnings = parse_log_events(report_data[phase]["loglines"])
# Cap the number of errors and warnings at 50 each.
errors = errors[0:49]
warnings = warnings[0:49]
- if phase == 'test':
+ if phase == "test":
# Convert log output from ASCII to Unicode and escape for XML.
def clean_log_event(event):
event = vars(event)
- event['text'] = xml.sax.saxutils.escape(event['text'])
- event['pre_context'] = xml.sax.saxutils.escape(
- '\n'.join(event['pre_context']))
- event['post_context'] = xml.sax.saxutils.escape(
- '\n'.join(event['post_context']))
+ event["text"] = xml.sax.saxutils.escape(event["text"])
+ event["pre_context"] = xml.sax.saxutils.escape("\n".join(event["pre_context"]))
+ event["post_context"] = xml.sax.saxutils.escape(
+ "\n".join(event["post_context"])
+ )
# source_file and source_line_no are either strings or
# the tuple (None,). Distinguish between these two cases.
- if event['source_file'][0] is None:
- event['source_file'] = ''
- event['source_line_no'] = ''
+ if event["source_file"][0] is None:
+ event["source_file"] = ""
+ event["source_line_no"] = ""
else:
- event['source_file'] = xml.sax.saxutils.escape(
- event['source_file'])
+ event["source_file"] = xml.sax.saxutils.escape(event["source_file"])
return event
# Convert errors to warnings if the package reported success.
- if package['result'] == 'success':
+ if package["result"] == "success":
warnings = errors + warnings
errors = []
- report_data[phase]['errors'] = []
- report_data[phase]['warnings'] = []
+ report_data[phase]["errors"] = []
+ report_data[phase]["warnings"] = []
for error in errors:
- report_data[phase]['errors'].append(clean_log_event(error))
+ report_data[phase]["errors"].append(clean_log_event(error))
for warning in warnings:
- report_data[phase]['warnings'].append(
- clean_log_event(warning))
+ report_data[phase]["warnings"].append(clean_log_event(warning))
- if phase == 'update':
- report_data[phase]['revision'] = self.revision
+ if phase == "update":
+ report_data[phase]["revision"] = self.revision
# Write the report.
report_name = phase.capitalize() + ".xml"
- report_file_name = package['name'] + "_" + report_name
+ report_file_name = package["name"] + "_" + report_name
phase_report = os.path.join(directory_name, report_file_name)
- with codecs.open(phase_report, 'w', 'utf-8') as f:
+ with codecs.open(phase_report, "w", "utf-8") as f:
env = spack.tengine.make_environment()
- if phase != 'update':
+ if phase != "update":
# Update.xml stores site information differently
# than the rest of the CTest XML files.
- site_template = posixpath.join(self.template_dir, 'Site.xml')
+ site_template = posixpath.join(self.template_dir, "Site.xml")
t = env.get_template(site_template)
f.write(t.render(report_data))
@@ -360,29 +347,28 @@ class CDash(Reporter):
def test_report(self, directory_name, input_data):
# Generate reports for each package in each spec.
- for spec in input_data['specs']:
+ for spec in input_data["specs"]:
duration = 0
- if 'time' in spec:
- duration = int(spec['time'])
- for package in spec['packages']:
- self.test_report_for_package(
- directory_name, package, duration)
+ if "time" in spec:
+ duration = int(spec["time"])
+ for package in spec["packages"]:
+ self.test_report_for_package(directory_name, package, duration)
self.finalize_report()
def concretization_report(self, directory_name, msg):
self.buildname = self.base_buildname
report_data = self.initialize_report(directory_name)
- report_data['update'] = {}
- report_data['update']['starttime'] = self.endtime
- report_data['update']['endtime'] = self.endtime
- report_data['update']['revision'] = self.revision
- report_data['update']['log'] = msg
+ report_data["update"] = {}
+ report_data["update"]["starttime"] = self.endtime
+ report_data["update"]["endtime"] = self.endtime
+ report_data["update"]["revision"] = self.revision
+ report_data["update"]["log"] = msg
env = spack.tengine.make_environment()
- update_template = posixpath.join(self.template_dir, 'Update.xml')
+ update_template = posixpath.join(self.template_dir, "Update.xml")
t = env.get_template(update_template)
- output_filename = os.path.join(directory_name, 'Update.xml')
- with open(output_filename, 'w') as f:
+ output_filename = os.path.join(directory_name, "Update.xml")
+ with open(output_filename, "w") as f:
f.write(t.render(report_data))
# We don't have a current package when reporting on concretization
# errors so refer to this report with the base buildname instead.
@@ -395,11 +381,11 @@ class CDash(Reporter):
if not os.path.exists(directory_name):
os.mkdir(directory_name)
report_data = {}
- report_data['buildname'] = self.buildname
- report_data['buildstamp'] = self.buildstamp
- report_data['install_command'] = self.install_command
- report_data['osname'] = self.osname
- report_data['site'] = self.site
+ report_data["buildname"] = self.buildname
+ report_data["buildstamp"] = self.buildstamp
+ report_data["install_command"] = self.install_command
+ report_data["osname"] = self.osname
+ report_data["site"] = self.site
return report_data
def upload(self, filename):
@@ -410,30 +396,29 @@ class CDash(Reporter):
md5sum = checksum(hashlib.md5, filename, block_size=8192)
opener = build_opener(HTTPHandler)
- with open(filename, 'rb') as f:
+ with open(filename, "rb") as f:
params_dict = {
- 'build': self.buildname,
- 'site': self.site,
- 'stamp': self.buildstamp,
- 'MD5': md5sum,
+ "build": self.buildname,
+ "site": self.site,
+ "stamp": self.buildstamp,
+ "MD5": md5sum,
}
encoded_params = urlencode(params_dict)
url = "{0}&{1}".format(self.cdash_upload_url, encoded_params)
request = Request(url, data=f)
- request.add_header('Content-Type', 'text/xml')
- request.add_header('Content-Length', os.path.getsize(filename))
+ request.add_header("Content-Type", "text/xml")
+ request.add_header("Content-Length", os.path.getsize(filename))
if self.authtoken:
- request.add_header('Authorization',
- 'Bearer {0}'.format(self.authtoken))
+ request.add_header("Authorization", "Bearer {0}".format(self.authtoken))
try:
# By default, urllib2 only support GET and POST.
# CDash needs expects this file to be uploaded via PUT.
- request.get_method = lambda: 'PUT'
+ request.get_method = lambda: "PUT"
response = opener.open(request)
if self.current_package_name not in self.buildIds:
resp_value = response.read()
if isinstance(resp_value, bytes):
- resp_value = resp_value.decode('utf-8')
+ resp_value = resp_value.decode("utf-8")
match = self.buildid_regexp.search(resp_value)
if match:
buildid = match.group(1)
@@ -448,7 +433,7 @@ class CDash(Reporter):
# Construct and display a helpful link if CDash responded with
# a buildId.
build_url = self.cdash_upload_url
- build_url = build_url[0:build_url.find("submit.php")]
+ build_url = build_url[0 : build_url.find("submit.php")]
build_url += "buildSummary.php?buildid={0}".format(buildid)
print("{0}: {1}".format(package_name, build_url))
if not self.success:
diff --git a/lib/spack/spack/reporters/junit.py b/lib/spack/spack/reporters/junit.py
index fa19bdd2bd..f845974d9d 100644
--- a/lib/spack/spack/reporters/junit.py
+++ b/lib/spack/spack/reporters/junit.py
@@ -10,7 +10,7 @@ import spack.fetch_strategy
import spack.package_base
from spack.reporter import Reporter
-__all__ = ['JUnit']
+__all__ = ["JUnit"]
class JUnit(Reporter):
@@ -20,11 +20,11 @@ class JUnit(Reporter):
Reporter.__init__(self, args)
# Posixpath is used here to support the underlying template enginge
# Jinja2, which expects `/` path separators
- self.template_file = posixpath.join('reports', 'junit.xml')
+ self.template_file = posixpath.join("reports", "junit.xml")
def build_report(self, filename, report_data):
# Write the report
- with open(filename, 'w') as f:
+ with open(filename, "w") as f:
env = spack.tengine.make_environment()
t = env.get_template(self.template_file)
f.write(t.render(report_data))
diff --git a/lib/spack/spack/rewiring.py b/lib/spack/spack/rewiring.py
index c91485219b..8a2dcad035 100644
--- a/lib/spack/spack/rewiring.py
+++ b/lib/spack/spack/rewiring.py
@@ -27,7 +27,7 @@ def _relocate_spliced_links(links, orig_prefix, new_prefix):
because it expects the new directory structure to be in place."""
for link in links:
link_target = os.readlink(os.path.join(orig_prefix, link))
- link_target = re.sub('^' + orig_prefix, new_prefix, link_target)
+ link_target = re.sub("^" + orig_prefix, new_prefix, link_target)
new_link_path = os.path.join(new_prefix, link)
os.unlink(new_link_path)
symlink(link_target, new_link_path)
@@ -37,13 +37,11 @@ def rewire(spliced_spec):
"""Given a spliced spec, this function conducts all the rewiring on all
nodes in the DAG of that spec."""
assert spliced_spec.spliced
- for spec in spliced_spec.traverse(order='post', root=True):
+ for spec in spliced_spec.traverse(order="post", root=True):
if not spec.build_spec.installed:
# TODO: May want to change this at least for the root spec...
# spec.build_spec.package.do_install(force=True)
- raise PackageNotInstalledError(spliced_spec,
- spec.build_spec,
- spec)
+ raise PackageNotInstalledError(spliced_spec, spec.build_spec, spec)
if spec.build_spec is not spec and not spec.installed:
explicit = spec is spliced_spec
rewire_node(spec, explicit)
@@ -55,8 +53,7 @@ def rewire_node(spec, explicit):
the splice. The resulting package is then 'installed.'"""
tempdir = tempfile.mkdtemp()
# copy anything installed to a temporary directory
- shutil.copytree(spec.build_spec.prefix,
- os.path.join(tempdir, spec.dag_hash()))
+ shutil.copytree(spec.build_spec.prefix, os.path.join(tempdir, spec.dag_hash()))
spack.hooks.pre_install(spec)
# compute prefix-to-prefix for every node from the build spec to the spliced
@@ -68,42 +65,50 @@ def rewire_node(spec, explicit):
manifest = bindist.get_buildfile_manifest(spec.build_spec)
platform = spack.platforms.by_name(spec.platform)
- text_to_relocate = [os.path.join(tempdir, spec.dag_hash(), rel_path)
- for rel_path in manifest.get('text_to_relocate', [])]
+ text_to_relocate = [
+ os.path.join(tempdir, spec.dag_hash(), rel_path)
+ for rel_path in manifest.get("text_to_relocate", [])
+ ]
if text_to_relocate:
- relocate.relocate_text(files=text_to_relocate,
- prefixes=prefix_to_prefix)
+ relocate.relocate_text(files=text_to_relocate, prefixes=prefix_to_prefix)
- bins_to_relocate = [os.path.join(tempdir, spec.dag_hash(), rel_path)
- for rel_path in manifest.get('binary_to_relocate', [])]
+ bins_to_relocate = [
+ os.path.join(tempdir, spec.dag_hash(), rel_path)
+ for rel_path in manifest.get("binary_to_relocate", [])
+ ]
if bins_to_relocate:
- if 'macho' in platform.binary_formats:
- relocate.relocate_macho_binaries(bins_to_relocate,
- str(spack.store.layout.root),
- str(spack.store.layout.root),
- prefix_to_prefix,
- False,
- spec.build_spec.prefix,
- spec.prefix)
- if 'elf' in platform.binary_formats:
- relocate.relocate_elf_binaries(bins_to_relocate,
- str(spack.store.layout.root),
- str(spack.store.layout.root),
- prefix_to_prefix,
- False,
- spec.build_spec.prefix,
- spec.prefix)
- relocate.relocate_text_bin(binaries=bins_to_relocate,
- prefixes=prefix_to_prefix)
+ if "macho" in platform.binary_formats:
+ relocate.relocate_macho_binaries(
+ bins_to_relocate,
+ str(spack.store.layout.root),
+ str(spack.store.layout.root),
+ prefix_to_prefix,
+ False,
+ spec.build_spec.prefix,
+ spec.prefix,
+ )
+ if "elf" in platform.binary_formats:
+ relocate.relocate_elf_binaries(
+ bins_to_relocate,
+ str(spack.store.layout.root),
+ str(spack.store.layout.root),
+ prefix_to_prefix,
+ False,
+ spec.build_spec.prefix,
+ spec.prefix,
+ )
+ relocate.relocate_text_bin(binaries=bins_to_relocate, prefixes=prefix_to_prefix)
# Copy package into place, except for spec.json (because spec.json
# describes the old spec and not the new spliced spec).
- shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
- ignore=shutil.ignore_patterns('spec.json',
- 'install_manifest.json'))
- if manifest.get('link_to_relocate'):
- _relocate_spliced_links(manifest.get('link_to_relocate'),
- spec.build_spec.prefix,
- spec.prefix)
+ shutil.copytree(
+ os.path.join(tempdir, spec.dag_hash()),
+ spec.prefix,
+ ignore=shutil.ignore_patterns("spec.json", "install_manifest.json"),
+ )
+ if manifest.get("link_to_relocate"):
+ _relocate_spliced_links(
+ manifest.get("link_to_relocate"), spec.build_spec.prefix, spec.prefix
+ )
shutil.rmtree(tempdir)
# Above, we did not copy spec.json: instead, here we write the new
# (spliced) spec into spec.json, without this, Database.add would fail on
@@ -119,14 +124,19 @@ def rewire_node(spec, explicit):
class RewireError(spack.error.SpackError):
"""Raised when something goes wrong with rewiring."""
+
def __init__(self, message, long_msg=None):
super(RewireError, self).__init__(message, long_msg)
class PackageNotInstalledError(RewireError):
"""Raised when the build_spec for a splice was not installed."""
+
def __init__(self, spliced_spec, build_spec, dep):
super(PackageNotInstalledError, self).__init__(
"""Rewire of {0}
failed due to missing install of build spec {1}
- for spec {2}""".format(spliced_spec, build_spec, dep))
+ for spec {2}""".format(
+ spliced_spec, build_spec, dep
+ )
+ )
diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py
index 904c7adfd2..c867642603 100644
--- a/lib/spack/spack/s3_handler.py
+++ b/lib/spack/spack/s3_handler.py
@@ -51,14 +51,14 @@ def _s3_open(url):
bucket = parsed.netloc
key = parsed.path
- if key.startswith('/'):
+ if key.startswith("/"):
key = key[1:]
obj = s3.get_object(Bucket=bucket, Key=key)
# NOTE(opadron): Apply workaround here (see above)
- stream = WrapStream(obj['Body'])
- headers = obj['ResponseMetadata']['HTTPHeaders']
+ stream = WrapStream(obj["Body"])
+ headers = obj["ResponseMetadata"]["HTTPHeaders"]
return url, headers, stream
@@ -67,21 +67,20 @@ class UrllibS3Handler(urllib_request.HTTPSHandler):
def s3_open(self, req):
orig_url = req.get_full_url()
from botocore.exceptions import ClientError # type: ignore[import]
+
try:
url, headers, stream = _s3_open(orig_url)
return urllib_response.addinfourl(stream, headers, url)
except ClientError as err:
# if no such [KEY], but [KEY]/index.html exists,
# return that, instead.
- if err.response['Error']['Code'] == 'NoSuchKey':
+ if err.response["Error"]["Code"] == "NoSuchKey":
try:
- _, headers, stream = _s3_open(
- url_util.join(orig_url, 'index.html'))
- return urllib_response.addinfourl(
- stream, headers, orig_url)
+ _, headers, stream = _s3_open(url_util.join(orig_url, "index.html"))
+ return urllib_response.addinfourl(stream, headers, orig_url)
except ClientError as err2:
- if err.response['Error']['Code'] == 'NoSuchKey':
+ if err.response["Error"]["Code"] == "NoSuchKey":
# raise original error
raise six.raise_from(urllib_error.URLError(err), err)
diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py
index 45514d4a7c..54f645fd5c 100644
--- a/lib/spack/spack/schema/__init__.py
+++ b/lib/spack/spack/schema/__init__.py
@@ -22,6 +22,7 @@ def _make_validator():
def _validate_spec(validator, is_spec, instance, schema):
"""Check if the attributes on instance are valid specs."""
import jsonschema
+
if not validator.is_type(instance, "object"):
return
@@ -34,19 +35,16 @@ def _make_validator():
)
def _deprecated_properties(validator, deprecated, instance, schema):
- if not (validator.is_type(instance, "object") or
- validator.is_type(instance, "array")):
+ if not (validator.is_type(instance, "object") or validator.is_type(instance, "array")):
return
# Get a list of the deprecated properties, return if there is none
- deprecated_properties = [
- x for x in instance if x in deprecated['properties']
- ]
+ deprecated_properties = [x for x in instance if x in deprecated["properties"]]
if not deprecated_properties:
return
# Retrieve the template message
- msg_str_or_func = deprecated['message']
+ msg_str_or_func = deprecated["message"]
if isinstance(msg_str_or_func, six.string_types):
msg = msg_str_or_func.format(properties=deprecated_properties)
else:
@@ -54,18 +52,17 @@ def _make_validator():
if msg is None:
return
- is_error = deprecated['error']
+ is_error = deprecated["error"]
if not is_error:
warnings.warn(msg)
else:
import jsonschema
+
yield jsonschema.ValidationError(msg)
return jsonschema.validators.extend(
- jsonschema.Draft4Validator, {
- "validate_spec": _validate_spec,
- "deprecatedProperties": _deprecated_properties
- }
+ jsonschema.Draft4Validator,
+ {"validate_spec": _validate_spec, "deprecatedProperties": _deprecated_properties},
)
diff --git a/lib/spack/spack/schema/bootstrap.py b/lib/spack/spack/schema/bootstrap.py
index ee5cf98a9a..c31942c81d 100644
--- a/lib/spack/spack/schema/bootstrap.py
+++ b/lib/spack/spack/schema/bootstrap.py
@@ -6,40 +6,29 @@
#: Schema of a single source
_source_schema = {
- 'type': 'object',
- 'properties': {
- 'name': {'type': 'string'},
- 'metadata': {'type': 'string'}
- },
- 'additionalProperties': False,
- 'required': ['name', 'metadata']
+ "type": "object",
+ "properties": {"name": {"type": "string"}, "metadata": {"type": "string"}},
+ "additionalProperties": False,
+ "required": ["name", "metadata"],
}
properties = {
- 'bootstrap': {
- 'type': 'object',
- 'properties': {
- 'enable': {'type': 'boolean'},
- 'root': {
- 'type': 'string'
- },
- 'sources': {
- 'type': 'array',
- 'items': _source_schema
- },
- 'trusted': {
- 'type': 'object',
- 'patternProperties': {r'\w[\w-]*': {'type': 'boolean'}}
- }
- }
+ "bootstrap": {
+ "type": "object",
+ "properties": {
+ "enable": {"type": "boolean"},
+ "root": {"type": "string"},
+ "sources": {"type": "array", "items": _source_schema},
+ "trusted": {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "boolean"}}},
+ },
}
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack bootstrap configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack bootstrap configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/buildcache_spec.py b/lib/spack/spack/schema/buildcache_spec.py
index 8ae1112381..fdf4b423af 100644
--- a/lib/spack/spack/schema/buildcache_spec.py
+++ b/lib/spack/spack/schema/buildcache_spec.py
@@ -11,32 +11,32 @@
import spack.schema.spec
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack buildcache specfile schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'buildinfo': {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['relative_prefix'],
- 'properties': {
- 'relative_prefix': {'type': 'string'},
- 'relative_rpaths': {'type': 'boolean'},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack buildcache specfile schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "buildinfo": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["relative_prefix"],
+ "properties": {
+ "relative_prefix": {"type": "string"},
+ "relative_rpaths": {"type": "boolean"},
},
},
- 'spec': {
- 'type': 'object',
- 'additionalProperties': True,
- 'items': spack.schema.spec.properties,
+ "spec": {
+ "type": "object",
+ "additionalProperties": True,
+ "items": spack.schema.spec.properties,
},
- 'binary_cache_checksum': {
- 'type': 'object',
- 'properties': {
- 'hash_algorithm': {'type': 'string'},
- 'hash': {'type': 'string'},
+ "binary_cache_checksum": {
+ "type": "object",
+ "properties": {
+ "hash_algorithm": {"type": "string"},
+ "hash": {"type": "string"},
},
},
- 'buildcache_layout_version': {'type': 'number'}
+ "buildcache_layout_version": {"type": "number"},
},
}
diff --git a/lib/spack/spack/schema/cdash.py b/lib/spack/spack/schema/cdash.py
index 0b6b1982bc..2c6c9283b5 100644
--- a/lib/spack/spack/schema/cdash.py
+++ b/lib/spack/spack/schema/cdash.py
@@ -12,15 +12,15 @@
#: Properties for inclusion in other schemas
properties = {
- 'cdash': {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['build-group', 'url', 'project', 'site'],
- 'patternProperties': {
- r'build-group': {'type': 'string'},
- r'url': {'type': 'string'},
- r'project': {'type': 'string'},
- r'site': {'type': 'string'},
+ "cdash": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["build-group", "url", "project", "site"],
+ "patternProperties": {
+ r"build-group": {"type": "string"},
+ r"url": {"type": "string"},
+ r"project": {"type": "string"},
+ r"site": {"type": "string"},
},
},
}
@@ -28,9 +28,9 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack cdash configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack cdash configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/compilers.py b/lib/spack/spack/schema/compilers.py
index 0acbce75d8..e9c500eff9 100644
--- a/lib/spack/spack/schema/compilers.py
+++ b/lib/spack/spack/schema/compilers.py
@@ -12,81 +12,74 @@ import spack.schema.environment
#: Properties for inclusion in other schemas
properties = {
- 'compilers': {
- 'type': 'array',
- 'items': [{
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'compiler': {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': [
- 'paths', 'spec', 'modules', 'operating_system'],
- 'properties': {
- 'paths': {
- 'type': 'object',
- 'required': ['cc', 'cxx', 'f77', 'fc'],
- 'additionalProperties': False,
- 'properties': {
- 'cc': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'cxx': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'f77': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'fc': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]}}},
- 'flags': {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'cflags': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'cxxflags': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'fflags': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'cppflags': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'ldflags': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'ldlibs': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]}}},
- 'spec': {'type': 'string'},
- 'operating_system': {'type': 'string'},
- 'target': {'type': 'string'},
- 'alias': {'anyOf': [{'type': 'string'},
- {'type': 'null'}]},
- 'modules': {'anyOf': [{'type': 'string'},
- {'type': 'null'},
- {'type': 'array'}]},
- 'implicit_rpaths': {
- 'anyOf': [
- {'type': 'array',
- 'items': {'type': 'string'}},
- {'type': 'boolean'}
- ]
+ "compilers": {
+ "type": "array",
+ "items": [
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "compiler": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["paths", "spec", "modules", "operating_system"],
+ "properties": {
+ "paths": {
+ "type": "object",
+ "required": ["cc", "cxx", "f77", "fc"],
+ "additionalProperties": False,
+ "properties": {
+ "cc": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "cxx": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "f77": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "fc": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ },
+ },
+ "flags": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "cflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "cxxflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "fflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "cppflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "ldflags": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "ldlibs": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ },
+ },
+ "spec": {"type": "string"},
+ "operating_system": {"type": "string"},
+ "target": {"type": "string"},
+ "alias": {"anyOf": [{"type": "string"}, {"type": "null"}]},
+ "modules": {
+ "anyOf": [{"type": "string"}, {"type": "null"}, {"type": "array"}]
+ },
+ "implicit_rpaths": {
+ "anyOf": [
+ {"type": "array", "items": {"type": "string"}},
+ {"type": "boolean"},
+ ]
+ },
+ "environment": spack.schema.environment.definition,
+ "extra_rpaths": {
+ "type": "array",
+ "default": [],
+ "items": {"type": "string"},
+ },
},
- 'environment': spack.schema.environment.definition,
- 'extra_rpaths': {
- 'type': 'array',
- 'default': [],
- 'items': {'type': 'string'}
- }
}
- }
+ },
}
- }]
+ ],
}
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack compiler configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack compiler configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/concretizer.py b/lib/spack/spack/schema/concretizer.py
index 63a1692411..e628519d56 100644
--- a/lib/spack/spack/schema/concretizer.py
+++ b/lib/spack/spack/schema/concretizer.py
@@ -10,37 +10,31 @@
"""
properties = {
- 'concretizer': {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'reuse': {'type': 'boolean'},
- 'targets': {
- 'type': 'object',
- 'properties': {
- 'host_compatible': {'type': 'boolean'},
- 'granularity': {
- 'type': 'string',
- 'enum': ['generic', 'microarchitectures']
- }
- }
+ "concretizer": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "reuse": {"type": "boolean"},
+ "targets": {
+ "type": "object",
+ "properties": {
+ "host_compatible": {"type": "boolean"},
+ "granularity": {"type": "string", "enum": ["generic", "microarchitectures"]},
+ },
},
- 'unify': {
- 'oneOf': [
- {'type': 'boolean'},
- {'type': 'string', 'enum': ['when_possible']}
- ]
- }
- }
+ "unify": {
+ "oneOf": [{"type": "boolean"}, {"type": "string", "enum": ["when_possible"]}]
+ },
+ },
}
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack concretizer configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack concretizer configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py
index 2de54a6179..4ed6e7fc2d 100644
--- a/lib/spack/spack/schema/config.py
+++ b/lib/spack/spack/schema/config.py
@@ -16,100 +16,81 @@ import spack.schema.projections
#: Properties for inclusion in other schemas
properties = {
- 'config': {
- 'type': 'object',
- 'default': {},
- 'properties': {
- 'shared_linking': {
- 'type': 'string',
- 'enum': ['rpath', 'runpath']
- },
- 'install_tree': {
- 'anyOf': [
+ "config": {
+ "type": "object",
+ "default": {},
+ "properties": {
+ "shared_linking": {"type": "string", "enum": ["rpath", "runpath"]},
+ "install_tree": {
+ "anyOf": [
{
- 'type': 'object',
- 'properties': union_dicts(
- {'root': {'type': 'string'}},
- {'padded_length': {'oneOf': [
- {'type': 'integer', 'minimum': 0},
- {'type': 'boolean'}]}},
+ "type": "object",
+ "properties": union_dicts(
+ {"root": {"type": "string"}},
+ {
+ "padded_length": {
+ "oneOf": [
+ {"type": "integer", "minimum": 0},
+ {"type": "boolean"},
+ ]
+ }
+ },
spack.schema.projections.properties,
),
},
- {'type': 'string'} # deprecated
+ {"type": "string"}, # deprecated
],
},
- 'install_hash_length': {'type': 'integer', 'minimum': 1},
- 'install_path_scheme': {'type': 'string'}, # deprecated
- 'build_stage': {
- 'oneOf': [
- {'type': 'string'},
- {'type': 'array',
- 'items': {'type': 'string'}}],
- },
- 'test_stage': {'type': 'string'},
- 'extensions': {
- 'type': 'array',
- 'items': {'type': 'string'}
+ "install_hash_length": {"type": "integer", "minimum": 1},
+ "install_path_scheme": {"type": "string"}, # deprecated
+ "build_stage": {
+ "oneOf": [{"type": "string"}, {"type": "array", "items": {"type": "string"}}],
},
- 'template_dirs': {
- 'type': 'array',
- 'items': {'type': 'string'}
+ "test_stage": {"type": "string"},
+ "extensions": {"type": "array", "items": {"type": "string"}},
+ "template_dirs": {"type": "array", "items": {"type": "string"}},
+ "license_dir": {"type": "string"},
+ "source_cache": {"type": "string"},
+ "misc_cache": {"type": "string"},
+ "connect_timeout": {"type": "integer", "minimum": 0},
+ "verify_ssl": {"type": "boolean"},
+ "suppress_gpg_warnings": {"type": "boolean"},
+ "install_missing_compilers": {"type": "boolean"},
+ "debug": {"type": "boolean"},
+ "checksum": {"type": "boolean"},
+ "deprecated": {"type": "boolean"},
+ "locks": {"type": "boolean"},
+ "dirty": {"type": "boolean"},
+ "build_language": {"type": "string"},
+ "build_jobs": {"type": "integer", "minimum": 1},
+ "ccache": {"type": "boolean"},
+ "concretizer": {"type": "string", "enum": ["original", "clingo"]},
+ "db_lock_timeout": {"type": "integer", "minimum": 1},
+ "package_lock_timeout": {
+ "anyOf": [{"type": "integer", "minimum": 1}, {"type": "null"}],
},
- 'license_dir': {'type': 'string'},
- 'source_cache': {'type': 'string'},
- 'misc_cache': {'type': 'string'},
- 'connect_timeout': {'type': 'integer', 'minimum': 0},
- 'verify_ssl': {'type': 'boolean'},
- 'suppress_gpg_warnings': {'type': 'boolean'},
- 'install_missing_compilers': {'type': 'boolean'},
- 'debug': {'type': 'boolean'},
- 'checksum': {'type': 'boolean'},
- 'deprecated': {'type': 'boolean'},
- 'locks': {'type': 'boolean'},
- 'dirty': {'type': 'boolean'},
- 'build_language': {'type': 'string'},
- 'build_jobs': {'type': 'integer', 'minimum': 1},
- 'ccache': {'type': 'boolean'},
- 'concretizer': {
- 'type': 'string',
- 'enum': ['original', 'clingo']
- },
- 'db_lock_timeout': {'type': 'integer', 'minimum': 1},
- 'package_lock_timeout': {
- 'anyOf': [
- {'type': 'integer', 'minimum': 1},
- {'type': 'null'}
- ],
- },
- 'allow_sgid': {'type': 'boolean'},
- 'binary_index_root': {'type': 'string'},
- 'url_fetch_method': {
- 'type': 'string',
- 'enum': ['urllib', 'curl']
- },
- 'additional_external_search_paths': {
- 'type': 'array',
- 'items': {'type': 'string'}
- }
+ "allow_sgid": {"type": "boolean"},
+ "binary_index_root": {"type": "string"},
+ "url_fetch_method": {"type": "string", "enum": ["urllib", "curl"]},
+ "additional_external_search_paths": {"type": "array", "items": {"type": "string"}},
+ },
+ "deprecatedProperties": {
+ "properties": ["module_roots"],
+ "message": "config:module_roots has been replaced by "
+ "modules:[module set]:roots and is ignored",
+ "error": False,
},
- 'deprecatedProperties': {
- 'properties': ['module_roots'],
- 'message': 'config:module_roots has been replaced by '
- 'modules:[module set]:roots and is ignored',
- 'error': False
- }
},
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack core configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack core configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
@@ -130,32 +111,28 @@ def update(data):
# root replaces install_tree, projections replace install_path_scheme
changed = False
- install_tree = data.get('install_tree', None)
+ install_tree = data.get("install_tree", None)
if isinstance(install_tree, six.string_types):
# deprecated short-form install tree
# add value as `root` in updated install_tree
- data['install_tree'] = {'root': install_tree}
+ data["install_tree"] = {"root": install_tree}
changed = True
- install_path_scheme = data.pop('install_path_scheme', None)
+ install_path_scheme = data.pop("install_path_scheme", None)
if install_path_scheme:
- projections_data = {
- 'projections': {
- 'all': install_path_scheme
- }
- }
+ projections_data = {"projections": {"all": install_path_scheme}}
# update projections with install_scheme
# whether install_tree was updated or not
# we merge the yaml to ensure we don't invalidate other projections
- update_data = data.get('install_tree', {})
+ update_data = data.get("install_tree", {})
update_data = spack.config.merge_yaml(update_data, projections_data)
- data['install_tree'] = update_data
+ data["install_tree"] = update_data
changed = True
- use_curl = data.pop('use_curl', None)
+ use_curl = data.pop("use_curl", None)
if use_curl is not None:
- data['url_fetch_method'] = 'curl' if use_curl else 'urllib'
+ data["url_fetch_method"] = "curl" if use_curl else "urllib"
changed = True
return changed
diff --git a/lib/spack/spack/schema/container.py b/lib/spack/spack/schema/container.py
index 4befb07fd7..b05199f8bb 100644
--- a/lib/spack/spack/schema/container.py
+++ b/lib/spack/spack/schema/container.py
@@ -5,103 +5,90 @@
"""Schema for the 'container' subsection of Spack environments."""
_stages_from_dockerhub = {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'os': {'type': 'string'},
- 'spack': {'anyOf': [
- {'type': 'string'},
- {'type': 'object',
- 'additional_properties': False,
- 'properties': {
- 'url': {'type': 'string'},
- 'ref': {'type': 'string'},
- 'resolve_sha': {'type': 'boolean', 'default': False},
- 'verify': {'type': 'boolean', 'default': False}
- }}
- ]},
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "os": {"type": "string"},
+ "spack": {
+ "anyOf": [
+ {"type": "string"},
+ {
+ "type": "object",
+ "additional_properties": False,
+ "properties": {
+ "url": {"type": "string"},
+ "ref": {"type": "string"},
+ "resolve_sha": {"type": "boolean", "default": False},
+ "verify": {"type": "boolean", "default": False},
+ },
+ },
+ ]
+ },
},
- 'required': ['os', 'spack']
+ "required": ["os", "spack"],
}
_custom_stages = {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'build': {'type': 'string'},
- 'final': {'type': 'string'}
- },
- 'required': ['build', 'final']
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {"build": {"type": "string"}, "final": {"type": "string"}},
+ "required": ["build", "final"],
}
#: List of packages for the schema below
-_list_of_packages = {
- 'type': 'array',
- 'items': {
- 'type': 'string'
- }
-}
+_list_of_packages = {"type": "array", "items": {"type": "string"}}
#: Schema for the container attribute included in Spack environments
container_schema = {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
# The recipe formats that are currently supported by the command
- 'format': {
- 'type': 'string',
- 'enum': ['docker', 'singularity']
- },
+ "format": {"type": "string", "enum": ["docker", "singularity"]},
# Describes the base image to start from and the version
# of Spack to be used
- 'images': {'anyOf': [_stages_from_dockerhub, _custom_stages]},
+ "images": {"anyOf": [_stages_from_dockerhub, _custom_stages]},
# Whether or not to strip installed binaries
- 'strip': {
- 'type': 'boolean',
- 'default': True
- },
+ "strip": {"type": "boolean", "default": True},
# Additional system packages that are needed at runtime
- 'os_packages': {
- 'type': 'object',
- 'properties': {
- 'command': {'type': 'string', 'enum': ['apt', 'yum']},
- 'update': {'type': 'boolean'},
- 'build': _list_of_packages,
- 'final': _list_of_packages
+ "os_packages": {
+ "type": "object",
+ "properties": {
+ "command": {"type": "string", "enum": ["apt", "yum"]},
+ "update": {"type": "boolean"},
+ "build": _list_of_packages,
+ "final": _list_of_packages,
},
- 'additionalProperties': False
+ "additionalProperties": False,
},
# Add labels to the image
- 'labels': {
- 'type': 'object',
+ "labels": {
+ "type": "object",
},
# Add a custom extra section at the bottom of a stage
- 'extra_instructions': {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'build': {'type': 'string'},
- 'final': {'type': 'string'}
- }
+ "extra_instructions": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {"build": {"type": "string"}, "final": {"type": "string"}},
},
# Reserved for properties that are specific to each format
- 'singularity': {
- 'type': 'object',
- 'additionalProperties': False,
- 'default': {},
- 'properties': {
- 'runscript': {'type': 'string'},
- 'startscript': {'type': 'string'},
- 'test': {'type': 'string'},
- 'help': {'type': 'string'}
- }
+ "singularity": {
+ "type": "object",
+ "additionalProperties": False,
+ "default": {},
+ "properties": {
+ "runscript": {"type": "string"},
+ "startscript": {"type": "string"},
+ "test": {"type": "string"},
+ "help": {"type": "string"},
+ },
},
- 'docker': {
- 'type': 'object',
- 'additionalProperties': False,
- 'default': {},
- }
- }
+ "docker": {
+ "type": "object",
+ "additionalProperties": False,
+ "default": {},
+ },
+ },
}
-properties = {'container': container_schema}
+properties = {"container": container_schema}
diff --git a/lib/spack/spack/schema/cray_manifest.py b/lib/spack/spack/schema/cray_manifest.py
index 89619a6aaa..c0f57eef11 100644
--- a/lib/spack/spack/schema/cray_manifest.py
+++ b/lib/spack/spack/schema/cray_manifest.py
@@ -29,7 +29,7 @@ schema = {
# Older schemas use did not have "cpe-version", just the
# schema version; in that case it was just called "version"
"version": {"type": "string", "minLength": 1},
- }
+ },
},
"compilers": {
"type": "array",
@@ -46,8 +46,8 @@ schema = {
"properties": {
"cc": {"type": "string", "minLength": 1},
"cxx": {"type": "string", "minLength": 1},
- "fc": {"type": "string", "minLength": 1}
- }
+ "fc": {"type": "string", "minLength": 1},
+ },
},
"arch": {
"type": "object",
@@ -55,23 +55,17 @@ schema = {
"additionalProperties": False,
"properties": {
"os": {"type": "string", "minLength": 1},
- "target": {"type": "string", "minLength": 1}
- }
- }
- }
- }
+ "target": {"type": "string", "minLength": 1},
+ },
+ },
+ },
+ },
},
"specs": {
"type": "array",
"items": {
"type": "object",
- "required": [
- "name",
- "version",
- "arch",
- "compiler",
- "prefix",
- "hash"],
+ "required": ["name", "version", "arch", "compiler", "prefix", "hash"],
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
@@ -87,11 +81,9 @@ schema = {
"type": "object",
"additionalProperties": False,
"required": ["name"],
- "properties": {
- "name": {"type": "string", "minLength": 1}
- }
- }
- }
+ "properties": {"name": {"type": "string", "minLength": 1}},
+ },
+ },
},
"compiler": {
"type": "object",
@@ -99,8 +91,8 @@ schema = {
"additionalProperties": False,
"properties": {
"name": {"type": "string", "minLength": 1},
- "version": {"type": "string", "minLength": 1}
- }
+ "version": {"type": "string", "minLength": 1},
+ },
},
"dependencies": {
"type": "object",
@@ -113,22 +105,20 @@ schema = {
"hash": {"type": "string", "minLength": 1},
"type": {
"type": "array",
- "items": {
- "type": "string", "minLength": 1}
- }
- }
+ "items": {"type": "string", "minLength": 1},
+ },
+ },
}
- }
+ },
},
- "prefix": {
- "type": "string", "minLength": 1},
+ "prefix": {"type": "string", "minLength": 1},
"rpm": {"type": "string", "minLength": 1},
"hash": {"type": "string", "minLength": 1},
"parameters": {
"type": "object",
- }
- }
- }
- }
- }
+ },
+ },
+ },
+ },
+ },
}
diff --git a/lib/spack/spack/schema/database_index.py b/lib/spack/spack/schema/database_index.py
index e5e11a765b..4b0d3202a5 100644
--- a/lib/spack/spack/schema/database_index.py
+++ b/lib/spack/spack/schema/database_index.py
@@ -14,45 +14,45 @@ import spack.schema.spec
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack spec schema',
- 'type': 'object',
- 'required': ['database'],
- 'additionalProperties': False,
- 'properties': {
- 'database': {
- 'type': 'object',
- 'required': ['installs', 'version'],
- 'additionalProperties': False,
- 'properties': {
- 'installs': {
- 'type': 'object',
- 'patternProperties': {
- r'^[\w\d]{32}$': {
- 'type': 'object',
- 'properties': {
- 'spec': spack.schema.spec.properties,
- 'path': {
- 'oneOf': [
- {'type': 'string'},
- {'type': 'null'},
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack spec schema",
+ "type": "object",
+ "required": ["database"],
+ "additionalProperties": False,
+ "properties": {
+ "database": {
+ "type": "object",
+ "required": ["installs", "version"],
+ "additionalProperties": False,
+ "properties": {
+ "installs": {
+ "type": "object",
+ "patternProperties": {
+ r"^[\w\d]{32}$": {
+ "type": "object",
+ "properties": {
+ "spec": spack.schema.spec.properties,
+ "path": {
+ "oneOf": [
+ {"type": "string"},
+ {"type": "null"},
],
},
- 'installed': {'type': 'boolean'},
- 'ref_count': {
- 'type': 'integer',
- 'minimum': 0,
+ "installed": {"type": "boolean"},
+ "ref_count": {
+ "type": "integer",
+ "minimum": 0,
+ },
+ "explicit": {"type": "boolean"},
+ "installation_time": {
+ "type": "number",
},
- 'explicit': {'type': 'boolean'},
- 'installation_time': {
- 'type': 'number',
- }
},
},
},
},
- 'version': {'type': 'string'},
- }
+ "version": {"type": "string"},
+ },
},
},
}
diff --git a/lib/spack/spack/schema/env.py b/lib/spack/spack/schema/env.py
index f338ac177d..21a738e6b8 100644
--- a/lib/spack/spack/schema/env.py
+++ b/lib/spack/spack/schema/env.py
@@ -24,162 +24,139 @@ def deprecate_concretization(instance, props):
if warned_about_concretization:
return None
# Deprecate `spack:concretization` in favor of `spack:concretizer:unify`.
- concretization_to_unify = {'together': 'true', 'separately': 'false'}
- concretization = instance['concretization']
+ concretization_to_unify = {"together": "true", "separately": "false"}
+ concretization = instance["concretization"]
unify = concretization_to_unify[concretization]
return (
- 'concretization:{} is deprecated and will be removed in Spack 0.19 in favor of '
- 'the new concretizer:unify:{} config option.'.format(concretization, unify)
+ "concretization:{} is deprecated and will be removed in Spack 0.19 in favor of "
+ "the new concretizer:unify:{} config option.".format(concretization, unify)
)
#: legal first keys in the schema
-keys = ('spack', 'env')
+keys = ("spack", "env")
spec_list_schema = {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'anyOf': [
- {'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'matrix': {
- 'type': 'array',
- 'items': {
- 'type': 'array',
- 'items': {
- 'type': 'string',
- }
- }
- },
- 'exclude': {
- 'type': 'array',
- 'items': {
- 'type': 'string'
- }
- }
- }},
- {'type': 'string'},
- {'type': 'null'}
+ "type": "array",
+ "default": [],
+ "items": {
+ "anyOf": [
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "matrix": {
+ "type": "array",
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "string",
+ },
+ },
+ },
+ "exclude": {"type": "array", "items": {"type": "string"}},
+ },
+ },
+ {"type": "string"},
+ {"type": "null"},
]
- }
+ },
}
-projections_scheme = spack.schema.projections.properties['projections']
+projections_scheme = spack.schema.projections.properties["projections"]
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack environment file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- '^env|spack$': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'deprecatedProperties': {
- 'properties': ['concretization'],
- 'message': deprecate_concretization,
- 'error': False
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack environment file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "patternProperties": {
+ "^env|spack$": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "deprecatedProperties": {
+ "properties": ["concretization"],
+ "message": deprecate_concretization,
+ "error": False,
},
- 'properties': union_dicts(
+ "properties": union_dicts(
# merged configuration scope schemas
spack.schema.merged.properties,
# extra environment schema properties
{
- 'include': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'
- },
+ "include": {
+ "type": "array",
+ "default": [],
+ "items": {"type": "string"},
},
- 'develop': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'spec': {
- 'type': 'string'
- },
- 'path': {
- 'type': 'string'
- },
+ "develop": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "patternProperties": {
+ r"\w[\w-]*": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "spec": {"type": "string"},
+ "path": {"type": "string"},
},
},
},
},
- 'definitions': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'object',
- 'properties': {
- 'when': {
- 'type': 'string'
- }
- },
- 'patternProperties': {
- r'^(?!when$)\w*': spec_list_schema
- }
- }
+ "definitions": {
+ "type": "array",
+ "default": [],
+ "items": {
+ "type": "object",
+ "properties": {"when": {"type": "string"}},
+ "patternProperties": {r"^(?!when$)\w*": spec_list_schema},
+ },
},
- 'specs': spec_list_schema,
- 'view': {
- 'anyOf': [
- {'type': 'boolean'},
- {'type': 'string'},
+ "specs": spec_list_schema,
+ "view": {
+ "anyOf": [
+ {"type": "boolean"},
+ {"type": "string"},
{
- 'type': 'object',
- 'patternProperties': {
- r'\w+': {
- 'required': ['root'],
- 'additionalProperties': False,
- 'properties': {
- 'root': {
- 'type': 'string'
+ "type": "object",
+ "patternProperties": {
+ r"\w+": {
+ "required": ["root"],
+ "additionalProperties": False,
+ "properties": {
+ "root": {"type": "string"},
+ "link": {
+ "type": "string",
+ "pattern": "(roots|all|run)",
},
- 'link': {
- 'type': 'string',
- 'pattern': '(roots|all|run)',
+ "link_type": {"type": "string"},
+ "select": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'link_type': {
- 'type': 'string'
+ "exclude": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'select': {
- 'type': 'array',
- 'items': {
- 'type': 'string'
- }
- },
- 'exclude': {
- 'type': 'array',
- 'items': {
- 'type': 'string'
- }
- },
- 'projections': projections_scheme
- }
+ "projections": projections_scheme,
+ },
}
- }
- }
+ },
+ },
]
},
- 'concretization': {
- 'type': 'string',
- 'enum': ['together', 'separately'],
- 'default': 'separately'
- }
- }
- )
+ "concretization": {
+ "type": "string",
+ "enum": ["together", "separately"],
+ "default": "separately",
+ },
+ },
+ ),
}
- }
+ },
}
@@ -193,29 +170,30 @@ def update(data):
True if data was changed, False otherwise
"""
updated = False
- if 'include' in data:
- msg = ("included configuration files should be updated manually"
- " [files={0}]")
- warnings.warn(msg.format(', '.join(data['include'])))
+ if "include" in data:
+ msg = "included configuration files should be updated manually" " [files={0}]"
+ warnings.warn(msg.format(", ".join(data["include"])))
# Spack 0.19 drops support for `spack:concretization` in favor of
# `spack:concretizer:unify`. Here we provide an upgrade path that changes the former
# into the latter, or warns when there's an ambiguity. Note that Spack 0.17 is not
# forward compatible with `spack:concretizer:unify`.
- if 'concretization' in data:
- has_unify = 'unify' in data.get('concretizer', {})
- to_unify = {'together': True, 'separately': False}
- unify = to_unify[data['concretization']]
+ if "concretization" in data:
+ has_unify = "unify" in data.get("concretizer", {})
+ to_unify = {"together": True, "separately": False}
+ unify = to_unify[data["concretization"]]
- if has_unify and data['concretizer']['unify'] != unify:
+ if has_unify and data["concretizer"]["unify"] != unify:
warnings.warn(
- 'The following configuration conflicts: '
- '`spack:concretization:{}` and `spack:concretizer:unify:{}`'
- '. Please update manually.'.format(
- data['concretization'], data['concretizer']['unify']))
+ "The following configuration conflicts: "
+ "`spack:concretization:{}` and `spack:concretizer:unify:{}`"
+ ". Please update manually.".format(
+ data["concretization"], data["concretizer"]["unify"]
+ )
+ )
else:
- data.update({'concretizer': {'unify': unify}})
- data.pop('concretization')
+ data.update({"concretizer": {"unify": unify}})
+ data.pop("concretization")
updated = True
return updated
diff --git a/lib/spack/spack/schema/environment.py b/lib/spack/spack/schema/environment.py
index 3bb02d33d0..2a295764a2 100644
--- a/lib/spack/spack/schema/environment.py
+++ b/lib/spack/spack/schema/environment.py
@@ -7,26 +7,27 @@ schemas.
"""
array_of_strings_or_num = {
- 'type': 'array', 'default': [], 'items':
- {'anyOf': [{'type': 'string'}, {'type': 'number'}]}
+ "type": "array",
+ "default": [],
+ "items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
}
dictionary_of_strings_or_num = {
- 'type': 'object', 'patternProperties':
- {r'\w[\w-]*': {'anyOf': [{'type': 'string'}, {'type': 'number'}]}}
+ "type": "object",
+ "patternProperties": {r"\w[\w-]*": {"anyOf": [{"type": "string"}, {"type": "number"}]}},
}
definition = {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'set': dictionary_of_strings_or_num,
- 'unset': array_of_strings_or_num,
- 'prepend_path': dictionary_of_strings_or_num,
- 'append_path': dictionary_of_strings_or_num,
- 'remove_path': dictionary_of_strings_or_num
- }
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": {
+ "set": dictionary_of_strings_or_num,
+ "unset": array_of_strings_or_num,
+ "prepend_path": dictionary_of_strings_or_num,
+ "append_path": dictionary_of_strings_or_num,
+ "remove_path": dictionary_of_strings_or_num,
+ },
}
diff --git a/lib/spack/spack/schema/gitlab_ci.py b/lib/spack/spack/schema/gitlab_ci.py
index 80e40dd72b..eb8abc9682 100644
--- a/lib/spack/spack/schema/gitlab_ci.py
+++ b/lib/spack/spack/schema/gitlab_ci.py
@@ -12,17 +12,16 @@
from llnl.util.lang import union_dicts
image_schema = {
- 'oneOf': [
+ "oneOf": [
+ {"type": "string"},
{
- 'type': 'string'
- }, {
- 'type': 'object',
- 'properties': {
- 'name': {'type': 'string'},
- 'entrypoint': {
- 'type': 'array',
- 'items': {
- 'type': 'string',
+ "type": "object",
+ "properties": {
+ "name": {"type": "string"},
+ "entrypoint": {
+ "type": "array",
+ "items": {
+ "type": "string",
},
},
},
@@ -31,115 +30,104 @@ image_schema = {
}
runner_attributes_schema_items = {
- 'image': image_schema,
- 'tags': {
- 'type': 'array',
- 'items': {'type': 'string'}
- },
- 'variables': {
- 'type': 'object',
- 'patternProperties': {
- r'[\w\d\-_\.]+': {
- 'type': 'string',
+ "image": image_schema,
+ "tags": {"type": "array", "items": {"type": "string"}},
+ "variables": {
+ "type": "object",
+ "patternProperties": {
+ r"[\w\d\-_\.]+": {
+ "type": "string",
},
},
},
- 'before_script': {
- 'type': 'array',
- 'items': {'type': 'string'}
- },
- 'script': {
- 'type': 'array',
- 'items': {'type': 'string'}
- },
- 'after_script': {
- 'type': 'array',
- 'items': {'type': 'string'}
- },
+ "before_script": {"type": "array", "items": {"type": "string"}},
+ "script": {"type": "array", "items": {"type": "string"}},
+ "after_script": {"type": "array", "items": {"type": "string"}},
}
runner_selector_schema = {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['tags'],
- 'properties': runner_attributes_schema_items,
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["tags"],
+ "properties": runner_attributes_schema_items,
}
core_shared_properties = union_dicts(
runner_attributes_schema_items,
{
- 'bootstrap': {
- 'type': 'array',
- 'items': {
- 'anyOf': [
+ "bootstrap": {
+ "type": "array",
+ "items": {
+ "anyOf": [
+ {
+ "type": "string",
+ },
{
- 'type': 'string',
- }, {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['name'],
- 'properties': {
- 'name': {
- 'type': 'string',
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["name"],
+ "properties": {
+ "name": {
+ "type": "string",
},
- 'compiler-agnostic': {
- 'type': 'boolean',
- 'default': False,
+ "compiler-agnostic": {
+ "type": "boolean",
+ "default": False,
},
},
},
],
},
},
- 'mappings': {
- 'type': 'array',
- 'items': {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['match'],
- 'properties': {
- 'match': {
- 'type': 'array',
- 'items': {
- 'type': 'string',
+ "mappings": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["match"],
+ "properties": {
+ "match": {
+ "type": "array",
+ "items": {
+ "type": "string",
},
},
- 'runner-attributes': runner_selector_schema,
+ "runner-attributes": runner_selector_schema,
},
},
},
- 'service-job-attributes': runner_selector_schema,
- 'signing-job-attributes': runner_selector_schema,
- 'rebuild-index': {'type': 'boolean'},
- 'broken-specs-url': {'type': 'string'},
+ "service-job-attributes": runner_selector_schema,
+ "signing-job-attributes": runner_selector_schema,
+ "rebuild-index": {"type": "boolean"},
+ "broken-specs-url": {"type": "string"},
},
)
gitlab_ci_properties = {
- 'anyOf': [
+ "anyOf": [
{
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['mappings'],
- 'properties': union_dicts(
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["mappings"],
+ "properties": union_dicts(
core_shared_properties,
{
- 'enable-artifacts-buildcache': {
- 'type': 'boolean',
+ "enable-artifacts-buildcache": {
+ "type": "boolean",
},
},
),
},
{
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['mappings'],
- 'properties': union_dicts(
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["mappings"],
+ "properties": union_dicts(
core_shared_properties,
{
- 'temporary-storage-url-prefix': {
- 'type': 'string',
+ "temporary-storage-url-prefix": {
+ "type": "string",
},
},
),
@@ -149,14 +137,14 @@ gitlab_ci_properties = {
#: Properties for inclusion in other schemas
properties = {
- 'gitlab-ci': gitlab_ci_properties,
+ "gitlab-ci": gitlab_ci_properties,
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack gitlab-ci configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack gitlab-ci configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/merged.py b/lib/spack/spack/schema/merged.py
index 8f9768cfd0..e7687c1f18 100644
--- a/lib/spack/spack/schema/merged.py
+++ b/lib/spack/spack/schema/merged.py
@@ -36,15 +36,15 @@ properties = union_dicts(
spack.schema.modules.properties,
spack.schema.packages.properties,
spack.schema.repos.properties,
- spack.schema.upstreams.properties
+ spack.schema.upstreams.properties,
)
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack merged configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack merged configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/mirrors.py b/lib/spack/spack/schema/mirrors.py
index 05f0cfc331..eb00a699f0 100644
--- a/lib/spack/spack/schema/mirrors.py
+++ b/lib/spack/spack/schema/mirrors.py
@@ -12,22 +12,22 @@
#: Properties for inclusion in other schemas
properties = {
- 'mirrors': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'anyOf': [
- {'type': 'string'},
+ "mirrors": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "patternProperties": {
+ r"\w[\w-]*": {
+ "anyOf": [
+ {"type": "string"},
{
- 'type': 'object',
- 'required': ['fetch', 'push'],
- 'properties': {
- 'fetch': {'type': ['string', 'object']},
- 'push': {'type': ['string', 'object']}
- }
- }
+ "type": "object",
+ "required": ["fetch", "push"],
+ "properties": {
+ "fetch": {"type": ["string", "object"]},
+ "push": {"type": ["string", "object"]},
+ },
+ },
]
},
},
@@ -37,9 +37,9 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack mirror configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack mirror configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py
index 03bca3d347..d355970f30 100644
--- a/lib/spack/spack/schema/modules.py
+++ b/lib/spack/spack/schema/modules.py
@@ -19,176 +19,139 @@ import spack.schema.projections
#: THIS NEEDS TO BE UPDATED FOR EVERY NEW KEYWORD THAT
#: IS ADDED IMMEDIATELY BELOW THE MODULE TYPE ATTRIBUTE
spec_regex = (
- r'(?!hierarchy|core_specs|verbose|hash_length|defaults|'
- r'whitelist|blacklist|' # DEPRECATED: remove in 0.20.
- r'include|exclude|' # use these more inclusive/consistent options
- r'projections|naming_scheme|core_compilers|all)(^\w[\w-]*)'
-
+ r"(?!hierarchy|core_specs|verbose|hash_length|defaults|"
+ r"whitelist|blacklist|" # DEPRECATED: remove in 0.20.
+ r"include|exclude|" # use these more inclusive/consistent options
+ r"projections|naming_scheme|core_compilers|all)(^\w[\w-]*)"
)
#: Matches a valid name for a module set
-valid_module_set_name = r'^(?!arch_folder$|lmod$|roots$|enable$|prefix_inspections$|'\
- r'tcl$|use_view$)\w[\w-]*$'
+valid_module_set_name = (
+ r"^(?!arch_folder$|lmod$|roots$|enable$|prefix_inspections$|" r"tcl$|use_view$)\w[\w-]*$"
+)
#: Matches an anonymous spec, i.e. a spec without a root name
-anonymous_spec_regex = r'^[\^@%+~]'
+anonymous_spec_regex = r"^[\^@%+~]"
#: Definitions for parts of module schema
-array_of_strings = {
- 'type': 'array', 'default': [], 'items': {'type': 'string'}
-}
+array_of_strings = {"type": "array", "default": [], "items": {"type": "string"}}
-dictionary_of_strings = {
- 'type': 'object', 'patternProperties': {r'\w[\w-]*': {'type': 'string'}}
-}
+dictionary_of_strings = {"type": "object", "patternProperties": {r"\w[\w-]*": {"type": "string"}}}
-dependency_selection = {'type': 'string', 'enum': ['none', 'direct', 'all']}
+dependency_selection = {"type": "string", "enum": ["none", "direct", "all"]}
module_file_configuration = {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'filter': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": {
+ "filter": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": {
# DEPRECATED: remove in 0.20.
- 'environment_blacklist': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'
- }
+ "environment_blacklist": {
+ "type": "array",
+ "default": [],
+ "items": {"type": "string"},
},
# use exclude_env_vars instead
- 'exclude_env_vars': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'
- }
- }
- }
- },
- 'template': {
- 'type': 'string'
+ "exclude_env_vars": {"type": "array", "default": [], "items": {"type": "string"}},
+ },
},
- 'autoload': dependency_selection,
- 'prerequisites': dependency_selection,
- 'conflict': array_of_strings,
- 'load': array_of_strings,
- 'suffixes': {
- 'type': 'object',
- 'validate_spec': True,
- 'patternProperties': {
- r'\w[\w-]*': { # key
- 'type': 'string'
- }
- }
+ "template": {"type": "string"},
+ "autoload": dependency_selection,
+ "prerequisites": dependency_selection,
+ "conflict": array_of_strings,
+ "load": array_of_strings,
+ "suffixes": {
+ "type": "object",
+ "validate_spec": True,
+ "patternProperties": {r"\w[\w-]*": {"type": "string"}}, # key
},
- 'environment': spack.schema.environment.definition
- }
+ "environment": spack.schema.environment.definition,
+ },
}
-projections_scheme = spack.schema.projections.properties['projections']
+projections_scheme = spack.schema.projections.properties["projections"]
module_type_configuration = {
- 'type': 'object',
- 'default': {},
- 'allOf': [
- {'properties': {
- 'verbose': {
- 'type': 'boolean',
- 'default': False
- },
- 'hash_length': {
- 'type': 'integer',
- 'minimum': 0,
- 'default': 7
- },
- # DEPRECATED: remove in 0.20.
- 'whitelist': array_of_strings,
- 'blacklist': array_of_strings,
- 'blacklist_implicits': {
- 'type': 'boolean',
- 'default': False
- },
- # whitelist/blacklist have been replaced with include/exclude
- 'include': array_of_strings,
- 'exclude': array_of_strings,
- 'exclude_implicits': {
- 'type': 'boolean',
- 'default': False
- },
- 'defaults': array_of_strings,
- 'naming_scheme': {
- 'type': 'string' # Can we be more specific here?
+ "type": "object",
+ "default": {},
+ "allOf": [
+ {
+ "properties": {
+ "verbose": {"type": "boolean", "default": False},
+ "hash_length": {"type": "integer", "minimum": 0, "default": 7},
+ # DEPRECATED: remove in 0.20.
+ "whitelist": array_of_strings,
+ "blacklist": array_of_strings,
+ "blacklist_implicits": {"type": "boolean", "default": False},
+ # whitelist/blacklist have been replaced with include/exclude
+ "include": array_of_strings,
+ "exclude": array_of_strings,
+ "exclude_implicits": {"type": "boolean", "default": False},
+ "defaults": array_of_strings,
+ "naming_scheme": {"type": "string"}, # Can we be more specific here?
+ "projections": projections_scheme,
+ "all": module_file_configuration,
+ }
+ },
+ {
+ "validate_spec": True,
+ "patternProperties": {
+ spec_regex: module_file_configuration,
+ anonymous_spec_regex: module_file_configuration,
},
- 'projections': projections_scheme,
- 'all': module_file_configuration,
- }
},
- {'validate_spec': True,
- 'patternProperties': {
- spec_regex: module_file_configuration,
- anonymous_spec_regex: module_file_configuration,
- }
- }
- ]
+ ],
}
module_config_properties = {
- 'use_view': {'anyOf': [
- {'type': 'string'},
- {'type': 'boolean'}
- ]},
- 'arch_folder': {'type': 'boolean'},
- 'roots': {
- 'type': 'object',
- 'properties': {
- 'tcl': {'type': 'string'},
- 'lmod': {'type': 'string'},
+ "use_view": {"anyOf": [{"type": "string"}, {"type": "boolean"}]},
+ "arch_folder": {"type": "boolean"},
+ "roots": {
+ "type": "object",
+ "properties": {
+ "tcl": {"type": "string"},
+ "lmod": {"type": "string"},
},
},
- 'enable': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string',
- 'enum': ['tcl', 'lmod']
- }
+ "enable": {
+ "type": "array",
+ "default": [],
+ "items": {"type": "string", "enum": ["tcl", "lmod"]},
},
- 'lmod': {
- 'allOf': [
+ "lmod": {
+ "allOf": [
# Base configuration
module_type_configuration,
{
- 'type': 'object',
- 'properties': {
- 'core_compilers': array_of_strings,
- 'hierarchy': array_of_strings,
- 'core_specs': array_of_strings,
+ "type": "object",
+ "properties": {
+ "core_compilers": array_of_strings,
+ "hierarchy": array_of_strings,
+ "core_specs": array_of_strings,
},
- } # Specific lmod extensions
+ }, # Specific lmod extensions
]
},
- 'tcl': {
- 'allOf': [
+ "tcl": {
+ "allOf": [
# Base configuration
module_type_configuration,
- {} # Specific tcl extensions
+ {}, # Specific tcl extensions
]
},
- 'prefix_inspections': {
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
+ "prefix_inspections": {
+ "type": "object",
+ "additionalProperties": False,
+ "patternProperties": {
# prefix-relative path to be inspected for existence
- r'^[\w-]*': array_of_strings
- }
+ r"^[\w-]*": array_of_strings
+ },
},
}
@@ -197,51 +160,51 @@ def deprecation_msg_default_module_set(instance, props):
return (
'Top-level properties "{0}" in module config are ignored as of Spack v0.18. '
'They should be set on the "default" module set. Run\n\n'
- '\t$ spack config update modules\n\n'
- 'to update the file to the new format'.format('", "'.join(instance))
+ "\t$ spack config update modules\n\n"
+ "to update the file to the new format".format('", "'.join(instance))
)
# Properties for inclusion into other schemas (requires definitions)
properties = {
- 'modules': {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'prefix_inspections': {
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
+ "modules": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "prefix_inspections": {
+ "type": "object",
+ "additionalProperties": False,
+ "patternProperties": {
# prefix-relative path to be inspected for existence
- r'^[\w-]*': array_of_strings
- }
+ r"^[\w-]*": array_of_strings
+ },
},
},
- 'patternProperties': {
+ "patternProperties": {
valid_module_set_name: {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': module_config_properties
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": module_config_properties,
},
# Deprecated top-level keys (ignored in 0.18 with a warning)
- '^(arch_folder|lmod|roots|enable|tcl|use_view)$': {}
+ "^(arch_folder|lmod|roots|enable|tcl|use_view)$": {},
+ },
+ "deprecatedProperties": {
+ "properties": ["arch_folder", "lmod", "roots", "enable", "tcl", "use_view"],
+ "message": deprecation_msg_default_module_set,
+ "error": False,
},
- 'deprecatedProperties': {
- 'properties': ['arch_folder', 'lmod', 'roots', 'enable', 'tcl', 'use_view'],
- 'message': deprecation_msg_default_module_set,
- 'error': False
- }
}
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack module file configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack module file configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
@@ -293,15 +256,16 @@ def update_default_module_set(data):
"""
changed = False
- deprecated_top_level_keys = ('arch_folder', 'lmod', 'roots', 'enable',
- 'tcl', 'use_view')
+ deprecated_top_level_keys = ("arch_folder", "lmod", "roots", "enable", "tcl", "use_view")
# Don't update when we already have a default module set
- if 'default' in data:
+ if "default" in data:
if any(key in data for key in deprecated_top_level_keys):
- warnings.warn('Did not move top-level module properties into "default" '
- 'module set, because the "default" module set is already '
- 'defined')
+ warnings.warn(
+ 'Did not move top-level module properties into "default" '
+ 'module set, because the "default" module set is already '
+ "defined"
+ )
return changed
default = {}
@@ -313,7 +277,7 @@ def update_default_module_set(data):
if default:
changed = True
- data['default'] = default
+ data["default"] = default
return changed
diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py
index 33d7238fe7..87d489028e 100644
--- a/lib/spack/spack/schema/packages.py
+++ b/lib/spack/spack/schema/packages.py
@@ -11,82 +11,85 @@
#: Properties for inclusion in other schemas
properties = {
- 'packages': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': { # package name
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'version': {
- 'type': 'array',
- 'default': [],
+ "packages": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "patternProperties": {
+ r"\w[\w-]*": { # package name
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": {
+ "version": {
+ "type": "array",
+ "default": [],
# version strings
- 'items': {'anyOf': [{'type': 'string'},
- {'type': 'number'}]}},
- 'target': {
- 'type': 'array',
- 'default': [],
+ "items": {"anyOf": [{"type": "string"}, {"type": "number"}]},
+ },
+ "target": {
+ "type": "array",
+ "default": [],
# target names
- 'items': {'type': 'string'},
+ "items": {"type": "string"},
},
- 'compiler': {
- 'type': 'array',
- 'default': [],
- 'items': {'type': 'string'}}, # compiler specs
- 'buildable': {
- 'type': 'boolean',
- 'default': True,
+ "compiler": {
+ "type": "array",
+ "default": [],
+ "items": {"type": "string"},
+ }, # compiler specs
+ "buildable": {
+ "type": "boolean",
+ "default": True,
},
- 'permissions': {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'read': {
- 'type': 'string',
- 'enum': ['user', 'group', 'world'],
+ "permissions": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "read": {
+ "type": "string",
+ "enum": ["user", "group", "world"],
},
- 'write': {
- 'type': 'string',
- 'enum': ['user', 'group', 'world'],
+ "write": {
+ "type": "string",
+ "enum": ["user", "group", "world"],
},
- 'group': {
- 'type': 'string',
+ "group": {
+ "type": "string",
},
},
},
- 'providers': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'array',
- 'default': [],
- 'items': {'type': 'string'}, }, }, },
- 'variants': {
- 'oneOf': [
- {'type': 'string'},
- {'type': 'array',
- 'items': {'type': 'string'}}],
+ "providers": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "patternProperties": {
+ r"\w[\w-]*": {
+ "type": "array",
+ "default": [],
+ "items": {"type": "string"},
+ },
+ },
},
- 'externals': {
- 'type': 'array',
- 'items': {
- 'type': 'object',
- 'properties': {
- 'spec': {'type': 'string'},
- 'prefix': {'type': 'string'},
- 'modules': {'type': 'array',
- 'items': {'type': 'string'}},
- 'extra_attributes': {'type': 'object'}
+ "variants": {
+ "oneOf": [
+ {"type": "string"},
+ {"type": "array", "items": {"type": "string"}},
+ ],
+ },
+ "externals": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "spec": {"type": "string"},
+ "prefix": {"type": "string"},
+ "modules": {"type": "array", "items": {"type": "string"}},
+ "extra_attributes": {"type": "object"},
},
- 'additionalProperties': True,
- 'required': ['spec']
- }
+ "additionalProperties": True,
+ "required": ["spec"],
+ },
},
},
},
@@ -97,9 +100,9 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack package configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack package configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/projections.py b/lib/spack/spack/schema/projections.py
index 0db940e5e4..b324fdd831 100644
--- a/lib/spack/spack/schema/projections.py
+++ b/lib/spack/spack/schema/projections.py
@@ -12,12 +12,10 @@
#: Properties for inclusion in other schemas
properties = {
- 'projections': {
- 'type': 'object',
- 'patternProperties': {
- r'all|\w[\w-]*': {
- 'type': 'string'
- },
+ "projections": {
+ "type": "object",
+ "patternProperties": {
+ r"all|\w[\w-]*": {"type": "string"},
},
},
}
@@ -25,9 +23,9 @@ properties = {
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack view projection configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack view projection configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/repos.py b/lib/spack/spack/schema/repos.py
index b44370ae36..0fc4cdb8f2 100644
--- a/lib/spack/spack/schema/repos.py
+++ b/lib/spack/spack/schema/repos.py
@@ -12,19 +12,19 @@
#: Properties for inclusion in other schemas
properties = {
- 'repos': {
- 'type': 'array',
- 'default': [],
- 'items': {'type': 'string'},
+ "repos": {
+ "type": "array",
+ "default": [],
+ "items": {"type": "string"},
},
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack repository configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack repository configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/schema/spec.py b/lib/spack/spack/schema/spec.py
index 3e64f08502..a13f70a2e2 100644
--- a/lib/spack/spack/schema/spec.py
+++ b/lib/spack/spack/schema/spec.py
@@ -13,30 +13,31 @@ TODO: This needs to be updated? Especially the hashes under properties.
target = {
- 'oneOf': [
+ "oneOf": [
{
- 'type': 'string',
- }, {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': [
- 'name',
- 'vendor',
- 'features',
- 'generation',
- 'parents',
+ "type": "string",
+ },
+ {
+ "type": "object",
+ "additionalProperties": False,
+ "required": [
+ "name",
+ "vendor",
+ "features",
+ "generation",
+ "parents",
],
- 'properties': {
- 'name': {'type': 'string'},
- 'vendor': {'type': 'string'},
- 'features': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "properties": {
+ "name": {"type": "string"},
+ "vendor": {"type": "string"},
+ "features": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'generation': {'type': 'integer'},
- 'parents': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "generation": {"type": "integer"},
+ "parents": {
+ "type": "array",
+ "items": {"type": "string"},
},
},
},
@@ -44,25 +45,25 @@ target = {
}
arch = {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'platform': {},
- 'platform_os': {},
- 'target': target,
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "platform": {},
+ "platform_os": {},
+ "target": target,
},
}
dependencies = {
- 'type': 'object',
- 'patternProperties': {
- r'\w[\w-]*': { # package name
- 'type': 'object',
- 'properties': {
- 'hash': {'type': 'string'},
- 'type': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "type": "object",
+ "patternProperties": {
+ r"\w[\w-]*": { # package name
+ "type": "object",
+ "properties": {
+ "hash": {"type": "string"},
+ "type": {
+ "type": "array",
+ "items": {"type": "string"},
},
},
},
@@ -70,134 +71,121 @@ dependencies = {
}
build_spec = {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['name', 'hash'],
- 'properties': {
- 'name': {'type': 'string'},
- 'hash': {'type': 'string'}
- }
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["name", "hash"],
+ "properties": {"name": {"type": "string"}, "hash": {"type": "string"}},
}
#: Properties for inclusion in other schemas
properties = {
- 'spec': {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': [
- '_meta',
- 'nodes'
- ],
- 'properties': {
- '_meta': {
- 'type': 'object',
- 'properties': {
- 'version': {'type': 'number'}
- }
- },
- 'nodes': {
- 'type': 'array',
- 'items': {
- 'type': 'object',
- 'additionalProperties': False,
- 'required': [
- 'version',
- 'arch',
- 'compiler',
- 'namespace',
- 'parameters',
+ "spec": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["_meta", "nodes"],
+ "properties": {
+ "_meta": {"type": "object", "properties": {"version": {"type": "number"}}},
+ "nodes": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": [
+ "version",
+ "arch",
+ "compiler",
+ "namespace",
+ "parameters",
],
- 'properties': {
- 'name': {'type': 'string'},
- 'hash': {'type': 'string'},
- 'package_hash': {'type': 'string'},
-
+ "properties": {
+ "name": {"type": "string"},
+ "hash": {"type": "string"},
+ "package_hash": {"type": "string"},
# these hashes were used on some specs prior to 0.18
- 'full_hash': {'type': 'string'},
- 'build_hash': {'type': 'string'},
-
- 'version': {
- 'oneOf': [
- {'type': 'string'},
- {'type': 'number'},
+ "full_hash": {"type": "string"},
+ "build_hash": {"type": "string"},
+ "version": {
+ "oneOf": [
+ {"type": "string"},
+ {"type": "number"},
],
},
- 'arch': arch,
- 'compiler': {
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': {
- 'name': {'type': 'string'},
- 'version': {'type': 'string'},
+ "arch": arch,
+ "compiler": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "name": {"type": "string"},
+ "version": {"type": "string"},
},
},
- 'develop': {
- 'anyOf': [
- {'type': 'boolean'},
- {'type': 'string'},
+ "develop": {
+ "anyOf": [
+ {"type": "boolean"},
+ {"type": "string"},
],
},
- 'namespace': {'type': 'string'},
- 'parameters': {
- 'type': 'object',
- 'required': [
- 'cflags',
- 'cppflags',
- 'cxxflags',
- 'fflags',
- 'ldflags',
- 'ldlibs',
+ "namespace": {"type": "string"},
+ "parameters": {
+ "type": "object",
+ "required": [
+ "cflags",
+ "cppflags",
+ "cxxflags",
+ "fflags",
+ "ldflags",
+ "ldlibs",
],
- 'additionalProperties': True,
- 'properties': {
- 'patches': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "additionalProperties": True,
+ "properties": {
+ "patches": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'cflags': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "cflags": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'cppflags': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "cppflags": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'cxxflags': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "cxxflags": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'fflags': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "fflags": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'ldflags': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "ldflags": {
+ "type": "array",
+ "items": {"type": "string"},
},
- 'ldlib': {
- 'type': 'array',
- 'items': {'type': 'string'},
+ "ldlib": {
+ "type": "array",
+ "items": {"type": "string"},
},
},
},
- 'patches': {
- 'type': 'array',
- 'items': {},
+ "patches": {
+ "type": "array",
+ "items": {},
},
- 'dependencies': dependencies,
- 'build_spec': build_spec,
+ "dependencies": dependencies,
+ "build_spec": build_spec,
},
- }
- }
- }
+ },
+ },
+ },
}
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack spec schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack spec schema",
+ "type": "object",
+ "additionalProperties": False,
+ "patternProperties": properties,
}
diff --git a/lib/spack/spack/schema/upstreams.py b/lib/spack/spack/schema/upstreams.py
index 9e45bc4a57..11deac3bbc 100644
--- a/lib/spack/spack/schema/upstreams.py
+++ b/lib/spack/spack/schema/upstreams.py
@@ -6,34 +6,31 @@
#: Properties for inclusion in other schemas
properties = {
- 'upstreams': {
- 'type': 'object',
- 'default': {},
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'install_tree': {'type': 'string'},
- 'modules': {
- 'type': 'object',
- 'properties': {
- 'tcl': {'type': 'string'},
- 'lmod': {'type': 'string'}
- }
- }
- }
+ "upstreams": {
+ "type": "object",
+ "default": {},
+ "patternProperties": {
+ r"\w[\w-]*": {
+ "type": "object",
+ "default": {},
+ "additionalProperties": False,
+ "properties": {
+ "install_tree": {"type": "string"},
+ "modules": {
+ "type": "object",
+ "properties": {"tcl": {"type": "string"}, "lmod": {"type": "string"}},
+ },
+ },
}
- }
+ },
}
}
#: Full schema with metadata
schema = {
- '$schema': 'http://json-schema.org/draft-07/schema#',
- 'title': 'Spack core configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'properties': properties,
+ "$schema": "http://json-schema.org/draft-07/schema#",
+ "title": "Spack core configuration file schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": properties,
}
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index dc1c93eacd..f8cba20adf 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -23,7 +23,7 @@ try:
import clingo # type: ignore[import]
# There may be a better way to detect this
- clingo_cffi = hasattr(clingo.Symbol, '_rep')
+ clingo_cffi = hasattr(clingo.Symbol, "_rep")
except ImportError:
clingo = None # type: ignore
clingo_cffi = False
@@ -64,6 +64,7 @@ def ast_getter(*names):
if result:
return result
raise KeyError("node has no such keys: %s" % names)
+
return getter
@@ -72,28 +73,24 @@ ast_sym = ast_getter("symbol", "term")
#: Order of precedence for version origins. Topmost types are preferred.
version_origin_fields = [
- 'spec',
- 'external',
- 'packages_yaml',
- 'package_py',
- 'installed',
+ "spec",
+ "external",
+ "packages_yaml",
+ "package_py",
+ "installed",
]
#: Look up version precedence strings by enum id
-version_origin_str = {
- i: name for i, name in enumerate(version_origin_fields)
-}
+version_origin_str = {i: name for i, name in enumerate(version_origin_fields)}
#: Enumeration like object to mark version provenance
version_provenance = collections.namedtuple( # type: ignore
- 'VersionProvenance',
+ "VersionProvenance",
version_origin_fields,
)(**{name: i for i, name in enumerate(version_origin_fields)})
#: Named tuple to contain information on declared versions
-DeclaredVersion = collections.namedtuple(
- 'DeclaredVersion', ['version', 'idx', 'origin']
-)
+DeclaredVersion = collections.namedtuple("DeclaredVersion", ["version", "idx", "origin"])
# Below numbers are used to map names of criteria to the order
# they appear in the solution. See concretize.lp
@@ -173,10 +170,11 @@ def build_criteria_names(costs, tuples):
indices = dict((p, i) for i, (p, n) in enumerate(priorities_names))
# make a list that has each name with its build and non-build costs
- criteria = [(cost, None, name) for cost, (p, name) in
- zip(costs[:build_start_idx], high_fixed)]
- criteria += [(cost, None, name) for cost, (p, name) in
- zip(costs[fixed_start_idx:installed_start_idx], fixed)]
+ criteria = [(cost, None, name) for cost, (p, name) in zip(costs[:build_start_idx], high_fixed)]
+ criteria += [
+ (cost, None, name)
+ for cost, (p, name) in zip(costs[fixed_start_idx:installed_start_idx], fixed)
+ ]
for (i, name), (b, _) in zip(installed, build):
criteria.append((costs[indices[i]], costs[indices[b]], name))
@@ -245,12 +243,11 @@ class AspFunction(AspObject):
return clingo.Number(arg)
else:
return clingo.String(str(arg))
- return clingo.Function(
- self.name, [argify(arg) for arg in self.args], positive=positive)
+
+ return clingo.Function(self.name, [argify(arg) for arg in self.args], positive=positive)
def __str__(self):
- return "%s(%s)" % (
- self.name, ', '.join(str(_id(arg)) for arg in self.args))
+ return "%s(%s)" % (self.name, ", ".join(str(_id(arg)) for arg in self.args))
def __repr__(self):
return str(self)
@@ -299,7 +296,7 @@ def check_packages_exist(specs):
try:
check_passed = repo.exists(s.name) or repo.is_virtual(s.name)
except Exception as e:
- msg = 'Cannot find package: {0}'.format(str(e))
+ msg = "Cannot find package: {0}".format(str(e))
check_passed = False
tty.debug(msg)
@@ -309,6 +306,7 @@ def check_packages_exist(specs):
class Result(object):
"""Result of an ASP solve."""
+
def __init__(self, specs, asp=None):
self.asp = asp
self.satisfiable = None
@@ -345,10 +343,7 @@ class Result(object):
"""
assert self.control
- symbols = dict(
- (a.literal, a.symbol)
- for a in self.control.symbolic_atoms
- )
+ symbols = dict((a.literal, a.symbol) for a in self.control.symbolic_atoms)
core_symbols = []
for atom in core:
@@ -390,7 +385,7 @@ class Result(object):
string_list = []
for core in self.minimal_cores():
if string_list:
- string_list.append('\n')
+ string_list.append("\n")
string_list.extend(self.format_core(core))
return string_list
@@ -403,7 +398,7 @@ class Result(object):
string_list = []
for core in self.cores:
if string_list:
- string_list.append('\n')
+ string_list.append("\n")
string_list.extend(self.format_core(core))
return string_list
@@ -460,8 +455,7 @@ class Result(object):
for input_spec in self.abstract_specs:
key = input_spec.name
if input_spec.virtual:
- providers = [spec.name for spec in answer.values()
- if spec.package.provides(key)]
+ providers = [spec.name for spec in answer.values() if spec.package.provides(key)]
key = providers[0]
candidate = answer.get(key)
@@ -476,22 +470,22 @@ def _normalize_packages_yaml(packages_yaml):
normalized_yaml = copy.copy(packages_yaml)
for pkg_name in packages_yaml:
is_virtual = spack.repo.path.is_virtual(pkg_name)
- if pkg_name == 'all' or not is_virtual:
+ if pkg_name == "all" or not is_virtual:
continue
# Remove the virtual entry from the normalized configuration
data = normalized_yaml.pop(pkg_name)
- is_buildable = data.get('buildable', True)
+ is_buildable = data.get("buildable", True)
if not is_buildable:
for provider in spack.repo.path.providers_for(pkg_name):
entry = normalized_yaml.setdefault(provider.name, {})
- entry['buildable'] = False
+ entry["buildable"] = False
- externals = data.get('externals', [])
- keyfn = lambda x: spack.spec.Spec(x['spec']).name
+ externals = data.get("externals", [])
+ keyfn = lambda x: spack.spec.Spec(x["spec"]).name
for provider, specs in itertools.groupby(externals, key=keyfn):
entry = normalized_yaml.setdefault(provider, {})
- entry.setdefault('externals', []).extend(specs)
+ entry.setdefault("externals", []).extend(specs)
return normalized_yaml
@@ -505,6 +499,7 @@ def bootstrap_clingo():
import clingo
from clingo.ast import ASTType
+
try:
from clingo.ast import parse_files
except ImportError:
@@ -526,12 +521,12 @@ class PyclingoDriver(object):
self.cores = cores
def title(self, name, char):
- self.out.write('\n')
+ self.out.write("\n")
self.out.write("%" + (char * 76))
- self.out.write('\n')
+ self.out.write("\n")
self.out.write("%% %s\n" % name)
self.out.write("%" + (char * 76))
- self.out.write('\n')
+ self.out.write("\n")
def h1(self, name):
self.title(name, "=")
@@ -540,7 +535,7 @@ class PyclingoDriver(object):
self.title(name, "-")
def newline(self):
- self.out.write('\n')
+ self.out.write("\n")
def fact(self, head):
"""ASP fact (a rule without a body).
@@ -548,7 +543,7 @@ class PyclingoDriver(object):
Arguments:
head (AspFunction): ASP function to generate as fact
"""
- symbol = head.symbol() if hasattr(head, 'symbol') else head
+ symbol = head.symbol() if hasattr(head, "symbol") else head
self.out.write("%s.\n" % str(symbol))
@@ -556,21 +551,21 @@ class PyclingoDriver(object):
# Only functions relevant for constructing bug reports for bad error messages
# are assumptions, and only when using cores.
- choice = self.cores and symbol.name == 'internal_error'
+ choice = self.cores and symbol.name == "internal_error"
self.backend.add_rule([atom], [], choice=choice)
if choice:
self.assumptions.append(atom)
def solve(
- self,
- setup,
- specs,
- nmodels=0,
- reuse=None,
- timers=False,
- stats=False,
- out=None,
- setup_only=False
+ self,
+ setup,
+ specs,
+ nmodels=0,
+ reuse=None,
+ timers=False,
+ stats=False,
+ out=None,
+ setup_only=False,
):
"""Set up the input and solve for dependencies of ``specs``.
@@ -592,10 +587,10 @@ class PyclingoDriver(object):
# Initialize the control object for the solver
self.control = clingo.Control()
- self.control.configuration.configuration = 'tweety'
+ self.control.configuration.configuration = "tweety"
self.control.configuration.solve.models = nmodels
- self.control.configuration.solver.heuristic = 'Domain'
- self.control.configuration.solve.parallel_mode = '1'
+ self.control.configuration.solver.heuristic = "Domain"
+ self.control.configuration.solve.parallel_mode = "1"
self.control.configuration.solver.opt_strategy = "usc,one"
# set up the problem -- this generates facts and rules
@@ -611,18 +606,19 @@ class PyclingoDriver(object):
# extract error messages from concretize.lp by inspecting its AST
with self.backend:
+
def visit(node):
if ast_type(node) == ASTType.Rule:
for term in node.body:
if ast_type(term) == ASTType.Literal:
if ast_type(term.atom) == ASTType.SymbolicAtom:
name = ast_sym(term.atom).name
- if name == 'internal_error':
+ if name == "internal_error":
arg = ast_sym(ast_sym(term.atom).arguments[0])
self.fact(AspFunction(name)(arg.string))
self.h1("Error messages")
- path = os.path.join(parent_dir, 'concretize.lp')
+ path = os.path.join(parent_dir, "concretize.lp")
parse_files([path], visit)
# If we're only doing setup, just return an empty solve result
@@ -630,7 +626,7 @@ class PyclingoDriver(object):
return Result(specs)
# Load the file itself
- self.control.load(os.path.join(parent_dir, 'concretize.lp'))
+ self.control.load(os.path.join(parent_dir, "concretize.lp"))
self.control.load(os.path.join(parent_dir, "os_compatibility.lp"))
self.control.load(os.path.join(parent_dir, "display.lp"))
timer.phase("load")
@@ -643,14 +639,16 @@ class PyclingoDriver(object):
# With a grounded program, we can run the solve.
result = Result(specs)
models = [] # stable models if things go well
- cores = [] # unsatisfiable cores if they do not
+ cores = [] # unsatisfiable cores if they do not
def on_model(model):
models.append((model.cost, model.symbols(shown=True, terms=True)))
- solve_kwargs = {"assumptions": self.assumptions,
- "on_model": on_model,
- "on_core": cores.append}
+ solve_kwargs = {
+ "assumptions": self.assumptions,
+ "on_model": on_model,
+ "on_core": cores.append,
+ }
if clingo_cffi:
solve_kwargs["on_unsat"] = cores.append
@@ -674,10 +672,7 @@ class PyclingoDriver(object):
# build spec from the best model
builder = SpecBuilder(specs, reuse=reuse)
min_cost, best_model = min(models)
- tuples = [
- (sym.name, [stringify(a) for a in sym.arguments])
- for sym in best_model
- ]
+ tuples = [(sym.name, [stringify(a) for a in sym.arguments]) for sym in best_model]
answers = builder.build_specs(tuples)
# add best spec to the results
@@ -742,6 +737,7 @@ class SpackSolverSetup(object):
This uses self.declared_versions so that we include any versions
that arise from a spec.
"""
+
def key_fn(version):
# Origins are sorted by precedence defined in `version_origin_str`,
# then by order added.
@@ -753,15 +749,19 @@ class SpackSolverSetup(object):
most_to_least_preferred = []
for _, group in itertools.groupby(partially_sorted_versions, key=key_fn):
- most_to_least_preferred.extend(list(sorted(
- group, reverse=True, key=lambda x: spack.version.ver(x.version)
- )))
+ most_to_least_preferred.extend(
+ list(sorted(group, reverse=True, key=lambda x: spack.version.ver(x.version)))
+ )
for weight, declared_version in enumerate(most_to_least_preferred):
- self.gen.fact(fn.version_declared(
- pkg.name, declared_version.version, weight,
- version_origin_str[declared_version.origin]
- ))
+ self.gen.fact(
+ fn.version_declared(
+ pkg.name,
+ declared_version.version,
+ weight,
+ version_origin_str[declared_version.origin],
+ )
+ )
# Declare deprecated versions for this package, if any
deprecated = self.deprecated_versions[pkg.name]
@@ -798,8 +798,7 @@ class SpackSolverSetup(object):
no_constraint_msg = "{0} conflicts with '{1}'"
for trigger, constraints in pkg.conflicts.items():
trigger_msg = "conflict trigger %s" % str(trigger)
- trigger_id = self.condition(
- spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
+ trigger_id = self.condition(spack.spec.Spec(trigger), name=pkg.name, msg=trigger_msg)
for constraint, conflict_msg in constraints:
if conflict_msg is None:
@@ -808,10 +807,8 @@ class SpackSolverSetup(object):
else:
conflict_msg = default_msg.format(pkg.name, trigger, constraint)
constraint_msg = "conflict constraint %s" % str(constraint)
- constraint_id = self.condition(
- constraint, name=pkg.name, msg=constraint_msg)
- self.gen.fact(
- fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
+ constraint_id = self.condition(constraint, name=pkg.name, msg=constraint_msg)
+ self.gen.fact(fn.conflict(pkg.name, trigger_id, constraint_id, conflict_msg))
self.gen.newline()
def available_compilers(self):
@@ -835,9 +832,8 @@ class SpackSolverSetup(object):
self.gen.h2("Default compiler preferences")
compiler_list = self.possible_compilers.copy()
- compiler_list = sorted(
- compiler_list, key=lambda x: (x.name, x.version), reverse=True)
- ppk = spack.package_prefs.PackagePrefs("all", 'compiler', all=False)
+ compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
+ ppk = spack.package_prefs.PackagePrefs("all", "compiler", all=False)
matches = sorted(compiler_list, key=ppk)
for i, cspec in enumerate(matches):
@@ -847,23 +843,21 @@ class SpackSolverSetup(object):
# Enumerate target families. This may be redundant, but compilers with
# custom versions will be able to concretize properly.
for entry in spack.compilers.all_compilers_config():
- compiler_entry = entry['compiler']
- cspec = spack.spec.CompilerSpec(compiler_entry['spec'])
- if not compiler_entry.get('target', None):
+ compiler_entry = entry["compiler"]
+ cspec = spack.spec.CompilerSpec(compiler_entry["spec"])
+ if not compiler_entry.get("target", None):
continue
- self.gen.fact(fn.compiler_supports_target(
- cspec.name, cspec.version, compiler_entry['target']
- ))
+ self.gen.fact(
+ fn.compiler_supports_target(cspec.name, cspec.version, compiler_entry["target"])
+ )
def compiler_supports_os(self):
compilers_yaml = spack.compilers.all_compilers_config()
for entry in compilers_yaml:
- c = spack.spec.CompilerSpec(entry['compiler']['spec'])
- operating_system = entry['compiler']['operating_system']
- self.gen.fact(fn.compiler_supports_os(
- c.name, c.version, operating_system
- ))
+ c = spack.spec.CompilerSpec(entry["compiler"]["spec"])
+ operating_system = entry["compiler"]["operating_system"]
+ self.gen.fact(fn.compiler_supports_os(c.name, c.version, operating_system))
def package_compiler_defaults(self, pkg):
"""Facts about packages' compiler prefs."""
@@ -874,15 +868,14 @@ class SpackSolverSetup(object):
return
compiler_list = self.possible_compilers.copy()
- compiler_list = sorted(
- compiler_list, key=lambda x: (x.name, x.version), reverse=True)
- ppk = spack.package_prefs.PackagePrefs(pkg.name, 'compiler', all=False)
+ compiler_list = sorted(compiler_list, key=lambda x: (x.name, x.version), reverse=True)
+ ppk = spack.package_prefs.PackagePrefs(pkg.name, "compiler", all=False)
matches = sorted(compiler_list, key=ppk)
for i, cspec in enumerate(reversed(matches)):
- self.gen.fact(fn.node_compiler_preference(
- pkg.name, cspec.name, cspec.version, -i * 100
- ))
+ self.gen.fact(
+ fn.node_compiler_preference(pkg.name, cspec.name, cspec.version, -i * 100)
+ )
def pkg_rules(self, pkg, tests):
pkg = packagize(pkg)
@@ -912,17 +905,13 @@ class SpackSolverSetup(object):
if single_value:
self.gen.fact(fn.variant_single_value(pkg.name, name))
self.gen.fact(
- fn.variant_default_value_from_package_py(
- pkg.name, name, variant.default)
+ fn.variant_default_value_from_package_py(pkg.name, name, variant.default)
)
else:
spec_variant = variant.make_default()
defaults = spec_variant.value
for val in sorted(defaults):
- self.gen.fact(
- fn.variant_default_value_from_package_py(
- pkg.name, name, val)
- )
+ self.gen.fact(fn.variant_default_value_from_package_py(pkg.name, name, val))
values = variant.values
if values is None:
@@ -932,9 +921,9 @@ class SpackSolverSetup(object):
# Encode the disjoint sets in the logic program
for sid, s in enumerate(values.sets):
for value in s:
- self.gen.fact(fn.variant_value_from_disjoint_sets(
- pkg.name, name, value, sid
- ))
+ self.gen.fact(
+ fn.variant_value_from_disjoint_sets(pkg.name, name, value, sid)
+ )
union.update(s)
values = union
@@ -944,14 +933,15 @@ class SpackSolverSetup(object):
for value in sorted(values):
self.gen.fact(fn.variant_possible_value(pkg.name, name, value))
- if hasattr(value, 'when'):
- required = spack.spec.Spec('{0}={1}'.format(name, value))
+ if hasattr(value, "when"):
+ required = spack.spec.Spec("{0}={1}".format(name, value))
imposed = spack.spec.Spec(value.when)
imposed.name = pkg.name
self.condition(
- required_spec=required, imposed_spec=imposed, name=pkg.name,
- msg="%s variant %s value %s when %s" % (
- pkg.name, name, value, when)
+ required_spec=required,
+ imposed_spec=imposed,
+ name=pkg.name,
+ msg="%s variant %s value %s when %s" % (pkg.name, name, value, when),
)
if variant.sticky:
@@ -973,10 +963,7 @@ class SpackSolverSetup(object):
# virtual preferences
self.virtual_preferences(
- pkg.name,
- lambda v, p, i: self.gen.fact(
- fn.pkg_provider_preference(pkg.name, v, p, i)
- )
+ pkg.name, lambda v, p, i: self.gen.fact(fn.pkg_provider_preference(pkg.name, v, p, i))
)
def condition(self, required_spec, imposed_spec=None, name=None, msg=None):
@@ -1000,12 +987,9 @@ class SpackSolverSetup(object):
self.gen.fact(fn.condition(condition_id, msg))
# requirements trigger the condition
- requirements = self.spec_clauses(
- named_cond, body=True, required_from=name)
+ requirements = self.spec_clauses(named_cond, body=True, required_from=name)
for pred in requirements:
- self.gen.fact(
- fn.condition_requirement(condition_id, pred.name, *pred.args)
- )
+ self.gen.fact(fn.condition_requirement(condition_id, pred.name, *pred.args))
if imposed_spec:
self.impose(condition_id, imposed_spec, node=False, name=name)
@@ -1013,15 +997,12 @@ class SpackSolverSetup(object):
return condition_id
def impose(self, condition_id, imposed_spec, node=True, name=None, body=False):
- imposed_constraints = self.spec_clauses(
- imposed_spec, body=body, required_from=name)
+ imposed_constraints = self.spec_clauses(imposed_spec, body=body, required_from=name)
for pred in imposed_constraints:
# imposed "node"-like conditions are no-ops
if not node and pred.name in ("node", "virtual_node"):
continue
- self.gen.fact(
- fn.imposed_constraint(condition_id, pred.name, *pred.args)
- )
+ self.gen.fact(fn.imposed_constraint(condition_id, pred.name, *pred.args))
def package_provider_rules(self, pkg):
for provider_name in sorted(set(s.name for s in pkg.provided.keys())):
@@ -1029,11 +1010,9 @@ class SpackSolverSetup(object):
for provided, whens in pkg.provided.items():
for when in whens:
- msg = '%s provides %s when %s' % (pkg.name, provided, when)
+ msg = "%s provides %s when %s" % (pkg.name, provided, when)
condition_id = self.condition(when, provided, pkg.name, msg)
- self.gen.fact(fn.provider_condition(
- condition_id, when.name, provided.name
- ))
+ self.gen.fact(fn.provider_condition(condition_id, when.name, provided.name))
self.gen.newline()
def package_dependencies_rules(self, pkg):
@@ -1054,14 +1033,12 @@ class SpackSolverSetup(object):
if not deptypes:
continue
- msg = '%s depends on %s' % (pkg.name, dep.spec.name)
+ msg = "%s depends on %s" % (pkg.name, dep.spec.name)
if cond != spack.spec.Spec():
- msg += ' when %s' % cond
+ msg += " when %s" % cond
condition_id = self.condition(cond, dep.spec, pkg.name, msg)
- self.gen.fact(fn.dependency_condition(
- condition_id, pkg.name, dep.spec.name
- ))
+ self.gen.fact(fn.dependency_condition(condition_id, pkg.name, dep.spec.name))
for t in sorted(deptypes):
# there is a declared dependency of type t
@@ -1085,9 +1062,7 @@ class SpackSolverSetup(object):
self.gen.h2("Default virtual providers")
assert self.possible_virtuals is not None
self.virtual_preferences(
- "all",
- lambda v, p, i: self.gen.fact(
- fn.default_provider_preference(v, p, i))
+ "all", lambda v, p, i: self.gen.fact(fn.default_provider_preference(v, p, i))
)
def external_packages(self):
@@ -1098,51 +1073,45 @@ class SpackSolverSetup(object):
packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
- self.gen.h1('External packages')
+ self.gen.h1("External packages")
for pkg_name, data in packages_yaml.items():
- if pkg_name == 'all':
+ if pkg_name == "all":
continue
# This package does not appear in any repository
if pkg_name not in spack.repo.path:
continue
- self.gen.h2('External package: {0}'.format(pkg_name))
+ self.gen.h2("External package: {0}".format(pkg_name))
# Check if the external package is buildable. If it is
# not then "external(<pkg>)" is a fact.
- external_buildable = data.get('buildable', True)
+ external_buildable = data.get("buildable", True)
if not external_buildable:
self.gen.fact(fn.external_only(pkg_name))
# Read a list of all the specs for this package
- externals = data.get('externals', [])
- external_specs = [spack.spec.Spec(x['spec']) for x in externals]
+ externals = data.get("externals", [])
+ external_specs = [spack.spec.Spec(x["spec"]) for x in externals]
# Order the external versions to prefer more recent versions
# even if specs in packages.yaml are not ordered that way
external_versions = [
- (x.version, external_id)
- for external_id, x in enumerate(external_specs)
+ (x.version, external_id) for external_id, x in enumerate(external_specs)
]
external_versions = [
(v, idx, external_id)
- for idx, (v, external_id) in
- enumerate(sorted(external_versions, reverse=True))
+ for idx, (v, external_id) in enumerate(sorted(external_versions, reverse=True))
]
for version, idx, external_id in external_versions:
- self.declared_versions[pkg_name].append(DeclaredVersion(
- version=version,
- idx=idx,
- origin=version_provenance.external
- ))
+ self.declared_versions[pkg_name].append(
+ DeclaredVersion(version=version, idx=idx, origin=version_provenance.external)
+ )
# Declare external conditions with a local index into packages.yaml
for local_idx, spec in enumerate(external_specs):
- msg = '%s available as external when satisfying %s' % (spec.name, spec)
+ msg = "%s available as external when satisfying %s" % (spec.name, spec)
condition_id = self.condition(spec, msg=msg)
- self.gen.fact(
- fn.possible_external(condition_id, pkg_name, local_idx)
- )
+ self.gen.fact(fn.possible_external(condition_id, pkg_name, local_idx))
self.possible_versions[spec.name].add(spec.version)
self.gen.newline()
@@ -1165,19 +1134,17 @@ class SpackSolverSetup(object):
spec.update_variant_validate(variant_name, values)
for value in values:
- self.variant_values_from_specs.add(
- (pkg_name, variant.name, value)
+ self.variant_values_from_specs.add((pkg_name, variant.name, value))
+ self.gen.fact(
+ fn.variant_default_value_from_packages_yaml(pkg_name, variant.name, value)
)
- self.gen.fact(fn.variant_default_value_from_packages_yaml(
- pkg_name, variant.name, value
- ))
def target_preferences(self, pkg_name):
- key_fn = spack.package_prefs.PackagePrefs(pkg_name, 'target')
+ key_fn = spack.package_prefs.PackagePrefs(pkg_name, "target")
if not self.target_specs_cache:
self.target_specs_cache = [
- spack.spec.Spec('target={0}'.format(target_name))
+ spack.spec.Spec("target={0}".format(target_name))
for _, target_name in self.default_targets
]
@@ -1189,9 +1156,9 @@ class SpackSolverSetup(object):
for i, preferred in enumerate(package_targets):
if str(preferred.architecture.target) == best_default and i != 0:
offset = 100
- self.gen.fact(fn.target_weight(
- pkg_name, str(preferred.architecture.target), i + offset
- ))
+ self.gen.fact(
+ fn.target_weight(pkg_name, str(preferred.architecture.target), i + offset)
+ )
def flag_defaults(self):
self.gen.h2("Compiler flag defaults")
@@ -1206,14 +1173,15 @@ class SpackSolverSetup(object):
for compiler in compilers:
for name, flags in compiler.flags.items():
for flag in flags:
- self.gen.fact(fn.compiler_version_flag(
- compiler.name, compiler.version, name, flag))
+ self.gen.fact(
+ fn.compiler_version_flag(compiler.name, compiler.version, name, flag)
+ )
def spec_clauses(self, *args, **kwargs):
"""Wrap a call to `_spec_clauses()` into a try/except block that
raises a comprehensible error message in case of failure.
"""
- requestor = kwargs.pop('required_from', None)
+ requestor = kwargs.pop("required_from", None)
try:
clauses = self._spec_clauses(*args, **kwargs)
except RuntimeError as exc:
@@ -1224,12 +1192,12 @@ class SpackSolverSetup(object):
return clauses
def _spec_clauses(
- self,
- spec,
- body=False,
- transitive=True,
- expand_hashes=False,
- concrete_build_deps=False,
+ self,
+ spec,
+ body=False,
+ transitive=True,
+ expand_hashes=False,
+ concrete_build_deps=False,
):
"""Return a list of clauses for a spec mandates are true.
@@ -1278,9 +1246,7 @@ class SpackSolverSetup(object):
f = Body if body else Head
if spec.name:
- clauses.append(
- f.node(spec.name) if not spec.virtual
- else f.virtual_node(spec.name))
+ clauses.append(f.node(spec.name) if not spec.virtual else f.virtual_node(spec.name))
clauses.extend(self.spec_versions(spec))
@@ -1303,7 +1269,7 @@ class SpackSolverSetup(object):
for value in values:
# * is meaningless for concretization -- just for matching
- if value == '*':
+ if value == "*":
continue
# validate variant value only if spec not concrete
@@ -1333,13 +1299,16 @@ class SpackSolverSetup(object):
clauses.append(f.node_compiler(spec.name, spec.compiler.name))
if spec.compiler.concrete:
- clauses.append(f.node_compiler_version(
- spec.name, spec.compiler.name, spec.compiler.version))
+ clauses.append(
+ f.node_compiler_version(spec.name, spec.compiler.name, spec.compiler.version)
+ )
elif spec.compiler.versions:
clauses.append(
fn.node_compiler_version_satisfies(
- spec.name, spec.compiler.name, spec.compiler.versions))
+ spec.name, spec.compiler.name, spec.compiler.versions
+ )
+ )
self.compiler_version_constraints.add(spec.compiler)
# compiler flags
@@ -1404,17 +1373,15 @@ class SpackSolverSetup(object):
# When COMPARING VERSIONS, the '@develop' version is always
# larger than other versions. BUT when CONCRETIZING, the largest
# NON-develop version is selected by default.
- return info.get('preferred', False), not version.isdevelop(), version
+ return info.get("preferred", False), not version.isdevelop(), version
- for idx, item in enumerate(sorted(
- pkg_cls.versions.items(), key=key_fn, reverse=True
- )):
+ for idx, item in enumerate(sorted(pkg_cls.versions.items(), key=key_fn, reverse=True)):
v, version_info = item
self.possible_versions[pkg_name].add(v)
- self.declared_versions[pkg_name].append(DeclaredVersion(
- version=v, idx=idx, origin=version_provenance.package_py
- ))
- deprecated = version_info.get('deprecated', False)
+ self.declared_versions[pkg_name].append(
+ DeclaredVersion(version=v, idx=idx, origin=version_provenance.package_py)
+ )
+ deprecated = version_info.get("deprecated", False)
if deprecated:
self.deprecated_versions[pkg_name].add(v)
@@ -1422,9 +1389,9 @@ class SpackSolverSetup(object):
# specs will be computed later
version_preferences = packages_yaml.get(pkg_name, {}).get("version", [])
for idx, v in enumerate(version_preferences):
- self.declared_versions[pkg_name].append(DeclaredVersion(
- version=v, idx=idx, origin=version_provenance.packages_yaml
- ))
+ self.declared_versions[pkg_name].append(
+ DeclaredVersion(version=v, idx=idx, origin=version_provenance.packages_yaml)
+ )
for spec in specs:
for dep in spec.traverse():
@@ -1432,8 +1399,9 @@ class SpackSolverSetup(object):
continue
known_versions = self.possible_versions[dep.name]
- if (not isinstance(dep.version, spack.version.GitVersion) and
- any(v.satisfies(dep.version) for v in known_versions)):
+ if not isinstance(dep.version, spack.version.GitVersion) and any(
+ v.satisfies(dep.version) for v in known_versions
+ ):
# some version we know about satisfies this constraint, so we
# should use that one. e.g, if the user asks for qt@5 and we
# know about qt@5.5. This ensures we don't add under-specified
@@ -1447,11 +1415,9 @@ class SpackSolverSetup(object):
# if there is a concrete version on the CLI *that we know nothing
# about*, add it to the known versions. Use idx=0, which is the
# best possible, so they're guaranteed to be used preferentially.
- self.declared_versions[dep.name].append(DeclaredVersion(
- version=dep.version,
- idx=0,
- origin=version_provenance.spec
- ))
+ self.declared_versions[dep.name].append(
+ DeclaredVersion(version=dep.version, idx=0, origin=version_provenance.spec)
+ )
self.possible_versions[dep.name].add(dep.version)
def _supported_targets(self, compiler_name, compiler_version, targets):
@@ -1475,12 +1441,12 @@ class SpackSolverSetup(object):
return sorted(supported, reverse=True)
def platform_defaults(self):
- self.gen.h2('Default platform')
+ self.gen.h2("Default platform")
platform = spack.platforms.host()
self.gen.fact(fn.node_platform_default(platform))
def os_defaults(self, specs):
- self.gen.h2('Possible operating systems')
+ self.gen.h2("Possible operating systems")
platform = spack.platforms.host()
# create set of OS's to consider
@@ -1500,8 +1466,8 @@ class SpackSolverSetup(object):
def keyfun(os):
return (
os == platform.default_os, # prefer default
- os not in buildable, # then prefer buildables
- os, # then sort by name
+ os not in buildable, # then prefer buildables
+ os, # then sort by name
)
all_oses = buildable.union(self.possible_oses)
@@ -1513,32 +1479,36 @@ class SpackSolverSetup(object):
def target_defaults(self, specs):
"""Add facts about targets and target compatibility."""
- self.gen.h2('Default target')
+ self.gen.h2("Default target")
platform = spack.platforms.host()
uarch = archspec.cpu.TARGETS.get(platform.default)
- self.gen.h2('Target compatibility')
+ self.gen.h2("Target compatibility")
# Construct the list of targets which are compatible with the host
candidate_targets = [uarch] + uarch.ancestors
# Get configuration options
- granularity = spack.config.get('concretizer:targets:granularity')
- host_compatible = spack.config.get('concretizer:targets:host_compatible')
+ granularity = spack.config.get("concretizer:targets:granularity")
+ host_compatible = spack.config.get("concretizer:targets:host_compatible")
# Add targets which are not compatible with the current host
if not host_compatible:
- additional_targets_in_family = sorted([
- t for t in archspec.cpu.TARGETS.values()
- if (t.family.name == uarch.family.name and
- t not in candidate_targets)
- ], key=lambda x: len(x.ancestors), reverse=True)
+ additional_targets_in_family = sorted(
+ [
+ t
+ for t in archspec.cpu.TARGETS.values()
+ if (t.family.name == uarch.family.name and t not in candidate_targets)
+ ],
+ key=lambda x: len(x.ancestors),
+ reverse=True,
+ )
candidate_targets += additional_targets_in_family
# Check if we want only generic architecture
- if granularity == 'generic':
- candidate_targets = [t for t in candidate_targets if t.vendor == 'generic']
+ if granularity == "generic":
+ candidate_targets = [t for t in candidate_targets if t.vendor == "generic"]
compilers = self.possible_compilers
@@ -1560,9 +1530,7 @@ class SpackSolverSetup(object):
best_targets = set([uarch.family.name])
for compiler in sorted(compilers):
- supported = self._supported_targets(
- compiler.name, compiler.version, candidate_targets
- )
+ supported = self._supported_targets(compiler.name, compiler.version, candidate_targets)
# If we can't find supported targets it may be due to custom
# versions in the spec, e.g. gcc@foo. Try to match the
@@ -1572,9 +1540,7 @@ class SpackSolverSetup(object):
compiler_obj = spack.compilers.compilers_for_spec(compiler)
compiler_obj = compiler_obj[0]
supported = self._supported_targets(
- compiler.name,
- compiler_obj.real_version,
- candidate_targets
+ compiler.name, compiler_obj.real_version, candidate_targets
)
if not supported:
@@ -1582,13 +1548,13 @@ class SpackSolverSetup(object):
for target in supported:
best_targets.add(target.name)
- self.gen.fact(fn.compiler_supports_target(
- compiler.name, compiler.version, target.name
- ))
+ self.gen.fact(
+ fn.compiler_supports_target(compiler.name, compiler.version, target.name)
+ )
- self.gen.fact(fn.compiler_supports_target(
- compiler.name, compiler.version, uarch.family.name
- ))
+ self.gen.fact(
+ fn.compiler_supports_target(compiler.name, compiler.version, uarch.family.name)
+ )
i = 0 # TODO compute per-target offset?
for target in candidate_targets:
@@ -1637,16 +1603,12 @@ class SpackSolverSetup(object):
if strict and s.compiler not in cspecs:
if not s.concrete:
- raise spack.concretize.UnavailableCompilerVersionError(
- s.compiler
- )
+ raise spack.concretize.UnavailableCompilerVersionError(s.compiler)
# Allow unknown compilers to exist if the associated spec
# is already built
else:
cspecs.add(s.compiler)
- self.gen.fact(fn.allow_compiler(
- s.compiler.name, s.compiler.version
- ))
+ self.gen.fact(fn.allow_compiler(s.compiler.name, s.compiler.version))
return cspecs
@@ -1655,8 +1617,7 @@ class SpackSolverSetup(object):
for pkg_name, versions in sorted(self.version_constraints):
# version must be *one* of the ones the spec allows.
allowed_versions = [
- v for v in sorted(self.possible_versions[pkg_name])
- if v.satisfies(versions)
+ v for v in sorted(self.possible_versions[pkg_name]) if v.satisfies(versions)
]
# This is needed to account for a variable number of
@@ -1701,9 +1662,7 @@ class SpackSolverSetup(object):
# that `version_satisfies(Package, Constraint, Version)` has the
# same semantics for virtuals as for regular packages.
for pkg_name, versions in sorted(constraint_map.items()):
- possible_versions = set(
- sum([versions_for(v) for v in versions], [])
- )
+ possible_versions = set(sum([versions_for(v) for v in versions], []))
for version in sorted(possible_versions):
self.possible_versions[pkg_name].add(version)
@@ -1713,20 +1672,21 @@ class SpackSolverSetup(object):
for constraint in sorted(self.compiler_version_constraints):
for compiler in compiler_list:
if compiler.satisfies(constraint):
- self.gen.fact(fn.compiler_version_satisfies(
- constraint.name, constraint.versions, compiler.version
- ))
+ self.gen.fact(
+ fn.compiler_version_satisfies(
+ constraint.name, constraint.versions, compiler.version
+ )
+ )
self.gen.newline()
def define_target_constraints(self):
-
def _all_targets_satisfiying(single_constraint):
allowed_targets = []
- if ':' not in single_constraint:
+ if ":" not in single_constraint:
return [single_constraint]
- t_min, _, t_max = single_constraint.partition(':')
+ t_min, _, t_max = single_constraint.partition(":")
for test_target in archspec.cpu.TARGETS.values():
# Check lower bound
if t_min and not t_min <= test_target:
@@ -1743,11 +1703,9 @@ class SpackSolverSetup(object):
for target_constraint in sorted(self.target_constraints):
# Construct the list of allowed targets for this constraint
allowed_targets = []
- for single_constraint in str(target_constraint).split(','):
+ for single_constraint in str(target_constraint).split(","):
if single_constraint not in cache:
- cache[single_constraint] = _all_targets_satisfiying(
- single_constraint
- )
+ cache[single_constraint] = _all_targets_satisfiying(single_constraint)
allowed_targets.extend(cache[single_constraint])
for target in allowed_targets:
@@ -1790,11 +1748,11 @@ class SpackSolverSetup(object):
# - Add OS to possible OS's
for dep in spec.traverse():
self.possible_versions[dep.name].add(dep.version)
- self.declared_versions[dep.name].append(DeclaredVersion(
- version=dep.version,
- idx=0,
- origin=version_provenance.installed
- ))
+ self.declared_versions[dep.name].append(
+ DeclaredVersion(
+ version=dep.version, idx=0, origin=version_provenance.installed
+ )
+ )
self.possible_oses.add(dep.os)
# add the hash to the one seen so far
@@ -1825,19 +1783,16 @@ class SpackSolverSetup(object):
check_packages_exist(specs)
# get list of all possible dependencies
- self.possible_virtuals = set(
- x.name for x in specs if x.virtual
- )
+ self.possible_virtuals = set(x.name for x in specs if x.virtual)
possible = spack.package_base.possible_dependencies(
- *specs,
- virtuals=self.possible_virtuals,
- deptype=spack.dependency.all_deptypes
+ *specs, virtuals=self.possible_virtuals, deptype=spack.dependency.all_deptypes
)
# Fail if we already know an unreachable node is requested
for spec in specs:
- missing_deps = [str(d) for d in spec.traverse()
- if d.name not in possible and not d.virtual]
+ missing_deps = [
+ str(d) for d in spec.traverse() if d.name not in possible and not d.virtual
+ ]
if missing_deps:
raise spack.spec.InvalidDependencyError(spec.name, missing_deps)
@@ -1862,7 +1817,7 @@ class SpackSolverSetup(object):
for reusable_spec in reuse:
self._facts_from_concrete_spec(reusable_spec, possible)
- self.gen.h1('General Constraints')
+ self.gen.h1("General Constraints")
self.available_compilers()
self.compiler_defaults()
self.compiler_supports_os()
@@ -1877,11 +1832,11 @@ class SpackSolverSetup(object):
self.external_packages()
self.flag_defaults()
- self.gen.h1('Package Constraints')
+ self.gen.h1("Package Constraints")
for pkg in sorted(pkgs):
- self.gen.h2('Package rules: %s' % pkg)
+ self.gen.h2("Package rules: %s" % pkg)
self.pkg_rules(pkg, tests=self.tests)
- self.gen.h2('Package preferences: %s' % pkg)
+ self.gen.h2("Package preferences: %s" % pkg)
self.preferred_variants(pkg)
self.target_preferences(pkg)
@@ -1892,7 +1847,7 @@ class SpackSolverSetup(object):
for dep in spec.traverse():
_develop_specs_from_env(dep, env)
- self.gen.h1('Spec Constraints')
+ self.gen.h1("Spec Constraints")
self.literal_specs(specs)
self.gen.h1("Variant Values defined in specs")
@@ -1912,17 +1867,15 @@ class SpackSolverSetup(object):
def literal_specs(self, specs):
for idx, spec in enumerate(specs):
- self.gen.h2('Spec: %s' % str(spec))
+ self.gen.h2("Spec: %s" % str(spec))
self.gen.fact(fn.literal(idx))
root_fn = fn.virtual_root(spec.name) if spec.virtual else fn.root(spec.name)
self.gen.fact(fn.literal(idx, root_fn.name, *root_fn.args))
for clause in self.spec_clauses(spec):
self.gen.fact(fn.literal(idx, clause.name, *clause.args))
- if clause.name == 'variant_set':
- self.gen.fact(fn.literal(
- idx, "variant_default_value_from_cli", *clause.args
- ))
+ if clause.name == "variant_set":
+ self.gen.fact(fn.literal(idx, "variant_default_value_from_cli", *clause.args))
if self.concretize_everything:
self.gen.fact(fn.concretize_everything())
@@ -1930,6 +1883,7 @@ class SpackSolverSetup(object):
class SpecBuilder(object):
"""Class with actions to rebuild a spec from ASP results."""
+
#: Attributes that don't need actions
ignored_attributes = ["opt_criterion"]
@@ -1978,24 +1932,22 @@ class SpecBuilder(object):
# For variant formatting, we sometimes have to construct specs
# to format values properly. Find/replace all occurances of
# Spec(...) with the string representation of the spec mentioned
- specs_to_construct = re.findall(r'Spec\(([^)]*)\)', msg)
+ specs_to_construct = re.findall(r"Spec\(([^)]*)\)", msg)
for spec_str in specs_to_construct:
- msg = msg.replace('Spec(%s)' % spec_str, str(spack.spec.Spec(spec_str)))
+ msg = msg.replace("Spec(%s)" % spec_str, str(spack.spec.Spec(spec_str)))
raise UnsatisfiableSpecError(msg)
def variant_value(self, pkg, name, value):
# FIXME: is there a way not to special case 'dev_path' everywhere?
- if name == 'dev_path':
+ if name == "dev_path":
self._specs[pkg].variants.setdefault(
- name,
- spack.variant.SingleValuedVariant(name, value)
+ name, spack.variant.SingleValuedVariant(name, value)
)
return
- if name == 'patches':
+ if name == "patches":
self._specs[pkg].variants.setdefault(
- name,
- spack.variant.MultiValuedVariant(name, value)
+ name, spack.variant.MultiValuedVariant(name, value)
)
return
@@ -2008,8 +1960,7 @@ class SpecBuilder(object):
self._specs[pkg].compiler = spack.spec.CompilerSpec(compiler)
def node_compiler_version(self, pkg, compiler, version):
- self._specs[pkg].compiler.versions = spack.version.VersionList(
- [version])
+ self._specs[pkg].compiler.versions = spack.version.VersionList([version])
def node_flag_compiler_default(self, pkg):
self._flag_compiler_defaults.add(pkg)
@@ -2027,23 +1978,20 @@ class SpecBuilder(object):
"""This means that the external spec and index idx
has been selected for this package.
"""
- packages_yaml = spack.config.get('packages')
+ packages_yaml = spack.config.get("packages")
packages_yaml = _normalize_packages_yaml(packages_yaml)
- spec_info = packages_yaml[pkg]['externals'][int(idx)]
- self._specs[pkg].external_path = spec_info.get('prefix', None)
- self._specs[pkg].external_modules = (
- spack.spec.Spec._format_module_list(spec_info.get('modules', None))
- )
- self._specs[pkg].extra_attributes = spec_info.get(
- 'extra_attributes', {}
+ spec_info = packages_yaml[pkg]["externals"][int(idx)]
+ self._specs[pkg].external_path = spec_info.get("prefix", None)
+ self._specs[pkg].external_modules = spack.spec.Spec._format_module_list(
+ spec_info.get("modules", None)
)
+ self._specs[pkg].extra_attributes = spec_info.get("extra_attributes", {})
def depends_on(self, pkg, dep, type):
dependencies = self._specs[pkg].edges_to_dependencies(name=dep)
# TODO: assertion to be removed when cross-compilation is handled correctly
- msg = ("Current solver does not handle multiple dependency edges "
- "of the same name")
+ msg = "Current solver does not handle multiple dependency edges " "of the same name"
assert len(dependencies) < 2, msg
if not dependencies:
@@ -2072,10 +2020,7 @@ class SpecBuilder(object):
spec.compiler_flags.update(compiler_flags)
# index of all specs (and deps) from the command line by name
- cmd_specs = dict(
- (s.name, s)
- for spec in self._command_line_specs
- for s in spec.traverse())
+ cmd_specs = dict((s.name, s) for spec in self._command_line_specs for s in spec.traverse())
# iterate through specs with specified flags
for pkg, sources in self._flag_sources.items():
@@ -2083,13 +2028,10 @@ class SpecBuilder(object):
# order is determined by the DAG. A spec's flags come after
# any from its ancestors on the compile line.
- order = [
- s.name
- for s in spec.traverse(order='post', direction='parents')]
+ order = [s.name for s in spec.traverse(order="post", direction="parents")]
# sort the sources in our DAG order
- sorted_sources = sorted(
- sources, key=lambda s: order.index(s))
+ sorted_sources = sorted(sources, key=lambda s: order.index(s))
# add flags from each source, lowest to highest precedence
flags = collections.defaultdict(lambda: [])
@@ -2108,14 +2050,14 @@ class SpecBuilder(object):
@staticmethod
def sort_fn(function_tuple):
name = function_tuple[0]
- if name == 'error':
+ if name == "error":
priority = function_tuple[1][0]
return (-4, priority)
- elif name == 'hash':
+ elif name == "hash":
return (-3, 0)
- elif name == 'node':
+ elif name == "node":
return (-2, 0)
- elif name == 'node_compiler':
+ elif name == "node_compiler":
return (-1, 0)
else:
return (0, 0)
@@ -2144,7 +2086,7 @@ class SpecBuilder(object):
# ignore predicates on virtual packages, as they're used for
# solving but don't construct anything. Do not ignore error
# predicates on virtual packages.
- if name != 'error':
+ if name != "error":
pkg = args[0]
if spack.repo.path.is_virtual(pkg):
continue
@@ -2206,15 +2148,13 @@ def _develop_specs_from_env(spec, env):
if not dev_info:
return
- path = os.path.normpath(os.path.join(env.path, dev_info['path']))
+ path = os.path.normpath(os.path.join(env.path, dev_info["path"]))
- if 'dev_path' in spec.variants:
- assert spec.variants['dev_path'].value == path
+ if "dev_path" in spec.variants:
+ assert spec.variants["dev_path"].value == path
else:
- spec.variants.setdefault(
- 'dev_path', spack.variant.SingleValuedVariant('dev_path', path)
- )
- spec.constrain(dev_info['spec'])
+ spec.variants.setdefault("dev_path", spack.variant.SingleValuedVariant("dev_path", path))
+ spec.constrain(dev_info["spec"])
class Solver(object):
@@ -2229,6 +2169,7 @@ class Solver(object):
Whether to try to reuse existing installs/binaries
"""
+
def __init__(self):
self.driver = PyclingoDriver()
@@ -2253,17 +2194,18 @@ class Solver(object):
if self.reuse:
# Specs from the local Database
with spack.store.db.read_transaction():
- reusable_specs.extend([
- s for s in spack.store.db.query(installed=True)
- if not s.satisfies('dev_path=*')
- ])
+ reusable_specs.extend(
+ [
+ s
+ for s in spack.store.db.query(installed=True)
+ if not s.satisfies("dev_path=*")
+ ]
+ )
# Specs from buildcaches
try:
index = spack.binary_distribution.update_cache_and_get_specs()
- reusable_specs.extend([
- s for s in index if not s.satisfies('dev_path=*')
- ])
+ reusable_specs.extend([s for s in index if not s.satisfies("dev_path=*")])
except (spack.binary_distribution.FetchCacheError, IndexError):
# this is raised when no mirrors had indices.
@@ -2273,14 +2215,14 @@ class Solver(object):
return reusable_specs
def solve(
- self,
- specs,
- out=None,
- models=0,
- timers=False,
- stats=False,
- tests=False,
- setup_only=False,
+ self,
+ specs,
+ out=None,
+ models=0,
+ timers=False,
+ stats=False,
+ tests=False,
+ setup_only=False,
):
"""
Arguments:
@@ -2310,13 +2252,13 @@ class Solver(object):
)
def solve_in_rounds(
- self,
- specs,
- out=None,
- models=0,
- timers=False,
- stats=False,
- tests=False,
+ self,
+ specs,
+ out=None,
+ models=0,
+ timers=False,
+ stats=False,
+ tests=False,
):
"""Solve for a stable model of specs in multiple rounds.
@@ -2351,7 +2293,7 @@ class Solver(object):
timers=timers,
stats=stats,
out=out,
- setup_only=False
+ setup_only=False,
)
yield result
@@ -2372,6 +2314,7 @@ class UnsatisfiableSpecError(spack.error.UnsatisfiableSpecError):
"""
Subclass for new constructor signature for new concretizer
"""
+
def __init__(self, msg):
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
self.provided = None
@@ -2383,13 +2326,14 @@ class InternalConcretizerError(spack.error.UnsatisfiableSpecError):
"""
Subclass for new constructor signature for new concretizer
"""
+
def __init__(self, provided, conflicts):
- indented = [' %s\n' % conflict for conflict in conflicts]
- error_msg = ''.join(indented)
- msg = 'Spack concretizer internal error. Please submit a bug report'
- msg += '\n Please include the command, environment if applicable,'
- msg += '\n and the following error message.'
- msg = '\n %s is unsatisfiable, errors are:\n%s' % (provided, error_msg)
+ indented = [" %s\n" % conflict for conflict in conflicts]
+ error_msg = "".join(indented)
+ msg = "Spack concretizer internal error. Please submit a bug report"
+ msg += "\n Please include the command, environment if applicable,"
+ msg += "\n and the following error message."
+ msg = "\n %s is unsatisfiable, errors are:\n%s" % (provided, error_msg)
super(spack.error.UnsatisfiableSpecError, self).__init__(msg)
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 94559d001c..d1ee8a4325 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -120,75 +120,80 @@ import spack.variant as vt
import spack.version as vn
__all__ = [
- 'CompilerSpec',
- 'Spec',
- 'SpecParser',
- 'parse',
- 'SpecParseError',
- 'DuplicateDependencyError',
- 'DuplicateCompilerSpecError',
- 'UnsupportedCompilerError',
- 'DuplicateArchitectureError',
- 'InconsistentSpecError',
- 'InvalidDependencyError',
- 'NoProviderError',
- 'MultipleProviderError',
- 'UnsatisfiableSpecNameError',
- 'UnsatisfiableVersionSpecError',
- 'UnsatisfiableCompilerSpecError',
- 'UnsatisfiableCompilerFlagSpecError',
- 'UnsatisfiableArchitectureSpecError',
- 'UnsatisfiableProviderSpecError',
- 'UnsatisfiableDependencySpecError',
- 'AmbiguousHashError',
- 'InvalidHashError',
- 'NoSuchHashError',
- 'RedundantSpecError',
- 'SpecDeprecatedError',
+ "CompilerSpec",
+ "Spec",
+ "SpecParser",
+ "parse",
+ "SpecParseError",
+ "DuplicateDependencyError",
+ "DuplicateCompilerSpecError",
+ "UnsupportedCompilerError",
+ "DuplicateArchitectureError",
+ "InconsistentSpecError",
+ "InvalidDependencyError",
+ "NoProviderError",
+ "MultipleProviderError",
+ "UnsatisfiableSpecNameError",
+ "UnsatisfiableVersionSpecError",
+ "UnsatisfiableCompilerSpecError",
+ "UnsatisfiableCompilerFlagSpecError",
+ "UnsatisfiableArchitectureSpecError",
+ "UnsatisfiableProviderSpecError",
+ "UnsatisfiableDependencySpecError",
+ "AmbiguousHashError",
+ "InvalidHashError",
+ "NoSuchHashError",
+ "RedundantSpecError",
+ "SpecDeprecatedError",
]
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
#: Valid pattern for an identifier in Spack
-identifier_re = r'\w[\w-]*'
+identifier_re = r"\w[\w-]*"
-compiler_color = '@g' #: color for highlighting compilers
-version_color = '@c' #: color for highlighting versions
-architecture_color = '@m' #: color for highlighting architectures
-enabled_variant_color = '@B' #: color for highlighting enabled variants
-disabled_variant_color = '@r' #: color for highlighting disabled varaints
-dependency_color = '@.' #: color for highlighting dependencies
-hash_color = '@K' #: color for highlighting package hashes
+compiler_color = "@g" #: color for highlighting compilers
+version_color = "@c" #: color for highlighting versions
+architecture_color = "@m" #: color for highlighting architectures
+enabled_variant_color = "@B" #: color for highlighting enabled variants
+disabled_variant_color = "@r" #: color for highlighting disabled varaints
+dependency_color = "@." #: color for highlighting dependencies
+hash_color = "@K" #: color for highlighting package hashes
#: This map determines the coloring of specs when using color output.
#: We make the fields different colors to enhance readability.
#: See llnl.util.tty.color for descriptions of the color codes.
-color_formats = {'%': compiler_color,
- '@': version_color,
- '=': architecture_color,
- '+': enabled_variant_color,
- '~': disabled_variant_color,
- '^': dependency_color,
- '#': hash_color}
+color_formats = {
+ "%": compiler_color,
+ "@": version_color,
+ "=": architecture_color,
+ "+": enabled_variant_color,
+ "~": disabled_variant_color,
+ "^": dependency_color,
+ "#": hash_color,
+}
#: Regex used for splitting by spec field separators.
#: These need to be escaped to avoid metacharacters in
#: ``color_formats.keys()``.
-_separators = '[\\%s]' % '\\'.join(color_formats.keys())
+_separators = "[\\%s]" % "\\".join(color_formats.keys())
#: Versionlist constant so we don't have to build a list
#: every time we call str()
-_any_version = vn.VersionList([':'])
+_any_version = vn.VersionList([":"])
-default_format = '{name}{@version}'
-default_format += '{%compiler.name}{@compiler.version}{compiler_flags}'
-default_format += '{variants}{arch=architecture}'
+default_format = "{name}{@version}"
+default_format += "{%compiler.name}{@compiler.version}{compiler_flags}"
+default_format += "{variants}{arch=architecture}"
#: Regular expression to pull spec contents out of clearsigned signature
#: file.
CLEARSIGN_FILE_REGEX = re.compile(
- (r"^-----BEGIN PGP SIGNED MESSAGE-----"
- r"\s+Hash:\s+[^\s]+\s+(.+)-----BEGIN PGP SIGNATURE-----"),
- re.MULTILINE | re.DOTALL)
+ (
+ r"^-----BEGIN PGP SIGNED MESSAGE-----"
+ r"\s+Hash:\s+[^\s]+\s+(.+)-----BEGIN PGP SIGNATURE-----"
+ ),
+ re.MULTILINE | re.DOTALL,
+)
#: specfile format version. Must increase monotonically
specfile_format_version = 3
@@ -196,22 +201,22 @@ specfile_format_version = 3
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
- color_formats."""
- class insert_color:
+ color_formats."""
+ class insert_color:
def __init__(self):
self.last = None
def __call__(self, match):
# ignore compiler versions (color same as compiler)
sep = match.group(0)
- if self.last == '%' and sep == '@':
+ if self.last == "%" and sep == "@":
return clr.cescape(sep)
self.last = sep
- return '%s%s' % (color_formats[sep], clr.cescape(sep))
+ return "%s%s" % (color_formats[sep], clr.cescape(sep))
- return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + '@.')
+ return clr.colorize(re.sub(_separators, insert_color(), str(spec)) + "@.")
@lang.lazy_lexicographic_ordering
@@ -219,6 +224,7 @@ class ArchSpec(object):
"""Aggregate the target platform, the operating system and the target
microarchitecture into an architecture spec..
"""
+
@staticmethod
def _return_arch(os_tag, target_tag):
platform = spack.platforms.host()
@@ -230,15 +236,15 @@ class ArchSpec(object):
@staticmethod
def default_arch():
"""Return the default architecture"""
- return ArchSpec._return_arch('default_os', 'default_target')
+ return ArchSpec._return_arch("default_os", "default_target")
@staticmethod
def frontend_arch():
"""Return the frontend architecture"""
- return ArchSpec._return_arch('frontend', 'frontend')
+ return ArchSpec._return_arch("frontend", "frontend")
def __init__(self, spec_or_platform_tuple=(None, None, None)):
- """ Architecture specification a package should be built with.
+ """Architecture specification a package should be built with.
Each ArchSpec is comprised of three elements: a platform (e.g. Linux),
an OS (e.g. RHEL6), and a target (e.g. x86_64).
@@ -252,7 +258,7 @@ class ArchSpec(object):
# If the argument to __init__ is a spec string, parse it
# and construct an ArchSpec
def _string_or_none(s):
- if s and s != 'None':
+ if s and s != "None":
return str(s)
return None
@@ -268,15 +274,11 @@ class ArchSpec(object):
if isinstance(spec_or_platform_tuple, six.string_types):
spec_fields = spec_or_platform_tuple.split("-")
if len(spec_fields) != 3:
- msg = 'cannot construct an ArchSpec from {0!s}'
+ msg = "cannot construct an ArchSpec from {0!s}"
raise ValueError(msg.format(spec_or_platform_tuple))
platform, operating_system, target = spec_fields
- platform_tuple = (
- _string_or_none(platform),
- _string_or_none(operating_system),
- target
- )
+ platform_tuple = (_string_or_none(platform), _string_or_none(operating_system), target)
self.platform, self.os, self.target = platform_tuple
@@ -324,8 +326,9 @@ class ArchSpec(object):
if self.platform != curr_platform:
raise ValueError(
"Can't set arch spec OS to reserved value '%s' when the "
- "arch platform (%s) isn't the current platform (%s)" %
- (value, self.platform, curr_platform))
+ "arch platform (%s) isn't the current platform (%s)"
+ % (value, self.platform, curr_platform)
+ )
spec_platform = spack.platforms.by_name(self.platform)
value = str(spec_platform.operating_system(value))
@@ -348,7 +351,7 @@ class ArchSpec(object):
def target_or_none(t):
if isinstance(t, spack.target.Target):
return t
- if t and t != 'None':
+ if t and t != "None":
return spack.target.Target(t)
return None
@@ -361,8 +364,9 @@ class ArchSpec(object):
if self.platform != curr_platform:
raise ValueError(
"Can't set arch spec target to reserved value '%s' when "
- "the arch platform (%s) isn't the current platform (%s)" %
- (value, self.platform, curr_platform))
+ "the arch platform (%s) isn't the current platform (%s)"
+ % (value, self.platform, curr_platform)
+ )
spec_platform = spack.platforms.by_name(self.platform)
value = spec_platform.target(value)
@@ -384,23 +388,23 @@ class ArchSpec(object):
other = self._autospec(other)
# Check platform and os
- for attribute in ('platform', 'os'):
+ for attribute in ("platform", "os"):
other_attribute = getattr(other, attribute)
self_attribute = getattr(self, attribute)
if strict or self.concrete:
if other_attribute and self_attribute != other_attribute:
return False
else:
- if other_attribute and self_attribute and \
- self_attribute != other_attribute:
+ if other_attribute and self_attribute and self_attribute != other_attribute:
return False
# Check target
return self.target_satisfies(other, strict=strict)
def target_satisfies(self, other, strict):
- need_to_check = bool(other.target) if strict or self.concrete \
- else bool(other.target and self.target)
+ need_to_check = (
+ bool(other.target) if strict or self.concrete else bool(other.target and self.target)
+ )
# If there's no need to check we are fine
if not need_to_check:
@@ -425,7 +429,7 @@ class ArchSpec(object):
# Compute the intersection of every combination of ranges in the lists
results = self.target_intersection(other)
# Do we need to dedupe here?
- self.target = ','.join(results)
+ self.target = ",".join(results)
def target_intersection(self, other):
results = []
@@ -433,10 +437,10 @@ class ArchSpec(object):
if not self.target or not other.target:
return results
- for s_target_range in str(self.target).split(','):
- s_min, s_sep, s_max = s_target_range.partition(':')
- for o_target_range in str(other.target).split(','):
- o_min, o_sep, o_max = o_target_range.partition(':')
+ for s_target_range in str(self.target).split(","):
+ s_min, s_sep, s_max = s_target_range.partition(":")
+ for o_target_range in str(other.target).split(","):
+ o_min, o_sep, o_max = o_target_range.partition(":")
if not s_sep:
# s_target_range is a concrete target
@@ -446,14 +450,12 @@ class ArchSpec(object):
if not o_sep:
if s_min == o_min:
results.append(s_min)
- elif (not o_min or s_comp >= o_min) and (
- not o_max or s_comp <= o_max):
+ elif (not o_min or s_comp >= o_min) and (not o_max or s_comp <= o_max):
results.append(s_min)
elif not o_sep:
# "cast" to microarchitecture
o_comp = spack.target.Target(o_min).microarchitecture
- if (not s_min or o_comp >= s_min) and (
- not s_max or o_comp <= s_max):
+ if (not s_min or o_comp >= s_min) and (not s_max or o_comp <= s_max):
results.append(o_min)
else:
# Take intersection of two ranges
@@ -470,7 +472,7 @@ class ArchSpec(object):
if _n_min == _n_max:
results.append(n_min)
elif not n_min or not n_max or _n_min < _n_max:
- results.append('%s:%s' % (n_min, n_max))
+ results.append("%s:%s" % (n_min, n_max))
return results
def constrain(self, other):
@@ -492,7 +494,7 @@ class ArchSpec(object):
raise UnsatisfiableArchitectureSpecError(other, self)
constrained = False
- for attr in ('platform', 'os'):
+ for attr in ("platform", "os"):
svalue, ovalue = getattr(self, attr), getattr(other, attr)
if svalue is None and ovalue is not None:
setattr(self, attr, ovalue)
@@ -510,33 +512,35 @@ class ArchSpec(object):
def concrete(self):
"""True if the spec is concrete, False otherwise"""
# return all(v for k, v in six.iteritems(self.to_cmp_dict()))
- return (self.platform and self.os and self.target and
- self.target_concrete)
+ return self.platform and self.os and self.target and self.target_concrete
@property
def target_concrete(self):
"""True if the target is not a range or list."""
- return ':' not in str(self.target) and ',' not in str(self.target)
+ return ":" not in str(self.target) and "," not in str(self.target)
def to_dict(self):
- d = syaml.syaml_dict([
- ('platform', self.platform),
- ('platform_os', self.os),
- ('target', self.target.to_dict_or_value())])
- return syaml.syaml_dict([('arch', d)])
+ d = syaml.syaml_dict(
+ [
+ ("platform", self.platform),
+ ("platform_os", self.os),
+ ("target", self.target.to_dict_or_value()),
+ ]
+ )
+ return syaml.syaml_dict([("arch", d)])
@staticmethod
def from_dict(d):
"""Import an ArchSpec from raw YAML/JSON data"""
- arch = d['arch']
- target = spack.target.Target.from_dict_or_value(arch['target'])
- return ArchSpec((arch['platform'], arch['platform_os'], target))
+ arch = d["arch"]
+ target = spack.target.Target.from_dict_or_value(arch["target"])
+ return ArchSpec((arch["platform"], arch["platform_os"], target))
def __str__(self):
return "%s-%s-%s" % (self.platform, self.os, self.target)
def __repr__(self):
- fmt = 'ArchSpec(({0.platform!r}, {0.os!r}, {1!r}))'
+ fmt = "ArchSpec(({0.platform!r}, {0.os!r}, {1!r}))"
return fmt.format(self, str(self.target))
def __contains__(self, string):
@@ -546,8 +550,8 @@ class ArchSpec(object):
@lang.lazy_lexicographic_ordering
class CompilerSpec(object):
"""The CompilerSpec field represents the compiler or range of compiler
- versions that a package should be built with. CompilerSpecs have a
- name and a version list. """
+ versions that a package should be built with. CompilerSpecs have a
+ name and a version list."""
def __init__(self, *args):
nargs = len(args)
@@ -566,8 +570,9 @@ class CompilerSpec(object):
else:
raise TypeError(
- "Can only build CompilerSpec from string or " +
- "CompilerSpec. Found %s" % type(arg))
+ "Can only build CompilerSpec from string or "
+ + "CompilerSpec. Found %s" % type(arg)
+ )
elif nargs == 2:
name, version = args
@@ -576,17 +581,16 @@ class CompilerSpec(object):
self.versions.add(vn.ver(version))
else:
- raise TypeError(
- "__init__ takes 1 or 2 arguments. (%d given)" % nargs)
+ raise TypeError("__init__ takes 1 or 2 arguments. (%d given)" % nargs)
def _add_versions(self, version_list):
# If it already has a non-trivial version list, this is an error
- if self.versions and self.versions != vn.VersionList(':'):
+ if self.versions and self.versions != vn.VersionList(":"):
# Note: This may be impossible to reach by the current parser
# Keeping it in case the implementation changes.
raise MultipleVersionError(
- 'A spec cannot contain multiple version signifiers.'
- ' Use a version list instead.')
+ "A spec cannot contain multiple version signifiers." " Use a version list instead."
+ )
self.versions = vn.VersionList()
for version in version_list:
self.versions.add(version)
@@ -598,8 +602,7 @@ class CompilerSpec(object):
def satisfies(self, other, strict=False):
other = self._autospec(other)
- return (self.name == other.name and
- self.versions.satisfies(other.versions, strict=strict))
+ return self.name == other.name and self.versions.satisfies(other.versions, strict=strict)
def constrain(self, other):
"""Intersect self's versions with other.
@@ -617,7 +620,7 @@ class CompilerSpec(object):
@property
def concrete(self):
"""A CompilerSpec is concrete if its versions are concrete and there
- is an available compiler with the right version."""
+ is an available compiler with the right version."""
return self.versions.concrete
@property
@@ -637,15 +640,15 @@ class CompilerSpec(object):
yield self.versions
def to_dict(self):
- d = syaml.syaml_dict([('name', self.name)])
+ d = syaml.syaml_dict([("name", self.name)])
d.update(self.versions.to_dict())
- return syaml.syaml_dict([('compiler', d)])
+ return syaml.syaml_dict([("compiler", d)])
@staticmethod
def from_dict(d):
- d = d['compiler']
- return CompilerSpec(d['name'], vn.VersionList.from_dict(d))
+ d = d["compiler"]
+ return CompilerSpec(d["name"], vn.VersionList.from_dict(d))
def __str__(self):
out = self.name
@@ -692,9 +695,7 @@ class DependencySpec(object):
return DependencySpec(self.parent, self.spec, self.deptypes)
def add_type(self, type):
- self.deptypes = dp.canonical_deptype(
- self.deptypes + dp.canonical_deptype(type)
- )
+ self.deptypes = dp.canonical_deptype(self.deptypes + dp.canonical_deptype(type))
def _cmp_iter(self):
yield self.parent.name if self.parent else None
@@ -702,31 +703,31 @@ class DependencySpec(object):
yield self.deptypes
def __str__(self):
- return "%s %s--> %s" % (self.parent.name if self.parent else None,
- self.deptypes,
- self.spec.name if self.spec else None)
+ return "%s %s--> %s" % (
+ self.parent.name if self.parent else None,
+ self.deptypes,
+ self.spec.name if self.spec else None,
+ )
def canonical(self):
return self.parent.dag_hash(), self.spec.dag_hash(), self.deptypes
-_valid_compiler_flags = [
- 'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
+_valid_compiler_flags = ["cflags", "cxxflags", "fflags", "ldflags", "ldlibs", "cppflags"]
class FlagMap(lang.HashableMap):
-
def __init__(self, spec):
super(FlagMap, self).__init__()
self.spec = spec
def satisfies(self, other, strict=False):
if strict or (self.spec and self.spec._concrete):
- return all(f in self and set(self[f]) == set(other[f])
- for f in other)
+ return all(f in self and set(self[f]) == set(other[f]) for f in other)
else:
- return all(set(self[f]) == set(other[f])
- for f in other if (other[f] != [] and f in self))
+ return all(
+ set(self[f]) == set(other[f]) for f in other if (other[f] != [] and f in self)
+ )
def constrain(self, other):
"""Add all flags in other that aren't in self to self.
@@ -736,15 +737,14 @@ class FlagMap(lang.HashableMap):
if other.spec and other.spec._concrete:
for k in self:
if k not in other:
- raise UnsatisfiableCompilerFlagSpecError(
- self[k], '<absent>')
+ raise UnsatisfiableCompilerFlagSpecError(self[k], "<absent>")
changed = False
for k in other:
if k in self and not set(self[k]) <= set(other[k]):
raise UnsatisfiableCompilerFlagSpecError(
- ' '.join(f for f in self[k]),
- ' '.join(f for f in other[k]))
+ " ".join(f for f in self[k]), " ".join(f for f in other[k])
+ )
elif k not in self:
self[k] = other[k]
changed = True
@@ -772,11 +772,14 @@ class FlagMap(lang.HashableMap):
def __str__(self):
sorted_keys = [k for k in sorted(self.keys()) if self[k] != []]
- cond_symbol = ' ' if len(sorted_keys) > 0 else ''
- return cond_symbol + ' '.join(
- str(key) + '=\"' + ' '.join(
- str(f) for f in self[key]) + '\"'
- for key in sorted_keys) + cond_symbol
+ cond_symbol = " " if len(sorted_keys) > 0 else ""
+ return (
+ cond_symbol
+ + " ".join(
+ str(key) + '="' + " ".join(str(f) for f in self[key]) + '"' for key in sorted_keys
+ )
+ + cond_symbol
+ )
def _sort_by_dep_types(dspec):
@@ -798,6 +801,7 @@ class _EdgeMap(Mapping):
Edges are stored in a dictionary and keyed by package name.
"""
+
def __init__(self, store_by=EdgeDirection.child):
# Sanitize input arguments
msg = 'unexpected value for "store_by" argument'
@@ -806,7 +810,7 @@ class _EdgeMap(Mapping):
#: This dictionary maps a package name to a list of edges
#: i.e. to a list of DependencySpec objects
self.edges = {}
- self.store_by_child = (store_by == EdgeDirection.child)
+ self.store_by_child = store_by == EdgeDirection.child
def __getitem__(self, key):
return self.edges[key]
@@ -829,7 +833,7 @@ class _EdgeMap(Mapping):
current_list.sort(key=_sort_by_dep_types)
def __str__(self):
- return "{deps: %s}" % ', '.join(str(d) for d in sorted(self.values()))
+ return "{deps: %s}" % ", ".join(str(d) for d in sorted(self.values()))
def _cmp_iter(self):
for item in sorted(itertools.chain.from_iterable(self.edges.values())):
@@ -882,9 +886,9 @@ class _EdgeMap(Mapping):
# Filter by allowed dependency types
if deptypes:
selected = (
- dep for dep in selected
- if not dep.deptypes or
- any(d in deptypes for d in dep.deptypes)
+ dep
+ for dep in selected
+ if not dep.deptypes or any(d in deptypes for d in dep.deptypes)
)
return list(selected)
@@ -910,13 +914,13 @@ def _command_default_handler(descriptor, spec, cls):
Raises:
RuntimeError: If the command is not found
"""
- home = getattr(spec.package, 'home')
+ home = getattr(spec.package, "home")
path = os.path.join(home.bin, spec.name)
if fs.is_exe(path):
return spack.util.executable.Executable(path)
else:
- msg = 'Unable to locate {0} command in {1}'
+ msg = "Unable to locate {0} command in {1}"
raise RuntimeError(msg.format(spec.name, home.bin))
@@ -938,15 +942,14 @@ def _headers_default_handler(descriptor, spec, cls):
Raises:
NoHeadersError: If no headers are found
"""
- home = getattr(spec.package, 'home')
- headers = fs.find_headers('*', root=home.include, recursive=True)
+ home = getattr(spec.package, "home")
+ headers = fs.find_headers("*", root=home.include, recursive=True)
if headers:
return headers
else:
- msg = 'Unable to locate {0} headers in {1}'
- raise spack.error.NoHeadersError(
- msg.format(spec.name, home))
+ msg = "Unable to locate {0} headers in {1}"
+ raise spack.error.NoHeadersError(msg.format(spec.name, home))
def _libs_default_handler(descriptor, spec, cls):
@@ -979,25 +982,25 @@ def _libs_default_handler(descriptor, spec, cls):
# depending on which one exists (there is a possibility, of course, to
# get something like 'libabcXabc.so, but for now we consider this
# unlikely).
- name = spec.name.replace('-', '?')
- home = getattr(spec.package, 'home')
+ name = spec.name.replace("-", "?")
+ home = getattr(spec.package, "home")
# Avoid double 'lib' for packages whose names already start with lib
- if not name.startswith('lib'):
- name = 'lib' + name
+ if not name.startswith("lib"):
+ name = "lib" + name
# If '+shared' search only for shared library; if '~shared' search only for
# static library; otherwise, first search for shared and then for static.
- search_shared = [True] if ('+shared' in spec) else \
- ([False] if ('~shared' in spec) else [True, False])
+ search_shared = (
+ [True] if ("+shared" in spec) else ([False] if ("~shared" in spec) else [True, False])
+ )
for shared in search_shared:
- libs = fs.find_libraries(
- name, home, shared=shared, recursive=True)
+ libs = fs.find_libraries(name, home, shared=shared, recursive=True)
if libs:
return libs
- msg = 'Unable to recursively locate {0} libraries in {1}'
+ msg = "Unable to recursively locate {0} libraries in {1}"
raise spack.error.NoLibrariesError(msg.format(spec.name, home))
@@ -1051,9 +1054,7 @@ class ForwardQueryToPackage(object):
callbacks_chain = []
# First in the chain : specialized attribute for virtual packages
if query.isvirtual:
- specialized_name = '{0}_{1}'.format(
- query.name, self.attribute_name
- )
+ specialized_name = "{0}_{1}".format(query.name, self.attribute_name)
callbacks_chain.append(lambda: getattr(pkg, specialized_name))
# Try to get the generic method from Package
callbacks_chain.append(lambda: getattr(pkg, self.attribute_name))
@@ -1071,14 +1072,17 @@ class ForwardQueryToPackage(object):
# A callback can return None to trigger an error indicating
# that the query failed.
if value is None:
- msg = "Query of package '{name}' for '{attrib}' failed\n"
+ msg = "Query of package '{name}' for '{attrib}' failed\n"
msg += "\tprefix : {spec.prefix}\n"
msg += "\tspec : {spec}\n"
msg += "\tqueried as : {query.name}\n"
msg += "\textra parameters : {query.extra_parameters}"
message = msg.format(
- name=pkg.name, attrib=self.attribute_name,
- spec=instance, query=instance.last_query)
+ name=pkg.name,
+ attrib=self.attribute_name,
+ spec=instance,
+ query=instance.last_query,
+ )
else:
return value
break
@@ -1095,15 +1099,11 @@ class ForwardQueryToPackage(object):
# properties defined and no default handler, or that all callbacks
# raised AttributeError. In this case, we raise AttributeError with an
# appropriate message.
- fmt = '\'{name}\' package has no relevant attribute \'{query}\'\n'
- fmt += '\tspec : \'{spec}\'\n'
- fmt += '\tqueried as : \'{spec.last_query.name}\'\n'
- fmt += '\textra parameters : \'{spec.last_query.extra_parameters}\'\n'
- message = fmt.format(
- name=pkg.name,
- query=self.attribute_name,
- spec=instance
- )
+ fmt = "'{name}' package has no relevant attribute '{query}'\n"
+ fmt += "\tspec : '{spec}'\n"
+ fmt += "\tqueried as : '{spec.last_query.name}'\n"
+ fmt += "\textra parameters : '{spec.last_query.extra_parameters}'\n"
+ message = fmt.format(name=pkg.name, query=self.attribute_name, spec=instance)
raise AttributeError(message)
def __set__(self, instance, value):
@@ -1113,42 +1113,29 @@ class ForwardQueryToPackage(object):
# Represents a query state in a BuildInterface object
-QueryState = collections.namedtuple(
- 'QueryState', ['name', 'extra_parameters', 'isvirtual']
-)
+QueryState = collections.namedtuple("QueryState", ["name", "extra_parameters", "isvirtual"])
class SpecBuildInterface(lang.ObjectWrapper):
# home is available in the base Package so no default is needed
- home = ForwardQueryToPackage('home', default_handler=None)
+ home = ForwardQueryToPackage("home", default_handler=None)
- command = ForwardQueryToPackage(
- 'command',
- default_handler=_command_default_handler
- )
+ command = ForwardQueryToPackage("command", default_handler=_command_default_handler)
- headers = ForwardQueryToPackage(
- 'headers',
- default_handler=_headers_default_handler
- )
+ headers = ForwardQueryToPackage("headers", default_handler=_headers_default_handler)
- libs = ForwardQueryToPackage(
- 'libs',
- default_handler=_libs_default_handler
- )
+ libs = ForwardQueryToPackage("libs", default_handler=_libs_default_handler)
def __init__(self, spec, name, query_parameters):
super(SpecBuildInterface, self).__init__(spec)
# Adding new attributes goes after super() call since the ObjectWrapper
# resets __dict__ to behave like the passed object
- original_spec = getattr(spec, 'wrapped_obj', spec)
+ original_spec = getattr(spec, "wrapped_obj", spec)
self.wrapped_obj = original_spec
self.token = original_spec, name, query_parameters
is_virtual = spack.repo.path.is_virtual(name)
self.last_query = QueryState(
- name=name,
- extra_parameters=query_parameters,
- isvirtual=is_virtual
+ name=name, extra_parameters=query_parameters, isvirtual=is_virtual
)
def __reduce__(self):
@@ -1171,8 +1158,14 @@ class Spec(object):
s.architecture = ArchSpec.default_arch()
return s
- def __init__(self, spec_like=None, normal=False,
- concrete=False, external_path=None, external_modules=None):
+ def __init__(
+ self,
+ spec_like=None,
+ normal=False,
+ concrete=False,
+ external_path=None,
+ external_modules=None,
+ ):
"""Create a new Spec.
Arguments:
@@ -1195,7 +1188,7 @@ class Spec(object):
# init an empty spec that matches anything.
self.name = None
- self.versions = vn.VersionList(':')
+ self.versions = vn.VersionList(":")
self.variants = vt.VariantMap(self)
self.architecture = None
self.compiler = None
@@ -1277,7 +1270,7 @@ class Spec(object):
self._dependencies.clear()
self._dependents.clear()
- def detach(self, deptype='all'):
+ def detach(self, deptype="all"):
"""Remove any reference that dependencies have of this node.
Args:
@@ -1309,7 +1302,7 @@ class Spec(object):
raise spack.error.SpecError(err_msg.format(name, len(deps)))
return deps[0]
- def edges_from_dependents(self, name=None, deptype='all'):
+ def edges_from_dependents(self, name=None, deptype="all"):
"""Return a list of edges connecting this node in the DAG
to parents.
@@ -1318,12 +1311,9 @@ class Spec(object):
deptype (str or tuple): allowed dependency types
"""
deptype = dp.canonical_deptype(deptype)
- return [
- d for d in
- self._dependents.select(parent=name, deptypes=deptype)
- ]
+ return [d for d in self._dependents.select(parent=name, deptypes=deptype)]
- def edges_to_dependencies(self, name=None, deptype='all'):
+ def edges_to_dependencies(self, name=None, deptype="all"):
"""Return a list of edges connecting this node in the DAG
to children.
@@ -1332,12 +1322,9 @@ class Spec(object):
deptype (str or tuple): allowed dependency types
"""
deptype = dp.canonical_deptype(deptype)
- return [
- d for d in
- self._dependencies.select(child=name, deptypes=deptype)
- ]
+ return [d for d in self._dependencies.select(child=name, deptypes=deptype)]
- def dependencies(self, name=None, deptype='all'):
+ def dependencies(self, name=None, deptype="all"):
"""Return a list of direct dependencies (nodes in the DAG).
Args:
@@ -1346,7 +1333,7 @@ class Spec(object):
"""
return [d.spec for d in self.edges_to_dependencies(name, deptype=deptype)]
- def dependents(self, name=None, deptype='all'):
+ def dependents(self, name=None, deptype="all"):
"""Return a list of direct dependents (nodes in the DAG).
Args:
@@ -1355,7 +1342,7 @@ class Spec(object):
"""
return [d.parent for d in self.edges_from_dependents(name, deptype=deptype)]
- def _dependencies_dict(self, deptype='all'):
+ def _dependencies_dict(self, deptype="all"):
"""Return a dictionary, keyed by package name, of the direct
dependencies.
@@ -1369,9 +1356,7 @@ class Spec(object):
deptype = dp.canonical_deptype(deptype)
selected_edges = self._dependencies.select(deptypes=deptype)
result = {}
- for key, group in itertools.groupby(
- sorted(selected_edges, key=_sort_fn), key=_group_fn
- ):
+ for key, group in itertools.groupby(sorted(selected_edges, key=_sort_fn), key=_group_fn):
result[key] = list(group)
return result
@@ -1381,10 +1366,10 @@ class Spec(object):
def _add_versions(self, version_list):
"""Called by the parser to add an allowable version."""
# If it already has a non-trivial version list, this is an error
- if self.versions and self.versions != vn.VersionList(':'):
+ if self.versions and self.versions != vn.VersionList(":"):
raise MultipleVersionError(
- 'A spec cannot contain multiple version signifiers.'
- ' Use a version list instead.')
+ "A spec cannot contain multiple version signifiers." " Use a version list instead."
+ )
self.versions = vn.VersionList()
for version in version_list:
self.versions.add(version)
@@ -1394,47 +1379,48 @@ class Spec(object):
Known flags currently include "arch"
"""
valid_flags = FlagMap.valid_compiler_flags()
- if name == 'arch' or name == 'architecture':
- parts = tuple(value.split('-'))
+ if name == "arch" or name == "architecture":
+ parts = tuple(value.split("-"))
plat, os, tgt = parts if len(parts) == 3 else (None, None, value)
self._set_architecture(platform=plat, os=os, target=tgt)
- elif name == 'platform':
+ elif name == "platform":
self._set_architecture(platform=value)
- elif name == 'os' or name == 'operating_system':
+ elif name == "os" or name == "operating_system":
self._set_architecture(os=value)
- elif name == 'target':
+ elif name == "target":
self._set_architecture(target=value)
elif name in valid_flags:
- assert(self.compiler_flags is not None)
+ assert self.compiler_flags is not None
self.compiler_flags[name] = spack.compiler.tokenize_flags(value)
else:
# FIXME:
# All other flags represent variants. 'foo=true' and 'foo=false'
# map to '+foo' and '~foo' respectively. As such they need a
# BoolValuedVariant instance.
- if str(value).upper() == 'TRUE' or str(value).upper() == 'FALSE':
+ if str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
self.variants[name] = vt.BoolValuedVariant(name, value)
else:
self.variants[name] = vt.AbstractVariant(name, value)
def _set_architecture(self, **kwargs):
"""Called by the parser to set the architecture."""
- arch_attrs = ['platform', 'os', 'target']
+ arch_attrs = ["platform", "os", "target"]
if self.architecture and self.architecture.concrete:
raise DuplicateArchitectureError(
- "Spec for '%s' cannot have two architectures." % self.name)
+ "Spec for '%s' cannot have two architectures." % self.name
+ )
if not self.architecture:
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
self.architecture = ArchSpec(new_vals)
else:
- new_attrvals = [(a, v) for a, v in six.iteritems(kwargs)
- if a in arch_attrs]
+ new_attrvals = [(a, v) for a, v in six.iteritems(kwargs) if a in arch_attrs]
for new_attr, new_value in new_attrvals:
if getattr(self.architecture, new_attr):
raise DuplicateArchitectureError(
"Spec for '%s' cannot have two '%s' specified "
- "for its architecture" % (self.name, new_attr))
+ "for its architecture" % (self.name, new_attr)
+ )
else:
setattr(self.architecture, new_attr, new_value)
@@ -1442,14 +1428,14 @@ class Spec(object):
"""Called by the parser to set the compiler."""
if self.compiler:
raise DuplicateCompilerSpecError(
- "Spec for '%s' cannot have two compilers." % self.name)
+ "Spec for '%s' cannot have two compilers." % self.name
+ )
self.compiler = compiler
def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
if spec.name in self._dependencies:
- raise DuplicateDependencyError(
- "Cannot depend on '%s' twice" % spec)
+ raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
self.add_dependency_edge(spec, deptypes)
@@ -1466,8 +1452,10 @@ class Spec(object):
selected = self._dependencies.select(child=dependency_spec.name)
for edge in selected:
if any(d in edge.deptypes for d in deptype):
- msg = ('cannot add a dependency on "{0.spec}" of {1} type '
- 'when the "{0.parent}" has the edge {0!s} already')
+ msg = (
+ 'cannot add a dependency on "{0.spec}" of {1} type '
+ 'when the "{0.parent}" has the edge {0!s} already'
+ )
raise spack.error.SpecError(msg.format(edge, deptype))
for edge in selected:
@@ -1499,8 +1487,10 @@ class Spec(object):
@property
def fullname(self):
return (
- ('%s.%s' % (self.namespace, self.name)) if self.namespace else
- (self.name if self.name else ''))
+ ("%s.%s" % (self.namespace, self.name))
+ if self.namespace
+ else (self.name if self.name else "")
+ )
@property
def root(self):
@@ -1525,7 +1515,7 @@ class Spec(object):
@property
def package_class(self):
"""Internal package call gets only the class object for a package.
- Use this to just get package metadata.
+ Use this to just get package metadata.
"""
return spack.repo.path.get_pkg_class(self.fullname)
@@ -1533,10 +1523,10 @@ class Spec(object):
def virtual(self):
"""Right now, a spec is virtual if no package exists with its name.
- TODO: revisit this -- might need to use a separate namespace and
- be more explicit about this.
- Possible idea: just use conventin and make virtual deps all
- caps, e.g., MPI vs mpi.
+ TODO: revisit this -- might need to use a separate namespace and
+ be more explicit about this.
+ Possible idea: just use conventin and make virtual deps all
+ caps, e.g., MPI vs mpi.
"""
# This method can be called while regenerating the provider index
# So we turn off using the index to detect virtuals
@@ -1596,11 +1586,11 @@ class Spec(object):
return upstream
def traverse(self, **kwargs):
- direction = kwargs.get('direction', 'children')
- depth = kwargs.get('depth', False)
+ direction = kwargs.get("direction", "children")
+ depth = kwargs.get("depth", False)
get_spec = lambda s: s.spec
- if direction == 'parents':
+ if direction == "parents":
get_spec = lambda s: s.parent
if depth:
@@ -1610,66 +1600,65 @@ class Spec(object):
for dspec in self.traverse_edges(**kwargs):
yield get_spec(dspec)
- def traverse_edges(self, visited=None, d=0, deptype='all',
- dep_spec=None, **kwargs):
+ def traverse_edges(self, visited=None, d=0, deptype="all", dep_spec=None, **kwargs):
"""Generic traversal of the DAG represented by this spec.
- This yields ``DependencySpec`` objects as they are traversed.
+ This yields ``DependencySpec`` objects as they are traversed.
- When traversing top-down, an imaginary incoming edge to the root
- is yielded first as ``DependencySpec(None, root, ())``. When
- traversing bottom-up, imaginary edges to leaves are yielded first
- as ``DependencySpec(left, None, ())`` objects.
+ When traversing top-down, an imaginary incoming edge to the root
+ is yielded first as ``DependencySpec(None, root, ())``. When
+ traversing bottom-up, imaginary edges to leaves are yielded first
+ as ``DependencySpec(left, None, ())`` objects.
- Options:
+ Options:
- order [=pre|post]
- Order to traverse spec nodes. Defaults to preorder traversal.
- Options are:
+ order [=pre|post]
+ Order to traverse spec nodes. Defaults to preorder traversal.
+ Options are:
- 'pre': Pre-order traversal; each node is yielded before its
- children in the dependency DAG.
- 'post': Post-order traversal; each node is yielded after its
- children in the dependency DAG.
+ 'pre': Pre-order traversal; each node is yielded before its
+ children in the dependency DAG.
+ 'post': Post-order traversal; each node is yielded after its
+ children in the dependency DAG.
- cover [=nodes|edges|paths]
- Determines how extensively to cover the dag. Possible values:
+ cover [=nodes|edges|paths]
+ Determines how extensively to cover the dag. Possible values:
- 'nodes': Visit each node in the dag only once. Every node
- yielded by this function will be unique.
- 'edges': If a node has been visited once but is reached along a
- new path from the root, yield it but do not descend
- into it. This traverses each 'edge' in the DAG once.
- 'paths': Explore every unique path reachable from the root.
- This descends into visited subtrees and will yield
- nodes twice if they're reachable by multiple paths.
+ 'nodes': Visit each node in the dag only once. Every node
+ yielded by this function will be unique.
+ 'edges': If a node has been visited once but is reached along a
+ new path from the root, yield it but do not descend
+ into it. This traverses each 'edge' in the DAG once.
+ 'paths': Explore every unique path reachable from the root.
+ This descends into visited subtrees and will yield
+ nodes twice if they're reachable by multiple paths.
- depth [=False]
- Defaults to False. When True, yields not just nodes in the
- spec, but also their depth from the root in a (depth, node)
- tuple.
+ depth [=False]
+ Defaults to False. When True, yields not just nodes in the
+ spec, but also their depth from the root in a (depth, node)
+ tuple.
- key [=id]
- Allow a custom key function to track the identity of nodes
- in the traversal.
+ key [=id]
+ Allow a custom key function to track the identity of nodes
+ in the traversal.
- root [=True]
- If False, this won't yield the root node, just its descendents.
+ root [=True]
+ If False, this won't yield the root node, just its descendents.
- direction [=children|parents]
- If 'children', does a traversal of this spec's children. If
- 'parents', traverses upwards in the DAG towards the root.
+ direction [=children|parents]
+ If 'children', does a traversal of this spec's children. If
+ 'parents', traverses upwards in the DAG towards the root.
"""
# get initial values for kwargs
- depth = kwargs.get('depth', False)
- key_fun = kwargs.get('key', id)
+ depth = kwargs.get("depth", False)
+ key_fun = kwargs.get("key", id)
if isinstance(key_fun, six.string_types):
key_fun = operator.attrgetter(key_fun)
- yield_root = kwargs.get('root', True)
- cover = kwargs.get('cover', 'nodes')
- direction = kwargs.get('direction', 'children')
- order = kwargs.get('order', 'pre')
+ yield_root = kwargs.get("root", True)
+ cover = kwargs.get("cover", "nodes")
+ direction = kwargs.get("direction", "children")
+ order = kwargs.get("order", "pre")
# we don't want to run canonical_deptype every time through
# traverse, because it is somewhat expensive. This ensures we
@@ -1684,24 +1673,27 @@ class Spec(object):
# Make sure kwargs have legal values; raise ValueError if not.
def validate(name, val, allowed_values):
if val not in allowed_values:
- raise ValueError("Invalid value for %s: %s. Choices are %s"
- % (name, val, ",".join(allowed_values)))
- validate('cover', cover, ('nodes', 'edges', 'paths'))
- validate('direction', direction, ('children', 'parents'))
- validate('order', order, ('pre', 'post'))
+ raise ValueError(
+ "Invalid value for %s: %s. Choices are %s"
+ % (name, val, ",".join(allowed_values))
+ )
+
+ validate("cover", cover, ("nodes", "edges", "paths"))
+ validate("direction", direction, ("children", "parents"))
+ validate("order", order, ("pre", "post"))
if visited is None:
visited = set()
key = key_fun(self)
# Node traversal does not yield visited nodes.
- if key in visited and cover == 'nodes':
+ if key in visited and cover == "nodes":
return
def return_val(dspec):
if not dspec:
# make a fake dspec for the root.
- if direction == 'parents':
+ if direction == "parents":
dspec = DependencySpec(self, None, ())
else:
dspec = DependencySpec(None, self, ())
@@ -1710,52 +1702,50 @@ class Spec(object):
yield_me = yield_root or d > 0
# Preorder traversal yields before successors
- if yield_me and order == 'pre':
+ if yield_me and order == "pre":
yield return_val(dep_spec)
# Edge traversal yields but skips children of visited nodes
- if not (key in visited and cover == 'edges'):
+ if not (key in visited and cover == "edges"):
visited.add(key)
# This code determines direction and yields the children/parents
- if direction == 'children':
+ if direction == "children":
edges = self.edges_to_dependencies
key_fn = lambda dspec: dspec.spec.name
succ = lambda dspec: dspec.spec
- elif direction == 'parents':
+ elif direction == "parents":
edges = self.edges_from_dependents
key_fn = lambda dspec: dspec.parent.name
succ = lambda dspec: dspec.parent
else:
- raise ValueError('Invalid traversal direction: %s' % direction)
+ raise ValueError("Invalid traversal direction: %s" % direction)
for dspec in sorted(edges(), key=key_fn):
dt = dspec.deptypes
if dt and not any(d in deptype for d in dt):
continue
- for child in succ(dspec).traverse_edges(
- visited, d + 1, deptype, dspec, **kwargs
- ):
+ for child in succ(dspec).traverse_edges(visited, d + 1, deptype, dspec, **kwargs):
yield child
# Postorder traversal yields after successors
- if yield_me and order == 'post':
+ if yield_me and order == "post":
yield return_val(dep_spec)
@property
def short_spec(self):
"""Returns a version of the spec with the dependencies hashed
- instead of completely enumerated."""
- spec_format = '{name}{@version}{%compiler}'
- spec_format += '{variants}{arch=architecture}{/hash:7}'
+ instead of completely enumerated."""
+ spec_format = "{name}{@version}{%compiler}"
+ spec_format += "{variants}{arch=architecture}{/hash:7}"
return self.format(spec_format)
@property
def cshort_spec(self):
"""Returns an auto-colorized version of ``self.short_spec``."""
- spec_format = '{name}{@version}{%compiler}'
- spec_format += '{variants}{arch=architecture}{/hash:7}'
+ spec_format = "{name}{@version}{%compiler}"
+ spec_format += "{variants}{arch=architecture}{/hash:7}"
return self.cformat(spec_format)
@property
@@ -1764,8 +1754,7 @@ class Spec(object):
raise spack.error.SpecError("Spec is not concrete: " + str(self))
if self._prefix is None:
- upstream, record = spack.store.db.query_by_spec_hash(
- self.dag_hash())
+ upstream, record = spack.store.db.query_by_spec_hash(self.dag_hash())
if record and record.path:
self.prefix = record.path
else:
@@ -1904,10 +1893,10 @@ class Spec(object):
Arguments:
hash (spack.hash_types.SpecHashDescriptor) type of hash to generate.
- """
+ """
d = syaml.syaml_dict()
- d['name'] = self.name
+ d["name"] = self.name
if self.versions:
d.update(self.versions.to_dict())
@@ -1919,32 +1908,30 @@ class Spec(object):
d.update(self.compiler.to_dict())
if self.namespace:
- d['namespace'] = self.namespace
+ d["namespace"] = self.namespace
- params = syaml.syaml_dict(
- sorted(
- v.yaml_entry() for _, v in self.variants.items()
- )
- )
+ params = syaml.syaml_dict(sorted(v.yaml_entry() for _, v in self.variants.items()))
params.update(sorted(self.compiler_flags.items()))
if params:
- d['parameters'] = params
+ d["parameters"] = params
if self.external:
- d['external'] = syaml.syaml_dict([
- ('path', self.external_path),
- ('module', self.external_modules),
- ('extra_attributes', self.extra_attributes)
- ])
+ d["external"] = syaml.syaml_dict(
+ [
+ ("path", self.external_path),
+ ("module", self.external_modules),
+ ("extra_attributes", self.extra_attributes),
+ ]
+ )
if not self._concrete:
- d['concrete'] = False
+ d["concrete"] = False
- if 'patches' in self.variants:
- variant = self.variants['patches']
- if hasattr(variant, '_patches_in_order_of_appearance'):
- d['patches'] = variant._patches_in_order_of_appearance
+ if "patches" in self.variants:
+ variant = self.variants["patches"]
+ if hasattr(variant, "_patches_in_order_of_appearance"):
+ d["patches"] = variant._patches_in_order_of_appearance
if self._concrete and hash.package_hash and self._package_hash:
# We use the attribute here instead of `self.package_hash()` because this
@@ -1955,31 +1942,29 @@ class Spec(object):
package_hash = self._package_hash
# Full hashes are in bytes
- if (not isinstance(package_hash, six.text_type)
- and isinstance(package_hash, six.binary_type)):
- package_hash = package_hash.decode('utf-8')
- d['package_hash'] = package_hash
+ if not isinstance(package_hash, six.text_type) and isinstance(
+ package_hash, six.binary_type
+ ):
+ package_hash = package_hash.decode("utf-8")
+ d["package_hash"] = package_hash
# Note: Relies on sorting dict by keys later in algorithm.
deps = self._dependencies_dict(deptype=hash.deptype)
if deps:
deps_list = []
for name, edges_for_name in sorted(deps.items()):
- name_tuple = ('name', name)
+ name_tuple = ("name", name)
for dspec in edges_for_name:
hash_tuple = (hash.name, dspec.spec._cached_hash(hash))
- type_tuple = ('type', sorted(str(s) for s in dspec.deptypes))
- deps_list.append(syaml.syaml_dict(
- [name_tuple, hash_tuple, type_tuple]
- ))
- d['dependencies'] = deps_list
+ type_tuple = ("type", sorted(str(s) for s in dspec.deptypes))
+ deps_list.append(syaml.syaml_dict([name_tuple, hash_tuple, type_tuple]))
+ d["dependencies"] = deps_list
# Name is included in case this is replacing a virtual.
if self._build_spec:
- d['build_spec'] = syaml.syaml_dict([
- ('name', self.build_spec.name),
- (hash.name, self.build_spec._cached_hash(hash))
- ])
+ d["build_spec"] = syaml.syaml_dict(
+ [("name", self.build_spec.name), (hash.name, self.build_spec._cached_hash(hash))]
+ )
return d
def to_dict(self, hash=ht.dag_hash):
@@ -2066,7 +2051,7 @@ class Spec(object):
"""
node_list = [] # Using a list to preserve preorder traversal for hash.
hash_set = set()
- for s in self.traverse(order='pre', deptype=hash.deptype):
+ for s in self.traverse(order="pre", deptype=hash.deptype):
spec_hash = s._cached_hash(hash)
if spec_hash not in hash_set:
@@ -2074,22 +2059,22 @@ class Spec(object):
hash_set.add(spec_hash)
if s.build_spec is not s:
- build_spec_list = s.build_spec.to_dict(hash)['spec']['nodes']
+ build_spec_list = s.build_spec.to_dict(hash)["spec"]["nodes"]
for node in build_spec_list:
node_hash = node[hash.name]
if node_hash not in hash_set:
node_list.append(node)
hash_set.add(node_hash)
- meta_dict = syaml.syaml_dict([('version', specfile_format_version)])
- inner_dict = syaml.syaml_dict([('_meta', meta_dict), ('nodes', node_list)])
- spec_dict = syaml.syaml_dict([('spec', inner_dict)])
+ meta_dict = syaml.syaml_dict([("version", specfile_format_version)])
+ inner_dict = syaml.syaml_dict([("_meta", meta_dict), ("nodes", node_list)])
+ spec_dict = syaml.syaml_dict([("spec", inner_dict)])
return spec_dict
def node_dict_with_hashes(self, hash=ht.dag_hash):
- """ Returns a node_dict of this spec with the dag hash added. If this
+ """Returns a node_dict of this spec with the dag hash added. If this
spec is concrete, the full hash is added as well. If 'build' is in
- the hash_type, the build hash is also added. """
+ the hash_type, the build hash is also added."""
node = self.to_node_dict(hash)
node[ht.dag_hash.name] = self.dag_hash()
@@ -2101,7 +2086,7 @@ class Spec(object):
node[ht.dag_hash.name] = self.dag_hash()
else:
- node['concrete'] = False
+ node["concrete"] = False
# we can also give them other hash types if we want
if hash.name != ht.dag_hash.name:
@@ -2110,8 +2095,7 @@ class Spec(object):
return node
def to_yaml(self, stream=None, hash=ht.dag_hash):
- return syaml.dump(
- self.to_dict(hash), stream=stream, default_flow_style=False)
+ return syaml.dump(self.to_dict(hash), stream=stream, default_flow_style=False)
def to_json(self, stream=None, hash=ht.dag_hash):
return sjson.dump(self.to_dict(hash), stream)
@@ -2119,18 +2103,18 @@ class Spec(object):
@staticmethod
def from_specfile(path):
"""Construct a spec from aJSON or YAML spec file path"""
- with open(path, 'r') as fd:
+ with open(path, "r") as fd:
file_content = fd.read()
- if path.endswith('.json'):
+ if path.endswith(".json"):
return Spec.from_json(file_content)
return Spec.from_yaml(file_content)
@staticmethod
def from_node_dict(node):
spec = Spec()
- if 'name' in node.keys():
+ if "name" in node.keys():
# New format
- name = node['name']
+ name = node["name"]
else:
# Old format
name = next(iter(node))
@@ -2139,59 +2123,54 @@ class Spec(object):
setattr(spec, h.attr, node.get(h.name, None))
spec.name = name
- spec.namespace = node.get('namespace', None)
+ spec.namespace = node.get("namespace", None)
- if 'version' in node or 'versions' in node:
+ if "version" in node or "versions" in node:
spec.versions = vn.VersionList.from_dict(node)
- if 'arch' in node:
+ if "arch" in node:
spec.architecture = ArchSpec.from_dict(node)
- if 'compiler' in node:
+ if "compiler" in node:
spec.compiler = CompilerSpec.from_dict(node)
else:
spec.compiler = None
- if 'parameters' in node:
- for name, value in node['parameters'].items():
+ if "parameters" in node:
+ for name, value in node["parameters"].items():
if name in _valid_compiler_flags:
spec.compiler_flags[name] = value
else:
- spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
- name, value)
- elif 'variants' in node:
- for name, value in node['variants'].items():
- spec.variants[name] = vt.MultiValuedVariant.from_node_dict(
- name, value
- )
+ spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
+ elif "variants" in node:
+ for name, value in node["variants"].items():
+ spec.variants[name] = vt.MultiValuedVariant.from_node_dict(name, value)
for name in FlagMap.valid_compiler_flags():
spec.compiler_flags[name] = []
spec.external_path = None
spec.external_modules = None
- if 'external' in node:
+ if "external" in node:
# This conditional is needed because sometimes this function is
# called with a node already constructed that contains a 'versions'
# and 'external' field. Related to virtual packages provider
# indexes.
- if node['external']:
- spec.external_path = node['external']['path']
- spec.external_modules = node['external']['module']
+ if node["external"]:
+ spec.external_path = node["external"]["path"]
+ spec.external_modules = node["external"]["module"]
if spec.external_modules is False:
spec.external_modules = None
- spec.extra_attributes = node['external'].get(
- 'extra_attributes', syaml.syaml_dict()
+ spec.extra_attributes = node["external"].get(
+ "extra_attributes", syaml.syaml_dict()
)
# specs read in are concrete unless marked abstract
- spec._concrete = node.get('concrete', True)
+ spec._concrete = node.get("concrete", True)
- if 'patches' in node:
- patches = node['patches']
+ if "patches" in node:
+ patches = node["patches"]
if len(patches) > 0:
- mvar = spec.variants.setdefault(
- 'patches', vt.MultiValuedVariant('patches', ())
- )
+ mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = patches
# FIXME: Monkey patches mvar to store patches order
mvar._patches_in_order_of_appearance = patches
@@ -2204,20 +2183,20 @@ class Spec(object):
@staticmethod
def build_spec_from_node_dict(node, hash_type=ht.dag_hash.name):
- build_spec_dict = node['build_spec']
- return build_spec_dict['name'], build_spec_dict[hash_type], hash_type
+ build_spec_dict = node["build_spec"]
+ return build_spec_dict["name"], build_spec_dict[hash_type], hash_type
@staticmethod
def dependencies_from_node_dict(node):
- if 'name' in node.keys():
+ if "name" in node.keys():
# New format
- name = node['name']
+ name = node["name"]
else:
name = next(iter(node))
node = node[name]
- if 'dependencies' not in node:
+ if "dependencies" not in node:
return
- for t in Spec.read_yaml_dep_specs(node['dependencies']):
+ for t in Spec.read_yaml_dep_specs(node["dependencies"]):
yield t
@staticmethod
@@ -2232,10 +2211,10 @@ class Spec(object):
dep_name, elt = dep
else:
elt = dep
- dep_name = dep['name']
+ dep_name = dep["name"]
if isinstance(elt, six.string_types):
# original format, elt is just the dependency hash.
- dep_hash, deptypes = elt, ['build', 'link']
+ dep_hash, deptypes = elt, ["build", "link"]
elif isinstance(elt, tuple):
# original deptypes format: (used tuples, not future-proof)
dep_hash, deptypes = elt
@@ -2243,14 +2222,13 @@ class Spec(object):
# new format: elements of dependency spec are keyed.
for h in ht.hashes:
if h.name in elt:
- dep_hash, deptypes = elt[h.name], elt['type']
+ dep_hash, deptypes = elt[h.name], elt["type"]
hash_type = h.name
break
else: # We never determined a hash type...
raise spack.error.SpecError("Couldn't parse dependency spec.")
else:
- raise spack.error.SpecError(
- "Couldn't parse dependency types in spec.")
+ raise spack.error.SpecError("Couldn't parse dependency types in spec.")
yield dep_name, dep_hash, list(deptypes), hash_type
@staticmethod
@@ -2360,7 +2338,7 @@ class Spec(object):
s (str): key in the dictionary containing the literal
"""
- t = s.split(':')
+ t = s.split(":")
if len(t) > 2:
msg = 'more than one ":" separator in key "{0}"'
@@ -2368,7 +2346,7 @@ class Spec(object):
n = t[0]
if len(t) == 2:
- dtypes = tuple(dt.strip() for dt in t[1].split(','))
+ dtypes = tuple(dt.strip() for dt in t[1].split(","))
else:
dtypes = ()
@@ -2388,7 +2366,7 @@ class Spec(object):
return s, ()
spec_obj, dtypes = s
- return spec_obj, tuple(dt.strip() for dt in dtypes.split(','))
+ return spec_obj, tuple(dt.strip() for dt in dtypes.split(","))
# Recurse on dependencies
for s, s_dependencies in dep_like.items():
@@ -2462,7 +2440,7 @@ class Spec(object):
stream: string or file object to read from.
"""
data = stream
- if hasattr(stream, 'read'):
+ if hasattr(stream, "read"):
data = stream.read()
extracted_json = Spec.extract_json_from_clearsig(data)
@@ -2496,22 +2474,21 @@ class Spec(object):
"""
# Assert that _extra_attributes is a Mapping and not None,
# which likely means the spec was created with Spec.from_detection
- msg = ('cannot validate "{0}" since it was not created '
- 'using Spec.from_detection'.format(self))
+ msg = 'cannot validate "{0}" since it was not created ' "using Spec.from_detection".format(
+ self
+ )
assert isinstance(self.extra_attributes, Mapping), msg
# Validate the spec calling a package specific method
pkg_cls = spack.repo.path.get_pkg_class(self.name)
- validate_fn = getattr(
- pkg_cls, 'validate_detected_spec', lambda x, y: None
- )
+ validate_fn = getattr(pkg_cls, "validate_detected_spec", lambda x, y: None)
validate_fn(self, self.extra_attributes)
def _concretize_helper(self, concretizer, presets=None, visited=None):
"""Recursive helper function for concretize().
- This concretizes everything bottom-up. As things are
- concretized, they're added to the presets, and ancestors
- will prefer the settings of their children.
+ This concretizes everything bottom-up. As things are
+ concretized, they're added to the presets, and ancestors
+ will prefer the settings of their children.
"""
if presets is None:
presets = {}
@@ -2547,14 +2524,17 @@ class Spec(object):
# still need to select a concrete package later.
if not self.virtual:
changed |= any(
- (concretizer.concretize_develop(self), # special variant
- concretizer.concretize_architecture(self),
- concretizer.concretize_compiler(self),
- concretizer.adjust_target(self),
- # flags must be concretized after compiler
- concretizer.concretize_compiler_flags(self),
- concretizer.concretize_version(self),
- concretizer.concretize_variants(self)))
+ (
+ concretizer.concretize_develop(self), # special variant
+ concretizer.concretize_architecture(self),
+ concretizer.concretize_compiler(self),
+ concretizer.adjust_target(self),
+ # flags must be concretized after compiler
+ concretizer.concretize_compiler_flags(self),
+ concretizer.concretize_version(self),
+ concretizer.concretize_variants(self),
+ )
+ )
presets[self.name] = self
visited.add(self.name)
@@ -2577,24 +2557,23 @@ class Spec(object):
def _expand_virtual_packages(self, concretizer):
"""Find virtual packages in this spec, replace them with providers,
- and normalize again to include the provider's (potentially virtual)
- dependencies. Repeat until there are no virtual deps.
+ and normalize again to include the provider's (potentially virtual)
+ dependencies. Repeat until there are no virtual deps.
- Precondition: spec is normalized.
+ Precondition: spec is normalized.
- .. todo::
+ .. todo::
- If a provider depends on something that conflicts with
- other dependencies in the spec being expanded, this can
- produce a conflicting spec. For example, if mpich depends
- on hwloc@:1.3 but something in the spec needs hwloc1.4:,
- then we should choose an MPI other than mpich. Cases like
- this are infrequent, but should implement this before it is
- a problem.
+ If a provider depends on something that conflicts with
+ other dependencies in the spec being expanded, this can
+ produce a conflicting spec. For example, if mpich depends
+ on hwloc@:1.3 but something in the spec needs hwloc1.4:,
+ then we should choose an MPI other than mpich. Cases like
+ this are infrequent, but should implement this before it is
+ a problem.
"""
# Make an index of stuff this spec already provides
- self_index = spack.provider_index.ProviderIndex(
- self.traverse(), restrict=True)
+ self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
changed = False
done = False
@@ -2653,16 +2632,15 @@ class Spec(object):
return (not cfield) or (cfield == sfield)
if replacement is spec or (
- feq(replacement.name, spec.name) and
- feq(replacement.versions, spec.versions) and
- feq(replacement.compiler, spec.compiler) and
- feq(replacement.architecture, spec.architecture) and
- feq(replacement._dependencies, spec._dependencies) and
- feq(replacement.variants, spec.variants) and
- feq(replacement.external_path,
- spec.external_path) and
- feq(replacement.external_modules,
- spec.external_modules)):
+ feq(replacement.name, spec.name)
+ and feq(replacement.versions, spec.versions)
+ and feq(replacement.compiler, spec.compiler)
+ and feq(replacement.architecture, spec.architecture)
+ and feq(replacement._dependencies, spec._dependencies)
+ and feq(replacement.variants, spec.variants)
+ and feq(replacement.external_path, spec.external_path)
+ and feq(replacement.external_modules, spec.external_modules)
+ ):
continue
# Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in
@@ -2703,14 +2681,15 @@ class Spec(object):
# Add a warning message to inform users that the original concretizer
# will be removed
if deprecation_warning:
- msg = ('the original concretizer is currently being used.\n\tUpgrade to '
- '"clingo" at your earliest convenience. The original concretizer '
- 'will be removed from Spack in a future version.')
+ msg = (
+ "the original concretizer is currently being used.\n\tUpgrade to "
+ '"clingo" at your earliest convenience. The original concretizer '
+ "will be removed from Spack in a future version."
+ )
warnings.warn(msg)
if not self.name:
- raise spack.error.SpecError(
- "Attempting to concretize anonymous spec")
+ raise spack.error.SpecError("Attempting to concretize anonymous spec")
if self._concrete:
return
@@ -2721,10 +2700,11 @@ class Spec(object):
user_spec_deps = self.flat_dependencies(copy=False)
concretizer = spack.concretize.Concretizer(self.copy())
while changed:
- changes = (self.normalize(force, tests=tests,
- user_spec_deps=user_spec_deps),
- self._expand_virtual_packages(concretizer),
- self._concretize_helper(concretizer))
+ changes = (
+ self.normalize(force, tests=tests, user_spec_deps=user_spec_deps),
+ self._expand_virtual_packages(concretizer),
+ self._concretize_helper(concretizer),
+ )
changed = any(changes)
force = True
@@ -2786,8 +2766,9 @@ class Spec(object):
# ensure that patch state is consistent
patch_variant = self.variants["patches"]
- assert hasattr(patch_variant, "_patches_in_order_of_appearance"), \
- "patches should always be assigned with a patch variant."
+ assert hasattr(
+ patch_variant, "_patches_in_order_of_appearance"
+ ), "patches should always be assigned with a patch variant."
return True
@@ -2821,8 +2802,7 @@ class Spec(object):
spec_to_patches[id(s)] = patches
# Also record all patches required on dependencies by
# depends_on(..., patch=...)
- for dspec in root.traverse_edges(deptype=all,
- cover='edges', root=False):
+ for dspec in root.traverse_edges(deptype=all, cover="edges", root=False):
pkg_deps = dspec.parent.package_class.dependencies
if dspec.spec.name not in pkg_deps:
continue
@@ -2833,8 +2813,7 @@ class Spec(object):
patches = []
for cond, dependency in pkg_deps[dspec.spec.name].items():
for pcond, patch_list in dependency.patches.items():
- if (dspec.parent.satisfies(cond, strict=True)
- and dspec.spec.satisfies(pcond)):
+ if dspec.parent.satisfies(cond, strict=True) and dspec.spec.satisfies(pcond):
patches.extend(patch_list)
if patches:
all_patches = spec_to_patches.setdefault(id(dspec.spec), [])
@@ -2844,25 +2823,23 @@ class Spec(object):
continue
patches = list(lang.dedupe(spec_to_patches[id(spec)]))
- mvar = spec.variants.setdefault(
- 'patches', vt.MultiValuedVariant('patches', ())
- )
+ mvar = spec.variants.setdefault("patches", vt.MultiValuedVariant("patches", ()))
mvar.value = tuple(p.sha256 for p in patches)
# FIXME: Monkey patches mvar to store patches order
- full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p
- in patches)
+ full_order_keys = list(tuple(p.ordering_key) + (p.sha256,) for p in patches)
ordered_hashes = sorted(full_order_keys)
- tty.debug("Ordered hashes [{0}]: ".format(spec.name) +
- ', '.join('/'.join(str(e) for e in t)
- for t in ordered_hashes))
- mvar._patches_in_order_of_appearance = list(
- t[-1] for t in ordered_hashes)
+ tty.debug(
+ "Ordered hashes [{0}]: ".format(spec.name)
+ + ", ".join("/".join(str(e) for e in t) for t in ordered_hashes)
+ )
+ mvar._patches_in_order_of_appearance = list(t[-1] for t in ordered_hashes)
@staticmethod
def ensure_external_path_if_external(external_spec):
if external_spec.external_modules and not external_spec.external_path:
compiler = spack.compilers.compiler_for_spec(
- external_spec.compiler, external_spec.architecture)
+ external_spec.compiler, external_spec.architecture
+ )
for mod in compiler.modules:
md.load_module(mod)
@@ -2871,8 +2848,7 @@ class Spec(object):
pkg_cls = spack.repo.path.get_pkg_class(external_spec.name)
package = pkg_cls(external_spec)
external_spec.external_path = getattr(
- package, 'external_prefix',
- md.path_from_modules(external_spec.external_modules)
+ package, "external_prefix", md.path_from_modules(external_spec.external_modules)
)
@staticmethod
@@ -2895,8 +2871,8 @@ class Spec(object):
msg = "\n The following specs have been deprecated"
msg += " in favor of specs with the hashes shown:\n"
for rec in deprecated:
- msg += ' %s --> %s\n' % (rec.spec, rec.deprecated_for)
- msg += '\n'
+ msg += " %s --> %s\n" % (rec.spec, rec.deprecated_for)
+ msg += "\n"
msg += " For each package listed, choose another spec\n"
raise SpecDeprecatedError(msg)
@@ -2904,8 +2880,7 @@ class Spec(object):
import spack.solver.asp
if not self.name:
- raise spack.error.SpecError(
- "Spec has no name; cannot concretize an anonymous spec")
+ raise spack.error.SpecError("Spec has no name; cannot concretize an anonymous spec")
if self._concrete:
return
@@ -2919,8 +2894,7 @@ class Spec(object):
name = self.name
# TODO: Consolidate this code with similar code in solve.py
if self.virtual:
- providers = [spec.name for spec in answer.values()
- if spec.package.provides(name)]
+ providers = [spec.name for spec in answer.values() if spec.package.provides(name)]
name = providers[0]
assert name in answer
@@ -2936,7 +2910,7 @@ class Spec(object):
if a list of names activate them for the packages in the list,
if True activate 'test' dependencies for all packages.
"""
- if spack.config.get('config:concretizer') == "clingo":
+ if spack.config.get("config:concretizer") == "clingo":
self._new_concretize(tests)
else:
self._old_concretize(tests)
@@ -3026,19 +3000,19 @@ class Spec(object):
def flat_dependencies(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
- dependencies with their constraints merged.
+ dependencies with their constraints merged.
- If copy is True, returns merged copies of its dependencies
- without modifying the spec it's called on.
+ If copy is True, returns merged copies of its dependencies
+ without modifying the spec it's called on.
- If copy is False, clears this spec's dependencies and
- returns them. This disconnects all dependency links including
- transitive dependencies, except for concrete specs: if a spec
- is concrete it will not be disconnected from its dependencies
- (although a non-concrete spec with concrete dependencies will
- be disconnected from those dependencies).
+ If copy is False, clears this spec's dependencies and
+ returns them. This disconnects all dependency links including
+ transitive dependencies, except for concrete specs: if a spec
+ is concrete it will not be disconnected from its dependencies
+ (although a non-concrete spec with concrete dependencies will
+ be disconnected from those dependencies).
"""
- copy = kwargs.get('copy', True)
+ copy = kwargs.get("copy", True)
flat_deps = {}
try:
@@ -3070,7 +3044,7 @@ class Spec(object):
e,
)
- def index(self, deptype='all'):
+ def index(self, deptype="all"):
"""Return a dictionary that points to all the dependencies in this
spec.
"""
@@ -3113,18 +3087,18 @@ class Spec(object):
"Cannot merge constraint"
"\n\n\t{1}\n\n"
"into"
- "\n\n\t{2}"
- .format(self, dependency.spec, dep.spec))
+ "\n\n\t{2}".format(self, dependency.spec, dep.spec)
+ )
raise e
return dep
def _find_provider(self, vdep, provider_index):
"""Find provider for a virtual spec in the provider index.
- Raise an exception if there is a conflicting virtual
- dependency already in this spec.
+ Raise an exception if there is a conflicting virtual
+ dependency already in this spec.
"""
- assert(vdep.virtual)
+ assert vdep.virtual
# note that this defensively copies.
providers = provider_index.providers_for(vdep)
@@ -3152,8 +3126,7 @@ class Spec(object):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
- def _merge_dependency(
- self, dependency, visited, spec_deps, provider_index, tests):
+ def _merge_dependency(self, dependency, visited, spec_deps, provider_index, tests):
"""Merge dependency information from a Package into this Spec.
Args:
@@ -3225,23 +3198,19 @@ class Spec(object):
else:
# merge package/vdep information into spec
try:
- tty.debug(
- "{0} applying constraint {1}".format(self.name, str(dep)))
+ tty.debug("{0} applying constraint {1}".format(self.name, str(dep)))
changed |= spec_deps[dep.name].constrain(dep)
except spack.error.UnsatisfiableSpecError as e:
- fmt = 'An unsatisfiable {0}'.format(e.constraint_type)
- fmt += ' constraint has been detected for spec:'
- fmt += '\n\n{0}\n\n'.format(spec_deps[dep.name].tree(indent=4))
- fmt += 'while trying to concretize the partial spec:'
- fmt += '\n\n{0}\n\n'.format(self.tree(indent=4))
- fmt += '{0} requires {1} {2} {3}, but spec asked for {4}'
+ fmt = "An unsatisfiable {0}".format(e.constraint_type)
+ fmt += " constraint has been detected for spec:"
+ fmt += "\n\n{0}\n\n".format(spec_deps[dep.name].tree(indent=4))
+ fmt += "while trying to concretize the partial spec:"
+ fmt += "\n\n{0}\n\n".format(self.tree(indent=4))
+ fmt += "{0} requires {1} {2} {3}, but spec asked for {4}"
e.message = fmt.format(
- self.name,
- dep.name,
- e.constraint_type,
- e.required,
- e.provided)
+ self.name, dep.name, e.constraint_type, e.required, e.provided
+ )
raise
@@ -3250,8 +3219,7 @@ class Spec(object):
if dep.name not in self._dependencies:
self._add_dependency(spec_dependency, dependency.type)
- changed |= spec_dependency._normalize_helper(
- visited, spec_deps, provider_index, tests)
+ changed |= spec_dependency._normalize_helper(visited, spec_deps, provider_index, tests)
return changed
def _normalize_helper(self, visited, spec_deps, provider_index, tests):
@@ -3286,37 +3254,40 @@ class Spec(object):
if dep:
merge = (
# caller requested test dependencies
- tests is True or (tests and self.name in tests) or
+ tests is True
+ or (tests and self.name in tests)
+ or
# this is not a test-only dependency
- dep.type - set(['test']))
+ dep.type - set(["test"])
+ )
if merge:
changed |= self._merge_dependency(
- dep, visited, spec_deps, provider_index, tests)
+ dep, visited, spec_deps, provider_index, tests
+ )
any_change |= changed
return any_change
def normalize(self, force=False, tests=False, user_spec_deps=None):
"""When specs are parsed, any dependencies specified are hanging off
- the root, and ONLY the ones that were explicitly provided are there.
- Normalization turns a partial flat spec into a DAG, where:
+ the root, and ONLY the ones that were explicitly provided are there.
+ Normalization turns a partial flat spec into a DAG, where:
- 1. Known dependencies of the root package are in the DAG.
- 2. Each node's dependencies dict only contains its known direct
- deps.
- 3. There is only ONE unique spec for each package in the DAG.
+ 1. Known dependencies of the root package are in the DAG.
+ 2. Each node's dependencies dict only contains its known direct
+ deps.
+ 3. There is only ONE unique spec for each package in the DAG.
- * This includes virtual packages. If there a non-virtual
- package that provides a virtual package that is in the spec,
- then we replace the virtual package with the non-virtual one.
+ * This includes virtual packages. If there a non-virtual
+ package that provides a virtual package that is in the spec,
+ then we replace the virtual package with the non-virtual one.
- TODO: normalize should probably implement some form of cycle
- detection, to ensure that the spec is actually a DAG.
+ TODO: normalize should probably implement some form of cycle
+ detection, to ensure that the spec is actually a DAG.
"""
if not self.name:
- raise spack.error.SpecError(
- "Attempting to normalize anonymous spec")
+ raise spack.error.SpecError("Attempting to normalize anonymous spec")
# Set _normal and _concrete to False when forced
if force and not self._concrete:
@@ -3351,14 +3322,14 @@ class Spec(object):
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
provider_index = spack.provider_index.ProviderIndex(
- [s for s in all_spec_deps.values()], restrict=True)
+ [s for s in all_spec_deps.values()], restrict=True
+ )
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
visited = set()
- any_change = self._normalize_helper(
- visited, all_spec_deps, provider_index, tests)
+ any_change = self._normalize_helper(visited, all_spec_deps, provider_index, tests)
# Mark the spec as normal once done.
self._normal = True
@@ -3441,8 +3412,9 @@ class Spec(object):
for value in values:
if self.variants.get(variant_name):
- msg = ("Cannot append a value to a single-valued "
- "variant with an already set value")
+ msg = (
+ "Cannot append a value to a single-valued " "variant with an already set value"
+ )
assert pkg_variant.multi, msg
self.variants[variant_name].append(value)
else:
@@ -3464,20 +3436,18 @@ class Spec(object):
if self.satisfies(other):
return False
else:
- raise spack.error.UnsatisfiableSpecError(
- self, other, 'constrain a concrete spec'
- )
+ raise spack.error.UnsatisfiableSpecError(self, other, "constrain a concrete spec")
other = self._autospec(other)
- if not (self.name == other.name or
- (not self.name) or
- (not other.name)):
+ if not (self.name == other.name or (not self.name) or (not other.name)):
raise UnsatisfiableSpecNameError(self.name, other.name)
- if (other.namespace is not None and
- self.namespace is not None and
- other.namespace != self.namespace):
+ if (
+ other.namespace is not None
+ and self.namespace is not None
+ and other.namespace != self.namespace
+ ):
raise UnsatisfiableSpecNameError(self.fullname, other.fullname)
if not self.versions.overlaps(other.versions):
@@ -3485,9 +3455,7 @@ class Spec(object):
for v in [x for x in other.variants if x in self.variants]:
if not self.variants[v].compatible(other.variants[v]):
- raise vt.UnsatisfiableVariantSpecError(
- self.variants[v], other.variants[v]
- )
+ raise vt.UnsatisfiableVariantSpecError(self.variants[v], other.variants[v])
# TODO: Check out the logic here
sarch, oarch = self.architecture, other.architecture
@@ -3515,7 +3483,7 @@ class Spec(object):
if self.compiler is not None and other.compiler is not None:
changed |= self.compiler.constrain(other.compiler)
elif self.compiler is None:
- changed |= (self.compiler != other.compiler)
+ changed |= self.compiler != other.compiler
self.compiler = other.compiler
changed |= self.versions.intersect(other.versions)
@@ -3534,7 +3502,7 @@ class Spec(object):
sarch.os = sarch.os or oarch.os
if sarch.target is None or oarch.target is None:
sarch.target = sarch.target or oarch.target
- changed |= (str(self.architecture) != old)
+ changed |= str(self.architecture) != old
if deps:
changed |= self._constrain_dependencies(other)
@@ -3570,7 +3538,8 @@ class Spec(object):
# WARNING: one edge from package "name"
edges_from_name = self._dependencies[name]
changed |= edges_from_name[0].update_deptypes(
- other._dependencies[name][0].deptypes)
+ other._dependencies[name][0].deptypes
+ )
# Update with additional constraints from other spec
for name in other.dep_difference(self):
@@ -3584,10 +3553,8 @@ class Spec(object):
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
- common = set(
- s.name for s in self.traverse(root=False))
- common.intersection_update(
- s.name for s in other.traverse(root=False))
+ common = set(s.name for s in self.traverse(root=False))
+ common.intersection_update(s.name for s in other.traverse(root=False))
return common
def constrained(self, other, deps=True):
@@ -3599,8 +3566,7 @@ class Spec(object):
def dep_difference(self, other):
"""Returns dependencies in self that are not in other."""
mine = set(s.name for s in self.traverse(root=False))
- mine.difference_update(
- s.name for s in other.traverse(root=False))
+ mine.difference_update(s.name for s in other.traverse(root=False))
return mine
def _autospec(self, spec_like):
@@ -3648,16 +3614,19 @@ class Spec(object):
if pkg.provides(other.name):
for provided, when_specs in pkg.provided.items():
- if any(self.satisfies(when, deps=False, strict=strict)
- for when in when_specs):
+ if any(
+ self.satisfies(when, deps=False, strict=strict) for when in when_specs
+ ):
if provided.satisfies(other):
return True
return False
# namespaces either match, or other doesn't require one.
- if (other.namespace is not None and
- self.namespace is not None and
- self.namespace != other.namespace):
+ if (
+ other.namespace is not None
+ and self.namespace is not None
+ and self.namespace != other.namespace
+ ):
return False
if self.versions and other.versions:
if not self.versions.satisfies(other.versions, strict=strict):
@@ -3686,9 +3655,7 @@ class Spec(object):
elif strict and (other.architecture and not self.architecture):
return False
- if not self.compiler_flags.satisfies(
- other.compiler_flags,
- strict=strict):
+ if not self.compiler_flags.satisfies(other.compiler_flags, strict=strict):
return False
# If we need to descend into dependencies, do it, otherwise we're done.
@@ -3719,8 +3686,7 @@ class Spec(object):
# use list to prevent double-iteration
selfdeps = list(self.traverse(root=False))
otherdeps = list(other.traverse(root=False))
- if not all(any(d.satisfies(dep, strict=True) for d in selfdeps)
- for dep in otherdeps):
+ if not all(any(d.satisfies(dep, strict=True) for d in selfdeps) for dep in otherdeps):
return False
elif not self._dependencies:
@@ -3734,10 +3700,8 @@ class Spec(object):
return False
# For virtual dependencies, we need to dig a little deeper.
- self_index = spack.provider_index.ProviderIndex(
- self.traverse(), restrict=True)
- other_index = spack.provider_index.ProviderIndex(
- other.traverse(), restrict=True)
+ self_index = spack.provider_index.ProviderIndex(self.traverse(), restrict=True)
+ other_index = spack.provider_index.ProviderIndex(other.traverse(), restrict=True)
# This handles cases where there are already providers for both vpkgs
if not self_index.satisfies(other_index):
@@ -3747,8 +3711,7 @@ class Spec(object):
# vpkg in one spec for a provider in the other (e.g., mpi@3: is not
# compatible with mpich2)
for spec in self.virtual_dependencies():
- if (spec.name in other_index and
- not other_index.providers_for(spec)):
+ if spec.name in other_index and not other_index.providers_for(spec):
return False
for spec in other.virtual_dependencies():
@@ -3807,25 +3770,26 @@ class Spec(object):
"""
# We don't count dependencies as changes here
changed = True
- if hasattr(self, 'name'):
- changed = (self.name != other.name and
- self.versions != other.versions and
- self.architecture != other.architecture and
- self.compiler != other.compiler and
- self.variants != other.variants and
- self._normal != other._normal and
- self.concrete != other.concrete and
- self.external_path != other.external_path and
- self.external_modules != other.external_modules and
- self.compiler_flags != other.compiler_flags)
+ if hasattr(self, "name"):
+ changed = (
+ self.name != other.name
+ and self.versions != other.versions
+ and self.architecture != other.architecture
+ and self.compiler != other.compiler
+ and self.variants != other.variants
+ and self._normal != other._normal
+ and self.concrete != other.concrete
+ and self.external_path != other.external_path
+ and self.external_modules != other.external_modules
+ and self.compiler_flags != other.compiler_flags
+ )
self._package = None
# Local node attributes get copied first.
self.name = other.name
self.versions = other.versions.copy()
- self.architecture = other.architecture.copy() if other.architecture \
- else None
+ self.architecture = other.architecture.copy() if other.architecture else None
self.compiler = other.compiler.copy() if other.compiler else None
if cleardeps:
self._dependents = _EdgeMap(store_by=EdgeDirection.parent)
@@ -3839,7 +3803,7 @@ class Spec(object):
# to keep it from leaking out of spec.py, but we should figure
# out how to handle it more elegantly in the Variant classes.
for k, v in other.variants.items():
- patches = getattr(v, '_patches_in_order_of_appearance', None)
+ patches = getattr(v, "_patches_in_order_of_appearance", None)
if patches:
self.variants[k]._patches_in_order_of_appearance = patches
@@ -3880,14 +3844,12 @@ class Spec(object):
return id(spec)
new_specs = {spid(other): self}
- for edge in other.traverse_edges(cover='edges', root=False):
+ for edge in other.traverse_edges(cover="edges", root=False):
if edge.deptypes and not any(d in deptypes for d in edge.deptypes):
continue
if spid(edge.parent) not in new_specs:
- new_specs[spid(edge.parent)] = edge.parent.copy(
- deps=False
- )
+ new_specs[spid(edge.parent)] = edge.parent.copy(deps=False)
if spid(edge.spec) not in new_specs:
new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
@@ -3931,8 +3893,7 @@ class Spec(object):
@property
def version(self):
if not self.versions.concrete:
- raise spack.error.SpecError(
- "Spec version is not concrete: " + str(self))
+ raise spack.error.SpecError("Spec version is not concrete: " + str(self))
return self.versions[0]
def __getitem__(self, name):
@@ -3944,10 +3905,10 @@ class Spec(object):
Note that if a virtual package is queried a copy of the Spec is
returned while for non-virtual a reference is returned.
"""
- query_parameters = name.split(':')
+ query_parameters = name.split(":")
if len(query_parameters) > 2:
- msg = 'key has more than one \':\' symbol.'
- msg += ' At most one is admitted.'
+ msg = "key has more than one ':' symbol."
+ msg += " At most one is admitted."
raise KeyError(msg)
name, query_parameters = query_parameters[0], query_parameters[1:]
@@ -3955,15 +3916,14 @@ class Spec(object):
# We have extra query parameters, which are comma separated
# values
csv = query_parameters.pop().strip()
- query_parameters = re.split(r'\s*,\s*', csv)
+ query_parameters = re.split(r"\s*,\s*", csv)
try:
value = next(
itertools.chain(
# Regular specs
(x for x in self.traverse() if x.name == name),
- (x for x in self.traverse()
- if (not x.virtual) and x.package.provides(name))
+ (x for x in self.traverse() if (not x.virtual) and x.package.provides(name)),
)
)
except StopIteration:
@@ -4007,13 +3967,10 @@ class Spec(object):
if len(self._dependencies) != len(other._dependencies):
return False
- ssorted = [self._dependencies[name]
- for name in sorted(self._dependencies)]
- osorted = [other._dependencies[name]
- for name in sorted(other._dependencies)]
+ ssorted = [self._dependencies[name] for name in sorted(self._dependencies)]
+ osorted = [other._dependencies[name] for name in sorted(other._dependencies)]
for s_dspec, o_dspec in zip(
- itertools.chain.from_iterable(ssorted),
- itertools.chain.from_iterable(osorted)
+ itertools.chain.from_iterable(ssorted), itertools.chain.from_iterable(osorted)
):
if deptypes and s_dspec.deptypes != o_dspec.deptypes:
return False
@@ -4048,9 +4005,7 @@ class Spec(object):
def eq_node(self, other):
"""Equality with another spec, not including dependencies."""
- return (other is not None) and lang.lazy_eq(
- self._cmp_node, other._cmp_node
- )
+ return (other is not None) and lang.lazy_eq(self._cmp_node, other._cmp_node)
def _cmp_iter(self):
"""Lazily yield components of self for comparison."""
@@ -4058,12 +4013,11 @@ class Spec(object):
yield item
def deps():
- for dep in sorted(
- itertools.chain.from_iterable(self._dependencies.values())
- ):
+ for dep in sorted(itertools.chain.from_iterable(self._dependencies.values())):
yield dep.spec.name
yield tuple(sorted(dep.deptypes))
yield hash(dep.spec)
+
yield deps
def colorized(self):
@@ -4149,69 +4103,68 @@ class Spec(object):
"""
# If we have an unescaped $ sigil, use the deprecated format strings
- if re.search(r'[^\\]*\$', format_string):
+ if re.search(r"[^\\]*\$", format_string):
return self.old_format(format_string, **kwargs)
- color = kwargs.get('color', False)
- transform = kwargs.get('transform', {})
+ color = kwargs.get("color", False)
+ transform = kwargs.get("transform", {})
out = six.StringIO()
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
- f = color_formats[c] + f + '@.'
+ f = color_formats[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
def write_attribute(spec, attribute, color):
current = spec
- if attribute.startswith('^'):
+ if attribute.startswith("^"):
attribute = attribute[1:]
- dep, attribute = attribute.split('.', 1)
+ dep, attribute = attribute.split(".", 1)
current = self[dep]
- if attribute == '':
- raise SpecFormatStringError(
- 'Format string attributes must be non-empty')
+ if attribute == "":
+ raise SpecFormatStringError("Format string attributes must be non-empty")
attribute = attribute.lower()
- sig = ''
- if attribute[0] in '@%/':
+ sig = ""
+ if attribute[0] in "@%/":
# color sigils that are inside braces
sig = attribute[0]
attribute = attribute[1:]
- elif attribute.startswith('arch='):
- sig = ' arch=' # include space as separator
+ elif attribute.startswith("arch="):
+ sig = " arch=" # include space as separator
attribute = attribute[5:]
- parts = attribute.split('.')
+ parts = attribute.split(".")
assert parts
# check that the sigil is valid for the attribute.
- if sig == '@' and parts[-1] not in ('versions', 'version'):
- raise SpecFormatSigilError(sig, 'versions', attribute)
- elif sig == '%' and attribute not in ('compiler', 'compiler.name'):
- raise SpecFormatSigilError(sig, 'compilers', attribute)
- elif sig == '/' and not re.match(r'hash(:\d+)?$', attribute):
- raise SpecFormatSigilError(sig, 'DAG hashes', attribute)
- elif sig == ' arch=' and attribute not in ('architecture', 'arch'):
- raise SpecFormatSigilError(sig, 'the architecture', attribute)
+ if sig == "@" and parts[-1] not in ("versions", "version"):
+ raise SpecFormatSigilError(sig, "versions", attribute)
+ elif sig == "%" and attribute not in ("compiler", "compiler.name"):
+ raise SpecFormatSigilError(sig, "compilers", attribute)
+ elif sig == "/" and not re.match(r"hash(:\d+)?$", attribute):
+ raise SpecFormatSigilError(sig, "DAG hashes", attribute)
+ elif sig == " arch=" and attribute not in ("architecture", "arch"):
+ raise SpecFormatSigilError(sig, "the architecture", attribute)
# find the morph function for our attribute
morph = transform.get(attribute, lambda s, x: x)
# Special cases for non-spec attributes and hashes.
# These must be the only non-dep component of the format attribute
- if attribute == 'spack_root':
+ if attribute == "spack_root":
write(morph(spec, spack.paths.spack_root))
return
- elif attribute == 'spack_install':
+ elif attribute == "spack_install":
write(morph(spec, spack.store.layout.root))
return
- elif re.match(r'hash(:\d)?', attribute):
- col = '#'
- if ':' in attribute:
- _, length = attribute.split(':')
+ elif re.match(r"hash(:\d)?", attribute):
+ col = "#"
+ if ":" in attribute:
+ _, length = attribute.split(":")
write(sig + morph(spec, spec.dag_hash(int(length))), col)
else:
write(sig + morph(spec, spec.dag_hash()), col)
@@ -4220,31 +4173,27 @@ class Spec(object):
# Iterate over components using getattr to get next element
for idx, part in enumerate(parts):
if not part:
- raise SpecFormatStringError(
- 'Format string attributes must be non-empty'
- )
- if part.startswith('_'):
- raise SpecFormatStringError(
- 'Attempted to format private attribute'
- )
+ raise SpecFormatStringError("Format string attributes must be non-empty")
+ if part.startswith("_"):
+ raise SpecFormatStringError("Attempted to format private attribute")
else:
if isinstance(current, vt.VariantMap):
# subscript instead of getattr for variant names
current = current[part]
else:
# aliases
- if part == 'arch':
- part = 'architecture'
- elif part == 'version':
+ if part == "arch":
+ part = "architecture"
+ elif part == "version":
# Version requires concrete spec, versions does not
# when concrete, they print the same thing
- part = 'versions'
+ part = "versions"
try:
current = getattr(current, part)
except AttributeError:
- parent = '.'.join(parts[:idx])
- m = 'Attempted to format attribute %s.' % attribute
- m += 'Spec.%s has no attribute %s' % (parent, part)
+ parent = ".".join(parts[:idx])
+ m = "Attempted to format attribute %s." % attribute
+ m += "Spec.%s has no attribute %s" % (parent, part)
raise SpecFormatStringError(m)
if isinstance(current, vn.VersionList):
if current == _any_version:
@@ -4252,28 +4201,26 @@ class Spec(object):
return
if callable(current):
- raise SpecFormatStringError(
- 'Attempted to format callable object'
- )
+ raise SpecFormatStringError("Attempted to format callable object")
if not current:
# We're not printing anything
return
# Set color codes for various attributes
col = None
- if 'variants' in parts:
- col = '+'
- elif 'architecture' in parts:
- col = '='
- elif 'compiler' in parts or 'compiler_flags' in parts:
- col = '%'
- elif 'version' in parts:
- col = '@'
+ if "variants" in parts:
+ col = "+"
+ elif "architecture" in parts:
+ col = "="
+ elif "compiler" in parts or "compiler_flags" in parts:
+ col = "%"
+ elif "version" in parts:
+ col = "@"
# Finally, write the output
write(sig + morph(spec, str(current)), col)
- attribute = ''
+ attribute = ""
in_attribute = False
escape = False
@@ -4281,34 +4228,31 @@ class Spec(object):
if escape:
out.write(c)
escape = False
- elif c == '\\':
+ elif c == "\\":
escape = True
elif in_attribute:
- if c == '}':
+ if c == "}":
write_attribute(self, attribute, color)
- attribute = ''
+ attribute = ""
in_attribute = False
else:
attribute += c
else:
- if c == '}':
- raise SpecFormatStringError(
- 'Encountered closing } before opening {'
- )
- elif c == '{':
+ if c == "}":
+ raise SpecFormatStringError("Encountered closing } before opening {")
+ elif c == "{":
in_attribute = True
else:
out.write(c)
if in_attribute:
raise SpecFormatStringError(
- 'Format string terminated while reading attribute.'
- 'Missing terminating }.'
+ "Format string terminated while reading attribute." "Missing terminating }."
)
formatted_spec = out.getvalue()
return formatted_spec.strip()
- def old_format(self, format_string='$_$@$%@+$+$=', **kwargs):
+ def old_format(self, format_string="$_$@$%@+$+$=", **kwargs):
"""
The format strings you can provide are::
@@ -4382,77 +4326,79 @@ class Spec(object):
TODO: allow, e.g., ``$//`` for full hash.
"""
- color = kwargs.get('color', False)
+ color = kwargs.get("color", False)
# Dictionary of transformations for named tokens
- token_transforms = dict(
- (k.upper(), v) for k, v in kwargs.get('transform', {}).items())
+ token_transforms = dict((k.upper(), v) for k, v in kwargs.get("transform", {}).items())
length = len(format_string)
out = six.StringIO()
named = escape = compiler = False
- named_str = fmt = ''
+ named_str = fmt = ""
def write(s, c=None):
f = clr.cescape(s)
if c is not None:
- f = color_formats[c] + f + '@.'
+ f = color_formats[c] + f + "@."
clr.cwrite(f, stream=out, color=color)
iterator = enumerate(format_string)
for i, c in iterator:
if escape:
- fmt = '%'
- if c == '-':
+ fmt = "%"
+ if c == "-":
fmt += c
i, c = next(iterator)
- while c in '0123456789':
+ while c in "0123456789":
fmt += c
i, c = next(iterator)
- fmt += 's'
+ fmt += "s"
- if c == '_':
- name = self.name if self.name else ''
+ if c == "_":
+ name = self.name if self.name else ""
out.write(fmt % name)
- elif c == '.':
- name = self.fullname if self.fullname else ''
+ elif c == ".":
+ name = self.fullname if self.fullname else ""
out.write(fmt % name)
- elif c == '@':
+ elif c == "@":
if self.versions and self.versions != _any_version:
write(fmt % (c + str(self.versions)), c)
- elif c == '%':
+ elif c == "%":
if self.compiler:
write(fmt % (c + str(self.compiler.name)), c)
compiler = True
- elif c == '+':
+ elif c == "+":
if self.variants:
write(fmt % str(self.variants), c)
- elif c == '=':
+ elif c == "=":
if self.architecture and str(self.architecture):
- a_str = ' arch' + c + str(self.architecture) + ' '
+ a_str = " arch" + c + str(self.architecture) + " "
write(fmt % (a_str), c)
- elif c == '/':
- out.write('/' + fmt % (self.dag_hash(7)))
- elif c == '$':
- if fmt != '%s':
+ elif c == "/":
+ out.write("/" + fmt % (self.dag_hash(7)))
+ elif c == "$":
+ if fmt != "%s":
raise ValueError("Can't use format width with $$.")
- out.write('$')
- elif c == '{':
+ out.write("$")
+ elif c == "{":
named = True
- named_str = ''
+ named_str = ""
escape = False
elif compiler:
- if c == '@':
- if (self.compiler and self.compiler.versions and
- self.compiler.versions != _any_version):
- write(c + str(self.compiler.versions), '%')
- elif c == '+':
+ if c == "@":
+ if (
+ self.compiler
+ and self.compiler.versions
+ and self.compiler.versions != _any_version
+ ):
+ write(c + str(self.compiler.versions), "%")
+ elif c == "+":
if self.compiler_flags:
- write(fmt % str(self.compiler_flags), '%')
+ write(fmt % str(self.compiler_flags), "%")
compiler = False
- elif c == '$':
+ elif c == "$":
escape = True
compiler = False
else:
@@ -4460,10 +4406,11 @@ class Spec(object):
compiler = False
elif named:
- if not c == '}':
+ if not c == "}":
if i == length - 1:
- raise ValueError("Error: unterminated ${ in format:"
- "'%s'" % format_string)
+ raise ValueError(
+ "Error: unterminated ${ in format:" "'%s'" % format_string
+ )
named_str += c
continue
named_str = named_str.upper()
@@ -4474,82 +4421,72 @@ class Spec(object):
# (`lambda x: x` is the identity function)
transform = token_transforms.get(named_str, lambda s, x: x)
- if named_str == 'PACKAGE':
- name = self.name if self.name else ''
+ if named_str == "PACKAGE":
+ name = self.name if self.name else ""
write(fmt % transform(self, name))
- elif named_str == 'FULLPACKAGE':
- name = self.fullname if self.fullname else ''
+ elif named_str == "FULLPACKAGE":
+ name = self.fullname if self.fullname else ""
write(fmt % transform(self, name))
- elif named_str == 'VERSION':
+ elif named_str == "VERSION":
if self.versions and self.versions != _any_version:
- write(fmt % transform(self, str(self.versions)), '@')
- elif named_str == 'COMPILER':
+ write(fmt % transform(self, str(self.versions)), "@")
+ elif named_str == "COMPILER":
if self.compiler:
- write(fmt % transform(self, self.compiler), '%')
- elif named_str == 'COMPILERNAME':
+ write(fmt % transform(self, self.compiler), "%")
+ elif named_str == "COMPILERNAME":
if self.compiler:
- write(fmt % transform(self, self.compiler.name), '%')
- elif named_str in ['COMPILERVER', 'COMPILERVERSION']:
+ write(fmt % transform(self, self.compiler.name), "%")
+ elif named_str in ["COMPILERVER", "COMPILERVERSION"]:
if self.compiler:
- write(
- fmt % transform(self, self.compiler.versions),
- '%'
- )
- elif named_str == 'COMPILERFLAGS':
+ write(fmt % transform(self, self.compiler.versions), "%")
+ elif named_str == "COMPILERFLAGS":
if self.compiler:
- write(
- fmt % transform(self, str(self.compiler_flags)),
- '%'
- )
- elif named_str == 'OPTIONS':
+ write(fmt % transform(self, str(self.compiler_flags)), "%")
+ elif named_str == "OPTIONS":
if self.variants:
- write(fmt % transform(self, str(self.variants)), '+')
+ write(fmt % transform(self, str(self.variants)), "+")
elif named_str in ["ARCHITECTURE", "PLATFORM", "TARGET", "OS"]:
if self.architecture and str(self.architecture):
if named_str == "ARCHITECTURE":
- write(
- fmt % transform(self, str(self.architecture)),
- '='
- )
+ write(fmt % transform(self, str(self.architecture)), "=")
elif named_str == "PLATFORM":
platform = str(self.architecture.platform)
- write(fmt % transform(self, platform), '=')
+ write(fmt % transform(self, platform), "=")
elif named_str == "OS":
operating_sys = str(self.architecture.os)
- write(fmt % transform(self, operating_sys), '=')
+ write(fmt % transform(self, operating_sys), "=")
elif named_str == "TARGET":
target = str(self.architecture.target)
- write(fmt % transform(self, target), '=')
- elif named_str == 'SHA1':
+ write(fmt % transform(self, target), "=")
+ elif named_str == "SHA1":
if self.dependencies:
out.write(fmt % transform(self, str(self.dag_hash(7))))
- elif named_str == 'SPACK_ROOT':
+ elif named_str == "SPACK_ROOT":
out.write(fmt % transform(self, spack.paths.prefix))
- elif named_str == 'SPACK_INSTALL':
+ elif named_str == "SPACK_INSTALL":
out.write(fmt % transform(self, spack.store.root))
- elif named_str == 'PREFIX':
+ elif named_str == "PREFIX":
out.write(fmt % transform(self, self.prefix))
- elif named_str.startswith('HASH'):
- if named_str.startswith('HASH:'):
- _, hashlen = named_str.split(':')
+ elif named_str.startswith("HASH"):
+ if named_str.startswith("HASH:"):
+ _, hashlen = named_str.split(":")
hashlen = int(hashlen)
else:
hashlen = None
out.write(fmt % (self.dag_hash(hashlen)))
- elif named_str == 'NAMESPACE':
+ elif named_str == "NAMESPACE":
out.write(fmt % transform(self, self.namespace))
- elif named_str.startswith('DEP:'):
- _, dep_name, dep_option = named_str.lower().split(':', 2)
+ elif named_str.startswith("DEP:"):
+ _, dep_name, dep_option = named_str.lower().split(":", 2)
dep_spec = self[dep_name]
- out.write(fmt % (dep_spec.format('${%s}' % dep_option)))
+ out.write(fmt % (dep_spec.format("${%s}" % dep_option)))
named = False
- elif c == '$':
+ elif c == "$":
escape = True
if i == length - 1:
- raise ValueError("Error: unterminated $ in format: '%s'"
- % format_string)
+ raise ValueError("Error: unterminated $ in format: '%s'" % format_string)
else:
out.write(c)
@@ -4559,13 +4496,11 @@ class Spec(object):
def cformat(self, *args, **kwargs):
"""Same as format, but color defaults to auto instead of False."""
kwargs = kwargs.copy()
- kwargs.setdefault('color', None)
+ kwargs.setdefault("color", None)
return self.format(*args, **kwargs)
def __str__(self):
- sorted_nodes = [self] + sorted(
- self.traverse(root=False), key=lambda x: x.name
- )
+ sorted_nodes = [self] + sorted(self.traverse(root=False), key=lambda x: x.name)
spec_str = " ^".join(d.format() for d in sorted_nodes)
return spec_str.strip()
@@ -4591,24 +4526,25 @@ class Spec(object):
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
- with indentation."""
- color = kwargs.pop('color', clr.get_color_when())
- depth = kwargs.pop('depth', False)
- hashes = kwargs.pop('hashes', False)
- hlen = kwargs.pop('hashlen', None)
- status_fn = kwargs.pop('status_fn', False)
- cover = kwargs.pop('cover', 'nodes')
- indent = kwargs.pop('indent', 0)
- fmt = kwargs.pop('format', default_format)
- prefix = kwargs.pop('prefix', None)
- show_types = kwargs.pop('show_types', False)
- deptypes = kwargs.pop('deptypes', 'all')
- recurse_dependencies = kwargs.pop('recurse_dependencies', True)
+ with indentation."""
+ color = kwargs.pop("color", clr.get_color_when())
+ depth = kwargs.pop("depth", False)
+ hashes = kwargs.pop("hashes", False)
+ hlen = kwargs.pop("hashlen", None)
+ status_fn = kwargs.pop("status_fn", False)
+ cover = kwargs.pop("cover", "nodes")
+ indent = kwargs.pop("indent", 0)
+ fmt = kwargs.pop("format", default_format)
+ prefix = kwargs.pop("prefix", None)
+ show_types = kwargs.pop("show_types", False)
+ deptypes = kwargs.pop("deptypes", "all")
+ recurse_dependencies = kwargs.pop("recurse_dependencies", True)
lang.check_kwargs(kwargs, self.tree)
out = ""
for d, dep_spec in self.traverse_edges(
- order='pre', cover=cover, depth=True, deptype=deptypes):
+ order="pre", cover=cover, depth=True, deptype=deptypes
+ ):
node = dep_spec.spec
if prefix is not None:
@@ -4630,25 +4566,22 @@ class Spec(object):
out += clr.colorize("@r{[-]} ", color=color) # missing
if hashes:
- out += clr.colorize(
- '@K{%s} ', color=color) % node.dag_hash(hlen)
+ out += clr.colorize("@K{%s} ", color=color) % node.dag_hash(hlen)
if show_types:
- if cover == 'nodes':
+ if cover == "nodes":
# when only covering nodes, we merge dependency types
# from all dependents before showing them.
- types = [
- ds.deptypes for ds in node.edges_from_dependents()
- ]
+ types = [ds.deptypes for ds in node.edges_from_dependents()]
else:
# when covering edges or paths, we show dependency
# types only for the edge through which we visited
types = [dep_spec.deptypes]
type_chars = dp.deptype_chars(*types)
- out += '[%s] ' % type_chars
+ out += "[%s] " % type_chars
- out += (" " * d)
+ out += " " * d
if d > 0:
out += "^"
out += node.format(fmt, color=color) + "\n"
@@ -4709,8 +4642,7 @@ class Spec(object):
assert self.concrete
assert other.concrete
- virtuals_to_replace = [v.name for v in other.package.virtuals_provided
- if v in self]
+ virtuals_to_replace = [v.name for v in other.package.virtuals_provided if v in self]
if virtuals_to_replace:
deps_to_replace = dict((self[v], other) for v in virtuals_to_replace)
# deps_to_replace = [self[v] for v in virtuals_to_replace]
@@ -4720,19 +4652,31 @@ class Spec(object):
# deps_to_replace = [self[other.name]]
for d in deps_to_replace:
- if not all(v in other.package.virtuals_provided or v not in self
- for v in d.package.virtuals_provided):
+ if not all(
+ v in other.package.virtuals_provided or v not in self
+ for v in d.package.virtuals_provided
+ ):
# There was something provided by the original that we don't
# get from its replacement.
- raise SpliceError(("Splice between {0} and {1} will not provide "
- "the same virtuals.").format(self.name, other.name))
+ raise SpliceError(
+ ("Splice between {0} and {1} will not provide " "the same virtuals.").format(
+ self.name, other.name
+ )
+ )
for n in d.traverse(root=False):
- if not all(any(v in other_n.package.virtuals_provided
- for other_n in other.traverse(root=False))
- or v not in self for v in n.package.virtuals_provided):
- raise SpliceError(("Splice between {0} and {1} will not provide "
- "the same virtuals."
- ).format(self.name, other.name))
+ if not all(
+ any(
+ v in other_n.package.virtuals_provided
+ for other_n in other.traverse(root=False)
+ )
+ or v not in self
+ for v in n.package.virtuals_provided
+ ):
+ raise SpliceError(
+ (
+ "Splice between {0} and {1} will not provide " "the same virtuals."
+ ).format(self.name, other.name)
+ )
# For now, check that we don't have DAG with multiple specs from the
# same package
@@ -4742,8 +4686,10 @@ class Spec(object):
return max_number > 1
if multiple_specs(self) or multiple_specs(other):
- msg = ('Either "{0}" or "{1}" contain multiple specs from the same '
- 'package, which cannot be handled by splicing at the moment')
+ msg = (
+ 'Either "{0}" or "{1}" contain multiple specs from the same '
+ "package, which cannot be handled by splicing at the moment"
+ )
raise ValueError(msg.format(self, other))
# Multiple unique specs with the same name will collide, so the
@@ -4763,23 +4709,28 @@ class Spec(object):
else:
if name == other.name:
return False
- if any(v in other.package.virtuals_provided
- for v in self[name].package.virtuals_provided):
+ if any(
+ v in other.package.virtuals_provided
+ for v in self[name].package.virtuals_provided
+ ):
return False
return True
- self_nodes = dict((s.name, s.copy(deps=False))
- for s in self.traverse(root=True)
- if from_self(s.name, transitive))
+ self_nodes = dict(
+ (s.name, s.copy(deps=False))
+ for s in self.traverse(root=True)
+ if from_self(s.name, transitive)
+ )
if transitive:
- other_nodes = dict((s.name, s.copy(deps=False))
- for s in other.traverse(root=True))
+ other_nodes = dict((s.name, s.copy(deps=False)) for s in other.traverse(root=True))
else:
# NOTE: Does not fully validate providers; loader races possible
- other_nodes = dict((s.name, s.copy(deps=False))
- for s in other.traverse(root=True)
- if s is other or s.name not in self)
+ other_nodes = dict(
+ (s.name, s.copy(deps=False))
+ for s in other.traverse(root=True)
+ if s is other or s.name not in self
+ )
nodes = other_nodes.copy()
nodes.update(self_nodes)
@@ -4788,30 +4739,24 @@ class Spec(object):
if name in self_nodes:
for edge in self[name].edges_to_dependencies():
dep_name = deps_to_replace.get(edge.spec, edge.spec).name
- nodes[name].add_dependency_edge(
- nodes[dep_name], edge.deptypes
- )
- if any(dep not in self_nodes
- for dep in self[name]._dependencies):
+ nodes[name].add_dependency_edge(nodes[dep_name], edge.deptypes)
+ if any(dep not in self_nodes for dep in self[name]._dependencies):
nodes[name].build_spec = self[name].build_spec
else:
for edge in other[name].edges_to_dependencies():
- nodes[name].add_dependency_edge(
- nodes[edge.spec.name], edge.deptypes
- )
- if any(dep not in other_nodes
- for dep in other[name]._dependencies):
+ nodes[name].add_dependency_edge(nodes[edge.spec.name], edge.deptypes)
+ if any(dep not in other_nodes for dep in other[name]._dependencies):
nodes[name].build_spec = other[name].build_spec
ret = nodes[self.name]
# Clear cached hashes for all affected nodes
# Do not touch unaffected nodes
- for dep in ret.traverse(root=True, order='post'):
+ for dep in ret.traverse(root=True, order="post"):
opposite = other_nodes if dep.name in self_nodes else self_nodes
if any(name in dep for name in opposite.keys()):
# package hash cannot be affected by splice
- dep.clear_cached_hashes(ignore=['package_hash'])
+ dep.clear_cached_hashes(ignore=["package_hash"])
dep.dag_hash()
@@ -4867,9 +4812,7 @@ def merge_abstract_anonymous_specs(*abstract_specs):
merged_spec.constrain(current_spec_constraint, deps=False)
for name in merged_spec.common_dependencies(current_spec_constraint):
- merged_spec[name].constrain(
- current_spec_constraint[name], deps=False
- )
+ merged_spec[name].constrain(current_spec_constraint[name], deps=False)
# Update with additional constraints from other spec
for name in current_spec_constraint.dep_difference(merged_spec):
@@ -4887,7 +4830,7 @@ def _spec_from_old_dict(data):
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
- nodes = data['spec']
+ nodes = data["spec"]
# Read nodes out of list. Root spec is the first element;
# dependencies are the following elements.
@@ -4901,7 +4844,7 @@ def _spec_from_old_dict(data):
# get dependency dict from the node.
name = next(iter(node))
- if 'dependencies' not in node[name]:
+ if "dependencies" not in node[name]:
continue
for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
@@ -4920,17 +4863,17 @@ def _spec_from_dict(data):
Parameters:
data -- a nested dict/list data structure read from YAML or JSON.
"""
- if isinstance(data['spec'], list): # Legacy specfile format
+ if isinstance(data["spec"], list): # Legacy specfile format
return _spec_from_old_dict(data)
# Current specfile format
- nodes = data['spec']['nodes']
+ nodes = data["spec"]["nodes"]
hash_type = None
any_deps = False
# Pass 0: Determine hash type
for node in nodes:
- if 'dependencies' in node.keys():
+ if "dependencies" in node.keys():
any_deps = True
for _, _, _, dhash_type in Spec.dependencies_from_node_dict(node):
if dhash_type:
@@ -4940,8 +4883,9 @@ def _spec_from_dict(data):
if not any_deps: # If we never see a dependency...
hash_type = ht.dag_hash.name
elif not hash_type: # Seen a dependency, still don't know hash_type
- raise spack.error.SpecError("Spec dictionary contains malformed "
- "dependencies. Old format?")
+ raise spack.error.SpecError(
+ "Spec dictionary contains malformed " "dependencies. Old format?"
+ )
hash_dict = {}
root_spec_hash = None
@@ -4951,7 +4895,7 @@ def _spec_from_dict(data):
node_hash = node[hash_type]
node_spec = Spec.from_node_dict(node)
hash_dict[node_hash] = node
- hash_dict[node_hash]['node_spec'] = node_spec
+ hash_dict[node_hash]["node_spec"] = node_spec
if i == 0:
root_spec_hash = node_hash
if not root_spec_hash:
@@ -4959,21 +4903,21 @@ def _spec_from_dict(data):
# Pass 2: Finish construction of all DAG edges (including build specs)
for node_hash, node in hash_dict.items():
- node_spec = node['node_spec']
+ node_spec = node["node_spec"]
for _, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
- node_spec._add_dependency(hash_dict[dhash]['node_spec'], dtypes)
- if 'build_spec' in node.keys():
- _, bhash, _ = Spec.build_spec_from_node_dict(node,
- hash_type=hash_type)
- node_spec._build_spec = hash_dict[bhash]['node_spec']
+ node_spec._add_dependency(hash_dict[dhash]["node_spec"], dtypes)
+ if "build_spec" in node.keys():
+ _, bhash, _ = Spec.build_spec_from_node_dict(node, hash_type=hash_type)
+ node_spec._build_spec = hash_dict[bhash]["node_spec"]
- return hash_dict[root_spec_hash]['node_spec']
+ return hash_dict[root_spec_hash]["node_spec"]
class LazySpecCache(collections.defaultdict):
"""Cache for Specs that uses a spec_like as key, and computes lazily
the corresponding value ``Spec(spec_like``.
"""
+
def __init__(self):
super(LazySpecCache, self).__init__(Spec)
@@ -4987,7 +4931,7 @@ class LazySpecCache(collections.defaultdict):
HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL, FILE = range(12)
#: Regex for fully qualified spec names. (e.g., builtin.hdf5)
-spec_id_re = r'\w[\w.-]*'
+spec_id_re = r"\w[\w.-]*"
class SpecLexer(spack.parse.Lexer):
@@ -4997,36 +4941,39 @@ class SpecLexer(spack.parse.Lexer):
def __init__(self):
# Spec strings require posix-style paths on Windows
# because the result is later passed to shlex
- filename_reg = r'[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*' if not is_windows\
- else r'([A-Za-z]:)*?[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*'
- super(SpecLexer, self).__init__([
- (r'\^', lambda scanner, val: self.token(DEP, val)),
- (r'\@', lambda scanner, val: self.token(AT, val)),
- (r'\:', lambda scanner, val: self.token(COLON, val)),
- (r'\,', lambda scanner, val: self.token(COMMA, val)),
- (r'\+', lambda scanner, val: self.token(ON, val)),
- (r'\-', lambda scanner, val: self.token(OFF, val)),
- (r'\~', lambda scanner, val: self.token(OFF, val)),
- (r'\%', lambda scanner, val: self.token(PCT, val)),
- (r'\=', lambda scanner, val: self.token(EQ, val)),
-
- # Filenames match before identifiers, so no initial filename
- # component is parsed as a spec (e.g., in subdir/spec.yaml/json)
- (filename_reg,
- lambda scanner, v: self.token(FILE, v)),
-
- # Hash match after filename. No valid filename can be a hash
- # (files end w/.yaml), but a hash can match a filename prefix.
- (r'/', lambda scanner, val: self.token(HASH, val)),
-
- # Identifiers match after filenames and hashes.
- (spec_id_re, lambda scanner, val: self.token(ID, val)),
-
- (r'\s+', lambda scanner, val: None)],
+ filename_reg = (
+ r"[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
+ if not is_windows
+ else r"([A-Za-z]:)*?[/\w.-]*/[/\w/-]+\.(yaml|json)[^\b]*"
+ )
+ super(SpecLexer, self).__init__(
+ [
+ (r"\^", lambda scanner, val: self.token(DEP, val)),
+ (r"\@", lambda scanner, val: self.token(AT, val)),
+ (r"\:", lambda scanner, val: self.token(COLON, val)),
+ (r"\,", lambda scanner, val: self.token(COMMA, val)),
+ (r"\+", lambda scanner, val: self.token(ON, val)),
+ (r"\-", lambda scanner, val: self.token(OFF, val)),
+ (r"\~", lambda scanner, val: self.token(OFF, val)),
+ (r"\%", lambda scanner, val: self.token(PCT, val)),
+ (r"\=", lambda scanner, val: self.token(EQ, val)),
+ # Filenames match before identifiers, so no initial filename
+ # component is parsed as a spec (e.g., in subdir/spec.yaml/json)
+ (filename_reg, lambda scanner, v: self.token(FILE, v)),
+ # Hash match after filename. No valid filename can be a hash
+ # (files end w/.yaml), but a hash can match a filename prefix.
+ (r"/", lambda scanner, val: self.token(HASH, val)),
+ # Identifiers match after filenames and hashes.
+ (spec_id_re, lambda scanner, val: self.token(ID, val)),
+ (r"\s+", lambda scanner, val: None),
+ ],
[EQ],
- [(r'[\S].*', lambda scanner, val: self.token(VAL, val)),
- (r'\s+', lambda scanner, val: None)],
- [VAL])
+ [
+ (r"[\S].*", lambda scanner, val: self.token(VAL, val)),
+ (r"\s+", lambda scanner, val: None),
+ ],
+ [VAL],
+ )
# Lexer is always the same for every parser.
@@ -5073,8 +5020,7 @@ class SpecParser(spack.parse.Parser):
else:
if specs[-1].concrete:
# Trying to add k-v pair to spec from hash
- raise RedundantSpecError(specs[-1],
- 'key-value pair')
+ raise RedundantSpecError(specs[-1], "key-value pair")
# We should never end up here.
# This requires starting a new spec with ID, EQ
# After another spec that is not concrete
@@ -5109,7 +5055,7 @@ class SpecParser(spack.parse.Parser):
# We're finding a dependency by hash for an
# anonymous spec
dep = self.spec_by_hash()
- dep = dep.copy(deps=('link', 'run'))
+ dep = dep.copy(deps=("link", "run"))
if not dep:
# We're adding a dependency to the last spec
@@ -5119,8 +5065,7 @@ class SpecParser(spack.parse.Parser):
# This is an anonymous dep with a key=value
# push tokens to be parsed as part of the
# dep spec
- self.push_tokens(
- [self.previous, self.token])
+ self.push_tokens([self.previous, self.token])
dep_name = None
else:
# named dep (standard)
@@ -5134,7 +5079,7 @@ class SpecParser(spack.parse.Parser):
# Raise an error if the previous spec is already
# concrete (assigned by hash)
if specs[-1].concrete:
- raise RedundantSpecError(specs[-1], 'dependency')
+ raise RedundantSpecError(specs[-1], "dependency")
# command line deps get empty deptypes now.
# Real deptypes are assigned later per packages.
specs[-1]._add_dependency(dep, ())
@@ -5145,9 +5090,7 @@ class SpecParser(spack.parse.Parser):
if self.next.type in (AT, ON, OFF, PCT):
# Raise an error if the previous spec is already concrete
if specs and specs[-1].concrete:
- raise RedundantSpecError(specs[-1],
- 'compiler, version, '
- 'or variant')
+ raise RedundantSpecError(specs[-1], "compiler, version, " "or variant")
specs.append(self.spec(None))
else:
self.unexpected_token()
@@ -5160,10 +5103,7 @@ class SpecParser(spack.parse.Parser):
# Cannot do lookups for versions in anonymous specs
# Only allow Version objects to use git for now
# Note: VersionRange(x, x) is currently concrete, hence isinstance(...).
- if (
- spec.name and spec.versions.concrete and
- isinstance(spec.version, vn.GitVersion)
- ):
+ if spec.name and spec.versions.concrete and isinstance(spec.version, vn.GitVersion):
spec.version.generate_git_lookup(spec.fullname)
return specs
@@ -5198,8 +5138,7 @@ class SpecParser(spack.parse.Parser):
# regex admits text *beyond* .yaml, and we raise a nice error for
# file names that don't end in .yaml.
if not (path.endswith(".yaml") or path.endswith(".json")):
- raise SpecFilenameError(
- "Spec filename must end in .yaml or .json: '{0}'".format(path))
+ raise SpecFilenameError("Spec filename must end in .yaml or .json: '{0}'".format(path))
if not os.path.exists(path):
raise NoSuchSpecFileError("No such spec file: '{0}'".format(path))
@@ -5216,6 +5155,7 @@ class SpecParser(spack.parse.Parser):
def spec_by_hash(self):
# TODO: Remove parser dependency on active environment and database.
import spack.environment
+
self.expect(ID)
dag_hash = self.token.value
matches = []
@@ -5228,18 +5168,18 @@ class SpecParser(spack.parse.Parser):
if len(matches) != 1:
raise AmbiguousHashError(
- "Multiple packages specify hash beginning '%s'."
- % dag_hash, *matches)
+ "Multiple packages specify hash beginning '%s'." % dag_hash, *matches
+ )
return matches[0]
def spec(self, name):
"""Parse a spec out of the input. If a spec is supplied, initialize
- and return it instead of creating a new one."""
+ and return it instead of creating a new one."""
spec_namespace = None
spec_name = None
if name:
- spec_namespace, dot, spec_name = name.rpartition('.')
+ spec_namespace, dot, spec_name = name.rpartition(".")
if not spec_namespace:
spec_namespace = None
self.check_identifier(spec_name)
@@ -5352,46 +5292,44 @@ class SpecParser(spack.parse.Parser):
vlist = self.version_list()
compiler._add_versions(vlist)
else:
- compiler.versions = vn.VersionList(':')
+ compiler.versions = vn.VersionList(":")
return compiler
def check_identifier(self, id=None):
"""The only identifiers that can contain '.' are versions, but version
- ids are context-sensitive so we have to check on a case-by-case
- basis. Call this if we detect a version id where it shouldn't be.
+ ids are context-sensitive so we have to check on a case-by-case
+ basis. Call this if we detect a version id where it shouldn't be.
"""
if not id:
id = self.token.value
- if '.' in id:
- self.last_token_error(
- "{0}: Identifier cannot contain '.'".format(id))
+ if "." in id:
+ self.last_token_error("{0}: Identifier cannot contain '.'".format(id))
def parse(string):
"""Returns a list of specs from an input string.
- For creating one spec, see Spec() constructor.
+ For creating one spec, see Spec() constructor.
"""
return SpecParser().parse(string)
def save_dependency_specfiles(
- root_spec_info, output_directory, dependencies=None,
- spec_format='json'):
+ root_spec_info, output_directory, dependencies=None, spec_format="json"
+):
"""Given a root spec (represented as a yaml object), index it with a subset
- of its dependencies, and write each dependency to a separate yaml file
- in the output directory. By default, all dependencies will be written
- out. To choose a smaller subset of dependencies to be written, pass a
- list of package names in the dependencies parameter. If the format of the
- incoming spec is not json, that can be specified with the spec_format
- parameter. This can be used to convert from yaml specfiles to the
- json format."""
- if spec_format == 'json':
+ of its dependencies, and write each dependency to a separate yaml file
+ in the output directory. By default, all dependencies will be written
+ out. To choose a smaller subset of dependencies to be written, pass a
+ list of package names in the dependencies parameter. If the format of the
+ incoming spec is not json, that can be specified with the spec_format
+ parameter. This can be used to convert from yaml specfiles to the
+ json format."""
+ if spec_format == "json":
root_spec = Spec.from_json(root_spec_info)
- elif spec_format == 'yaml':
+ elif spec_format == "yaml":
root_spec = Spec.from_yaml(root_spec_info)
else:
- raise SpecParseError('Unrecognized spec format {0}.'.format(
- spec_format))
+ raise SpecParseError("Unrecognized spec format {0}.".format(spec_format))
dep_list = dependencies
if not dep_list:
@@ -5399,18 +5337,18 @@ def save_dependency_specfiles(
for dep_name in dep_list:
if dep_name not in root_spec:
- msg = 'Dependency {0} does not exist in root spec {1}'.format(
- dep_name, root_spec.name)
+ msg = "Dependency {0} does not exist in root spec {1}".format(dep_name, root_spec.name)
raise SpecDependencyNotFoundError(msg)
dep_spec = root_spec[dep_name]
- json_path = os.path.join(output_directory, '{0}.json'.format(dep_name))
+ json_path = os.path.join(output_directory, "{0}.json".format(dep_name))
- with open(json_path, 'w') as fd:
+ with open(json_path, "w") as fd:
fd.write(dep_spec.to_json(hash=ht.dag_hash))
class SpecParseError(spack.error.SpecError):
"""Wrapper for ParseError for when we're parsing specs."""
+
def __init__(self, parse_error):
super(SpecParseError, self).__init__(parse_error.message)
self.string = parse_error.string
@@ -5441,9 +5379,11 @@ class DuplicateCompilerSpecError(spack.error.SpecError):
class UnsupportedCompilerError(spack.error.SpecError):
"""Raised when the user asks for a compiler spack doesn't know about."""
+
def __init__(self, compiler_name):
super(UnsupportedCompilerError, self).__init__(
- "The '%s' compiler is not yet supported." % compiler_name)
+ "The '%s' compiler is not yet supported." % compiler_name
+ )
class DuplicateArchitectureError(spack.error.SpecError):
@@ -5452,96 +5392,105 @@ class DuplicateArchitectureError(spack.error.SpecError):
class InconsistentSpecError(spack.error.SpecError):
"""Raised when two nodes in the same spec DAG have inconsistent
- constraints."""
+ constraints."""
class InvalidDependencyError(spack.error.SpecError):
"""Raised when a dependency in a spec is not actually a dependency
- of the package."""
+ of the package."""
+
def __init__(self, pkg, deps):
self.invalid_deps = deps
super(InvalidDependencyError, self).__init__(
- 'Package {0} does not depend on {1}'.format(
- pkg, spack.util.string.comma_or(deps)))
+ "Package {0} does not depend on {1}".format(pkg, spack.util.string.comma_or(deps))
+ )
class NoProviderError(spack.error.SpecError):
"""Raised when there is no package that provides a particular
- virtual dependency.
+ virtual dependency.
"""
+
def __init__(self, vpkg):
super(NoProviderError, self).__init__(
- "No providers found for virtual package: '%s'" % vpkg)
+ "No providers found for virtual package: '%s'" % vpkg
+ )
self.vpkg = vpkg
class MultipleProviderError(spack.error.SpecError):
"""Raised when there is no package that provides a particular
- virtual dependency.
+ virtual dependency.
"""
+
def __init__(self, vpkg, providers):
"""Takes the name of the vpkg"""
super(MultipleProviderError, self).__init__(
- "Multiple providers found for '%s': %s"
- % (vpkg, [str(s) for s in providers]))
+ "Multiple providers found for '%s': %s" % (vpkg, [str(s) for s in providers])
+ )
self.vpkg = vpkg
self.providers = providers
class UnsatisfiableSpecNameError(spack.error.UnsatisfiableSpecError):
"""Raised when two specs aren't even for the same package."""
+
def __init__(self, provided, required):
- super(UnsatisfiableSpecNameError, self).__init__(
- provided, required, "name")
+ super(UnsatisfiableSpecNameError, self).__init__(provided, required, "name")
class UnsatisfiableVersionSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec version conflicts with package constraints."""
+
def __init__(self, provided, required):
- super(UnsatisfiableVersionSpecError, self).__init__(
- provided, required, "version")
+ super(UnsatisfiableVersionSpecError, self).__init__(provided, required, "version")
class UnsatisfiableCompilerSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec comiler conflicts with package constraints."""
+
def __init__(self, provided, required):
- super(UnsatisfiableCompilerSpecError, self).__init__(
- provided, required, "compiler")
+ super(UnsatisfiableCompilerSpecError, self).__init__(provided, required, "compiler")
class UnsatisfiableCompilerFlagSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints."""
+
def __init__(self, provided, required):
super(UnsatisfiableCompilerFlagSpecError, self).__init__(
- provided, required, "compiler_flags")
+ provided, required, "compiler_flags"
+ )
class UnsatisfiableArchitectureSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints."""
+
def __init__(self, provided, required):
super(UnsatisfiableArchitectureSpecError, self).__init__(
- provided, required, "architecture")
+ provided, required, "architecture"
+ )
class UnsatisfiableProviderSpecError(spack.error.UnsatisfiableSpecError):
"""Raised when a provider is supplied but constraints don't match
- a vpkg requirement"""
+ a vpkg requirement"""
+
def __init__(self, provided, required):
- super(UnsatisfiableProviderSpecError, self).__init__(
- provided, required, "provider")
+ super(UnsatisfiableProviderSpecError, self).__init__(provided, required, "provider")
# TODO: get rid of this and be more specific about particular incompatible
# dep constraints
class UnsatisfiableDependencySpecError(spack.error.UnsatisfiableSpecError):
"""Raised when some dependency of constrained specs are incompatible"""
+
def __init__(self, provided, required):
- super(UnsatisfiableDependencySpecError, self).__init__(
- provided, required, "dependency")
+ super(UnsatisfiableDependencySpecError, self).__init__(provided, required, "dependency")
class UnconstrainableDependencySpecError(spack.error.SpecError):
"""Raised when attempting to constrain by an anonymous dependency spec"""
+
def __init__(self, spec):
msg = "Cannot constrain by spec '%s'. Cannot constrain by a" % spec
msg += " spec containing anonymous dependencies"
@@ -5550,25 +5499,22 @@ class UnconstrainableDependencySpecError(spack.error.SpecError):
class AmbiguousHashError(spack.error.SpecError):
def __init__(self, msg, *specs):
- spec_fmt = '{namespace}.{name}{@version}{%compiler}{compiler_flags}'
- spec_fmt += '{variants}{arch=architecture}{/hash:7}'
- specs_str = '\n ' + '\n '.join(spec.format(spec_fmt)
- for spec in specs)
+ spec_fmt = "{namespace}.{name}{@version}{%compiler}{compiler_flags}"
+ spec_fmt += "{variants}{arch=architecture}{/hash:7}"
+ specs_str = "\n " + "\n ".join(spec.format(spec_fmt) for spec in specs)
super(AmbiguousHashError, self).__init__(msg + specs_str)
class InvalidHashError(spack.error.SpecError):
def __init__(self, spec, hash):
super(InvalidHashError, self).__init__(
- "The spec specified by %s does not match provided spec %s"
- % (hash, spec))
+ "The spec specified by %s does not match provided spec %s" % (hash, spec)
+ )
class NoSuchHashError(spack.error.SpecError):
def __init__(self, hash):
- super(NoSuchHashError, self).__init__(
- "No installed spec matches the hash: '%s'"
- % hash)
+ super(NoSuchHashError, self).__init__("No installed spec matches the hash: '%s'" % hash)
class SpecFilenameError(spack.error.SpecError):
@@ -5583,8 +5529,8 @@ class RedundantSpecError(spack.error.SpecError):
def __init__(self, spec, addition):
super(RedundantSpecError, self).__init__(
"Attempting to add %s to spec %s which is already concrete."
- " This is likely the result of adding to a spec specified by hash."
- % (addition, spec))
+ " This is likely the result of adding to a spec specified by hash." % (addition, spec)
+ )
class SpecFormatStringError(spack.error.SpecError):
@@ -5593,21 +5539,20 @@ class SpecFormatStringError(spack.error.SpecError):
class SpecFormatSigilError(SpecFormatStringError):
"""Called for mismatched sigils and attributes in format strings"""
+
def __init__(self, sigil, requirement, used):
- msg = 'The sigil %s may only be used for %s.' % (sigil, requirement)
- msg += ' It was used with the attribute %s.' % used
+ msg = "The sigil %s may only be used for %s." % (sigil, requirement)
+ msg += " It was used with the attribute %s." % used
super(SpecFormatSigilError, self).__init__(msg)
class ConflictsInSpecError(spack.error.SpecError, RuntimeError):
def __init__(self, spec, matches):
- message = 'Conflicts in concretized spec "{0}"\n'.format(
- spec.short_spec
- )
+ message = 'Conflicts in concretized spec "{0}"\n'.format(spec.short_spec)
visited = set()
- long_message = ''
+ long_message = ""
match_fmt_default = '{0}. "{1}" conflicts with "{2}"\n'
match_fmt_custom = '{0}. "{1}" conflicts with "{2}" [{3}]\n'
@@ -5616,8 +5561,8 @@ class ConflictsInSpecError(spack.error.SpecError, RuntimeError):
if s not in visited:
visited.add(s)
- long_message += 'List of matching conflicts for spec:\n\n'
- long_message += s.tree(indent=4) + '\n'
+ long_message += "List of matching conflicts for spec:\n\n"
+ long_message += s.tree(indent=4) + "\n"
if msg is None:
long_message += match_fmt_default.format(idx + 1, c, w)
diff --git a/lib/spack/spack/spec_list.py b/lib/spack/spack/spec_list.py
index 2582921e3e..aac099eb4d 100644
--- a/lib/spack/spack/spec_list.py
+++ b/lib/spack/spack/spec_list.py
@@ -12,8 +12,7 @@ from spack.spec import Spec
class SpecList(object):
-
- def __init__(self, name='specs', yaml_list=None, reference=None):
+ def __init__(self, name="specs", yaml_list=None, reference=None):
# Normalize input arguments
yaml_list = yaml_list or []
reference = reference or {}
@@ -22,11 +21,11 @@ class SpecList(object):
self._reference = reference # TODO: Do we need defensive copy here?
# Validate yaml_list before assigning
- if not all(isinstance(s, string_types) or isinstance(s, (list, dict))
- for s in yaml_list):
+ if not all(isinstance(s, string_types) or isinstance(s, (list, dict)) for s in yaml_list):
raise ValueError(
"yaml_list can contain only valid YAML types! Found:\n %s"
- % [type(s) for s in yaml_list])
+ % [type(s) for s in yaml_list]
+ )
self.yaml_list = yaml_list[:]
# Expansions can be expensive to compute and difficult to keep updated
@@ -82,13 +81,15 @@ class SpecList(object):
def remove(self, spec):
# Get spec to remove from list
- remove = [s for s in self.yaml_list
- if (isinstance(s, string_types) and not s.startswith('$'))
- and Spec(s) == Spec(spec)]
+ remove = [
+ s
+ for s in self.yaml_list
+ if (isinstance(s, string_types) and not s.startswith("$")) and Spec(s) == Spec(spec)
+ ]
if not remove:
- msg = 'Cannot remove %s from SpecList %s\n' % (spec, self.name)
- msg += 'Either %s is not in %s or %s is ' % (spec, self.name, spec)
- msg += 'expanded from a matrix and cannot be removed directly.'
+ msg = "Cannot remove %s from SpecList %s\n" % (spec, self.name)
+ msg += "Either %s is not in %s or %s is " % (spec, self.name, spec)
+ msg += "expanded from a matrix and cannot be removed directly."
raise SpecListError(msg)
assert len(remove) == 1
self.yaml_list.remove(remove[0])
@@ -114,19 +115,19 @@ class SpecList(object):
self._specs = None
def _parse_reference(self, name):
- sigil = ''
+ sigil = ""
name = name[1:]
# Parse specs as constraints
- if name.startswith('^') or name.startswith('%'):
+ if name.startswith("^") or name.startswith("%"):
sigil = name[0]
name = name[1:]
# Make sure the reference is valid
if name not in self._reference:
- msg = 'SpecList %s refers to ' % self.name
- msg += 'named list %s ' % name
- msg += 'which does not appear in its reference dict'
+ msg = "SpecList %s refers to " % self.name
+ msg += "named list %s " % name
+ msg += "which does not appear in its reference dict"
raise UndefinedReferenceError(msg)
return (name, sigil)
@@ -137,12 +138,11 @@ class SpecList(object):
for item in yaml:
# if it's a reference, expand it
- if isinstance(item, string_types) and item.startswith('$'):
+ if isinstance(item, string_types) and item.startswith("$"):
# replace the reference and apply the sigil if needed
name, sigil = self._parse_reference(item)
referent = [
- _sigilify(item, sigil)
- for item in self._reference[name].specs_as_yaml_list
+ _sigilify(item, sigil) for item in self._reference[name].specs_as_yaml_list
]
ret.extend(referent)
else:
@@ -151,8 +151,7 @@ class SpecList(object):
return ret
elif isinstance(yaml, dict):
# There can't be expansions in dicts
- return dict((name, self._expand_references(val))
- for (name, val) in yaml.items())
+ return dict((name, self._expand_references(val)) for (name, val) in yaml.items())
else:
# Strings are just returned
return yaml
@@ -167,27 +166,28 @@ class SpecList(object):
def _expand_matrix_constraints(matrix_config):
# recurse so we can handle nested matrices
expanded_rows = []
- for row in matrix_config['matrix']:
+ for row in matrix_config["matrix"]:
new_row = []
for r in row:
if isinstance(r, dict):
# Flatten the nested matrix into a single row of constraints
new_row.extend(
- [[' '.join([str(c) for c in expanded_constraint_list])]
- for expanded_constraint_list in _expand_matrix_constraints(r)]
+ [
+ [" ".join([str(c) for c in expanded_constraint_list])]
+ for expanded_constraint_list in _expand_matrix_constraints(r)
+ ]
)
else:
new_row.append([r])
expanded_rows.append(new_row)
- excludes = matrix_config.get('exclude', []) # only compute once
- sigil = matrix_config.get('sigil', '')
+ excludes = matrix_config.get("exclude", []) # only compute once
+ sigil = matrix_config.get("sigil", "")
results = []
for combo in itertools.product(*expanded_rows):
# Construct a combined spec to test against excludes
- flat_combo = [constraint for constraint_list in combo
- for constraint in constraint_list]
+ flat_combo = [constraint for constraint_list in combo for constraint in constraint_list]
flat_combo = [Spec(x) for x in flat_combo]
test_spec = flat_combo[0].copy()
@@ -219,7 +219,7 @@ def _expand_matrix_constraints(matrix_config):
def _sigilify(item, sigil):
if isinstance(item, dict):
if sigil:
- item['sigil'] = sigil
+ item["sigil"] = sigil
return item
else:
return sigil + item
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index a5d29f2917..ddd5bba8b8 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -44,10 +44,10 @@ import spack.util.url as url_util
from spack.util.crypto import bit_length, prefix_bits
# The well-known stage source subdirectory name.
-_source_path_subdir = 'spack-src'
+_source_path_subdir = "spack-src"
# The temporary stage name prefix.
-stage_prefix = 'spack-stage-'
+stage_prefix = "spack-stage-"
def create_stage_root(path):
@@ -56,13 +56,12 @@ def create_stage_root(path):
"""Create the stage root directory and ensure appropriate access perms."""
assert os.path.isabs(path) and len(path.strip()) > 1
- err_msg = 'Cannot create stage root {0}: Access to {1} is denied'
+ err_msg = "Cannot create stage root {0}: Access to {1} is denied"
user_uid = getuid()
# Obtain lists of ancestor and descendant paths of the $user node, if any.
- group_paths, user_node, user_paths = partition_path(path,
- getpass.getuser())
+ group_paths, user_node, user_paths = partition_path(path, getpass.getuser())
for p in group_paths:
if not os.path.exists(p):
@@ -73,12 +72,18 @@ def create_stage_root(path):
p_stat = os.stat(p)
if par_stat.st_gid != p_stat.st_gid:
- tty.warn("Expected {0} to have group {1}, but it is {2}"
- .format(p, par_stat.st_gid, p_stat.st_gid))
+ tty.warn(
+ "Expected {0} to have group {1}, but it is {2}".format(
+ p, par_stat.st_gid, p_stat.st_gid
+ )
+ )
if par_stat.st_mode & p_stat.st_mode != par_stat.st_mode:
- tty.warn("Expected {0} to support mode {1}, but it is {2}"
- .format(p, par_stat.st_mode, p_stat.st_mode))
+ tty.warn(
+ "Expected {0} to support mode {1}, but it is {2}".format(
+ p, par_stat.st_mode, p_stat.st_mode
+ )
+ )
if not can_access(p):
raise OSError(errno.EACCES, err_msg.format(path, p))
@@ -93,8 +98,11 @@ def create_stage_root(path):
# restricted to the user.
owner_uid = get_owner_uid(p)
if user_uid != owner_uid:
- tty.warn("Expected user {0} to own {1}, but it is owned by {2}"
- .format(user_uid, p, owner_uid))
+ tty.warn(
+ "Expected user {0} to own {1}, but it is owned by {2}".format(
+ user_uid, p, owner_uid
+ )
+ )
spack_src_subdir = os.path.join(path, _source_path_subdir)
# When staging into a user-specified directory with `spack stage -p <PATH>`, we need
@@ -120,8 +128,7 @@ def _first_accessible_path(paths):
return path
except OSError as e:
- tty.debug('OSError while checking stage path %s: %s' % (
- path, str(e)))
+ tty.debug("OSError while checking stage path %s: %s" % (path, str(e)))
return None
@@ -133,7 +140,7 @@ def _resolve_paths(candidates):
Adjustments involve removing extra $user from $tempdir if $tempdir includes
$user and appending $user if it is not present in the path.
"""
- temp_path = sup.canonicalize_path('$tempdir')
+ temp_path = sup.canonicalize_path("$tempdir")
user = getpass.getuser()
tmp_has_usr = user in temp_path.split(os.path.sep)
@@ -141,7 +148,7 @@ def _resolve_paths(candidates):
for path in candidates:
# Remove the extra `$user` node from a `$tempdir/$user` entry for
# hosts that automatically append `$user` to `$tempdir`.
- if path.startswith(os.path.join('$tempdir', '$user')) and tmp_has_usr:
+ if path.startswith(os.path.join("$tempdir", "$user")) and tmp_has_usr:
path = path.replace("/$user", "", 1)
# Ensure the path is unique per user.
@@ -162,15 +169,14 @@ def get_stage_root():
global _stage_root
if _stage_root is None:
- candidates = spack.config.get('config:build_stage')
+ candidates = spack.config.get("config:build_stage")
if isinstance(candidates, string_types):
candidates = [candidates]
resolved_candidates = _resolve_paths(candidates)
path = _first_accessible_path(resolved_candidates)
if not path:
- raise StageError("No accessible stage paths in:",
- ' '.join(resolved_candidates))
+ raise StageError("No accessible stage paths in:", " ".join(resolved_candidates))
_stage_root = path
@@ -178,11 +184,13 @@ def get_stage_root():
def _mirror_roots():
- mirrors = spack.config.get('mirrors')
+ mirrors = spack.config.get("mirrors")
return [
- sup.substitute_path_variables(root) if root.endswith(os.sep)
+ sup.substitute_path_variables(root)
+ if root.endswith(os.sep)
else sup.substitute_path_variables(root) + os.sep
- for root in mirrors.values()]
+ for root in mirrors.values()
+ ]
class Stage(object):
@@ -234,42 +242,48 @@ class Stage(object):
managed_by_spack = True
def __init__(
- self, url_or_fetch_strategy,
- name=None, mirror_paths=None, keep=False, path=None, lock=True,
- search_fn=None):
+ self,
+ url_or_fetch_strategy,
+ name=None,
+ mirror_paths=None,
+ keep=False,
+ path=None,
+ lock=True,
+ search_fn=None,
+ ):
"""Create a stage object.
- Parameters:
- url_or_fetch_strategy
- URL of the archive to be downloaded into this stage, OR
- a valid FetchStrategy.
-
- name
- If a name is provided, then this stage is a named stage
- and will persist between runs (or if you construct another
- stage object later). If name is not provided, then this
- stage will be given a unique name automatically.
-
- mirror_paths
- If provided, Stage will search Spack's mirrors for
- this archive at each of the provided relative mirror paths
- before using the default fetch strategy.
-
- keep
- By default, when used as a context manager, the Stage
- is deleted on exit when no exceptions are raised.
- Pass True to keep the stage intact even if no
- exceptions are raised.
-
- path
- If provided, the stage path to use for associated builds.
-
- lock
- True if the stage directory file lock is to be used, False
- otherwise.
-
- search_fn
- The search function that provides the fetch strategy
- instance.
+ Parameters:
+ url_or_fetch_strategy
+ URL of the archive to be downloaded into this stage, OR
+ a valid FetchStrategy.
+
+ name
+ If a name is provided, then this stage is a named stage
+ and will persist between runs (or if you construct another
+ stage object later). If name is not provided, then this
+ stage will be given a unique name automatically.
+
+ mirror_paths
+ If provided, Stage will search Spack's mirrors for
+ this archive at each of the provided relative mirror paths
+ before using the default fetch strategy.
+
+ keep
+ By default, when used as a context manager, the Stage
+ is deleted on exit when no exceptions are raised.
+ Pass True to keep the stage intact even if no
+ exceptions are raised.
+
+ path
+ If provided, the stage path to use for associated builds.
+
+ lock
+ True if the stage directory file lock is to be used, False
+ otherwise.
+
+ search_fn
+ The search function that provides the fetch strategy
+ instance.
"""
# TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough.
@@ -278,8 +292,7 @@ class Stage(object):
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
else:
- raise ValueError(
- "Can't construct Stage without url or fetch strategy")
+ raise ValueError("Can't construct Stage without url or fetch strategy")
self.fetcher.stage = self
# self.fetcher can change with mirrors.
self.default_fetcher = self.fetcher
@@ -312,13 +325,14 @@ class Stage(object):
self._lock = None
if lock:
if self.name not in Stage.stage_locks:
- sha1 = hashlib.sha1(self.name.encode('utf-8')).digest()
+ sha1 = hashlib.sha1(self.name.encode("utf-8")).digest()
lock_id = prefix_bits(sha1, bit_length(sys.maxsize))
- stage_lock_path = os.path.join(get_stage_root(), '.lock')
+ stage_lock_path = os.path.join(get_stage_root(), ".lock")
tty.debug("Creating stage lock {0}".format(self.name))
Stage.stage_locks[self.name] = spack.util.lock.Lock(
- stage_lock_path, lock_id, 1, desc=self.name)
+ stage_lock_path, lock_id, 1, desc=self.name
+ )
self._lock = Stage.stage_locks[self.name]
@@ -367,8 +381,7 @@ class Stage(object):
expanded = True
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
expanded = self.default_fetcher.expand_archive
- clean_url = os.path.basename(
- sup.sanitize_file_path(self.default_fetcher.url))
+ clean_url = os.path.basename(sup.sanitize_file_path(self.default_fetcher.url))
fnames.append(clean_url)
if self.mirror_paths:
@@ -435,14 +448,16 @@ class Stage(object):
for rel_path in self.mirror_paths:
mirror_url = url_util.join(mirror.fetch_url, rel_path)
mirror_urls[mirror_url] = {}
- if mirror.get_access_pair("fetch") or \
- mirror.get_access_token("fetch") or \
- mirror.get_profile("fetch"):
+ if (
+ mirror.get_access_pair("fetch")
+ or mirror.get_access_token("fetch")
+ or mirror.get_profile("fetch")
+ ):
mirror_urls[mirror_url] = {
"access_token": mirror.get_access_token("fetch"),
"access_pair": mirror.get_access_pair("fetch"),
"access_profile": mirror.get_profile("fetch"),
- "endpoint_url": mirror.get_endpoint_url("fetch")
+ "endpoint_url": mirror.get_endpoint_url("fetch"),
}
# If this archive is normally fetched from a tarball URL,
@@ -464,15 +479,21 @@ class Stage(object):
# Insert fetchers in the order that the URLs are provided.
for url in reversed(list(mirror_urls.keys())):
fetchers.insert(
- 0, fs.from_url_scheme(
- url, digest, expand=expand, extension=extension,
- connection=mirror_urls[url]))
+ 0,
+ fs.from_url_scheme(
+ url,
+ digest,
+ expand=expand,
+ extension=extension,
+ connection=mirror_urls[url],
+ ),
+ )
if self.default_fetcher.cachable:
for rel_path in reversed(list(self.mirror_paths)):
cache_fetcher = spack.caches.fetch_cache.fetcher(
- rel_path, digest, expand=expand,
- extension=extension)
+ rel_path, digest, expand=expand, extension=extension
+ )
fetchers.insert(0, cache_fetcher)
def generate_fetchers():
@@ -500,14 +521,14 @@ class Stage(object):
# Don't bother reporting when something is not cached.
continue
except spack.error.SpackError as e:
- errors.append('Fetching from {0} failed.'.format(fetcher))
+ errors.append("Fetching from {0} failed.".format(fetcher))
tty.debug(e)
continue
else:
print_errors(errors)
self.fetcher = self.default_fetcher
- default_msg = 'All fetchers failed for {0}'.format(self.name)
+ default_msg = "All fetchers failed for {0}".format(self.name)
raise fs.FetchError(err_msg or default_msg, None)
print_errors(errors)
@@ -528,8 +549,8 @@ class Stage(object):
mkdirp(dest)
# glob all files and directories in the source path
- hidden_entries = glob.glob(os.path.join(self.source_path, '.*'))
- entries = glob.glob(os.path.join(self.source_path, '*'))
+ hidden_entries = glob.glob(os.path.join(self.source_path, ".*"))
+ entries = glob.glob(os.path.join(self.source_path, "*"))
# Move all files from stage to destination directory
# Include hidden files for VCS repo history
@@ -549,21 +570,21 @@ class Stage(object):
def check(self):
"""Check the downloaded archive against a checksum digest.
- No-op if this stage checks code out of a repository."""
- if self.fetcher is not self.default_fetcher and \
- self.skip_checksum_for_mirror:
- tty.warn("Fetching from mirror without a checksum!",
- "This package is normally checked out from a version "
- "control system, but it has been archived on a spack "
- "mirror. This means we cannot know a checksum for the "
- "tarball in advance. Be sure that your connection to "
- "this mirror is secure!")
- elif spack.config.get('config:checksum'):
+ No-op if this stage checks code out of a repository."""
+ if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
+ tty.warn(
+ "Fetching from mirror without a checksum!",
+ "This package is normally checked out from a version "
+ "control system, but it has been archived on a spack "
+ "mirror. This means we cannot know a checksum for the "
+ "tarball in advance. Be sure that your connection to "
+ "this mirror is secure!",
+ )
+ elif spack.config.get("config:checksum"):
self.fetcher.check()
def cache_local(self):
- spack.caches.fetch_cache.store(
- self.fetcher, self.mirror_paths.storage_path)
+ spack.caches.fetch_cache.store(self.fetcher, self.mirror_paths.storage_path)
def cache_mirror(self, mirror, stats):
"""Perform a fetch if the resource is not already cached
@@ -584,20 +605,17 @@ class Stage(object):
# must examine the type of the fetcher.
return
- if (mirror.skip_unstable_versions and
- not fs.stable_target(self.default_fetcher)):
+ if mirror.skip_unstable_versions and not fs.stable_target(self.default_fetcher):
return
- absolute_storage_path = os.path.join(
- mirror.root, self.mirror_paths.storage_path)
+ absolute_storage_path = os.path.join(mirror.root, self.mirror_paths.storage_path)
if os.path.exists(absolute_storage_path):
stats.already_existed(absolute_storage_path)
else:
self.fetch()
self.check()
- mirror.store(
- self.fetcher, self.mirror_paths.storage_path)
+ mirror.store(self.fetcher, self.mirror_paths.storage_path)
stats.added(absolute_storage_path)
mirror.symlink(self.mirror_paths)
@@ -608,13 +626,13 @@ class Stage(object):
downloaded."""
if not self.expanded:
self.fetcher.expand()
- tty.debug('Created stage in {0}'.format(self.path))
+ tty.debug("Created stage in {0}".format(self.path))
else:
- tty.debug('Already staged {0} in {1}'.format(self.name, self.path))
+ tty.debug("Already staged {0} in {1}".format(self.name, self.path))
def restage(self):
"""Removes the expanded archive path if it exists, then re-expands
- the archive.
+ the archive.
"""
self.fetcher.reset()
@@ -649,7 +667,6 @@ class Stage(object):
class ResourceStage(Stage):
-
def __init__(self, url_or_fetch_strategy, root, resource, **kwargs):
super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs)
self.root_stage = root
@@ -678,10 +695,9 @@ class ResourceStage(Stage):
placement = self.source_path
if not isinstance(placement, dict):
- placement = {'': placement}
+ placement = {"": placement}
- target_path = os.path.join(
- root_stage.source_path, resource.destination)
+ target_path = os.path.join(root_stage.source_path, resource.destination)
try:
os.makedirs(target_path)
@@ -697,10 +713,12 @@ class ResourceStage(Stage):
source_path = os.path.join(self.source_path, key)
if not os.path.exists(destination_path):
- tty.info('Moving resource stage\n\tsource: '
- '{stage}\n\tdestination: {destination}'.format(
- stage=source_path, destination=destination_path
- ))
+ tty.info(
+ "Moving resource stage\n\tsource: "
+ "{stage}\n\tdestination: {destination}".format(
+ stage=source_path, destination=destination_path
+ )
+ )
src = os.path.realpath(source_path)
@@ -714,15 +732,27 @@ class StageComposite(pattern.Composite):
"""Composite for Stage type objects. The first item in this composite is
considered to be the root package, and operations that return a value are
forwarded to it."""
+
#
# __enter__ and __exit__ delegate to all stages in the composite.
#
def __init__(self):
- super(StageComposite, self).__init__([
- 'fetch', 'create', 'created', 'check', 'expand_archive', 'restage',
- 'destroy', 'cache_local', 'cache_mirror', 'steal_source',
- 'managed_by_spack'])
+ super(StageComposite, self).__init__(
+ [
+ "fetch",
+ "create",
+ "created",
+ "check",
+ "expand_archive",
+ "restage",
+ "destroy",
+ "cache_local",
+ "cache_mirror",
+ "steal_source",
+ "managed_by_spack",
+ ]
+ )
def __enter__(self):
for item in self:
@@ -731,7 +761,7 @@ class StageComposite(pattern.Composite):
def __exit__(self, exc_type, exc_val, exc_tb):
for item in reversed(self):
- item.keep = getattr(self, 'keep', False)
+ item.keep = getattr(self, "keep", False)
item.__exit__(exc_type, exc_val, exc_tb)
#
@@ -768,8 +798,7 @@ class DIYStage(object):
if path is None:
raise ValueError("Cannot construct DIYStage without a path.")
elif not os.path.isdir(path):
- raise StagePathError("The stage path directory does not exist:",
- path)
+ raise StagePathError("The stage path directory does not exist:", path)
self.archive_file = None
self.path = path
@@ -784,13 +813,13 @@ class DIYStage(object):
pass
def fetch(self, *args, **kwargs):
- tty.debug('No need to fetch for DIY.')
+ tty.debug("No need to fetch for DIY.")
def check(self):
- tty.debug('No checksum needed for DIY.')
+ tty.debug("No checksum needed for DIY.")
def expand_archive(self):
- tty.debug('Using source directory: {0}'.format(self.source_path))
+ tty.debug("Using source directory: {0}".format(self.source_path))
@property
def expanded(self):
@@ -808,7 +837,7 @@ class DIYStage(object):
pass
def cache_local(self):
- tty.debug('Sources for DIY stages are not cached')
+ tty.debug("Sources for DIY stages are not cached")
def ensure_access(file):
@@ -822,7 +851,7 @@ def purge():
root = get_stage_root()
if os.path.isdir(root):
for stage_dir in os.listdir(root):
- if stage_dir.startswith(stage_prefix) or stage_dir == '.lock':
+ if stage_dir.startswith(stage_prefix) or stage_dir == ".lock":
stage_path = os.path.join(root, stage_dir)
if os.path.isdir(stage_path):
remove_linked_tree(stage_path)
@@ -854,11 +883,11 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
(str): A multi-line string containing versions and corresponding hashes
"""
- batch = kwargs.get('batch', False)
- fetch_options = kwargs.get('fetch_options', None)
- first_stage_function = kwargs.get('first_stage_function', None)
- keep_stage = kwargs.get('keep_stage', False)
- latest = kwargs.get('latest', False)
+ batch = kwargs.get("batch", False)
+ fetch_options = kwargs.get("fetch_options", None)
+ first_stage_function = kwargs.get("first_stage_function", None)
+ keep_stage = kwargs.get("keep_stage", False)
+ latest = kwargs.get("latest", False)
sorted_versions = sorted(url_dict.keys(), reverse=True)
if latest:
@@ -868,19 +897,21 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
max_len = max(len(str(v)) for v in sorted_versions)
num_ver = len(sorted_versions)
- tty.msg('Found {0} version{1} of {2}:'.format(
- num_ver, '' if num_ver == 1 else 's', name),
- '',
- *llnl.util.lang.elide_list(
- ['{0:{1}} {2}'.format(str(v), max_len, url_dict[v])
- for v in sorted_versions]))
+ tty.msg(
+ "Found {0} version{1} of {2}:".format(num_ver, "" if num_ver == 1 else "s", name),
+ "",
+ *llnl.util.lang.elide_list(
+ ["{0:{1}} {2}".format(str(v), max_len, url_dict[v]) for v in sorted_versions]
+ )
+ )
print()
if batch or latest:
archives_to_fetch = len(sorted_versions)
else:
archives_to_fetch = tty.get_number(
- "How many would you like to checksum?", default=1, abort='q')
+ "How many would you like to checksum?", default=1, abort="q"
+ )
if not archives_to_fetch:
tty.die("Aborted.")
@@ -888,19 +919,18 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
versions = sorted_versions[:archives_to_fetch]
urls = [url_dict[v] for v in versions]
- tty.debug('Downloading...')
+ tty.debug("Downloading...")
version_hashes = []
i = 0
errors = []
for url, version in zip(urls, versions):
# Wheels should not be expanded during staging
- expand_arg = ''
- if url.endswith('.whl') or '.whl#' in url:
- expand_arg = ', expand=False'
+ expand_arg = ""
+ if url.endswith(".whl") or ".whl#" in url:
+ expand_arg = ", expand=False"
try:
if fetch_options:
- url_or_fs = fs.URLFetchStrategy(
- url, fetch_options=fetch_options)
+ url_or_fs = fs.URLFetchStrategy(url, fetch_options=fetch_options)
else:
url_or_fs = url
with Stage(url_or_fs, keep=keep_stage) as stage:
@@ -912,13 +942,14 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
first_stage_function(stage, url)
# Checksum the archive and add it to the list
- version_hashes.append((version, spack.util.crypto.checksum(
- hashlib.sha256, stage.archive_file)))
+ version_hashes.append(
+ (version, spack.util.crypto.checksum(hashlib.sha256, stage.archive_file))
+ )
i += 1
except FailedDownloadError:
- errors.append('Failed to fetch {0}'.format(url))
+ errors.append("Failed to fetch {0}".format(url))
except Exception as e:
- tty.msg('Something failed on {0}, skipping. ({1})'.format(url, e))
+ tty.msg("Something failed on {0}, skipping. ({1})".format(url, e))
for msg in errors:
tty.debug(msg)
@@ -930,28 +961,33 @@ def get_checksums_for_versions(url_dict, name, **kwargs):
max_len = max(len(str(v)) for v, h in version_hashes)
# Generate the version directives to put in a package.py
- version_lines = "\n".join([
- " version('{0}', {1}sha256='{2}'{3})".format(
- v, ' ' * (max_len - len(str(v))), h, expand_arg) for v, h in version_hashes
- ])
+ version_lines = "\n".join(
+ [
+ " version('{0}', {1}sha256='{2}'{3})".format(
+ v, " " * (max_len - len(str(v))), h, expand_arg
+ )
+ for v, h in version_hashes
+ ]
+ )
num_hash = len(version_hashes)
- tty.debug('Checksummed {0} version{1} of {2}:'.format(
- num_hash, '' if num_hash == 1 else 's', name))
+ tty.debug(
+ "Checksummed {0} version{1} of {2}:".format(num_hash, "" if num_hash == 1 else "s", name)
+ )
return version_lines
class StageError(spack.error.SpackError):
- """"Superclass for all errors encountered during staging."""
+ """ "Superclass for all errors encountered during staging."""
class StagePathError(StageError):
- """"Error encountered with stage path."""
+ """ "Error encountered with stage path."""
class RestageError(StageError):
- """"Error encountered during restaging."""
+ """ "Error encountered during restaging."""
class VersionFetchError(StageError):
diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py
index eeb183e5b4..4e38d0c634 100644
--- a/lib/spack/spack/store.py
+++ b/lib/spack/spack/store.py
@@ -34,7 +34,7 @@ import spack.paths
import spack.util.path
#: default installation root, relative to the Spack install path
-default_install_tree_root = os.path.join(spack.paths.opt_path, 'spack')
+default_install_tree_root = os.path.join(spack.paths.opt_path, "spack")
def parse_install_tree(config_dict):
@@ -66,7 +66,7 @@ def parse_install_tree(config_dict):
# projections:
# all: '{name}-{version}'
- install_tree = config_dict.get('install_tree', {})
+ install_tree = config_dict.get("install_tree", {})
padded_length = False
if isinstance(install_tree, six.string_types):
@@ -75,38 +75,39 @@ def parse_install_tree(config_dict):
unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
# construct projection from previous values for backwards compatibility
all_projection = config_dict.get(
- 'install_path_scheme',
- spack.directory_layout.default_projections['all'])
+ "install_path_scheme", spack.directory_layout.default_projections["all"]
+ )
- projections = {'all': all_projection}
+ projections = {"all": all_projection}
else:
- unpadded_root = install_tree.get('root', default_install_tree_root)
+ unpadded_root = install_tree.get("root", default_install_tree_root)
unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
- padded_length = install_tree.get('padded_length', False)
+ padded_length = install_tree.get("padded_length", False)
if padded_length is True:
padded_length = spack.util.path.get_system_path_max()
padded_length -= spack.util.path.SPACK_MAX_INSTALL_PATH_LENGTH
- projections = install_tree.get(
- 'projections', spack.directory_layout.default_projections)
+ projections = install_tree.get("projections", spack.directory_layout.default_projections)
- path_scheme = config_dict.get('install_path_scheme', None)
+ path_scheme = config_dict.get("install_path_scheme", None)
if path_scheme:
- tty.warn("Deprecated config value 'install_path_scheme' ignored"
- " when using new install_tree syntax")
+ tty.warn(
+ "Deprecated config value 'install_path_scheme' ignored"
+ " when using new install_tree syntax"
+ )
# Handle backwards compatibility for padding
- old_pad = re.search(r'\$padding(:\d+)?|\${padding(:\d+)?}', unpadded_root)
+ old_pad = re.search(r"\$padding(:\d+)?|\${padding(:\d+)?}", unpadded_root)
if old_pad:
if padded_length:
msg = "Ignoring deprecated padding option in install_tree root "
msg += "because new syntax padding is present."
tty.warn(msg)
else:
- unpadded_root = unpadded_root.replace(old_pad.group(0), '')
+ unpadded_root = unpadded_root.replace(old_pad.group(0), "")
if old_pad.group(1) or old_pad.group(2):
- length_group = 2 if '{' in old_pad.group(0) else 1
+ length_group = 2 if "{" in old_pad.group(0) else 1
padded_length = int(old_pad.group(length_group)[1:])
else:
padded_length = spack.util.path.get_system_path_max()
@@ -148,17 +149,16 @@ class Store(object):
hash_length (int): length of the hashes used in the directory
layout; spec hash suffixes will be truncated to this length
"""
- def __init__(
- self, root, unpadded_root=None, projections=None, hash_length=None
- ):
+
+ def __init__(self, root, unpadded_root=None, projections=None, hash_length=None):
self.root = root
self.unpadded_root = unpadded_root or root
self.projections = projections
self.hash_length = hash_length
- self.db = spack.database.Database(
- root, upstream_dbs=retrieve_upstream_dbs())
+ self.db = spack.database.Database(root, upstream_dbs=retrieve_upstream_dbs())
self.layout = spack.directory_layout.DirectoryLayout(
- root, projections=projections, hash_length=hash_length)
+ root, projections=projections, hash_length=hash_length
+ )
def reindex(self):
"""Convenience function to reindex the store DB with its own layout."""
@@ -168,9 +168,7 @@ class Store(object):
"""Return a pickle-able object that can be used to reconstruct
a store.
"""
- return (
- self.root, self.unpadded_root, self.projections, self.hash_length
- )
+ return (self.root, self.unpadded_root, self.projections, self.hash_length)
@staticmethod
def deserialize(token):
@@ -189,24 +187,26 @@ class Store(object):
def _store():
"""Get the singleton store instance."""
import spack.bootstrap
- config_dict = spack.config.get('config')
+
+ config_dict = spack.config.get("config")
root, unpadded_root, projections = parse_install_tree(config_dict)
- hash_length = spack.config.get('config:install_hash_length')
+ hash_length = spack.config.get("config:install_hash_length")
# Check that the user is not trying to install software into the store
# reserved by Spack to bootstrap its own dependencies, since this would
# lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
# user installed software)
- enable_bootstrap = spack.config.get('bootstrap:enable', True)
+ enable_bootstrap = spack.config.get("bootstrap:enable", True)
if enable_bootstrap and spack.bootstrap.store_path() == root:
- msg = ('please change the install tree root "{0}" in your '
- 'configuration [path reserved for Spack internal use]')
+ msg = (
+ 'please change the install tree root "{0}" in your '
+ "configuration [path reserved for Spack internal use]"
+ )
raise ValueError(msg.format(root))
- return Store(root=root,
- unpadded_root=unpadded_root,
- projections=projections,
- hash_length=hash_length)
+ return Store(
+ root=root, unpadded_root=unpadded_root, projections=projections, hash_length=hash_length
+ )
#: Singleton store instance
@@ -262,22 +262,22 @@ def restore(token):
def retrieve_upstream_dbs():
- other_spack_instances = spack.config.get('upstreams', {})
+ other_spack_instances = spack.config.get("upstreams", {})
install_roots = []
for install_properties in other_spack_instances.values():
- install_roots.append(install_properties['install_tree'])
+ install_roots.append(install_properties["install_tree"])
return _construct_upstream_dbs_from_install_roots(install_roots)
-def _construct_upstream_dbs_from_install_roots(
- install_roots, _test=False):
+def _construct_upstream_dbs_from_install_roots(install_roots, _test=False):
accumulated_upstream_dbs = []
for install_root in reversed(install_roots):
upstream_dbs = list(accumulated_upstream_dbs)
next_db = spack.database.Database(
- install_root, is_upstream=True, upstream_dbs=upstream_dbs)
+ install_root, is_upstream=True, upstream_dbs=upstream_dbs
+ )
next_db._fail_when_missing_deps = _test
next_db._read()
accumulated_upstream_dbs.insert(0, next_db)
@@ -320,9 +320,7 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
# For each spec provided, make sure it refers to only one package.
if not multiple and len(current_matches) > 1:
msg_fmt = '"{0}" matches multiple packages: [{1}]'
- errors.append(
- msg_fmt.format(spec, ', '.join([m.format() for m in current_matches]))
- )
+ errors.append(msg_fmt.format(spec, ", ".join([m.format() for m in current_matches])))
# No installed package matches the query
if len(current_matches) == 0 and spec is not any:
@@ -334,7 +332,7 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
if errors:
raise MatchError(
message="errors occurred when looking for specs in the store",
- long_message='\n'.join(errors)
+ long_message="\n".join(errors),
)
return matching_specs
diff --git a/lib/spack/spack/subprocess_context.py b/lib/spack/spack/subprocess_context.py
index 4251516f82..7199112eb7 100644
--- a/lib/spack/spack/subprocess_context.py
+++ b/lib/spack/spack/subprocess_context.py
@@ -27,8 +27,7 @@ import spack.platforms
import spack.repo
import spack.store
-_serialize = sys.platform == 'win32' or (sys.version_info >= (3, 8)
- and sys.platform == 'darwin')
+_serialize = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
patches = None
@@ -58,15 +57,14 @@ class SpackTestProcess(object):
def create(self):
test_state = TestState()
- return multiprocessing.Process(
- target=self._restore_and_run,
- args=(self.fn, test_state))
+ return multiprocessing.Process(target=self._restore_and_run, args=(self.fn, test_state))
class PackageInstallContext(object):
"""Captures the in-memory process state of a package installation that
needs to be transmitted to a child process.
"""
+
def __init__(self, pkg):
if _serialize:
self.serialized_pkg = serialize(pkg)
@@ -93,6 +91,7 @@ class TestState(object):
applied to a subprocess. This isn't needed outside of a testing environment
but this logic is designed to behave the same inside or outside of tests.
"""
+
def __init__(self):
if _serialize:
self.repo_dirs = list(r.root for r in spack.repo.path.repos)
@@ -119,10 +118,8 @@ class TestState(object):
class TestPatches(object):
def __init__(self, module_patches, class_patches):
- self.module_patches = list(
- (x, y, serialize(z)) for (x, y, z) in module_patches)
- self.class_patches = list(
- (x, y, serialize(z)) for (x, y, z) in class_patches)
+ self.module_patches = list((x, y, serialize(z)) for (x, y, z) in module_patches)
+ self.class_patches = list((x, y, serialize(z)) for (x, y, z) in class_patches)
def restore(self):
for module_name, attr_name, value in self.module_patches:
@@ -148,7 +145,7 @@ def store_patches():
module_patches.append((module_name, name, new_val))
elif isinstance(target, type):
new_val = getattr(target, name)
- class_fqn = '.'.join([target.__module__, target.__name__])
+ class_fqn = ".".join([target.__module__, target.__name__])
class_patches.append((class_fqn, name, new_val))
return TestPatches(module_patches, class_patches)
diff --git a/lib/spack/spack/tag.py b/lib/spack/spack/tag.py
index 86988738b0..44c162ccc1 100644
--- a/lib/spack/spack/tag.py
+++ b/lib/spack/spack/tag.py
@@ -39,8 +39,9 @@ def packages_with_tags(tags, installed, skip_empty):
spec_names = _get_installed_package_names() if installed else []
keys = spack.repo.path.tag_index if tags is None else tags
for tag in keys:
- packages = [name for name in spack.repo.path.tag_index[tag] if
- not installed or name in spec_names]
+ packages = [
+ name for name in spack.repo.path.tag_index[tag] if not installed or name in spec_names
+ ]
if packages or not skip_empty:
tag_pkgs[tag] = packages
return tag_pkgs
@@ -57,7 +58,7 @@ class TagIndex(Mapping):
return self._tag_dict
def to_json(self, stream):
- sjson.dump({'tags': self._tag_dict}, stream)
+ sjson.dump({"tags": self._tag_dict}, stream)
@staticmethod
def from_json(stream):
@@ -66,12 +67,12 @@ class TagIndex(Mapping):
if not isinstance(d, dict):
raise TagIndexError("TagIndex data was not a dict.")
- if 'tags' not in d:
+ if "tags" not in d:
raise TagIndexError("TagIndex data does not start with 'tags'")
r = TagIndex()
- for tag, packages in d['tags'].items():
+ for tag, packages in d["tags"].items():
r[tag].extend(packages)
return r
@@ -101,7 +102,7 @@ class TagIndex(Mapping):
Args:
other (TagIndex): tag index to be merged
"""
- other = other.copy() # defensive copy.
+ other = other.copy() # defensive copy.
for tag in other.tags:
if tag not in self.tags:
@@ -126,7 +127,7 @@ class TagIndex(Mapping):
pkg_list.remove(pkg_name)
# Add it again under the appropriate tags
- for tag in getattr(pkg_cls, 'tags', []):
+ for tag in getattr(pkg_cls, "tags", []):
tag = tag.lower()
self._tag_dict[tag].append(pkg_cls.name)
diff --git a/lib/spack/spack/target.py b/lib/spack/spack/target.py
index c7a166b9f8..a85baa4d45 100644
--- a/lib/spack/spack/target.py
+++ b/lib/spack/spack/target.py
@@ -20,6 +20,7 @@ def _ensure_other_is_target(method):
"""In a single argument method, ensure that the argument is an
instance of ``Target``.
"""
+
@functools.wraps(method)
def _impl(self, other):
if isinstance(other, six.string_types):
@@ -44,9 +45,7 @@ class Target(object):
like Cray (e.g. craype-compiler)
"""
if not isinstance(name, archspec.cpu.Microarchitecture):
- name = archspec.cpu.TARGETS.get(
- name, archspec.cpu.generic_microarchitecture(name)
- )
+ name = archspec.cpu.TARGETS.get(name, archspec.cpu.generic_microarchitecture(name))
self.microarchitecture = name
self.module_name = module_name
@@ -56,8 +55,10 @@ class Target(object):
@_ensure_other_is_target
def __eq__(self, other):
- return (self.microarchitecture == other.microarchitecture and
- self.module_name == other.module_name)
+ return (
+ self.microarchitecture == other.microarchitecture
+ and self.module_name == other.module_name
+ )
def __ne__(self, other):
# This method is necessary as long as we support Python 2. In Python 3
@@ -88,7 +89,7 @@ class Target(object):
# TODO: just the name. We can use that information to reconstruct an
# TODO: "old" micro-architecture or check the current definition.
target_info = dict_or_value
- return Target(target_info['name'])
+ return Target(target_info["name"])
def to_dict_or_value(self):
"""Returns a dict or a value representing the current target.
@@ -100,18 +101,15 @@ class Target(object):
"""
# Generic targets represent either an architecture
# family (like x86_64) or a custom micro-architecture
- if self.microarchitecture.vendor == 'generic':
+ if self.microarchitecture.vendor == "generic":
return str(self)
- return syaml.syaml_dict(
- self.microarchitecture.to_dict(return_list_of_items=True)
- )
+ return syaml.syaml_dict(self.microarchitecture.to_dict(return_list_of_items=True))
def __repr__(self):
cls_name = self.__class__.__name__
- fmt = cls_name + '({0}, {1})'
- return fmt.format(repr(self.microarchitecture),
- repr(self.module_name))
+ fmt = cls_name + "({0}, {1})"
+ return fmt.format(repr(self.microarchitecture), repr(self.module_name))
def __str__(self):
return str(self.microarchitecture)
@@ -130,20 +128,20 @@ class Target(object):
# Mixed toolchains are not supported yet
if isinstance(compiler, spack.compiler.Compiler):
if spack.compilers.is_mixed_toolchain(compiler):
- msg = ('microarchitecture specific optimizations are not '
- 'supported yet on mixed compiler toolchains [check'
- ' {0.name}@{0.version} for further details]')
+ msg = (
+ "microarchitecture specific optimizations are not "
+ "supported yet on mixed compiler toolchains [check"
+ " {0.name}@{0.version} for further details]"
+ )
tty.debug(msg.format(compiler))
- return ''
+ return ""
# Try to check if the current compiler comes with a version number or
# has an unexpected suffix. If so, treat it as a compiler with a
# custom spec.
compiler_version = compiler.version
- version_number, suffix = archspec.cpu.version_components(
- compiler.version
- )
- if not version_number or suffix not in ('', 'apple'):
+ version_number, suffix = archspec.cpu.version_components(compiler.version)
+ if not version_number or suffix not in ("", "apple"):
# Try to deduce the underlying version of the compiler, regardless
# of its name in compilers.yaml. Depending on where this function
# is called we might get either a CompilerSpec or a fully fledged
@@ -156,6 +154,4 @@ class Target(object):
# log this and just return compiler.version instead
tty.debug(str(e))
- return self.microarchitecture.optimization_flags(
- compiler.name, str(compiler_version)
- )
+ return self.microarchitecture.optimization_flags(compiler.name, str(compiler_version))
diff --git a/lib/spack/spack/tengine.py b/lib/spack/spack/tengine.py
index 0be15dd238..34db15d832 100644
--- a/lib/spack/spack/tengine.py
+++ b/lib/spack/spack/tengine.py
@@ -18,6 +18,7 @@ class ContextMeta(type):
"""Meta class for Context. It helps reducing the boilerplate in
client code.
"""
+
#: Keeps track of the context properties that have been added
#: by the class that is being defined
_new_context_properties = [] # type: List[str]
@@ -37,7 +38,7 @@ class ContextMeta(type):
cls._new_context_properties = []
# Attach the list to the class being created
- attr_dict['context_properties'] = context_properties
+ attr_dict["context_properties"] = context_properties
return super(ContextMeta, cls).__new__(cls, name, bases, attr_dict)
@@ -70,11 +71,9 @@ def make_environment(dirs=None):
"""Returns an configured environment for template rendering."""
if dirs is None:
# Default directories where to search for templates
- builtins = spack.config.get('config:template_dirs',
- ['$spack/share/spack/templates'])
+ builtins = spack.config.get("config:template_dirs", ["$spack/share/spack/templates"])
extensions = spack.extensions.get_template_dirs()
- dirs = [canonicalize_path(d)
- for d in itertools.chain(builtins, extensions)]
+ dirs = [canonicalize_path(d) for d in itertools.chain(builtins, extensions)]
# avoid importing this at the top level as it's used infrequently and
# slows down startup a bit.
@@ -83,9 +82,7 @@ def make_environment(dirs=None):
# Loader for the templates
loader = jinja2.FileSystemLoader(dirs)
# Environment of the template engine
- env = jinja2.Environment(
- loader=loader, trim_blocks=True, lstrip_blocks=True
- )
+ env = jinja2.Environment(loader=loader, trim_blocks=True, lstrip_blocks=True)
# Custom filters
_set_filters(env)
return env
@@ -93,6 +90,7 @@ def make_environment(dirs=None):
# Extra filters for template engine environment
+
def prepend_to_line(text, token):
"""Prepends a token to each line in text"""
return [token + line for line in text]
@@ -105,7 +103,7 @@ def quote(text):
def _set_filters(env):
"""Sets custom filters to the template engine environment"""
- env.filters['textwrap'] = textwrap.wrap
- env.filters['prepend_to_line'] = prepend_to_line
- env.filters['join'] = '\n'.join
- env.filters['quote'] = quote
+ env.filters["textwrap"] = textwrap.wrap
+ env.filters["prepend_to_line"] = prepend_to_line
+ env.filters["join"] = "\n".join
+ env.filters["quote"] = quote
diff --git a/lib/spack/spack/test/abi.py b/lib/spack/spack/test/abi.py
index a390043efa..5c3d0fa83d 100644
--- a/lib/spack/spack/test/abi.py
+++ b/lib/spack/spack/test/abi.py
@@ -11,56 +11,56 @@ from spack.spec import Spec
@pytest.mark.parametrize(
- 'target,constraint,expected',
+ "target,constraint,expected",
[
- ('foo', 'bar', True),
- ('platform=linux', 'foo', True),
- ('foo', 'arch=linux-fedora31-x86_64', True),
- ('arch=linux-fedora31-skylake', 'arch=linux-fedora31-skylake', True),
- ('arch=linux-fedora31-skylake', 'arch=linux-fedora31-x86_64', False),
- ('platform=linux os=fedora31', 'arch=linux-fedora31-x86_64', True),
- ('platform=linux', 'arch=linux-fedora31-x86_64', True),
- ('platform=linux os=fedora31', 'platform=linux', True),
- ('platform=darwin', 'arch=linux-fedora31-x86_64', False),
- ('os=fedora31', 'platform=linux', False), # TODO should be true ?
- ])
+ ("foo", "bar", True),
+ ("platform=linux", "foo", True),
+ ("foo", "arch=linux-fedora31-x86_64", True),
+ ("arch=linux-fedora31-skylake", "arch=linux-fedora31-skylake", True),
+ ("arch=linux-fedora31-skylake", "arch=linux-fedora31-x86_64", False),
+ ("platform=linux os=fedora31", "arch=linux-fedora31-x86_64", True),
+ ("platform=linux", "arch=linux-fedora31-x86_64", True),
+ ("platform=linux os=fedora31", "platform=linux", True),
+ ("platform=darwin", "arch=linux-fedora31-x86_64", False),
+ ("os=fedora31", "platform=linux", False), # TODO should be true ?
+ ],
+)
def test_architecture_compatibility(target, constraint, expected):
- assert ABI().architecture_compatible(Spec(target),
- Spec(constraint)) == expected
+ assert ABI().architecture_compatible(Spec(target), Spec(constraint)) == expected
@pytest.mark.parametrize(
- 'target,constraint,loose,expected',
+ "target,constraint,loose,expected",
[
- ('foo', 'bar', False, True),
- ('%gcc', 'foo', False, True),
- ('foo', '%gcc', False, True),
- ('%gcc', '%gcc', False, True),
- ('%gcc', '%intel', False, False),
- ('%gcc', '%clang', False, False),
- ('%gcc@9.1', '%gcc@9.2', False, False), # TODO should be true ?
- ('%gcc@9.2.1', '%gcc@9.2.2', False, False), # TODO should be true ?
- ('%gcc@4.9', '%gcc@9.2', False, False),
- ('%clang@5', '%clang@6', False, False),
- ('%gcc@9.1', '%gcc@9.2', True, True),
- ('%gcc@9.2.1', '%gcc@9.2.2', True, True),
- ('%gcc@4.9', '%gcc@9.2', True, True),
- ('%clang@5', '%clang@6', True, True),
- ])
+ ("foo", "bar", False, True),
+ ("%gcc", "foo", False, True),
+ ("foo", "%gcc", False, True),
+ ("%gcc", "%gcc", False, True),
+ ("%gcc", "%intel", False, False),
+ ("%gcc", "%clang", False, False),
+ ("%gcc@9.1", "%gcc@9.2", False, False), # TODO should be true ?
+ ("%gcc@9.2.1", "%gcc@9.2.2", False, False), # TODO should be true ?
+ ("%gcc@4.9", "%gcc@9.2", False, False),
+ ("%clang@5", "%clang@6", False, False),
+ ("%gcc@9.1", "%gcc@9.2", True, True),
+ ("%gcc@9.2.1", "%gcc@9.2.2", True, True),
+ ("%gcc@4.9", "%gcc@9.2", True, True),
+ ("%clang@5", "%clang@6", True, True),
+ ],
+)
def test_compiler_compatibility(target, constraint, loose, expected):
- assert ABI().compiler_compatible(Spec(target),
- Spec(constraint),
- loose=loose) == expected
+ assert ABI().compiler_compatible(Spec(target), Spec(constraint), loose=loose) == expected
-@pytest.mark.parametrize('target,constraint,loose,expected', [
- ('foo', 'bar', False, True),
- ('%gcc', 'platform=linux', False, True),
- ('%gcc@9.2.1', '%gcc@8.3.1 platform=linux', False, False),
- ('%gcc@9.2.1', '%gcc@8.3.1 platform=linux', True, True),
- ('%gcc@9.2.1 arch=linux-fedora31-skylake', '%gcc@9.2.1 platform=linux',
- False, True),
-])
+@pytest.mark.parametrize(
+ "target,constraint,loose,expected",
+ [
+ ("foo", "bar", False, True),
+ ("%gcc", "platform=linux", False, True),
+ ("%gcc@9.2.1", "%gcc@8.3.1 platform=linux", False, False),
+ ("%gcc@9.2.1", "%gcc@8.3.1 platform=linux", True, True),
+ ("%gcc@9.2.1 arch=linux-fedora31-skylake", "%gcc@9.2.1 platform=linux", False, True),
+ ],
+)
def test_compatibility(target, constraint, loose, expected):
- assert ABI().compatible(Spec(target), Spec(constraint),
- loose=loose) == expected
+ assert ABI().compatible(Spec(target), Spec(constraint), loose=loose) == expected
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
index 18a8dbe1b6..64bb57d474 100644
--- a/lib/spack/spack/test/architecture.py
+++ b/lib/spack/spack/test/architecture.py
@@ -20,13 +20,13 @@ def current_host_platform():
"""Return the platform of the current host as detected by the
'platform' stdlib package.
"""
- if os.path.exists('/opt/cray/pe'):
+ if os.path.exists("/opt/cray/pe"):
current_platform = spack.platforms.Cray()
- elif 'Linux' in platform.system():
+ elif "Linux" in platform.system():
current_platform = spack.platforms.Linux()
- elif 'Darwin' in platform.system():
+ elif "Darwin" in platform.system():
current_platform = spack.platforms.Darwin()
- elif 'Windows' in platform.system():
+ elif "Windows" in platform.system():
current_platform = spack.platforms.Windows()
return current_platform
@@ -36,8 +36,7 @@ valid_keywords = ["fe", "be", "frontend", "backend"]
@pytest.fixture(
- params=([x for x in spack.platforms.Test().targets]
- + valid_keywords + ['default_target'])
+ params=([x for x in spack.platforms.Test().targets] + valid_keywords + ["default_target"])
)
def target_str(request):
"""All the possible strings that can be used for targets"""
@@ -45,8 +44,7 @@ def target_str(request):
@pytest.fixture(
- params=([x for x in spack.platforms.Test().operating_sys]
- + valid_keywords + ['default_os'])
+ params=([x for x in spack.platforms.Test().operating_sys] + valid_keywords + ["default_os"])
)
def os_str(request):
"""All the possible strings that can be used for operating systems"""
@@ -59,8 +57,7 @@ def test_platform(current_host_platform):
assert str(detected_platform) == str(current_host_platform)
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_user_input_combination(config, target_str, os_str):
"""Test for all the valid user input combinations that both the target and
the operating system match.
@@ -78,120 +75,136 @@ def test_user_input_combination(config, target_str, os_str):
def test_operating_system_conversion_to_dict():
- operating_system = spack.operating_systems.OperatingSystem('os', '1.0')
- assert operating_system.to_dict() == {
- 'name': 'os', 'version': '1.0'
- }
-
-
-@pytest.mark.parametrize('cpu_flag,target_name', [
- # Test that specific flags can be used in queries
- ('ssse3', 'haswell'),
- ('popcnt', 'nehalem'),
- ('avx512f', 'skylake_avx512'),
- ('avx512ifma', 'icelake'),
- # Test that proxy flags can be used in queries too
- ('sse3', 'nehalem'),
- ('avx512', 'skylake_avx512'),
- ('avx512', 'icelake'),
-])
+ operating_system = spack.operating_systems.OperatingSystem("os", "1.0")
+ assert operating_system.to_dict() == {"name": "os", "version": "1.0"}
+
+
+@pytest.mark.parametrize(
+ "cpu_flag,target_name",
+ [
+ # Test that specific flags can be used in queries
+ ("ssse3", "haswell"),
+ ("popcnt", "nehalem"),
+ ("avx512f", "skylake_avx512"),
+ ("avx512ifma", "icelake"),
+ # Test that proxy flags can be used in queries too
+ ("sse3", "nehalem"),
+ ("avx512", "skylake_avx512"),
+ ("avx512", "icelake"),
+ ],
+)
def test_target_container_semantic(cpu_flag, target_name):
target = spack.target.Target(target_name)
assert cpu_flag in target
-@pytest.mark.parametrize('item,architecture_str', [
- # We can search the architecture string representation
- ('linux', 'linux-ubuntu18.04-haswell'),
- ('ubuntu', 'linux-ubuntu18.04-haswell'),
- ('haswell', 'linux-ubuntu18.04-haswell'),
- # We can also search flags of the target,
- ('avx512', 'linux-ubuntu18.04-icelake'),
-])
+@pytest.mark.parametrize(
+ "item,architecture_str",
+ [
+ # We can search the architecture string representation
+ ("linux", "linux-ubuntu18.04-haswell"),
+ ("ubuntu", "linux-ubuntu18.04-haswell"),
+ ("haswell", "linux-ubuntu18.04-haswell"),
+ # We can also search flags of the target,
+ ("avx512", "linux-ubuntu18.04-icelake"),
+ ],
+)
def test_arch_spec_container_semantic(item, architecture_str):
architecture = spack.spec.ArchSpec(architecture_str)
assert item in architecture
-@pytest.mark.parametrize('compiler_spec,target_name,expected_flags', [
- # Check compilers with version numbers from a single toolchain
- ('gcc@4.7.2', 'ivybridge', '-march=core-avx-i -mtune=core-avx-i'),
- # Check mixed toolchains
- ('clang@8.0.0', 'broadwell', ''),
- ('clang@3.5', 'x86_64', '-march=x86-64 -mtune=generic'),
- # Check Apple's Clang compilers
- ('apple-clang@9.1.0', 'x86_64', '-march=x86-64')
-])
+@pytest.mark.parametrize(
+ "compiler_spec,target_name,expected_flags",
+ [
+ # Check compilers with version numbers from a single toolchain
+ ("gcc@4.7.2", "ivybridge", "-march=core-avx-i -mtune=core-avx-i"),
+ # Check mixed toolchains
+ ("clang@8.0.0", "broadwell", ""),
+ ("clang@3.5", "x86_64", "-march=x86-64 -mtune=generic"),
+ # Check Apple's Clang compilers
+ ("apple-clang@9.1.0", "x86_64", "-march=x86-64"),
+ ],
+)
@pytest.mark.filterwarnings("ignore:microarchitecture specific")
-def test_optimization_flags(
- compiler_spec, target_name, expected_flags, config
-):
+def test_optimization_flags(compiler_spec, target_name, expected_flags, config):
target = spack.target.Target(target_name)
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
opt_flags = target.optimization_flags(compiler)
assert opt_flags == expected_flags
-@pytest.mark.parametrize('compiler,real_version,target_str,expected_flags', [
- (spack.spec.CompilerSpec('gcc@9.2.0'), None, 'haswell',
- '-march=haswell -mtune=haswell'),
- # Check that custom string versions are accepted
- (spack.spec.CompilerSpec('gcc@foo'), '9.2.0', 'icelake',
- '-march=icelake-client -mtune=icelake-client'),
- # Check that we run version detection (4.4.0 doesn't support icelake)
- (spack.spec.CompilerSpec('gcc@4.4.0-special'), '9.2.0', 'icelake',
- '-march=icelake-client -mtune=icelake-client'),
- # Check that the special case for Apple's clang is treated correctly
- # i.e. it won't try to detect the version again
- (spack.spec.CompilerSpec('apple-clang@9.1.0'), None, 'x86_64',
- '-march=x86-64'),
-])
+@pytest.mark.parametrize(
+ "compiler,real_version,target_str,expected_flags",
+ [
+ (spack.spec.CompilerSpec("gcc@9.2.0"), None, "haswell", "-march=haswell -mtune=haswell"),
+ # Check that custom string versions are accepted
+ (
+ spack.spec.CompilerSpec("gcc@foo"),
+ "9.2.0",
+ "icelake",
+ "-march=icelake-client -mtune=icelake-client",
+ ),
+ # Check that we run version detection (4.4.0 doesn't support icelake)
+ (
+ spack.spec.CompilerSpec("gcc@4.4.0-special"),
+ "9.2.0",
+ "icelake",
+ "-march=icelake-client -mtune=icelake-client",
+ ),
+ # Check that the special case for Apple's clang is treated correctly
+ # i.e. it won't try to detect the version again
+ (spack.spec.CompilerSpec("apple-clang@9.1.0"), None, "x86_64", "-march=x86-64"),
+ ],
+)
def test_optimization_flags_with_custom_versions(
- compiler, real_version, target_str, expected_flags, monkeypatch, config
+ compiler, real_version, target_str, expected_flags, monkeypatch, config
):
target = spack.target.Target(target_str)
if real_version:
- monkeypatch.setattr(
- spack.compiler.Compiler, 'get_real_version',
- lambda x: real_version)
+ monkeypatch.setattr(spack.compiler.Compiler, "get_real_version", lambda x: real_version)
opt_flags = target.optimization_flags(compiler)
assert opt_flags == expected_flags
-@pytest.mark.regression('15306')
-@pytest.mark.parametrize('architecture_tuple,constraint_tuple', [
- (('linux', 'ubuntu18.04', None), ('linux', None, 'x86_64')),
- (('linux', 'ubuntu18.04', None), ('linux', None, 'x86_64:')),
-])
-def test_satisfy_strict_constraint_when_not_concrete(
- architecture_tuple, constraint_tuple
-):
+@pytest.mark.regression("15306")
+@pytest.mark.parametrize(
+ "architecture_tuple,constraint_tuple",
+ [
+ (("linux", "ubuntu18.04", None), ("linux", None, "x86_64")),
+ (("linux", "ubuntu18.04", None), ("linux", None, "x86_64:")),
+ ],
+)
+def test_satisfy_strict_constraint_when_not_concrete(architecture_tuple, constraint_tuple):
architecture = spack.spec.ArchSpec(architecture_tuple)
constraint = spack.spec.ArchSpec(constraint_tuple)
assert not architecture.satisfies(constraint, strict=True)
-@pytest.mark.parametrize('root_target_range,dep_target_range,result', [
- ('x86_64:nocona', 'x86_64:core2', 'nocona'), # pref not in intersection
- ('x86_64:core2', 'x86_64:nocona', 'nocona'),
- ('x86_64:haswell', 'x86_64:mic_knl', 'core2'), # pref in intersection
- ('ivybridge', 'nocona:skylake', 'ivybridge'), # one side concrete
- ('haswell:icelake', 'broadwell', 'broadwell'),
- # multiple ranges in lists with multiple overlaps
- ('x86_64:nocona,haswell:broadwell', 'nocona:haswell,skylake:', 'nocona'),
- # lists with concrete targets, lists compared to ranges
- ('x86_64,haswell', 'core2:broadwell', 'haswell')
-])
-@pytest.mark.usefixtures('mock_packages', 'config')
-def test_concretize_target_ranges(
- root_target_range, dep_target_range, result
-):
+@pytest.mark.parametrize(
+ "root_target_range,dep_target_range,result",
+ [
+ ("x86_64:nocona", "x86_64:core2", "nocona"), # pref not in intersection
+ ("x86_64:core2", "x86_64:nocona", "nocona"),
+ ("x86_64:haswell", "x86_64:mic_knl", "core2"), # pref in intersection
+ ("ivybridge", "nocona:skylake", "ivybridge"), # one side concrete
+ ("haswell:icelake", "broadwell", "broadwell"),
+ # multiple ranges in lists with multiple overlaps
+ ("x86_64:nocona,haswell:broadwell", "nocona:haswell,skylake:", "nocona"),
+ # lists with concrete targets, lists compared to ranges
+ ("x86_64,haswell", "core2:broadwell", "haswell"),
+ ],
+)
+@pytest.mark.usefixtures("mock_packages", "config")
+def test_concretize_target_ranges(root_target_range, dep_target_range, result):
# use foobar=bar to make the problem simpler for the old concretizer
# the new concretizer should not need that help
- spec_str = ('a %%gcc@10 foobar=bar target=%s ^b target=%s' %
- (root_target_range, dep_target_range))
+ spec_str = "a %%gcc@10 foobar=bar target=%s ^b target=%s" % (
+ root_target_range,
+ dep_target_range,
+ )
spec = spack.spec.Spec(spec_str)
with spack.concretize.disable_compiler_existence_check():
spec.concretize()
- assert str(spec).count('arch=test-debian6-%s' % result) == 2
+ assert str(spec).count("arch=test-debian6-%s" % result) == 2
diff --git a/lib/spack/spack/test/audit.py b/lib/spack/spack/test/audit.py
index 9cb36b5047..890a8dcaf8 100644
--- a/lib/spack/spack/test/audit.py
+++ b/lib/spack/spack/test/audit.py
@@ -8,25 +8,28 @@ import spack.audit
import spack.config
-@pytest.mark.parametrize('packages,expected_error', [
- # A non existing variant is used in a conflict directive
- (['wrong-variant-in-conflicts'], 'PKG-DIRECTIVES'),
- # The package declares a non-existing dependency
- (['missing-dependency'], 'PKG-DIRECTIVES'),
- # The package use a non existing variant in a depends_on directive
- (['wrong-variant-in-depends-on'], 'PKG-DIRECTIVES'),
- # This package has a GitHub patch URL without full_index=1
- (['invalid-github-patch-url'], 'PKG-DIRECTIVES'),
- # This package has a stand-alone 'test' method in build-time callbacks
- (['test-build-callbacks'], 'PKG-DIRECTIVES'),
- # This package has no issues
- (['mpileaks'], None),
- # This package has a conflict with a trigger which cannot constrain the constraint
- # Should not raise an error
- (['unconstrainable-conflict'], None),
-])
+@pytest.mark.parametrize(
+ "packages,expected_error",
+ [
+ # A non existing variant is used in a conflict directive
+ (["wrong-variant-in-conflicts"], "PKG-DIRECTIVES"),
+ # The package declares a non-existing dependency
+ (["missing-dependency"], "PKG-DIRECTIVES"),
+ # The package use a non existing variant in a depends_on directive
+ (["wrong-variant-in-depends-on"], "PKG-DIRECTIVES"),
+ # This package has a GitHub patch URL without full_index=1
+ (["invalid-github-patch-url"], "PKG-DIRECTIVES"),
+ # This package has a stand-alone 'test' method in build-time callbacks
+ (["test-build-callbacks"], "PKG-DIRECTIVES"),
+ # This package has no issues
+ (["mpileaks"], None),
+ # This package has a conflict with a trigger which cannot constrain the constraint
+ # Should not raise an error
+ (["unconstrainable-conflict"], None),
+ ],
+)
def test_package_audits(packages, expected_error, mock_packages):
- reports = spack.audit.run_group('packages', pkgs=packages)
+ reports = spack.audit.run_group("packages", pkgs=packages)
# Check that errors were reported only for the expected failure
actual_errors = [check for check, errors in reports if errors]
@@ -38,53 +41,64 @@ def test_package_audits(packages, expected_error, mock_packages):
# Data used in the test below to audit the double definition of a compiler
_double_compiler_definition = [
- {'compiler': {
- 'spec': 'gcc@9.0.1',
- 'paths': {
- 'cc': '/usr/bin/gcc-9',
- 'cxx': '/usr/bin/g++-9',
- 'f77': '/usr/bin/gfortran-9',
- 'fc': '/usr/bin/gfortran-9'
- },
- 'flags': {},
- 'operating_system': 'ubuntu18.04',
- 'target': 'x86_64',
- 'modules': [],
- 'environment': {},
- 'extra_rpaths': []
- }},
- {'compiler': {
- 'spec': 'gcc@9.0.1',
- 'paths': {
- 'cc': '/usr/bin/gcc-9',
- 'cxx': '/usr/bin/g++-9',
- 'f77': '/usr/bin/gfortran-9',
- 'fc': '/usr/bin/gfortran-9'
- },
- 'flags': {"cflags": "-O3"},
- 'operating_system': 'ubuntu18.04',
- 'target': 'x86_64',
- 'modules': [],
- 'environment': {},
- 'extra_rpaths': []
- }}
+ {
+ "compiler": {
+ "spec": "gcc@9.0.1",
+ "paths": {
+ "cc": "/usr/bin/gcc-9",
+ "cxx": "/usr/bin/g++-9",
+ "f77": "/usr/bin/gfortran-9",
+ "fc": "/usr/bin/gfortran-9",
+ },
+ "flags": {},
+ "operating_system": "ubuntu18.04",
+ "target": "x86_64",
+ "modules": [],
+ "environment": {},
+ "extra_rpaths": [],
+ }
+ },
+ {
+ "compiler": {
+ "spec": "gcc@9.0.1",
+ "paths": {
+ "cc": "/usr/bin/gcc-9",
+ "cxx": "/usr/bin/g++-9",
+ "f77": "/usr/bin/gfortran-9",
+ "fc": "/usr/bin/gfortran-9",
+ },
+ "flags": {"cflags": "-O3"},
+ "operating_system": "ubuntu18.04",
+ "target": "x86_64",
+ "modules": [],
+ "environment": {},
+ "extra_rpaths": [],
+ }
+ },
]
-@pytest.mark.parametrize('config_section,data,failing_check', [
- # Double compiler definitions in compilers.yaml
- ('compilers', _double_compiler_definition, 'CFG-COMPILER'),
- # Multiple definitions of the same external spec in packages.yaml
- ('packages', {
- "mpileaks": {"externals": [
- {"spec": "mpileaks@1.0.0", "prefix": "/"},
- {"spec": "mpileaks@1.0.0", "prefix": "/usr"},
- ]}
- }, 'CFG-PACKAGES')
-])
+@pytest.mark.parametrize(
+ "config_section,data,failing_check",
+ [
+ # Double compiler definitions in compilers.yaml
+ ("compilers", _double_compiler_definition, "CFG-COMPILER"),
+ # Multiple definitions of the same external spec in packages.yaml
+ (
+ "packages",
+ {
+ "mpileaks": {
+ "externals": [
+ {"spec": "mpileaks@1.0.0", "prefix": "/"},
+ {"spec": "mpileaks@1.0.0", "prefix": "/usr"},
+ ]
+ }
+ },
+ "CFG-PACKAGES",
+ ),
+ ],
+)
def test_config_audits(config_section, data, failing_check):
with spack.config.override(config_section, data):
- reports = spack.audit.run_group('configs')
- assert any(
- (check == failing_check) and errors for check, errors in reports
- )
+ reports = spack.audit.run_group("configs")
+ assert any((check == failing_check) and errors for check, errors in reports)
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index 21b264b4dc..b2bbbe200c 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -24,20 +24,19 @@ from spack.directory_layout import DirectoryLayout
from spack.paths import test_path
from spack.spec import Spec
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-mirror_cmd = spack.main.SpackCommand('mirror')
-install_cmd = spack.main.SpackCommand('install')
-uninstall_cmd = spack.main.SpackCommand('uninstall')
-buildcache_cmd = spack.main.SpackCommand('buildcache')
+mirror_cmd = spack.main.SpackCommand("mirror")
+install_cmd = spack.main.SpackCommand("install")
+uninstall_cmd = spack.main.SpackCommand("uninstall")
+buildcache_cmd = spack.main.SpackCommand("buildcache")
-legacy_mirror_dir = os.path.join(test_path, 'data', 'mirrors', 'legacy_yaml')
+legacy_mirror_dir = os.path.join(test_path, "data", "mirrors", "legacy_yaml")
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def cache_directory(tmpdir):
- fetch_cache_dir = tmpdir.ensure('fetch_cache', dir=True)
+ fetch_cache_dir = tmpdir.ensure("fetch_cache", dir=True)
fsc = spack.fetch_strategy.FsCache(str(fetch_cache_dir))
spack.config.caches, old_cache_path = fsc, spack.caches.fetch_cache
@@ -47,96 +46,97 @@ def cache_directory(tmpdir):
spack.config.caches = old_cache_path
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def mirror_dir(tmpdir_factory):
- dir = tmpdir_factory.mktemp('mirror')
- dir.ensure('build_cache', dir=True)
+ dir = tmpdir_factory.mktemp("mirror")
+ dir.ensure("build_cache", dir=True)
yield str(dir)
- dir.join('build_cache').remove()
+ dir.join("build_cache").remove()
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def test_mirror(mirror_dir):
- mirror_url = 'file://%s' % mirror_dir
- mirror_cmd('add', '--scope', 'site', 'test-mirror-func', mirror_url)
+ mirror_url = "file://%s" % mirror_dir
+ mirror_cmd("add", "--scope", "site", "test-mirror-func", mirror_url)
yield mirror_dir
- mirror_cmd('rm', '--scope=site', 'test-mirror-func')
+ mirror_cmd("rm", "--scope=site", "test-mirror-func")
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def test_legacy_mirror(mutable_config, tmpdir):
- mirror_dir = tmpdir.join('legacy_yaml_mirror')
+ mirror_dir = tmpdir.join("legacy_yaml_mirror")
shutil.copytree(legacy_mirror_dir, mirror_dir.strpath)
- mirror_url = 'file://%s' % mirror_dir
- mirror_cmd('add', '--scope', 'site', 'test-legacy-yaml', mirror_url)
+ mirror_url = "file://%s" % mirror_dir
+ mirror_cmd("add", "--scope", "site", "test-legacy-yaml", mirror_url)
yield mirror_dir
- mirror_cmd('rm', '--scope=site', 'test-legacy-yaml')
+ mirror_cmd("rm", "--scope=site", "test-legacy-yaml")
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def config_directory(tmpdir_factory):
- tmpdir = tmpdir_factory.mktemp('test_configs')
+ tmpdir = tmpdir_factory.mktemp("test_configs")
# restore some sane defaults for packages and config
config_path = py.path.local(spack.paths.etc_path)
- modules_yaml = config_path.join('defaults', 'modules.yaml')
- os_modules_yaml = config_path.join('defaults', '%s' %
- platform.system().lower(),
- 'modules.yaml')
- packages_yaml = config_path.join('defaults', 'packages.yaml')
- config_yaml = config_path.join('defaults', 'config.yaml')
- repos_yaml = config_path.join('defaults', 'repos.yaml')
- tmpdir.ensure('site', dir=True)
- tmpdir.ensure('user', dir=True)
- tmpdir.ensure('site/%s' % platform.system().lower(), dir=True)
- modules_yaml.copy(tmpdir.join('site', 'modules.yaml'))
- os_modules_yaml.copy(tmpdir.join('site/%s' % platform.system().lower(),
- 'modules.yaml'))
- packages_yaml.copy(tmpdir.join('site', 'packages.yaml'))
- config_yaml.copy(tmpdir.join('site', 'config.yaml'))
- repos_yaml.copy(tmpdir.join('site', 'repos.yaml'))
+ modules_yaml = config_path.join("defaults", "modules.yaml")
+ os_modules_yaml = config_path.join(
+ "defaults", "%s" % platform.system().lower(), "modules.yaml"
+ )
+ packages_yaml = config_path.join("defaults", "packages.yaml")
+ config_yaml = config_path.join("defaults", "config.yaml")
+ repos_yaml = config_path.join("defaults", "repos.yaml")
+ tmpdir.ensure("site", dir=True)
+ tmpdir.ensure("user", dir=True)
+ tmpdir.ensure("site/%s" % platform.system().lower(), dir=True)
+ modules_yaml.copy(tmpdir.join("site", "modules.yaml"))
+ os_modules_yaml.copy(tmpdir.join("site/%s" % platform.system().lower(), "modules.yaml"))
+ packages_yaml.copy(tmpdir.join("site", "packages.yaml"))
+ config_yaml.copy(tmpdir.join("site", "config.yaml"))
+ repos_yaml.copy(tmpdir.join("site", "repos.yaml"))
yield tmpdir
tmpdir.remove()
-@pytest.fixture(scope='function')
-def default_config(
- tmpdir_factory, config_directory, monkeypatch,
- install_mockery_mutable_config
-):
+@pytest.fixture(scope="function")
+def default_config(tmpdir_factory, config_directory, monkeypatch, install_mockery_mutable_config):
# This fixture depends on install_mockery_mutable_config to ensure
# there is a clear order of initialization. The substitution of the
# config scopes here is done on top of the substitution that comes with
# install_mockery_mutable_config
- mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
+ mutable_dir = tmpdir_factory.mktemp("mutable_config").join("tmp")
config_directory.copy(mutable_dir)
cfg = spack.config.Configuration(
- *[spack.config.ConfigScope(name, str(mutable_dir))
- for name in ['site/%s' % platform.system().lower(),
- 'site', 'user']])
+ *[
+ spack.config.ConfigScope(name, str(mutable_dir))
+ for name in ["site/%s" % platform.system().lower(), "site", "user"]
+ ]
+ )
spack.config.config, old_config = cfg, spack.config.config
# This is essential, otherwise the cache will create weird side effects
# that will compromise subsequent tests if compilers.yaml is modified
- monkeypatch.setattr(spack.compilers, '_cache_config_file', [])
- njobs = spack.config.get('config:build_jobs')
+ monkeypatch.setattr(spack.compilers, "_cache_config_file", [])
+ njobs = spack.config.get("config:build_jobs")
if not njobs:
- spack.config.set('config:build_jobs', 4, scope='user')
- extensions = spack.config.get('config:template_dirs')
+ spack.config.set("config:build_jobs", 4, scope="user")
+ extensions = spack.config.get("config:template_dirs")
if not extensions:
- spack.config.set('config:template_dirs',
- [os.path.join(spack.paths.share_path, 'templates')],
- scope='user')
-
- mutable_dir.ensure('build_stage', dir=True)
- build_stage = spack.config.get('config:build_stage')
+ spack.config.set(
+ "config:template_dirs",
+ [os.path.join(spack.paths.share_path, "templates")],
+ scope="user",
+ )
+
+ mutable_dir.ensure("build_stage", dir=True)
+ build_stage = spack.config.get("config:build_stage")
if not build_stage:
- spack.config.set('config:build_stage',
- [str(mutable_dir.join('build_stage'))], scope='user')
- timeout = spack.config.get('config:connect_timeout')
+ spack.config.set(
+ "config:build_stage", [str(mutable_dir.join("build_stage"))], scope="user"
+ )
+ timeout = spack.config.get("config:connect_timeout")
if not timeout:
- spack.config.set('config:connect_timeout', 10, scope='user')
+ spack.config.set("config:connect_timeout", 10, scope="user")
yield spack.config.config
@@ -144,16 +144,14 @@ def default_config(
mutable_dir.remove()
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def install_dir_default_layout(tmpdir):
"""Hooks a fake install directory with a default layout"""
scheme = os.path.join(
- '${architecture}',
- '${compiler.name}-${compiler.version}',
- '${name}-${version}-${hash}'
+ "${architecture}", "${compiler.name}-${compiler.version}", "${name}-${version}-${hash}"
)
real_store, real_layout = spack.store.store, spack.store.layout
- opt_dir = tmpdir.join('opt')
+ opt_dir = tmpdir.join("opt")
spack.store.store = spack.store.Store(str(opt_dir))
spack.store.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
try:
@@ -163,15 +161,14 @@ def install_dir_default_layout(tmpdir):
spack.store.layout = real_layout
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def install_dir_non_default_layout(tmpdir):
"""Hooks a fake install directory with a non-default layout"""
scheme = os.path.join(
- '${name}', '${version}',
- '${architecture}-${compiler.name}-${compiler.version}-${hash}'
+ "${name}", "${version}", "${architecture}-${compiler.name}-${compiler.version}-${hash}"
)
real_store, real_layout = spack.store.store, spack.store.layout
- opt_dir = tmpdir.join('opt')
+ opt_dir = tmpdir.join("opt")
spack.store.store = spack.store.Store(str(opt_dir))
spack.store.layout = DirectoryLayout(str(opt_dir), path_scheme=scheme)
try:
@@ -181,181 +178,170 @@ def install_dir_non_default_layout(tmpdir):
spack.store.layout = real_layout
-args = ['strings', 'file']
-if sys.platform == 'darwin':
- args.extend(['/usr/bin/clang++', 'install_name_tool'])
+args = ["strings", "file"]
+if sys.platform == "darwin":
+ args.extend(["/usr/bin/clang++", "install_name_tool"])
else:
- args.extend(['/usr/bin/g++', 'patchelf'])
+ args.extend(["/usr/bin/g++", "patchelf"])
@pytest.mark.requires_executables(*args)
@pytest.mark.maybeslow
@pytest.mark.usefixtures(
- 'default_config', 'cache_directory', 'install_dir_default_layout',
- 'test_mirror'
+ "default_config", "cache_directory", "install_dir_default_layout", "test_mirror"
)
def test_default_rpaths_create_install_default_layout(mirror_dir):
"""
Test the creation and installation of buildcaches with default rpaths
into the default directory layout scheme.
"""
- gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
- sy_spec = Spec('symly').concretized()
+ gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
+ sy_spec = Spec("symly").concretized()
# Install 'corge' without using a cache
- install_cmd('--no-cache', cspec.name)
- install_cmd('--no-cache', sy_spec.name)
+ install_cmd("--no-cache", cspec.name)
+ install_cmd("--no-cache", sy_spec.name)
# Create a buildache
- buildcache_cmd('create', '-au', '-d', mirror_dir, cspec.name, sy_spec.name)
+ buildcache_cmd("create", "-au", "-d", mirror_dir, cspec.name, sy_spec.name)
# Test force overwrite create buildcache (-f option)
- buildcache_cmd('create', '-auf', '-d', mirror_dir, cspec.name)
+ buildcache_cmd("create", "-auf", "-d", mirror_dir, cspec.name)
# Create mirror index
- mirror_url = 'file://{0}'.format(mirror_dir)
- buildcache_cmd('update-index', '-d', mirror_url)
+ mirror_url = "file://{0}".format(mirror_dir)
+ buildcache_cmd("update-index", "-d", mirror_url)
# List the buildcaches in the mirror
- buildcache_cmd('list', '-alv')
+ buildcache_cmd("list", "-alv")
# Uninstall the package and deps
- uninstall_cmd('-y', '--dependents', gspec.name)
+ uninstall_cmd("-y", "--dependents", gspec.name)
# Test installing from build caches
- buildcache_cmd('install', '-au', cspec.name, sy_spec.name)
+ buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
# This gives warning that spec is already installed
- buildcache_cmd('install', '-au', cspec.name)
+ buildcache_cmd("install", "-au", cspec.name)
# Test overwrite install
- buildcache_cmd('install', '-afu', cspec.name)
+ buildcache_cmd("install", "-afu", cspec.name)
- buildcache_cmd('keys', '-f')
- buildcache_cmd('list')
+ buildcache_cmd("keys", "-f")
+ buildcache_cmd("list")
- buildcache_cmd('list', '-a')
- buildcache_cmd('list', '-l', '-v')
+ buildcache_cmd("list", "-a")
+ buildcache_cmd("list", "-l", "-v")
@pytest.mark.requires_executables(*args)
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures(
- 'default_config', 'cache_directory', 'install_dir_non_default_layout',
- 'test_mirror'
+ "default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
)
def test_default_rpaths_install_nondefault_layout(mirror_dir):
"""
Test the creation and installation of buildcaches with default rpaths
into the non-default directory layout scheme.
"""
- cspec = Spec('corge').concretized()
+ cspec = Spec("corge").concretized()
# This guy tests for symlink relocation
- sy_spec = Spec('symly').concretized()
+ sy_spec = Spec("symly").concretized()
# Install some packages with dependent packages
# test install in non-default install path scheme
- buildcache_cmd('install', '-au', cspec.name, sy_spec.name)
+ buildcache_cmd("install", "-au", cspec.name, sy_spec.name)
# Test force install in non-default install path scheme
- buildcache_cmd('install', '-auf', cspec.name)
+ buildcache_cmd("install", "-auf", cspec.name)
@pytest.mark.requires_executables(*args)
@pytest.mark.maybeslow
@pytest.mark.nomockstage
-@pytest.mark.usefixtures(
- 'default_config', 'cache_directory', 'install_dir_default_layout'
-)
+@pytest.mark.usefixtures("default_config", "cache_directory", "install_dir_default_layout")
def test_relative_rpaths_create_default_layout(mirror_dir):
"""
Test the creation and installation of buildcaches with relative
rpaths into the default directory layout scheme.
"""
- gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
+ gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
# Install 'corge' without using a cache
- install_cmd('--no-cache', cspec.name)
+ install_cmd("--no-cache", cspec.name)
# Create build cache with relative rpaths
- buildcache_cmd(
- 'create', '-aur', '-d', mirror_dir, cspec.name
- )
+ buildcache_cmd("create", "-aur", "-d", mirror_dir, cspec.name)
# Create mirror index
- mirror_url = 'file://%s' % mirror_dir
- buildcache_cmd('update-index', '-d', mirror_url)
+ mirror_url = "file://%s" % mirror_dir
+ buildcache_cmd("update-index", "-d", mirror_url)
# Uninstall the package and deps
- uninstall_cmd('-y', '--dependents', gspec.name)
+ uninstall_cmd("-y", "--dependents", gspec.name)
@pytest.mark.requires_executables(*args)
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures(
- 'default_config', 'cache_directory', 'install_dir_default_layout',
- 'test_mirror'
+ "default_config", "cache_directory", "install_dir_default_layout", "test_mirror"
)
def test_relative_rpaths_install_default_layout(mirror_dir):
"""
Test the creation and installation of buildcaches with relative
rpaths into the default directory layout scheme.
"""
- gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
+ gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
# Install buildcache created with relativized rpaths
- buildcache_cmd('install', '-auf', cspec.name)
+ buildcache_cmd("install", "-auf", cspec.name)
# This gives warning that spec is already installed
- buildcache_cmd('install', '-auf', cspec.name)
+ buildcache_cmd("install", "-auf", cspec.name)
# Uninstall the package and deps
- uninstall_cmd('-y', '--dependents', gspec.name)
+ uninstall_cmd("-y", "--dependents", gspec.name)
# Install build cache
- buildcache_cmd('install', '-auf', cspec.name)
+ buildcache_cmd("install", "-auf", cspec.name)
# Test overwrite install
- buildcache_cmd('install', '-auf', cspec.name)
+ buildcache_cmd("install", "-auf", cspec.name)
@pytest.mark.requires_executables(*args)
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures(
- 'default_config', 'cache_directory', 'install_dir_non_default_layout',
- 'test_mirror'
+ "default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
)
def test_relative_rpaths_install_nondefault(mirror_dir):
"""
Test the installation of buildcaches with relativized rpaths
into the non-default directory layout scheme.
"""
- cspec = Spec('corge').concretized()
+ cspec = Spec("corge").concretized()
# Test install in non-default install path scheme and relative path
- buildcache_cmd('install', '-auf', cspec.name)
+ buildcache_cmd("install", "-auf", cspec.name)
def test_push_and_fetch_keys(mock_gnupghome):
testpath = str(mock_gnupghome)
- mirror = os.path.join(testpath, 'mirror')
- mirrors = {'test-mirror': mirror}
+ mirror = os.path.join(testpath, "mirror")
+ mirrors = {"test-mirror": mirror}
mirrors = spack.mirror.MirrorCollection(mirrors)
- mirror = spack.mirror.Mirror('file://' + mirror)
+ mirror = spack.mirror.Mirror("file://" + mirror)
- gpg_dir1 = os.path.join(testpath, 'gpg1')
- gpg_dir2 = os.path.join(testpath, 'gpg2')
+ gpg_dir1 = os.path.join(testpath, "gpg1")
+ gpg_dir2 = os.path.join(testpath, "gpg2")
# dir 1: create a new key, record its fingerprint, and push it to a new
# mirror
with spack.util.gpg.gnupghome_override(gpg_dir1):
- spack.util.gpg.create(name='test-key',
- email='fake@test.key',
- expires='0',
- comment=None)
+ spack.util.gpg.create(name="test-key", email="fake@test.key", expires="0", comment=None)
keys = spack.util.gpg.public_keys()
assert len(keys) == 1
@@ -379,56 +365,54 @@ def test_push_and_fetch_keys(mock_gnupghome):
@pytest.mark.maybeslow
@pytest.mark.nomockstage
@pytest.mark.usefixtures(
- 'default_config', 'cache_directory', 'install_dir_non_default_layout',
- 'test_mirror'
+ "default_config", "cache_directory", "install_dir_non_default_layout", "test_mirror"
)
def test_built_spec_cache(mirror_dir):
- """ Because the buildcache list command fetches the buildcache index
+ """Because the buildcache list command fetches the buildcache index
and uses it to populate the binary_distribution built spec cache, when
this test calls get_mirrors_for_spec, it is testing the popluation of
- that cache from a buildcache index. """
- buildcache_cmd('list', '-a', '-l')
+ that cache from a buildcache index."""
+ buildcache_cmd("list", "-a", "-l")
- gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
+ gspec, cspec = Spec("garply").concretized(), Spec("corge").concretized()
for s in [gspec, cspec]:
results = bindist.get_mirrors_for_spec(s)
- assert(any([r['spec'] == s for r in results]))
+ assert any([r["spec"] == s for r in results])
def fake_dag_hash(spec):
# Generate an arbitrary hash that is intended to be different than
# whatever a Spec reported before (to test actions that trigger when
# the hash changes)
- return 'tal4c7h4z0gqmixb1eqa92mjoybxn5l6'
+ return "tal4c7h4z0gqmixb1eqa92mjoybxn5l6"
@pytest.mark.usefixtures(
- 'install_mockery_mutable_config', 'mock_packages', 'mock_fetch',
- 'test_mirror'
+ "install_mockery_mutable_config", "mock_packages", "mock_fetch", "test_mirror"
)
def test_spec_needs_rebuild(monkeypatch, tmpdir):
"""Make sure needs_rebuild properly compares remote hash
against locally computed one, avoiding unnecessary rebuilds"""
# Create a temp mirror directory for buildcache usage
- mirror_dir = tmpdir.join('mirror_dir')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = tmpdir.join("mirror_dir")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
- s = Spec('libdwarf').concretized()
+ s = Spec("libdwarf").concretized()
# Install a package
install_cmd(s.name)
# Put installed package in the buildcache
- buildcache_cmd('create', '-u', '-a', '-d', mirror_dir.strpath, s.name)
+ buildcache_cmd("create", "-u", "-a", "-d", mirror_dir.strpath, s.name)
rebuild = bindist.needs_rebuild(s, mirror_url)
assert not rebuild
# Now monkey patch Spec to change the hash on the package
- monkeypatch.setattr(spack.spec.Spec, 'dag_hash', fake_dag_hash)
+ monkeypatch.setattr(spack.spec.Spec, "dag_hash", fake_dag_hash)
rebuild = bindist.needs_rebuild(s, mirror_url)
@@ -436,170 +420,174 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
@pytest.mark.usefixtures(
- 'install_mockery_mutable_config', 'mock_packages', 'mock_fetch',
+ "install_mockery_mutable_config",
+ "mock_packages",
+ "mock_fetch",
)
def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
"""Ensure spack buildcache index only reports available packages"""
# Create a temp mirror directory for buildcache usage
- mirror_dir = tmpdir.join('mirror_dir')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
- spack.config.set('mirrors', {'test': mirror_url})
+ mirror_dir = tmpdir.join("mirror_dir")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
+ spack.config.set("mirrors", {"test": mirror_url})
- s = Spec('libdwarf').concretized()
+ s = Spec("libdwarf").concretized()
# Install a package
- install_cmd('--no-cache', s.name)
+ install_cmd("--no-cache", s.name)
# Create a buildcache and update index
- buildcache_cmd('create', '-uad', mirror_dir.strpath, s.name)
- buildcache_cmd('update-index', '-d', mirror_dir.strpath)
+ buildcache_cmd("create", "-uad", mirror_dir.strpath, s.name)
+ buildcache_cmd("update-index", "-d", mirror_dir.strpath)
# Check package and dependency in buildcache
- cache_list = buildcache_cmd('list', '--allarch')
- assert 'libdwarf' in cache_list
- assert 'libelf' in cache_list
+ cache_list = buildcache_cmd("list", "--allarch")
+ assert "libdwarf" in cache_list
+ assert "libelf" in cache_list
# Remove dependency from cache
- libelf_files = glob.glob(
- os.path.join(mirror_dir.join('build_cache').strpath, '*libelf*'))
+ libelf_files = glob.glob(os.path.join(mirror_dir.join("build_cache").strpath, "*libelf*"))
os.remove(*libelf_files)
# Update index
- buildcache_cmd('update-index', '-d', mirror_dir.strpath)
+ buildcache_cmd("update-index", "-d", mirror_dir.strpath)
# Check dependency not in buildcache
- cache_list = buildcache_cmd('list', '--allarch')
- assert 'libdwarf' in cache_list
- assert 'libelf' not in cache_list
+ cache_list = buildcache_cmd("list", "--allarch")
+ assert "libdwarf" in cache_list
+ assert "libelf" not in cache_list
def test_generate_indices_key_error(monkeypatch, capfd):
-
def mock_list_url(url, recursive=False):
- print('mocked list_url({0}, {1})'.format(url, recursive))
- raise KeyError('Test KeyError handling')
+ print("mocked list_url({0}, {1})".format(url, recursive))
+ raise KeyError("Test KeyError handling")
- monkeypatch.setattr(web_util, 'list_url', mock_list_url)
+ monkeypatch.setattr(web_util, "list_url", mock_list_url)
- test_url = 'file:///fake/keys/dir'
+ test_url = "file:///fake/keys/dir"
# Make sure generate_key_index handles the KeyError
bindist.generate_key_index(test_url)
err = capfd.readouterr()[1]
- assert 'Warning: No keys at {0}'.format(test_url) in err
+ assert "Warning: No keys at {0}".format(test_url) in err
# Make sure generate_package_index handles the KeyError
bindist.generate_package_index(test_url)
err = capfd.readouterr()[1]
- assert 'Warning: No packages at {0}'.format(test_url) in err
+ assert "Warning: No packages at {0}".format(test_url) in err
def test_generate_indices_exception(monkeypatch, capfd):
-
def mock_list_url(url, recursive=False):
- print('mocked list_url({0}, {1})'.format(url, recursive))
- raise Exception('Test Exception handling')
+ print("mocked list_url({0}, {1})".format(url, recursive))
+ raise Exception("Test Exception handling")
- monkeypatch.setattr(web_util, 'list_url', mock_list_url)
+ monkeypatch.setattr(web_util, "list_url", mock_list_url)
- test_url = 'file:///fake/keys/dir'
+ test_url = "file:///fake/keys/dir"
# Make sure generate_key_index handles the Exception
bindist.generate_key_index(test_url)
err = capfd.readouterr()[1]
- expect = 'Encountered problem listing keys at {0}'.format(test_url)
+ expect = "Encountered problem listing keys at {0}".format(test_url)
assert expect in err
# Make sure generate_package_index handles the Exception
bindist.generate_package_index(test_url)
err = capfd.readouterr()[1]
- expect = 'Encountered problem listing packages at {0}'.format(test_url)
+ expect = "Encountered problem listing packages at {0}".format(test_url)
assert expect in err
-@pytest.mark.usefixtures('mock_fetch', 'install_mockery')
+@pytest.mark.usefixtures("mock_fetch", "install_mockery")
def test_update_sbang(tmpdir, test_mirror):
"""Test the creation and installation of buildcaches with default rpaths
into the non-default directory layout scheme, triggering an update of the
sbang.
"""
scheme = os.path.join(
- '${name}', '${version}',
- '${architecture}-${compiler.name}-${compiler.version}-${hash}'
+ "${name}", "${version}", "${architecture}-${compiler.name}-${compiler.version}-${hash}"
)
- spec_str = 'old-sbang'
+ spec_str = "old-sbang"
# Concretize a package with some old-fashioned sbang lines.
old_spec = Spec(spec_str).concretized()
- old_spec_hash_str = '/{0}'.format(old_spec.dag_hash())
+ old_spec_hash_str = "/{0}".format(old_spec.dag_hash())
# Need a fake mirror with *function* scope.
mirror_dir = test_mirror
- mirror_url = 'file://{0}'.format(mirror_dir)
+ mirror_url = "file://{0}".format(mirror_dir)
# Assume all commands will concretize old_spec the same way.
- install_cmd('--no-cache', old_spec.name)
+ install_cmd("--no-cache", old_spec.name)
# Create a buildcache with the installed spec.
- buildcache_cmd('create', '-u', '-a', '-d', mirror_dir, old_spec_hash_str)
+ buildcache_cmd("create", "-u", "-a", "-d", mirror_dir, old_spec_hash_str)
# Need to force an update of the buildcache index
- buildcache_cmd('update-index', '-d', mirror_url)
+ buildcache_cmd("update-index", "-d", mirror_url)
# Uninstall the original package.
- uninstall_cmd('-y', old_spec_hash_str)
+ uninstall_cmd("-y", old_spec_hash_str)
# Switch the store to the new install tree locations
- newtree_dir = tmpdir.join('newtree')
+ newtree_dir = tmpdir.join("newtree")
s = spack.store.Store(str(newtree_dir))
s.layout = DirectoryLayout(str(newtree_dir), path_scheme=scheme)
with spack.store.use_store(s):
- new_spec = Spec('old-sbang')
+ new_spec = Spec("old-sbang")
new_spec.concretize()
assert new_spec.dag_hash() == old_spec.dag_hash()
# Install package from buildcache
- buildcache_cmd('install', '-a', '-u', '-f', new_spec.name)
+ buildcache_cmd("install", "-a", "-u", "-f", new_spec.name)
# Continue blowing away caches
bindist.clear_spec_cache()
spack.stage.purge()
# test that the sbang was updated by the move
- sbang_style_1_expected = '''{0}
+ sbang_style_1_expected = """{0}
#!/usr/bin/env python
{1}
-'''.format(sbang.sbang_shebang_line(), new_spec.prefix.bin)
- sbang_style_2_expected = '''{0}
+""".format(
+ sbang.sbang_shebang_line(), new_spec.prefix.bin
+ )
+ sbang_style_2_expected = """{0}
#!/usr/bin/env python
{1}
-'''.format(sbang.sbang_shebang_line(), new_spec.prefix.bin)
+""".format(
+ sbang.sbang_shebang_line(), new_spec.prefix.bin
+ )
- installed_script_style_1_path = new_spec.prefix.bin.join('sbang-style-1.sh')
- assert sbang_style_1_expected == \
- open(str(installed_script_style_1_path)).read()
+ installed_script_style_1_path = new_spec.prefix.bin.join("sbang-style-1.sh")
+ assert sbang_style_1_expected == open(str(installed_script_style_1_path)).read()
- installed_script_style_2_path = new_spec.prefix.bin.join('sbang-style-2.sh')
- assert sbang_style_2_expected == \
- open(str(installed_script_style_2_path)).read()
+ installed_script_style_2_path = new_spec.prefix.bin.join("sbang-style-2.sh")
+ assert sbang_style_2_expected == open(str(installed_script_style_2_path)).read()
- uninstall_cmd('-y', '/%s' % new_spec.dag_hash())
+ uninstall_cmd("-y", "/%s" % new_spec.dag_hash())
# Need one where the platform has been changed to the test platform.
-def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config,
- mock_packages):
- install_cmd('--no-check-signature', '--cache-only', '-f', legacy_mirror_dir
- + '/build_cache/test-debian6-core2-gcc-4.5.0-zlib-' +
- '1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk.spec.yaml')
- uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
+def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config, mock_packages):
+ install_cmd(
+ "--no-check-signature",
+ "--cache-only",
+ "-f",
+ legacy_mirror_dir
+ + "/build_cache/test-debian6-core2-gcc-4.5.0-zlib-"
+ + "1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk.spec.yaml",
+ )
+ uninstall_cmd("-y", "/t5mczux3tfqpxwmg7egp7axy2jvyulqk")
def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
@@ -607,18 +595,20 @@ def test_install_legacy_buildcache_layout(install_mockery_mutable_config):
where the .spack file contained a repeated spec.json and another
compressed archive file containing the install tree. This test
makes sure we can still read that layout."""
- legacy_layout_dir = os.path.join(test_path, 'data', 'mirrors', 'legacy_layout')
+ legacy_layout_dir = os.path.join(test_path, "data", "mirrors", "legacy_layout")
mirror_url = "file://{0}".format(legacy_layout_dir)
- filename = ("test-debian6-core2-gcc-4.5.0-archive-files-2.0-"
- "l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json")
- spec_json_path = os.path.join(legacy_layout_dir, 'build_cache', filename)
- mirror_cmd('add', '--scope', 'site', 'test-legacy-layout', mirror_url)
- output = install_cmd(
- '--no-check-signature', '--cache-only', '-f', spec_json_path, output=str)
- mirror_cmd('rm', '--scope=site', 'test-legacy-layout')
- expect_line = ("Extracting archive-files-2.0-"
- "l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache")
- assert(expect_line in output)
+ filename = (
+ "test-debian6-core2-gcc-4.5.0-archive-files-2.0-"
+ "l3vdiqvbobmspwyb4q2b62fz6nitd4hk.spec.json"
+ )
+ spec_json_path = os.path.join(legacy_layout_dir, "build_cache", filename)
+ mirror_cmd("add", "--scope", "site", "test-legacy-layout", mirror_url)
+ output = install_cmd("--no-check-signature", "--cache-only", "-f", spec_json_path, output=str)
+ mirror_cmd("rm", "--scope=site", "test-legacy-layout")
+ expect_line = (
+ "Extracting archive-files-2.0-" "l3vdiqvbobmspwyb4q2b62fz6nitd4hk from binary cache"
+ )
+ assert expect_line in output
def test_FetchCacheError_only_accepts_lists_of_errors():
diff --git a/lib/spack/spack/test/bootstrap.py b/lib/spack/spack/test/bootstrap.py
index 183f0bc6f4..7170a4b791 100644
--- a/lib/spack/spack/test/bootstrap.py
+++ b/lib/spack/spack/test/bootstrap.py
@@ -16,20 +16,20 @@ import spack.util.path
@pytest.fixture
def active_mock_environment(mutable_config, mutable_mock_env_path):
- with spack.environment.create('bootstrap-test') as env:
+ with spack.environment.create("bootstrap-test") as env:
yield env
-@pytest.mark.regression('22294')
+@pytest.mark.regression("22294")
def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
# Prepare a custom store path. This should be in a writeable location
# since Spack needs to initialize the DB.
- user_path = str(tmpdir.join('store'))
+ user_path = str(tmpdir.join("store"))
# Reassign global variables in spack.store to the value
# they would have at Spack startup.
spack.store.reinitialize()
# Set the custom user path
- spack.config.set('config:install_tree:root', user_path)
+ spack.config.set("config:install_tree:root", user_path)
# Test that within the context manager we use the bootstrap store
# and that outside we restore the correct location
@@ -38,15 +38,18 @@ def test_store_is_restored_correctly_after_bootstrap(mutable_config, tmpdir):
assert spack.store.root == user_path
-@pytest.mark.parametrize('config_value,expected', [
- # Absolute path without expansion
- ('/opt/spack/bootstrap', '/opt/spack/bootstrap/store'),
- # Path with placeholder
- ('$spack/opt/bootstrap', '$spack/opt/bootstrap/store'),
-])
+@pytest.mark.parametrize(
+ "config_value,expected",
+ [
+ # Absolute path without expansion
+ ("/opt/spack/bootstrap", "/opt/spack/bootstrap/store"),
+ # Path with placeholder
+ ("$spack/opt/bootstrap", "$spack/opt/bootstrap/store"),
+ ],
+)
def test_store_path_customization(config_value, expected, mutable_config):
# Set the current configuration to a specific value
- spack.config.set('bootstrap:root', config_value)
+ spack.config.set("bootstrap:root", config_value)
# Check the store path
current = spack.bootstrap.store_path()
@@ -55,10 +58,10 @@ def test_store_path_customization(config_value, expected, mutable_config):
def test_raising_exception_if_bootstrap_disabled(mutable_config):
# Disable bootstrapping in config.yaml
- spack.config.set('bootstrap:enable', False)
+ spack.config.set("bootstrap:enable", False)
# Check the correct exception is raised
- with pytest.raises(RuntimeError, match='bootstrapping is currently disabled'):
+ with pytest.raises(RuntimeError, match="bootstrapping is currently disabled"):
spack.bootstrap.store_path()
@@ -78,7 +81,7 @@ def test_raising_exception_executables_in_path():
spack.bootstrap.ensure_executables_in_path_or_raise(["asdf", "fdsa"], "python")
-@pytest.mark.regression('25603')
+@pytest.mark.regression("25603")
def test_bootstrap_deactivates_environments(active_mock_environment):
assert spack.environment.active_environment() == active_mock_environment
with spack.bootstrap.ensure_bootstrap_configuration():
@@ -86,22 +89,22 @@ def test_bootstrap_deactivates_environments(active_mock_environment):
assert spack.environment.active_environment() == active_mock_environment
-@pytest.mark.regression('25805')
+@pytest.mark.regression("25805")
def test_bootstrap_disables_modulefile_generation(mutable_config):
# Be sure to enable both lmod and tcl in modules.yaml
- spack.config.set('modules:default:enable', ['tcl', 'lmod'])
+ spack.config.set("modules:default:enable", ["tcl", "lmod"])
- assert 'tcl' in spack.config.get('modules:default:enable')
- assert 'lmod' in spack.config.get('modules:default:enable')
+ assert "tcl" in spack.config.get("modules:default:enable")
+ assert "lmod" in spack.config.get("modules:default:enable")
with spack.bootstrap.ensure_bootstrap_configuration():
- assert 'tcl' not in spack.config.get('modules:default:enable')
- assert 'lmod' not in spack.config.get('modules:default:enable')
- assert 'tcl' in spack.config.get('modules:default:enable')
- assert 'lmod' in spack.config.get('modules:default:enable')
+ assert "tcl" not in spack.config.get("modules:default:enable")
+ assert "lmod" not in spack.config.get("modules:default:enable")
+ assert "tcl" in spack.config.get("modules:default:enable")
+ assert "lmod" in spack.config.get("modules:default:enable")
-@pytest.mark.regression('25992')
-@pytest.mark.requires_executables('gcc')
+@pytest.mark.regression("25992")
+@pytest.mark.requires_executables("gcc")
def test_bootstrap_search_for_compilers_with_no_environment(no_compilers_yaml):
assert not spack.compilers.all_compiler_specs(init_config=False)
with spack.bootstrap.ensure_bootstrap_configuration():
@@ -109,10 +112,10 @@ def test_bootstrap_search_for_compilers_with_no_environment(no_compilers_yaml):
assert not spack.compilers.all_compiler_specs(init_config=False)
-@pytest.mark.regression('25992')
-@pytest.mark.requires_executables('gcc')
+@pytest.mark.regression("25992")
+@pytest.mark.requires_executables("gcc")
def test_bootstrap_search_for_compilers_with_environment_active(
- no_compilers_yaml, active_mock_environment
+ no_compilers_yaml, active_mock_environment
):
assert not spack.compilers.all_compiler_specs(init_config=False)
with spack.bootstrap.ensure_bootstrap_configuration():
@@ -120,37 +123,39 @@ def test_bootstrap_search_for_compilers_with_environment_active(
assert not spack.compilers.all_compiler_specs(init_config=False)
-@pytest.mark.regression('26189')
+@pytest.mark.regression("26189")
def test_config_yaml_is_preserved_during_bootstrap(mutable_config):
- expected_dir = '/tmp/test'
+ expected_dir = "/tmp/test"
spack.config.set("config:test_stage", expected_dir, scope="command_line")
- assert spack.config.get('config:test_stage') == expected_dir
+ assert spack.config.get("config:test_stage") == expected_dir
with spack.bootstrap.ensure_bootstrap_configuration():
- assert spack.config.get('config:test_stage') == expected_dir
- assert spack.config.get('config:test_stage') == expected_dir
+ assert spack.config.get("config:test_stage") == expected_dir
+ assert spack.config.get("config:test_stage") == expected_dir
-@pytest.mark.regression('26548')
+@pytest.mark.regression("26548")
def test_custom_store_in_environment(mutable_config, tmpdir):
# Test that the custom store in an environment is taken into account
# during bootstrapping
- spack_yaml = tmpdir.join('spack.yaml')
- spack_yaml.write("""
+ spack_yaml = tmpdir.join("spack.yaml")
+ spack_yaml.write(
+ """
spack:
specs:
- libelf
config:
install_tree:
root: /tmp/store
-""")
+"""
+ )
with spack.environment.Environment(str(tmpdir)):
assert spack.environment.active_environment()
- assert spack.config.get('config:install_tree:root') == '/tmp/store'
+ assert spack.config.get("config:install_tree:root") == "/tmp/store"
# Don't trigger evaluation here
with spack.bootstrap.ensure_bootstrap_configuration():
pass
- assert str(spack.store.root) == os.sep + os.path.join('tmp', 'store')
+ assert str(spack.store.root) == os.sep + os.path.join("tmp", "store")
def test_nested_use_of_context_manager(mutable_config):
@@ -163,26 +168,26 @@ def test_nested_use_of_context_manager(mutable_config):
assert spack.config.config == user_config
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.parametrize('expected_missing', [False, True])
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.parametrize("expected_missing", [False, True])
def test_status_function_find_files(
- mutable_config, mock_executable, tmpdir, monkeypatch, expected_missing
+ mutable_config, mock_executable, tmpdir, monkeypatch, expected_missing
):
if not expected_missing:
- mock_executable('foo', 'echo Hello WWorld!')
+ mock_executable("foo", "echo Hello WWorld!")
monkeypatch.setattr(
- spack.bootstrap, '_optional_requirements',
- lambda: [spack.bootstrap._required_system_executable('foo', 'NOT FOUND')]
+ spack.bootstrap,
+ "_optional_requirements",
+ lambda: [spack.bootstrap._required_system_executable("foo", "NOT FOUND")],
)
- monkeypatch.setenv('PATH', str(tmpdir.join('bin')))
+ monkeypatch.setenv("PATH", str(tmpdir.join("bin")))
- _, missing = spack.bootstrap.status_message('optional')
+ _, missing = spack.bootstrap.status_message("optional")
assert missing is expected_missing
-@pytest.mark.regression('31042')
+@pytest.mark.regression("31042")
def test_source_is_disabled(mutable_config):
# Get the configuration dictionary of the current bootstrapping source
conf = next(iter(spack.bootstrap.bootstrapping_sources()))
@@ -194,6 +199,6 @@ def test_source_is_disabled(mutable_config):
# Try to explicitly disable the source and verify that the behavior
# is the same as above
- spack.config.add('bootstrap:trusted:{0}:{1}'.format(conf['name'], False))
+ spack.config.add("bootstrap:trusted:{0}:{1}".format(conf["name"], False))
with pytest.raises(ValueError):
spack.bootstrap.source_is_enabled_or_raise(conf)
diff --git a/lib/spack/spack/test/build_distribution.py b/lib/spack/spack/test/build_distribution.py
index c5132a9558..2d3024ab06 100644
--- a/lib/spack/spack/test/build_distribution.py
+++ b/lib/spack/spack/test/build_distribution.py
@@ -12,10 +12,9 @@ import pytest
import spack.binary_distribution
import spack.spec
-install = spack.main.SpackCommand('install')
+install = spack.main.SpackCommand("install")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def _validate_url(url):
@@ -24,30 +23,32 @@ def _validate_url(url):
@pytest.fixture(autouse=True)
def url_check(monkeypatch):
- monkeypatch.setattr(spack.util.url, 'require_url_format', _validate_url)
+ monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url)
-def test_build_tarball_overwrite(
- install_mockery, mock_fetch, monkeypatch, tmpdir):
+def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdir):
with tmpdir.as_cwd():
- spec = spack.spec.Spec('trivial-install-test-package').concretized()
+ spec = spack.spec.Spec("trivial-install-test-package").concretized()
install(str(spec))
# Runs fine the first time, throws the second time
- spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
+ spack.binary_distribution._build_tarball(spec, ".", unsigned=True)
with pytest.raises(spack.binary_distribution.NoOverwriteException):
- spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
+ spack.binary_distribution._build_tarball(spec, ".", unsigned=True)
# Should work fine with force=True
- spack.binary_distribution._build_tarball(spec, '.', force=True, unsigned=True)
+ spack.binary_distribution._build_tarball(spec, ".", force=True, unsigned=True)
# Remove the tarball and try again.
# This must *also* throw, because of the existing .spec.json file
- os.remove(os.path.join(
- spack.binary_distribution.build_cache_prefix('.'),
- spack.binary_distribution.tarball_directory_name(spec),
- spack.binary_distribution.tarball_name(spec, '.spack')))
+ os.remove(
+ os.path.join(
+ spack.binary_distribution.build_cache_prefix("."),
+ spack.binary_distribution.tarball_directory_name(spec),
+ spack.binary_distribution.tarball_name(spec, ".spack"),
+ )
+ )
with pytest.raises(spack.binary_distribution.NoOverwriteException):
- spack.binary_distribution._build_tarball(spec, '.', unsigned=True)
+ spack.binary_distribution._build_tarball(spec, ".", unsigned=True)
diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py
index e69b23815b..41a5475c89 100644
--- a/lib/spack/spack/test/build_environment.py
+++ b/lib/spack/spack/test/build_environment.py
@@ -47,39 +47,47 @@ def build_environment(working_env):
realcc = "/bin/mycc"
prefix = "/spack-test-prefix"
- os.environ['SPACK_CC'] = realcc
- os.environ['SPACK_CXX'] = realcc
- os.environ['SPACK_FC'] = realcc
-
- os.environ['SPACK_PREFIX'] = prefix
- os.environ['SPACK_ENV_PATH'] = "test"
- os.environ['SPACK_DEBUG_LOG_DIR'] = "."
- os.environ['SPACK_DEBUG_LOG_ID'] = "foo-hashabc"
- os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7"
- os.environ['SPACK_SHORT_SPEC'] = (
- "foo@1.2 arch=linux-rhel6-x86_64 /hashabc")
-
- os.environ['SPACK_CC_RPATH_ARG'] = "-Wl,-rpath,"
- os.environ['SPACK_CXX_RPATH_ARG'] = "-Wl,-rpath,"
- os.environ['SPACK_F77_RPATH_ARG'] = "-Wl,-rpath,"
- os.environ['SPACK_FC_RPATH_ARG'] = "-Wl,-rpath,"
- os.environ['SPACK_LINKER_ARG'] = '-Wl,'
- os.environ['SPACK_DTAGS_TO_ADD'] = '--disable-new-dtags'
- os.environ['SPACK_DTAGS_TO_STRIP'] = '--enable-new-dtags'
- os.environ['SPACK_SYSTEM_DIRS'] = '/usr/include /usr/lib'
- os.environ['SPACK_TARGET_ARGS'] = ''
-
- if 'SPACK_DEPENDENCIES' in os.environ:
- del os.environ['SPACK_DEPENDENCIES']
-
- yield {'cc': cc, 'cxx': cxx, 'fc': fc}
-
- for name in ('SPACK_CC', 'SPACK_CXX', 'SPACK_FC', 'SPACK_PREFIX',
- 'SPACK_ENV_PATH', 'SPACK_DEBUG_LOG_DIR',
- 'SPACK_COMPILER_SPEC', 'SPACK_SHORT_SPEC',
- 'SPACK_CC_RPATH_ARG', 'SPACK_CXX_RPATH_ARG',
- 'SPACK_F77_RPATH_ARG', 'SPACK_FC_RPATH_ARG',
- 'SPACK_TARGET_ARGS'):
+ os.environ["SPACK_CC"] = realcc
+ os.environ["SPACK_CXX"] = realcc
+ os.environ["SPACK_FC"] = realcc
+
+ os.environ["SPACK_PREFIX"] = prefix
+ os.environ["SPACK_ENV_PATH"] = "test"
+ os.environ["SPACK_DEBUG_LOG_DIR"] = "."
+ os.environ["SPACK_DEBUG_LOG_ID"] = "foo-hashabc"
+ os.environ["SPACK_COMPILER_SPEC"] = "gcc@4.4.7"
+ os.environ["SPACK_SHORT_SPEC"] = "foo@1.2 arch=linux-rhel6-x86_64 /hashabc"
+
+ os.environ["SPACK_CC_RPATH_ARG"] = "-Wl,-rpath,"
+ os.environ["SPACK_CXX_RPATH_ARG"] = "-Wl,-rpath,"
+ os.environ["SPACK_F77_RPATH_ARG"] = "-Wl,-rpath,"
+ os.environ["SPACK_FC_RPATH_ARG"] = "-Wl,-rpath,"
+ os.environ["SPACK_LINKER_ARG"] = "-Wl,"
+ os.environ["SPACK_DTAGS_TO_ADD"] = "--disable-new-dtags"
+ os.environ["SPACK_DTAGS_TO_STRIP"] = "--enable-new-dtags"
+ os.environ["SPACK_SYSTEM_DIRS"] = "/usr/include /usr/lib"
+ os.environ["SPACK_TARGET_ARGS"] = ""
+
+ if "SPACK_DEPENDENCIES" in os.environ:
+ del os.environ["SPACK_DEPENDENCIES"]
+
+ yield {"cc": cc, "cxx": cxx, "fc": fc}
+
+ for name in (
+ "SPACK_CC",
+ "SPACK_CXX",
+ "SPACK_FC",
+ "SPACK_PREFIX",
+ "SPACK_ENV_PATH",
+ "SPACK_DEBUG_LOG_DIR",
+ "SPACK_COMPILER_SPEC",
+ "SPACK_SHORT_SPEC",
+ "SPACK_CC_RPATH_ARG",
+ "SPACK_CXX_RPATH_ARG",
+ "SPACK_F77_RPATH_ARG",
+ "SPACK_FC_RPATH_ARG",
+ "SPACK_TARGET_ARGS",
+ ):
del os.environ[name]
@@ -88,6 +96,7 @@ def ensure_env_variables(config, mock_packages, monkeypatch, working_env):
"""Returns a function that takes a dictionary and updates os.environ
for the test lifetime accordingly. Plugs-in mock config and repo.
"""
+
def _ensure(env_mods):
for name, value in env_mods.items():
monkeypatch.setenv(name, value)
@@ -97,7 +106,6 @@ def ensure_env_variables(config, mock_packages, monkeypatch, working_env):
@pytest.fixture
def mock_module_cmd(monkeypatch):
-
class Logger(object):
def __init__(self, fn=None):
self.fn = fn
@@ -109,122 +117,157 @@ def mock_module_cmd(monkeypatch):
return self.fn(*args, **kwargs)
mock_module_cmd = Logger()
- monkeypatch.setattr(spack.build_environment, 'module', mock_module_cmd)
- monkeypatch.setattr(spack.build_environment, '_on_cray', lambda: (True, None))
+ monkeypatch.setattr(spack.build_environment, "module", mock_module_cmd)
+ monkeypatch.setattr(spack.build_environment, "_on_cray", lambda: (True, None))
return mock_module_cmd
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Static to Shared not supported on Win (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Static to Shared not supported on Win (yet)")
def test_static_to_shared_library(build_environment):
- os.environ['SPACK_TEST_COMMAND'] = 'dump-args'
+ os.environ["SPACK_TEST_COMMAND"] = "dump-args"
expected = {
- 'linux': ('/bin/mycc -shared'
- ' -Wl,--disable-new-dtags'
- ' -Wl,-soname,{2} -Wl,--whole-archive {0}'
- ' -Wl,--no-whole-archive -o {1}'),
- 'darwin': ('/bin/mycc -dynamiclib'
- ' -Wl,--disable-new-dtags'
- ' -install_name {1} -Wl,-force_load,{0} -o {1}')
+ "linux": (
+ "/bin/mycc -shared"
+ " -Wl,--disable-new-dtags"
+ " -Wl,-soname,{2} -Wl,--whole-archive {0}"
+ " -Wl,--no-whole-archive -o {1}"
+ ),
+ "darwin": (
+ "/bin/mycc -dynamiclib"
+ " -Wl,--disable-new-dtags"
+ " -install_name {1} -Wl,-force_load,{0} -o {1}"
+ ),
}
- static_lib = '/spack/libfoo.a'
+ static_lib = "/spack/libfoo.a"
- for arch in ('linux', 'darwin'):
- for shared_lib in (None, '/spack/libbar.so'):
- output = _static_to_shared_library(arch, build_environment['cc'],
- static_lib, shared_lib,
- compiler_output=str).strip()
+ for arch in ("linux", "darwin"):
+ for shared_lib in (None, "/spack/libbar.so"):
+ output = _static_to_shared_library(
+ arch, build_environment["cc"], static_lib, shared_lib, compiler_output=str
+ ).strip()
if not shared_lib:
- shared_lib = '{0}.{1}'.format(
- os.path.splitext(static_lib)[0], dso_suffix)
+ shared_lib = "{0}.{1}".format(os.path.splitext(static_lib)[0], dso_suffix)
- assert set(output.split()) == set(expected[arch].format(
- static_lib, shared_lib, os.path.basename(shared_lib)).split())
+ assert set(output.split()) == set(
+ expected[arch].format(static_lib, shared_lib, os.path.basename(shared_lib)).split()
+ )
-@pytest.mark.regression('8345')
-@pytest.mark.usefixtures('config', 'mock_packages')
+@pytest.mark.regression("8345")
+@pytest.mark.usefixtures("config", "mock_packages")
def test_cc_not_changed_by_modules(monkeypatch, working_env):
- s = spack.spec.Spec('cmake')
+ s = spack.spec.Spec("cmake")
s.concretize()
pkg = s.package
def _set_wrong_cc(x):
- os.environ['CC'] = 'NOT_THIS_PLEASE'
- os.environ['ANOTHER_VAR'] = 'THIS_IS_SET'
+ os.environ["CC"] = "NOT_THIS_PLEASE"
+ os.environ["ANOTHER_VAR"] = "THIS_IS_SET"
- monkeypatch.setattr(
- spack.build_environment, 'load_module', _set_wrong_cc
- )
- monkeypatch.setattr(
- pkg.compiler, 'modules', ['some_module']
- )
+ monkeypatch.setattr(spack.build_environment, "load_module", _set_wrong_cc)
+ monkeypatch.setattr(pkg.compiler, "modules", ["some_module"])
spack.build_environment.setup_package(pkg, False)
- assert os.environ['CC'] != 'NOT_THIS_PLEASE'
- assert os.environ['ANOTHER_VAR'] == 'THIS_IS_SET'
-
-
-@pytest.mark.parametrize('initial,modifications,expected', [
- # Set and unset variables
- ({'SOME_VAR_STR': '', 'SOME_VAR_NUM': '0'},
- {'set': {'SOME_VAR_STR': 'SOME_STR', 'SOME_VAR_NUM': 1}},
- {'SOME_VAR_STR': 'SOME_STR', 'SOME_VAR_NUM': '1'}),
- ({'SOME_VAR_STR': ''},
- {'unset': ['SOME_VAR_STR']},
- {'SOME_VAR_STR': None}),
- ({}, # Set a variable that was not defined already
- {'set': {'SOME_VAR_STR': 'SOME_STR'}},
- {'SOME_VAR_STR': 'SOME_STR'}),
- # Append and prepend to the same variable
- ({'EMPTY_PATH_LIST': prep_and_join('path', 'middle')},
- {'prepend_path': {'EMPTY_PATH_LIST': prep_and_join('path', 'first')},
- 'append_path': {'EMPTY_PATH_LIST': prep_and_join('path', 'last')}},
- {'EMPTY_PATH_LIST': os_pathsep_join(prep_and_join('path', 'first'),
- prep_and_join('path', 'middle'),
- prep_and_join('path', 'last'))}),
- # Append and prepend from empty variables
- ({'EMPTY_PATH_LIST': '', 'SOME_VAR_STR': ''},
- {'prepend_path': {'EMPTY_PATH_LIST': prep_and_join('path', 'first')},
- 'append_path': {'SOME_VAR_STR': prep_and_join('path', 'last')}},
- {'EMPTY_PATH_LIST': prep_and_join('path', 'first'),
- 'SOME_VAR_STR': prep_and_join('path', 'last')}),
- ({}, # Same as before but on variables that were not defined
- {'prepend_path': {'EMPTY_PATH_LIST': prep_and_join('path', 'first')},
- 'append_path': {'SOME_VAR_STR': prep_and_join('path', 'last')}},
- {'EMPTY_PATH_LIST': prep_and_join('path', 'first'),
- 'SOME_VAR_STR': prep_and_join('path', 'last')}),
- # Remove a path from a list
- ({'EMPTY_PATH_LIST': os_pathsep_join(prep_and_join('path', 'first'),
- prep_and_join('path', 'middle'),
- prep_and_join('path', 'last'))},
- {'remove_path': {'EMPTY_PATH_LIST': prep_and_join('path', 'middle')}},
- {'EMPTY_PATH_LIST': os_pathsep_join(prep_and_join('path', 'first'),
- prep_and_join('path', 'last'))}),
- ({'EMPTY_PATH_LIST': prep_and_join('only', 'path')},
- {'remove_path': {'EMPTY_PATH_LIST': prep_and_join('only', 'path')}},
- {'EMPTY_PATH_LIST': ''}),
-])
+ assert os.environ["CC"] != "NOT_THIS_PLEASE"
+ assert os.environ["ANOTHER_VAR"] == "THIS_IS_SET"
+
+
+@pytest.mark.parametrize(
+ "initial,modifications,expected",
+ [
+ # Set and unset variables
+ (
+ {"SOME_VAR_STR": "", "SOME_VAR_NUM": "0"},
+ {"set": {"SOME_VAR_STR": "SOME_STR", "SOME_VAR_NUM": 1}},
+ {"SOME_VAR_STR": "SOME_STR", "SOME_VAR_NUM": "1"},
+ ),
+ ({"SOME_VAR_STR": ""}, {"unset": ["SOME_VAR_STR"]}, {"SOME_VAR_STR": None}),
+ (
+ {}, # Set a variable that was not defined already
+ {"set": {"SOME_VAR_STR": "SOME_STR"}},
+ {"SOME_VAR_STR": "SOME_STR"},
+ ),
+ # Append and prepend to the same variable
+ (
+ {"EMPTY_PATH_LIST": prep_and_join("path", "middle")},
+ {
+ "prepend_path": {"EMPTY_PATH_LIST": prep_and_join("path", "first")},
+ "append_path": {"EMPTY_PATH_LIST": prep_and_join("path", "last")},
+ },
+ {
+ "EMPTY_PATH_LIST": os_pathsep_join(
+ prep_and_join("path", "first"),
+ prep_and_join("path", "middle"),
+ prep_and_join("path", "last"),
+ )
+ },
+ ),
+ # Append and prepend from empty variables
+ (
+ {"EMPTY_PATH_LIST": "", "SOME_VAR_STR": ""},
+ {
+ "prepend_path": {"EMPTY_PATH_LIST": prep_and_join("path", "first")},
+ "append_path": {"SOME_VAR_STR": prep_and_join("path", "last")},
+ },
+ {
+ "EMPTY_PATH_LIST": prep_and_join("path", "first"),
+ "SOME_VAR_STR": prep_and_join("path", "last"),
+ },
+ ),
+ (
+ {}, # Same as before but on variables that were not defined
+ {
+ "prepend_path": {"EMPTY_PATH_LIST": prep_and_join("path", "first")},
+ "append_path": {"SOME_VAR_STR": prep_and_join("path", "last")},
+ },
+ {
+ "EMPTY_PATH_LIST": prep_and_join("path", "first"),
+ "SOME_VAR_STR": prep_and_join("path", "last"),
+ },
+ ),
+ # Remove a path from a list
+ (
+ {
+ "EMPTY_PATH_LIST": os_pathsep_join(
+ prep_and_join("path", "first"),
+ prep_and_join("path", "middle"),
+ prep_and_join("path", "last"),
+ )
+ },
+ {"remove_path": {"EMPTY_PATH_LIST": prep_and_join("path", "middle")}},
+ {
+ "EMPTY_PATH_LIST": os_pathsep_join(
+ prep_and_join("path", "first"), prep_and_join("path", "last")
+ )
+ },
+ ),
+ (
+ {"EMPTY_PATH_LIST": prep_and_join("only", "path")},
+ {"remove_path": {"EMPTY_PATH_LIST": prep_and_join("only", "path")}},
+ {"EMPTY_PATH_LIST": ""},
+ ),
+ ],
+)
def test_compiler_config_modifications(
- initial, modifications, expected, ensure_env_variables, monkeypatch
+ initial, modifications, expected, ensure_env_variables, monkeypatch
):
# Set the environment as per prerequisites
ensure_env_variables(initial)
def platform_pathsep(pathlist):
if Path.platform_path == Path.windows:
- pathlist = pathlist.replace(':', ';')
+ pathlist = pathlist.replace(":", ";")
return convert_to_platform_path(pathlist)
# Monkeypatch a pkg.compiler.environment with the required modifications
- pkg = spack.spec.Spec('cmake').concretized().package
- monkeypatch.setattr(pkg.compiler, 'environment', modifications)
+ pkg = spack.spec.Spec("cmake").concretized().package
+ monkeypatch.setattr(pkg.compiler, "environment", modifications)
# Trigger the modifications
spack.build_environment.setup_package(pkg, False)
@@ -237,50 +280,44 @@ def test_compiler_config_modifications(
assert name not in os.environ
-@pytest.mark.regression('9107')
-def test_spack_paths_before_module_paths(
- config, mock_packages, monkeypatch, working_env):
- s = spack.spec.Spec('cmake')
+@pytest.mark.regression("9107")
+def test_spack_paths_before_module_paths(config, mock_packages, monkeypatch, working_env):
+ s = spack.spec.Spec("cmake")
s.concretize()
pkg = s.package
- module_path = os.path.join('path', 'to', 'module')
+ module_path = os.path.join("path", "to", "module")
def _set_wrong_cc(x):
- os.environ['PATH'] = module_path + os.pathsep + os.environ['PATH']
+ os.environ["PATH"] = module_path + os.pathsep + os.environ["PATH"]
- monkeypatch.setattr(
- spack.build_environment, 'load_module', _set_wrong_cc
- )
- monkeypatch.setattr(
- pkg.compiler, 'modules', ['some_module']
- )
+ monkeypatch.setattr(spack.build_environment, "load_module", _set_wrong_cc)
+ monkeypatch.setattr(pkg.compiler, "modules", ["some_module"])
spack.build_environment.setup_package(pkg, False)
- spack_path = os.path.join(spack.paths.prefix, os.path.join('lib', 'spack', 'env'))
+ spack_path = os.path.join(spack.paths.prefix, os.path.join("lib", "spack", "env"))
- paths = os.environ['PATH'].split(os.pathsep)
+ paths = os.environ["PATH"].split(os.pathsep)
assert paths.index(spack_path) < paths.index(module_path)
def test_package_inheritance_module_setup(config, mock_packages, working_env):
- s = spack.spec.Spec('multimodule-inheritance')
+ s = spack.spec.Spec("multimodule-inheritance")
s.concretize()
pkg = s.package
spack.build_environment.setup_package(pkg, False)
- os.environ['TEST_MODULE_VAR'] = 'failed'
+ os.environ["TEST_MODULE_VAR"] = "failed"
- assert pkg.use_module_variable() == 'test_module_variable'
- assert os.environ['TEST_MODULE_VAR'] == 'test_module_variable'
+ assert pkg.use_module_variable() == "test_module_variable"
+ assert os.environ["TEST_MODULE_VAR"] == "test_module_variable"
def test_wrapper_variables(
- config, mock_packages, working_env, monkeypatch,
- installation_dir_with_headers
+ config, mock_packages, working_env, monkeypatch, installation_dir_with_headers
):
"""Check that build_environment supplies the needed library/include
directories via the SPACK_LINK_DIRS and SPACK_INCLUDE_DIRS environment
@@ -288,109 +325,106 @@ def test_wrapper_variables(
"""
# https://github.com/spack/spack/issues/13969
- cuda_headers = HeaderList([
- 'prefix/include/cuda_runtime.h',
- 'prefix/include/cuda/atomic',
- 'prefix/include/cuda/std/detail/libcxx/include/ctype.h'])
+ cuda_headers = HeaderList(
+ [
+ "prefix/include/cuda_runtime.h",
+ "prefix/include/cuda/atomic",
+ "prefix/include/cuda/std/detail/libcxx/include/ctype.h",
+ ]
+ )
cuda_include_dirs = cuda_headers.directories
- assert(posixpath.join('prefix', 'include')
- in cuda_include_dirs)
- assert(posixpath.join('prefix', 'include', 'cuda', 'std', 'detail',
- 'libcxx', 'include')
- not in cuda_include_dirs)
+ assert posixpath.join("prefix", "include") in cuda_include_dirs
+ assert (
+ posixpath.join("prefix", "include", "cuda", "std", "detail", "libcxx", "include")
+ not in cuda_include_dirs
+ )
- root = spack.spec.Spec('dt-diamond')
+ root = spack.spec.Spec("dt-diamond")
root.concretize()
for s in root.traverse():
- s.prefix = '/{0}-prefix/'.format(s.name)
+ s.prefix = "/{0}-prefix/".format(s.name)
- dep_pkg = root['dt-diamond-left'].package
- dep_lib_paths = ['/test/path/to/ex1.so', '/test/path/to/subdir/ex2.so']
- dep_lib_dirs = ['/test/path/to', '/test/path/to/subdir']
+ dep_pkg = root["dt-diamond-left"].package
+ dep_lib_paths = ["/test/path/to/ex1.so", "/test/path/to/subdir/ex2.so"]
+ dep_lib_dirs = ["/test/path/to", "/test/path/to/subdir"]
dep_libs = LibraryList(dep_lib_paths)
- dep2_pkg = root['dt-diamond-right'].package
+ dep2_pkg = root["dt-diamond-right"].package
dep2_pkg.spec.prefix = str(installation_dir_with_headers)
- setattr(dep_pkg, 'libs', dep_libs)
+ setattr(dep_pkg, "libs", dep_libs)
try:
pkg = root.package
env_mods = EnvironmentModifications()
- spack.build_environment.set_wrapper_variables(
- pkg, env_mods)
+ spack.build_environment.set_wrapper_variables(pkg, env_mods)
env_mods.apply_modifications()
def normpaths(paths):
return list(os.path.normpath(p) for p in paths)
- link_dir_var = os.environ['SPACK_LINK_DIRS']
- assert (
- normpaths(link_dir_var.split(':')) == normpaths(dep_lib_dirs))
+ link_dir_var = os.environ["SPACK_LINK_DIRS"]
+ assert normpaths(link_dir_var.split(":")) == normpaths(dep_lib_dirs)
- root_libdirs = ['/dt-diamond-prefix/lib', '/dt-diamond-prefix/lib64']
- rpath_dir_var = os.environ['SPACK_RPATH_DIRS']
+ root_libdirs = ["/dt-diamond-prefix/lib", "/dt-diamond-prefix/lib64"]
+ rpath_dir_var = os.environ["SPACK_RPATH_DIRS"]
# The 'lib' and 'lib64' subdirectories of the root package prefix
# should always be rpathed and should be the first rpaths
- assert (
- normpaths(rpath_dir_var.split(':')) ==
- normpaths(root_libdirs + dep_lib_dirs))
+ assert normpaths(rpath_dir_var.split(":")) == normpaths(root_libdirs + dep_lib_dirs)
- header_dir_var = os.environ['SPACK_INCLUDE_DIRS']
+ header_dir_var = os.environ["SPACK_INCLUDE_DIRS"]
# The default implementation looks for header files only
# in <prefix>/include and subdirectories
prefix = str(installation_dir_with_headers)
include_dirs = normpaths(header_dir_var.split(os.pathsep))
- assert os.path.join(prefix, 'include') in include_dirs
- assert os.path.join(prefix, 'include', 'boost') not in include_dirs
- assert os.path.join(prefix, 'path', 'to') not in include_dirs
- assert os.path.join(prefix, 'path', 'to', 'subdir') not in include_dirs
+ assert os.path.join(prefix, "include") in include_dirs
+ assert os.path.join(prefix, "include", "boost") not in include_dirs
+ assert os.path.join(prefix, "path", "to") not in include_dirs
+ assert os.path.join(prefix, "path", "to", "subdir") not in include_dirs
finally:
- delattr(dep_pkg, 'libs')
+ delattr(dep_pkg, "libs")
-def test_external_prefixes_last(mutable_config, mock_packages, working_env,
- monkeypatch):
+def test_external_prefixes_last(mutable_config, mock_packages, working_env, monkeypatch):
# Sanity check: under normal circumstances paths associated with
# dt-diamond-left would appear first. We'll mark it as external in
# the test to check if the associated paths are placed last.
- assert 'dt-diamond-left' < 'dt-diamond-right'
+ assert "dt-diamond-left" < "dt-diamond-right"
- cfg_data = syaml.load_config("""\
+ cfg_data = syaml.load_config(
+ """\
dt-diamond-left:
externals:
- spec: dt-diamond-left@1.0
prefix: /fake/path1
buildable: false
-""")
+"""
+ )
spack.config.set("packages", cfg_data)
- top = spack.spec.Spec('dt-diamond').concretized()
+ top = spack.spec.Spec("dt-diamond").concretized()
def _trust_me_its_a_dir(path):
return True
- monkeypatch.setattr(
- os.path, 'isdir', _trust_me_its_a_dir
- )
+
+ monkeypatch.setattr(os.path, "isdir", _trust_me_its_a_dir)
env_mods = EnvironmentModifications()
- spack.build_environment.set_wrapper_variables(
- top.package, env_mods)
+ spack.build_environment.set_wrapper_variables(top.package, env_mods)
env_mods.apply_modifications()
- link_dir_var = os.environ['SPACK_LINK_DIRS']
- link_dirs = link_dir_var.split(':')
- external_lib_paths = set([os.path.normpath('/fake/path1/lib'),
- os.path.normpath('/fake/path1/lib64')])
+ link_dir_var = os.environ["SPACK_LINK_DIRS"]
+ link_dirs = link_dir_var.split(":")
+ external_lib_paths = set(
+ [os.path.normpath("/fake/path1/lib"), os.path.normpath("/fake/path1/lib64")]
+ )
# The external lib paths should be the last two entries of the list and
# should not appear anywhere before the last two entries
- assert (set(os.path.normpath(x) for x in link_dirs[-2:]) ==
- external_lib_paths)
- assert not (set(os.path.normpath(x) for x in link_dirs[:-2]) &
- external_lib_paths)
+ assert set(os.path.normpath(x) for x in link_dirs[-2:]) == external_lib_paths
+ assert not (set(os.path.normpath(x) for x in link_dirs[:-2]) & external_lib_paths)
def test_parallel_false_is_not_propagating(config, mock_packages):
@@ -399,68 +433,75 @@ def test_parallel_false_is_not_propagating(config, mock_packages):
# Package A has parallel = False and depends on B which instead
# can be built in parallel
- s = spack.spec.Spec('a foobar=bar')
+ s = spack.spec.Spec("a foobar=bar")
s.concretize()
for spec in s.traverse():
- expected_jobs = spack.config.get('config:build_jobs') \
- if s.package.parallel else 1
+ expected_jobs = spack.config.get("config:build_jobs") if s.package.parallel else 1
m = AttributeHolder()
spack.build_environment._set_variables_for_single_module(s.package, m)
assert m.make_jobs == expected_jobs
-@pytest.mark.parametrize('config_setting,expected_flag', [
- ('runpath', '' if platform.system() == 'Darwin' else '--enable-new-dtags'),
- ('rpath', '' if platform.system() == 'Darwin' else '--disable-new-dtags'),
-])
-def test_setting_dtags_based_on_config(
- config_setting, expected_flag, config, mock_packages
-):
+@pytest.mark.parametrize(
+ "config_setting,expected_flag",
+ [
+ ("runpath", "" if platform.system() == "Darwin" else "--enable-new-dtags"),
+ ("rpath", "" if platform.system() == "Darwin" else "--disable-new-dtags"),
+ ],
+)
+def test_setting_dtags_based_on_config(config_setting, expected_flag, config, mock_packages):
# Pick a random package to be able to set compiler's variables
- s = spack.spec.Spec('cmake')
+ s = spack.spec.Spec("cmake")
s.concretize()
pkg = s.package
env = EnvironmentModifications()
- with spack.config.override('config:shared_linking', config_setting):
+ with spack.config.override("config:shared_linking", config_setting):
spack.build_environment.set_compiler_environment_variables(pkg, env)
modifications = env.group_by_name()
- assert 'SPACK_DTAGS_TO_STRIP' in modifications
- assert 'SPACK_DTAGS_TO_ADD' in modifications
- assert len(modifications['SPACK_DTAGS_TO_ADD']) == 1
- assert len(modifications['SPACK_DTAGS_TO_STRIP']) == 1
+ assert "SPACK_DTAGS_TO_STRIP" in modifications
+ assert "SPACK_DTAGS_TO_ADD" in modifications
+ assert len(modifications["SPACK_DTAGS_TO_ADD"]) == 1
+ assert len(modifications["SPACK_DTAGS_TO_STRIP"]) == 1
- dtags_to_add = modifications['SPACK_DTAGS_TO_ADD'][0]
+ dtags_to_add = modifications["SPACK_DTAGS_TO_ADD"][0]
assert dtags_to_add.value == expected_flag
def test_build_jobs_sequential_is_sequential():
- assert determine_number_of_jobs(
- parallel=False, command_line=8, config_default=8, max_cpus=8) == 1
+ assert (
+ determine_number_of_jobs(parallel=False, command_line=8, config_default=8, max_cpus=8) == 1
+ )
def test_build_jobs_command_line_overrides():
- assert determine_number_of_jobs(
- parallel=True, command_line=10, config_default=1, max_cpus=1) == 10
- assert determine_number_of_jobs(
- parallel=True, command_line=10, config_default=100, max_cpus=100) == 10
+ assert (
+ determine_number_of_jobs(parallel=True, command_line=10, config_default=1, max_cpus=1)
+ == 10
+ )
+ assert (
+ determine_number_of_jobs(parallel=True, command_line=10, config_default=100, max_cpus=100)
+ == 10
+ )
def test_build_jobs_defaults():
- assert determine_number_of_jobs(
- parallel=True, command_line=None, config_default=1, max_cpus=10) == 1
- assert determine_number_of_jobs(
- parallel=True, command_line=None, config_default=100, max_cpus=10) == 10
+ assert (
+ determine_number_of_jobs(parallel=True, command_line=None, config_default=1, max_cpus=10)
+ == 1
+ )
+ assert (
+ determine_number_of_jobs(parallel=True, command_line=None, config_default=100, max_cpus=10)
+ == 10
+ )
-def test_dirty_disable_module_unload(
- config, mock_packages, working_env, mock_module_cmd
-):
+def test_dirty_disable_module_unload(config, mock_packages, working_env, mock_module_cmd):
"""Test that on CRAY platform 'module unload' is not called if the 'dirty'
option is on.
"""
- s = spack.spec.Spec('a').concretized()
+ s = spack.spec.Spec("a").concretized()
# If called with "dirty" we don't unload modules, so no calls to the
# `module` function on Cray
@@ -470,5 +511,5 @@ def test_dirty_disable_module_unload(
# If called without "dirty" we unload modules on Cray
spack.build_environment.setup_package(s.package, dirty=False)
assert mock_module_cmd.calls
- assert any(('unload', 'cray-libsci') == item[0] for item in mock_module_cmd.calls)
- assert any(('unload', 'cray-mpich') == item[0] for item in mock_module_cmd.calls)
+ assert any(("unload", "cray-libsci") == item[0] for item in mock_module_cmd.calls)
+ assert any(("unload", "cray-mpich") == item[0] for item in mock_module_cmd.calls)
diff --git a/lib/spack/spack/test/build_system_guess.py b/lib/spack/spack/test/build_system_guess.py
index 7850d70fff..22ab96041d 100644
--- a/lib/spack/spack/test/build_system_guess.py
+++ b/lib/spack/spack/test/build_system_guess.py
@@ -11,47 +11,46 @@ import spack.cmd.create
import spack.stage
import spack.util.executable
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture(
- scope='function',
+ scope="function",
params=[
- ('configure', 'autotools'),
- ('CMakeLists.txt', 'cmake'),
- ('project.pro', 'qmake'),
- ('pom.xml', 'maven'),
- ('SConstruct', 'scons'),
- ('waf', 'waf'),
- ('argbah.rockspec', 'lua'),
- ('setup.py', 'python'),
- ('NAMESPACE', 'r'),
- ('WORKSPACE', 'bazel'),
- ('Makefile.PL', 'perlmake'),
- ('Build.PL', 'perlbuild'),
- ('foo.gemspec', 'ruby'),
- ('Rakefile', 'ruby'),
- ('setup.rb', 'ruby'),
- ('GNUmakefile', 'makefile'),
- ('makefile', 'makefile'),
- ('Makefile', 'makefile'),
- ('meson.build', 'meson'),
- ('configure.py', 'sip'),
- ('foobar', 'generic')
- ]
+ ("configure", "autotools"),
+ ("CMakeLists.txt", "cmake"),
+ ("project.pro", "qmake"),
+ ("pom.xml", "maven"),
+ ("SConstruct", "scons"),
+ ("waf", "waf"),
+ ("argbah.rockspec", "lua"),
+ ("setup.py", "python"),
+ ("NAMESPACE", "r"),
+ ("WORKSPACE", "bazel"),
+ ("Makefile.PL", "perlmake"),
+ ("Build.PL", "perlbuild"),
+ ("foo.gemspec", "ruby"),
+ ("Rakefile", "ruby"),
+ ("setup.rb", "ruby"),
+ ("GNUmakefile", "makefile"),
+ ("makefile", "makefile"),
+ ("Makefile", "makefile"),
+ ("meson.build", "meson"),
+ ("configure.py", "sip"),
+ ("foobar", "generic"),
+ ],
)
def url_and_build_system(request, tmpdir):
"""Sets up the resources to be pulled by the stage with
the appropriate file name and returns their url along with
the correct build-system guess
"""
- tar = spack.util.executable.which('tar')
+ tar = spack.util.executable.which("tar")
orig_dir = tmpdir.chdir()
filename, system = request.param
- tmpdir.ensure('archive', filename)
- tar('czf', 'archive.tar.gz', 'archive')
- url = 'file://' + str(tmpdir.join('archive.tar.gz'))
+ tmpdir.ensure("archive", filename)
+ tar("czf", "archive.tar.gz", "archive")
+ url = "file://" + str(tmpdir.join("archive.tar.gz"))
yield url, system
orig_dir.chdir()
diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py
index 3158696666..f660596bb7 100644
--- a/lib/spack/spack/test/build_systems.py
+++ b/lib/spack/spack/test/build_systems.py
@@ -20,18 +20,18 @@ from spack.build_environment import ChildError, get_std_cmake_args, setup_packag
from spack.spec import Spec
from spack.util.executable import which
-DATA_PATH = os.path.join(spack.paths.test_path, 'data')
+DATA_PATH = os.path.join(spack.paths.test_path, "data")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture()
def concretize_and_setup():
def _func(spec_str):
- s = Spec('mpich').concretized()
+ s = Spec("mpich").concretized()
setup_package(s.package, False)
return s
+
return _func
@@ -40,126 +40,122 @@ def test_dir(tmpdir):
def _func(dir_str):
py.path.local(dir_str).copy(tmpdir)
return str(tmpdir)
+
return _func
-@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
+@pytest.mark.usefixtures("config", "mock_packages", "working_env")
class TestTargets(object):
@pytest.mark.parametrize(
- 'input_dir', glob.iglob(os.path.join(DATA_PATH, 'make', 'affirmative', '*'))
+ "input_dir", glob.iglob(os.path.join(DATA_PATH, "make", "affirmative", "*"))
)
def test_affirmative_make_check(self, input_dir, test_dir, concretize_and_setup):
"""Tests that Spack correctly detects targets in a Makefile."""
- s = concretize_and_setup('mpich')
+ s = concretize_and_setup("mpich")
with fs.working_dir(test_dir(input_dir)):
- assert s.package._has_make_target('check')
- s.package._if_make_target_execute('check')
+ assert s.package._has_make_target("check")
+ s.package._if_make_target_execute("check")
@pytest.mark.parametrize(
- 'input_dir', glob.iglob(os.path.join(DATA_PATH, 'make', 'negative', '*'))
+ "input_dir", glob.iglob(os.path.join(DATA_PATH, "make", "negative", "*"))
)
- @pytest.mark.regression('9067')
+ @pytest.mark.regression("9067")
def test_negative_make_check(self, input_dir, test_dir, concretize_and_setup):
"""Tests that Spack correctly ignores false positives in a Makefile."""
- s = concretize_and_setup('mpich')
+ s = concretize_and_setup("mpich")
with fs.working_dir(test_dir(input_dir)):
- assert not s.package._has_make_target('check')
- s.package._if_make_target_execute('check')
+ assert not s.package._has_make_target("check")
+ s.package._if_make_target_execute("check")
- @pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
+ @pytest.mark.skipif(not which("ninja"), reason="ninja is not installed")
@pytest.mark.parametrize(
- 'input_dir', glob.iglob(os.path.join(DATA_PATH, 'ninja', 'affirmative', '*'))
+ "input_dir", glob.iglob(os.path.join(DATA_PATH, "ninja", "affirmative", "*"))
)
def test_affirmative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
"""Tests that Spack correctly detects targets in a Ninja build script."""
- s = concretize_and_setup('mpich')
+ s = concretize_and_setup("mpich")
with fs.working_dir(test_dir(input_dir)):
- assert s.package._has_ninja_target('check')
- s.package._if_ninja_target_execute('check')
+ assert s.package._has_ninja_target("check")
+ s.package._if_ninja_target_execute("check")
- @pytest.mark.skipif(not which('ninja'), reason='ninja is not installed')
+ @pytest.mark.skipif(not which("ninja"), reason="ninja is not installed")
@pytest.mark.parametrize(
- 'input_dir', glob.iglob(os.path.join(DATA_PATH, 'ninja', 'negative', '*'))
+ "input_dir", glob.iglob(os.path.join(DATA_PATH, "ninja", "negative", "*"))
)
def test_negative_ninja_check(self, input_dir, test_dir, concretize_and_setup):
"""Tests that Spack correctly ignores false positives in a Ninja
build script.
"""
- s = concretize_and_setup('mpich')
+ s = concretize_and_setup("mpich")
with fs.working_dir(test_dir(input_dir)):
- assert not s.package._has_ninja_target('check')
- s.package._if_ninja_target_execute('check')
+ assert not s.package._has_ninja_target("check")
+ s.package._if_ninja_target_execute("check")
-@pytest.mark.usefixtures('config', 'mock_packages')
+@pytest.mark.usefixtures("config", "mock_packages")
class TestAutotoolsPackage(object):
-
def test_with_or_without(self):
- s = Spec('a').concretized()
- options = s.package.with_or_without('foo')
+ s = Spec("a").concretized()
+ options = s.package.with_or_without("foo")
# Ensure that values that are not representing a feature
# are not used by with_or_without
- assert '--without-none' not in options
- assert '--with-bar' in options
- assert '--without-baz' in options
- assert '--no-fee' in options
+ assert "--without-none" not in options
+ assert "--with-bar" in options
+ assert "--without-baz" in options
+ assert "--no-fee" in options
def activate(value):
- return 'something'
+ return "something"
- options = s.package.with_or_without('foo', activation_value=activate)
- assert '--without-none' not in options
- assert '--with-bar=something' in options
- assert '--without-baz' in options
- assert '--no-fee' in options
+ options = s.package.with_or_without("foo", activation_value=activate)
+ assert "--without-none" not in options
+ assert "--with-bar=something" in options
+ assert "--without-baz" in options
+ assert "--no-fee" in options
- options = s.package.enable_or_disable('foo')
- assert '--disable-none' not in options
- assert '--enable-bar' in options
- assert '--disable-baz' in options
- assert '--disable-fee' in options
+ options = s.package.enable_or_disable("foo")
+ assert "--disable-none" not in options
+ assert "--enable-bar" in options
+ assert "--disable-baz" in options
+ assert "--disable-fee" in options
- options = s.package.with_or_without('bvv')
- assert '--with-bvv' in options
+ options = s.package.with_or_without("bvv")
+ assert "--with-bvv" in options
- options = s.package.with_or_without('lorem-ipsum', variant='lorem_ipsum')
- assert '--without-lorem-ipsum' in options
+ options = s.package.with_or_without("lorem-ipsum", variant="lorem_ipsum")
+ assert "--without-lorem-ipsum" in options
def test_none_is_allowed(self):
- s = Spec('a foo=none').concretized()
- options = s.package.with_or_without('foo')
+ s = Spec("a foo=none").concretized()
+ options = s.package.with_or_without("foo")
# Ensure that values that are not representing a feature
# are not used by with_or_without
- assert '--with-none' not in options
- assert '--without-bar' in options
- assert '--without-baz' in options
- assert '--no-fee' in options
+ assert "--with-none" not in options
+ assert "--without-bar" in options
+ assert "--without-baz" in options
+ assert "--no-fee" in options
- def test_libtool_archive_files_are_deleted_by_default(
- self, mutable_database
- ):
+ def test_libtool_archive_files_are_deleted_by_default(self, mutable_database):
# Install a package that creates a mock libtool archive
- s = Spec('libtool-deletion').concretized()
+ s = Spec("libtool-deletion").concretized()
s.package.do_install(explicit=True)
# Assert the libtool archive is not there and we have
# a log of removed files
assert not os.path.exists(s.package.libtool_archive_file)
- search_directory = os.path.join(s.prefix, '.spack')
- libtool_deletion_log = fs.find(
- search_directory, 'removed_la_files.txt', recursive=True
- )
+ search_directory = os.path.join(s.prefix, ".spack")
+ libtool_deletion_log = fs.find(search_directory, "removed_la_files.txt", recursive=True)
assert libtool_deletion_log
def test_libtool_archive_files_might_be_installed_on_demand(
- self, mutable_database, monkeypatch
+ self, mutable_database, monkeypatch
):
# Install a package that creates a mock libtool archive,
# patch its package to preserve the installation
- s = Spec('libtool-deletion').concretized()
- monkeypatch.setattr(s.package, 'install_libtool_archives', True)
+ s = Spec("libtool-deletion").concretized()
+ monkeypatch.setattr(s.package, "install_libtool_archives", True)
s.package.do_install(explicit=True)
# Assert libtool archives are installed
@@ -170,40 +166,40 @@ class TestAutotoolsPackage(object):
Tests whether only broken config.sub and config.guess are replaced with
files from working alternatives from the gnuconfig package.
"""
- s = Spec('autotools-config-replacement +patch_config_files +gnuconfig')
+ s = Spec("autotools-config-replacement +patch_config_files +gnuconfig")
s.concretize()
s.package.do_install()
- with open(os.path.join(s.prefix.broken, 'config.sub')) as f:
+ with open(os.path.join(s.prefix.broken, "config.sub")) as f:
assert "gnuconfig version of config.sub" in f.read()
- with open(os.path.join(s.prefix.broken, 'config.guess')) as f:
+ with open(os.path.join(s.prefix.broken, "config.guess")) as f:
assert "gnuconfig version of config.guess" in f.read()
- with open(os.path.join(s.prefix.working, 'config.sub')) as f:
+ with open(os.path.join(s.prefix.working, "config.sub")) as f:
assert "gnuconfig version of config.sub" not in f.read()
- with open(os.path.join(s.prefix.working, 'config.guess')) as f:
+ with open(os.path.join(s.prefix.working, "config.guess")) as f:
assert "gnuconfig version of config.guess" not in f.read()
def test_autotools_gnuconfig_replacement_disabled(self, mutable_database):
"""
Tests whether disabling patch_config_files
"""
- s = Spec('autotools-config-replacement ~patch_config_files +gnuconfig')
+ s = Spec("autotools-config-replacement ~patch_config_files +gnuconfig")
s.concretize()
s.package.do_install()
- with open(os.path.join(s.prefix.broken, 'config.sub')) as f:
+ with open(os.path.join(s.prefix.broken, "config.sub")) as f:
assert "gnuconfig version of config.sub" not in f.read()
- with open(os.path.join(s.prefix.broken, 'config.guess')) as f:
+ with open(os.path.join(s.prefix.broken, "config.guess")) as f:
assert "gnuconfig version of config.guess" not in f.read()
- with open(os.path.join(s.prefix.working, 'config.sub')) as f:
+ with open(os.path.join(s.prefix.working, "config.sub")) as f:
assert "gnuconfig version of config.sub" not in f.read()
- with open(os.path.join(s.prefix.working, 'config.guess')) as f:
+ with open(os.path.join(s.prefix.working, "config.guess")) as f:
assert "gnuconfig version of config.guess" not in f.read()
@pytest.mark.disable_clean_stage_check
@@ -212,7 +208,7 @@ class TestAutotoolsPackage(object):
Tests whether a useful error message is shown when patch_config_files is
enabled, but gnuconfig is not listed as a direct build dependency.
"""
- s = Spec('autotools-config-replacement +patch_config_files ~gnuconfig')
+ s = Spec("autotools-config-replacement +patch_config_files ~gnuconfig")
s.concretize()
msg = "Cannot patch config files: missing dependencies: gnuconfig"
@@ -226,10 +222,11 @@ class TestAutotoolsPackage(object):
external, but the install prefix is misconfigured and no config.guess
and config.sub substitute files are found in the provided prefix.
"""
- env_dir = str(tmpdir.ensure('env', dir=True))
- gnuconfig_dir = str(tmpdir.ensure('gnuconfig', dir=True)) # empty dir
- with open(os.path.join(env_dir, 'spack.yaml'), 'w') as f:
- f.write("""\
+ env_dir = str(tmpdir.ensure("env", dir=True))
+ gnuconfig_dir = str(tmpdir.ensure("gnuconfig", dir=True)) # empty dir
+ with open(os.path.join(env_dir, "spack.yaml"), "w") as f:
+ f.write(
+ """\
spack:
specs:
- 'autotools-config-replacement +patch_config_files +gnuconfig'
@@ -239,100 +236,111 @@ spack:
externals:
- spec: gnuconfig@1.0.0
prefix: {0}
-""".format(gnuconfig_dir))
+""".format(
+ gnuconfig_dir
+ )
+ )
- msg = ("Spack could not find `config.guess`.*misconfigured as an "
- "external package")
+ msg = "Spack could not find `config.guess`.*misconfigured as an " "external package"
with spack.environment.Environment(env_dir) as e:
e.concretize()
with pytest.raises(ChildError, match=msg):
e.install_all()
-@pytest.mark.usefixtures('config', 'mock_packages')
+@pytest.mark.usefixtures("config", "mock_packages")
class TestCMakePackage(object):
def test_cmake_std_args(self):
# Call the function on a CMakePackage instance
- s = Spec('cmake-client').concretized()
+ s = Spec("cmake-client").concretized()
assert s.package.std_cmake_args == get_std_cmake_args(s.package)
# Call it on another kind of package
- s = Spec('mpich').concretized()
+ s = Spec("mpich").concretized()
assert get_std_cmake_args(s.package)
def test_cmake_bad_generator(self):
- s = Spec('cmake-client').concretized()
- s.package.generator = 'Yellow Sticky Notes'
+ s = Spec("cmake-client").concretized()
+ s.package.generator = "Yellow Sticky Notes"
with pytest.raises(spack.package_base.InstallError):
get_std_cmake_args(s.package)
def test_cmake_secondary_generator(config, mock_packages):
- s = Spec('cmake-client').concretized()
- s.package.generator = 'CodeBlocks - Unix Makefiles'
+ s = Spec("cmake-client").concretized()
+ s.package.generator = "CodeBlocks - Unix Makefiles"
assert get_std_cmake_args(s.package)
def test_define(self):
- s = Spec('cmake-client').concretized()
+ s = Spec("cmake-client").concretized()
define = s.package.define
for cls in (list, tuple):
- assert define('MULTI', cls(['right', 'up'])) == '-DMULTI:STRING=right;up'
+ assert define("MULTI", cls(["right", "up"])) == "-DMULTI:STRING=right;up"
- file_list = fs.FileList(['/foo', '/bar'])
- assert define('MULTI', file_list) == '-DMULTI:STRING=/foo;/bar'
+ file_list = fs.FileList(["/foo", "/bar"])
+ assert define("MULTI", file_list) == "-DMULTI:STRING=/foo;/bar"
- assert define('ENABLE_TRUTH', False) == '-DENABLE_TRUTH:BOOL=OFF'
- assert define('ENABLE_TRUTH', True) == '-DENABLE_TRUTH:BOOL=ON'
+ assert define("ENABLE_TRUTH", False) == "-DENABLE_TRUTH:BOOL=OFF"
+ assert define("ENABLE_TRUTH", True) == "-DENABLE_TRUTH:BOOL=ON"
- assert define('SINGLE', 'red') == '-DSINGLE:STRING=red'
+ assert define("SINGLE", "red") == "-DSINGLE:STRING=red"
def test_define_from_variant(self):
- s = Spec('cmake-client multi=up,right ~truthy single=red').concretized()
+ s = Spec("cmake-client multi=up,right ~truthy single=red").concretized()
- arg = s.package.define_from_variant('MULTI')
- assert arg == '-DMULTI:STRING=right;up'
+ arg = s.package.define_from_variant("MULTI")
+ assert arg == "-DMULTI:STRING=right;up"
- arg = s.package.define_from_variant('ENABLE_TRUTH', 'truthy')
- assert arg == '-DENABLE_TRUTH:BOOL=OFF'
+ arg = s.package.define_from_variant("ENABLE_TRUTH", "truthy")
+ assert arg == "-DENABLE_TRUTH:BOOL=OFF"
- arg = s.package.define_from_variant('SINGLE')
- assert arg == '-DSINGLE:STRING=red'
+ arg = s.package.define_from_variant("SINGLE")
+ assert arg == "-DSINGLE:STRING=red"
with pytest.raises(KeyError, match="not a variant"):
- s.package.define_from_variant('NONEXISTENT')
+ s.package.define_from_variant("NONEXISTENT")
-@pytest.mark.usefixtures('config', 'mock_packages')
+@pytest.mark.usefixtures("config", "mock_packages")
class TestDownloadMixins(object):
"""Test GnuMirrorPackage, SourceforgePackage, SourcewarePackage and XorgPackage."""
- @pytest.mark.parametrize('spec_str,expected_url', [
- # GnuMirrorPackage
- ('mirror-gnu', 'https://ftpmirror.gnu.org/make/make-4.2.1.tar.gz'),
- # SourceforgePackage
- ('mirror-sourceforge',
- 'https://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz'),
- # SourcewarePackage
- ('mirror-sourceware', 'https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz'),
- # XorgPackage
- ('mirror-xorg',
- 'https://www.x.org/archive/individual/util/util-macros-1.19.1.tar.bz2')
- ])
+
+ @pytest.mark.parametrize(
+ "spec_str,expected_url",
+ [
+ # GnuMirrorPackage
+ ("mirror-gnu", "https://ftpmirror.gnu.org/make/make-4.2.1.tar.gz"),
+ # SourceforgePackage
+ ("mirror-sourceforge", "https://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz"),
+ # SourcewarePackage
+ ("mirror-sourceware", "https://sourceware.org/pub/bzip2/bzip2-1.0.8.tar.gz"),
+ # XorgPackage
+ (
+ "mirror-xorg",
+ "https://www.x.org/archive/individual/util/util-macros-1.19.1.tar.bz2",
+ ),
+ ],
+ )
def test_attributes_defined(self, spec_str, expected_url):
s = Spec(spec_str).concretized()
assert s.package.urls[0] == expected_url
- @pytest.mark.parametrize('spec_str,error_fmt', [
- # GnuMirrorPackage
- ('mirror-gnu-broken', r'{0} must define a `gnu_mirror_path` attribute'),
- # SourceforgePackage
- ('mirror-sourceforge-broken',
- r'{0} must define a `sourceforge_mirror_path` attribute'),
- # SourcewarePackage
- ('mirror-sourceware-broken',
- r'{0} must define a `sourceware_mirror_path` attribute'),
- # XorgPackage
- ('mirror-xorg-broken', r'{0} must define a `xorg_mirror_path` attribute'),
- ])
+ @pytest.mark.parametrize(
+ "spec_str,error_fmt",
+ [
+ # GnuMirrorPackage
+ ("mirror-gnu-broken", r"{0} must define a `gnu_mirror_path` attribute"),
+ # SourceforgePackage
+ (
+ "mirror-sourceforge-broken",
+ r"{0} must define a `sourceforge_mirror_path` attribute",
+ ),
+ # SourcewarePackage
+ ("mirror-sourceware-broken", r"{0} must define a `sourceware_mirror_path` attribute"),
+ # XorgPackage
+ ("mirror-xorg-broken", r"{0} must define a `xorg_mirror_path` attribute"),
+ ],
+ )
def test_attributes_missing(self, spec_str, error_fmt):
s = Spec(spec_str).concretized()
error_msg = error_fmt.format(type(s.package).__name__)
@@ -343,30 +351,33 @@ class TestDownloadMixins(object):
def test_cmake_define_from_variant_conditional(config, mock_packages):
"""Test that define_from_variant returns empty string when a condition on a variant
is not met. When this is the case, the variant is not set in the spec."""
- s = Spec('cmake-conditional-variants-test').concretized()
- assert 'example' not in s.variants
- assert s.package.define_from_variant('EXAMPLE', 'example') == ''
+ s = Spec("cmake-conditional-variants-test").concretized()
+ assert "example" not in s.variants
+ assert s.package.define_from_variant("EXAMPLE", "example") == ""
def test_autotools_args_from_conditional_variant(config, mock_packages):
"""Test that _activate_or_not returns an empty string when a condition on a variant
is not met. When this is the case, the variant is not set in the spec."""
- s = Spec('autotools-conditional-variants-test').concretized()
- assert 'example' not in s.variants
- assert len(s.package._activate_or_not('example', 'enable', 'disable')) == 0
+ s = Spec("autotools-conditional-variants-test").concretized()
+ assert "example" not in s.variants
+ assert len(s.package._activate_or_not("example", "enable", "disable")) == 0
def test_autoreconf_search_path_args_multiple(config, mock_packages, tmpdir):
"""autoreconf should receive the right -I flags with search paths for m4 files
for build deps."""
- spec = Spec('dttop').concretized()
+ spec = Spec("dttop").concretized()
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
- build_dep_one, build_dep_two = spec.dependencies(deptype='build')
+ build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
- '-I', aclocal_fst, '-I', aclocal_snd
+ "-I",
+ aclocal_fst,
+ "-I",
+ aclocal_snd,
]
@@ -374,35 +385,36 @@ def test_autoreconf_search_path_args_skip_automake(config, mock_packages, tmpdir
"""automake's aclocal dir should not be added as -I flag as it is a default
3rd party dir search path, and if it's a system version it usually includes
m4 files shadowing spack deps."""
- spec = Spec('dttop').concretized()
+ spec = Spec("dttop").concretized()
tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal")
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
- build_dep_one, build_dep_two = spec.dependencies(deptype='build')
- build_dep_one.name = 'automake'
+ build_dep_one, build_dep_two = spec.dependencies(deptype="build")
+ build_dep_one.name = "automake"
build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.prefix = str(tmpdir.join("snd"))
- assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
- '-I', aclocal_snd
- ]
+ assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal_snd]
def test_autoreconf_search_path_args_external_order(config, mock_packages, tmpdir):
"""When a build dep is external, its -I flag should occur last"""
- spec = Spec('dttop').concretized()
+ spec = Spec("dttop").concretized()
aclocal_fst = str(tmpdir.mkdir("fst").mkdir("share").mkdir("aclocal"))
aclocal_snd = str(tmpdir.mkdir("snd").mkdir("share").mkdir("aclocal"))
- build_dep_one, build_dep_two = spec.dependencies(deptype='build')
+ build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.external_path = str(tmpdir.join("fst"))
build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
- '-I', aclocal_snd, '-I', aclocal_fst
+ "-I",
+ aclocal_snd,
+ "-I",
+ aclocal_fst,
]
def test_autoreconf_search_path_skip_nonexisting(config, mock_packages, tmpdir):
"""Skip -I flags for non-existing directories"""
- spec = Spec('dttop').concretized()
- build_dep_one, build_dep_two = spec.dependencies(deptype='build')
+ spec = Spec("dttop").concretized()
+ build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.prefix = str(tmpdir.join("fst"))
build_dep_two.prefix = str(tmpdir.join("snd"))
assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == []
@@ -410,11 +422,9 @@ def test_autoreconf_search_path_skip_nonexisting(config, mock_packages, tmpdir):
def test_autoreconf_search_path_dont_repeat(config, mock_packages, tmpdir):
"""Do not add the same -I flag twice to keep things readable for humans"""
- spec = Spec('dttop').concretized()
+ spec = Spec("dttop").concretized()
aclocal = str(tmpdir.mkdir("prefix").mkdir("share").mkdir("aclocal"))
- build_dep_one, build_dep_two = spec.dependencies(deptype='build')
+ build_dep_one, build_dep_two = spec.dependencies(deptype="build")
build_dep_one.external_path = str(tmpdir.join("prefix"))
build_dep_two.external_path = str(tmpdir.join("prefix"))
- assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == [
- '-I', aclocal
- ]
+ assert spack.build_systems.autotools._autoreconf_search_path_args(spec) == ["-I", aclocal]
diff --git a/lib/spack/spack/test/buildrequest.py b/lib/spack/spack/test/buildrequest.py
index f8b62c44d3..e656cfa99e 100644
--- a/lib/spack/spack/test/buildrequest.py
+++ b/lib/spack/spack/test/buildrequest.py
@@ -15,22 +15,21 @@ import spack.spec
# however, tests are currently failing because support
# for Spack on Windows has not been extended to this
# module yet.
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_build_request_errors(install_mockery):
- with pytest.raises(ValueError, match='must be a package'):
- inst.BuildRequest('abc', {})
+ with pytest.raises(ValueError, match="must be a package"):
+ inst.BuildRequest("abc", {})
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
- with pytest.raises(ValueError, match='must have a concrete spec'):
+ with pytest.raises(ValueError, match="must have a concrete spec"):
inst.BuildRequest(pkg_cls(spec), {})
def test_build_request_basics(install_mockery):
- spec = spack.spec.Spec('dependent-install')
+ spec = spack.spec.Spec("dependent-install")
spec.concretize()
assert spec.concrete
@@ -41,14 +40,14 @@ def test_build_request_basics(install_mockery):
assert request.spec == spec.package.spec
# Ensure key default install arguments are set
- assert 'install_package' in request.install_args
- assert 'install_deps' in request.install_args
+ assert "install_package" in request.install_args
+ assert "install_deps" in request.install_args
def test_build_request_strings(install_mockery):
"""Tests of BuildRequest repr and str for coverage purposes."""
# Using a package with one dependency
- spec = spack.spec.Spec('dependent-install')
+ spec = spack.spec.Spec("dependent-install")
spec.concretize()
assert spec.concrete
diff --git a/lib/spack/spack/test/buildtask.py b/lib/spack/spack/test/buildtask.py
index b3fe96c42b..d383579d36 100644
--- a/lib/spack/spack/test/buildtask.py
+++ b/lib/spack/spack/test/buildtask.py
@@ -11,34 +11,32 @@ import spack.spec
def test_build_task_errors(install_mockery):
- with pytest.raises(ValueError, match='must be a package'):
- inst.BuildTask('abc', None, False, 0, 0, 0, [])
+ with pytest.raises(ValueError, match="must be a package"):
+ inst.BuildTask("abc", None, False, 0, 0, 0, [])
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
- with pytest.raises(ValueError, match='must have a concrete spec'):
+ with pytest.raises(ValueError, match="must have a concrete spec"):
inst.BuildTask(pkg_cls(spec), None, False, 0, 0, 0, [])
spec.concretize()
assert spec.concrete
- with pytest.raises(ValueError, match='must have a build request'):
+ with pytest.raises(ValueError, match="must have a build request"):
inst.BuildTask(spec.package, None, False, 0, 0, 0, [])
request = inst.BuildRequest(spec.package, {})
- with pytest.raises(inst.InstallError, match='Cannot create a build task'):
- inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED,
- [])
+ with pytest.raises(inst.InstallError, match="Cannot create a build task"):
+ inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_REMOVED, [])
def test_build_task_basics(install_mockery):
- spec = spack.spec.Spec('dependent-install')
+ spec = spack.spec.Spec("dependent-install")
spec.concretize()
assert spec.concrete
# Ensure key properties match expectations
request = inst.BuildRequest(spec.package, {})
- task = inst.BuildTask(spec.package, request, False, 0, 0,
- inst.STATUS_ADDED, [])
+ task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
assert task.explicit # package was "explicitly" requested
assert task.priority == len(task.uninstalled_deps)
assert task.key == (task.priority, task.sequence)
@@ -54,14 +52,13 @@ def test_build_task_basics(install_mockery):
def test_build_task_strings(install_mockery):
"""Tests of build_task repr and str for coverage purposes."""
# Using a package with one dependency
- spec = spack.spec.Spec('dependent-install')
+ spec = spack.spec.Spec("dependent-install")
spec.concretize()
assert spec.concrete
# Ensure key properties match expectations
request = inst.BuildRequest(spec.package, {})
- task = inst.BuildTask(spec.package, request, False, 0, 0,
- inst.STATUS_ADDED, [])
+ task = inst.BuildTask(spec.package, request, False, 0, 0, inst.STATUS_ADDED, [])
# Cover __repr__
irep = task.__repr__()
diff --git a/lib/spack/spack/test/cache_fetch.py b/lib/spack/spack/test/cache_fetch.py
index 828dd81791..03b8e92ecf 100644
--- a/lib/spack/spack/test/cache_fetch.py
+++ b/lib/spack/spack/test/cache_fetch.py
@@ -14,34 +14,34 @@ import spack.config
from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
from spack.stage import Stage
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_fetch_missing_cache(tmpdir, _fetch_method):
"""Ensure raise a missing cache file."""
testpath = str(tmpdir)
- with spack.config.override('config:url_fetch_method', _fetch_method):
- abs_pref = '' if is_windows else '/'
- url = 'file://' + abs_pref + 'not-a-real-cache-file'
+ with spack.config.override("config:url_fetch_method", _fetch_method):
+ abs_pref = "" if is_windows else "/"
+ url = "file://" + abs_pref + "not-a-real-cache-file"
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath):
- with pytest.raises(NoCacheError, match=r'No cache'):
+ with pytest.raises(NoCacheError, match=r"No cache"):
fetcher.fetch()
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_fetch(tmpdir, _fetch_method):
"""Ensure a fetch after expanding is effectively a no-op."""
testpath = str(tmpdir)
- cache = os.path.join(testpath, 'cache.tar.gz')
+ cache = os.path.join(testpath, "cache.tar.gz")
touch(cache)
if is_windows:
- url_stub = '{0}'
+ url_stub = "{0}"
else:
- url_stub = '/{0}'
- url = 'file://' + url_stub.format(cache)
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ url_stub = "/{0}"
+ url = "file://" + url_stub.format(cache)
+ with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index 36890c2eb2..2dcdc533c4 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -20,20 +20,38 @@ from spack.util.executable import Executable, ProcessError
# Complicated compiler test command
#
test_args = [
- '-I/test/include', '-L/test/lib', '-L/with space/lib', '-I/other/include',
- 'arg1',
- '-Wl,--start-group',
- 'arg2',
- '-Wl,-rpath,/first/rpath', 'arg3', '-Wl,-rpath', '-Wl,/second/rpath',
- '-llib1', '-llib2',
- 'arg4',
- '-Wl,--end-group',
- '-Xlinker', '-rpath', '-Xlinker', '/third/rpath',
- '-Xlinker', '-rpath', '-Xlinker', '/fourth/rpath',
- '-Wl,--rpath,/fifth/rpath', '-Wl,--rpath', '-Wl,/sixth/rpath',
- '-llib3', '-llib4',
- 'arg5', 'arg6',
- '"-DDOUBLE_QUOTED_ARG"', "'-DSINGLE_QUOTED_ARG'",
+ "-I/test/include",
+ "-L/test/lib",
+ "-L/with space/lib",
+ "-I/other/include",
+ "arg1",
+ "-Wl,--start-group",
+ "arg2",
+ "-Wl,-rpath,/first/rpath",
+ "arg3",
+ "-Wl,-rpath",
+ "-Wl,/second/rpath",
+ "-llib1",
+ "-llib2",
+ "arg4",
+ "-Wl,--end-group",
+ "-Xlinker",
+ "-rpath",
+ "-Xlinker",
+ "/third/rpath",
+ "-Xlinker",
+ "-rpath",
+ "-Xlinker",
+ "/fourth/rpath",
+ "-Wl,--rpath,/fifth/rpath",
+ "-Wl,--rpath",
+ "-Wl,/sixth/rpath",
+ "-llib3",
+ "-llib4",
+ "arg5",
+ "arg6",
+ '"-DDOUBLE_QUOTED_ARG"',
+ "'-DSINGLE_QUOTED_ARG'",
]
#
@@ -42,33 +60,53 @@ test_args = [
# `_wl_rpaths` are for the compiler (with -Wl,), and `_rpaths` are raw
# -rpath arguments for the linker.
#
-test_include_paths = [
- '-I/test/include', '-I/other/include']
+test_include_paths = ["-I/test/include", "-I/other/include"]
-test_library_paths = [
- '-L/test/lib', '-L/with space/lib']
+test_library_paths = ["-L/test/lib", "-L/with space/lib"]
test_wl_rpaths = [
- '-Wl,-rpath,/first/rpath', '-Wl,-rpath,/second/rpath',
- '-Wl,-rpath,/third/rpath', '-Wl,-rpath,/fourth/rpath',
- '-Wl,-rpath,/fifth/rpath', '-Wl,-rpath,/sixth/rpath']
+ "-Wl,-rpath,/first/rpath",
+ "-Wl,-rpath,/second/rpath",
+ "-Wl,-rpath,/third/rpath",
+ "-Wl,-rpath,/fourth/rpath",
+ "-Wl,-rpath,/fifth/rpath",
+ "-Wl,-rpath,/sixth/rpath",
+]
test_rpaths = [
- '-rpath', '/first/rpath', '-rpath', '/second/rpath',
- '-rpath', '/third/rpath', '-rpath', '/fourth/rpath',
- '-rpath', '/fifth/rpath', '-rpath', '/sixth/rpath']
+ "-rpath",
+ "/first/rpath",
+ "-rpath",
+ "/second/rpath",
+ "-rpath",
+ "/third/rpath",
+ "-rpath",
+ "/fourth/rpath",
+ "-rpath",
+ "/fifth/rpath",
+ "-rpath",
+ "/sixth/rpath",
+]
test_args_without_paths = [
- 'arg1',
- '-Wl,--start-group',
- 'arg2', 'arg3', '-llib1', '-llib2', 'arg4',
- '-Wl,--end-group',
- '-llib3', '-llib4', 'arg5', 'arg6',
- '"-DDOUBLE_QUOTED_ARG"', "'-DSINGLE_QUOTED_ARG'",
+ "arg1",
+ "-Wl,--start-group",
+ "arg2",
+ "arg3",
+ "-llib1",
+ "-llib2",
+ "arg4",
+ "-Wl,--end-group",
+ "-llib3",
+ "-llib4",
+ "arg5",
+ "arg6",
+ '"-DDOUBLE_QUOTED_ARG"',
+ "'-DSINGLE_QUOTED_ARG'",
]
#: The prefix of the package being mock installed
-pkg_prefix = '/spack-test-prefix'
+pkg_prefix = "/spack-test-prefix"
# Compilers to use during tests
cc = Executable(os.path.join(build_env_path, "cc"))
@@ -78,70 +116,71 @@ cxx = Executable(os.path.join(build_env_path, "c++"))
fc = Executable(os.path.join(build_env_path, "fc"))
#: the "real" compiler the wrapper is expected to invoke
-real_cc = '/bin/mycc'
+real_cc = "/bin/mycc"
# mock flags to use in the wrapper environment
-spack_cppflags = ['-g', '-O1', '-DVAR=VALUE']
-spack_cflags = ['-Wall']
-spack_cxxflags = ['-Werror']
-spack_fflags = ['-w']
-spack_ldflags = ['-L', 'foo']
-spack_ldlibs = ['-lfoo']
+spack_cppflags = ["-g", "-O1", "-DVAR=VALUE"]
+spack_cflags = ["-Wall"]
+spack_cxxflags = ["-Werror"]
+spack_fflags = ["-w"]
+spack_ldflags = ["-L", "foo"]
+spack_ldlibs = ["-lfoo"]
-lheaderpad = ['-Wl,-headerpad_max_install_names']
-headerpad = ['-headerpad_max_install_names']
+lheaderpad = ["-Wl,-headerpad_max_install_names"]
+headerpad = ["-headerpad_max_install_names"]
target_args = ["-march=znver2", "-mtune=znver2"]
# common compile arguments: includes, libs, -Wl linker args, other args
common_compile_args = (
- test_include_paths +
- test_library_paths +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- test_args_without_paths
+ test_include_paths
+ + test_library_paths
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + test_args_without_paths
)
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def wrapper_environment():
with set_env(
- SPACK_CC=real_cc,
- SPACK_CXX=real_cc,
- SPACK_FC=real_cc,
- SPACK_PREFIX=pkg_prefix,
- SPACK_ENV_PATH='test',
- SPACK_DEBUG_LOG_DIR='.',
- SPACK_DEBUG_LOG_ID='foo-hashabc',
- SPACK_COMPILER_SPEC='gcc@4.4.7',
- SPACK_SHORT_SPEC='foo@1.2 arch=linux-rhel6-x86_64 /hashabc',
- SPACK_SYSTEM_DIRS=':'.join(system_dirs),
- SPACK_CC_RPATH_ARG='-Wl,-rpath,',
- SPACK_CXX_RPATH_ARG='-Wl,-rpath,',
- SPACK_F77_RPATH_ARG='-Wl,-rpath,',
- SPACK_FC_RPATH_ARG='-Wl,-rpath,',
- SPACK_LINK_DIRS=None,
- SPACK_INCLUDE_DIRS=None,
- SPACK_RPATH_DIRS=None,
- SPACK_TARGET_ARGS="-march=znver2 -mtune=znver2",
- SPACK_LINKER_ARG='-Wl,',
- SPACK_DTAGS_TO_ADD='--disable-new-dtags',
- SPACK_DTAGS_TO_STRIP='--enable-new-dtags'):
+ SPACK_CC=real_cc,
+ SPACK_CXX=real_cc,
+ SPACK_FC=real_cc,
+ SPACK_PREFIX=pkg_prefix,
+ SPACK_ENV_PATH="test",
+ SPACK_DEBUG_LOG_DIR=".",
+ SPACK_DEBUG_LOG_ID="foo-hashabc",
+ SPACK_COMPILER_SPEC="gcc@4.4.7",
+ SPACK_SHORT_SPEC="foo@1.2 arch=linux-rhel6-x86_64 /hashabc",
+ SPACK_SYSTEM_DIRS=":".join(system_dirs),
+ SPACK_CC_RPATH_ARG="-Wl,-rpath,",
+ SPACK_CXX_RPATH_ARG="-Wl,-rpath,",
+ SPACK_F77_RPATH_ARG="-Wl,-rpath,",
+ SPACK_FC_RPATH_ARG="-Wl,-rpath,",
+ SPACK_LINK_DIRS=None,
+ SPACK_INCLUDE_DIRS=None,
+ SPACK_RPATH_DIRS=None,
+ SPACK_TARGET_ARGS="-march=znver2 -mtune=znver2",
+ SPACK_LINKER_ARG="-Wl,",
+ SPACK_DTAGS_TO_ADD="--disable-new-dtags",
+ SPACK_DTAGS_TO_STRIP="--enable-new-dtags",
+ ):
yield
@pytest.fixture()
def wrapper_flags():
with set_env(
- SPACK_CPPFLAGS=' '.join(spack_cppflags),
- SPACK_CFLAGS=' '.join(spack_cflags),
- SPACK_CXXFLAGS=' '.join(spack_cxxflags),
- SPACK_FFLAGS=' '.join(spack_fflags),
- SPACK_LDFLAGS=' '.join(spack_ldflags),
- SPACK_LDLIBS=' '.join(spack_ldlibs)):
+ SPACK_CPPFLAGS=" ".join(spack_cppflags),
+ SPACK_CFLAGS=" ".join(spack_cflags),
+ SPACK_CXXFLAGS=" ".join(spack_cxxflags),
+ SPACK_FFLAGS=" ".join(spack_fflags),
+ SPACK_LDFLAGS=" ".join(spack_ldflags),
+ SPACK_LDLIBS=" ".join(spack_ldlibs),
+ ):
yield
@@ -152,8 +191,8 @@ def check_args(cc, args, expected):
per line, so that we see whether arguments that should (or shouldn't)
contain spaces are parsed correctly.
"""
- with set_env(SPACK_TEST_COMMAND='dump-args'):
- cc_modified_args = cc(*args, output=str).strip().split('\n')
+ with set_env(SPACK_TEST_COMMAND="dump-args"):
+ cc_modified_args = cc(*args, output=str).strip().split("\n")
assert expected == cc_modified_args
@@ -163,14 +202,14 @@ def check_env_var(executable, var, expected):
This assumes that cc will print debug output when it's environment
contains SPACK_TEST_COMMAND=dump-env-<variable-to-debug>
"""
- with set_env(SPACK_TEST_COMMAND='dump-env-' + var):
+ with set_env(SPACK_TEST_COMMAND="dump-env-" + var):
output = executable(*test_args, output=str).strip()
- assert executable.path + ': ' + var + ': ' + expected == output
+ assert executable.path + ": " + var + ": " + expected == output
def dump_mode(cc, args):
"""Make cc dump the mode it detects, and return it."""
- with set_env(SPACK_TEST_COMMAND='dump-mode'):
+ with set_env(SPACK_TEST_COMMAND="dump-mode"):
return cc(*args, output=str).strip()
@@ -181,119 +220,125 @@ def test_no_wrapper_environment():
def test_vcheck_mode(wrapper_environment):
- assert dump_mode(cc, ['-I/include', '--version']) == 'vcheck'
- assert dump_mode(cc, ['-I/include', '-V']) == 'vcheck'
- assert dump_mode(cc, ['-I/include', '-v']) == 'vcheck'
- assert dump_mode(cc, ['-I/include', '-dumpversion']) == 'vcheck'
- assert dump_mode(cc, ['-I/include', '--version', '-c']) == 'vcheck'
- assert dump_mode(cc, ['-I/include', '-V', '-o', 'output']) == 'vcheck'
+ assert dump_mode(cc, ["-I/include", "--version"]) == "vcheck"
+ assert dump_mode(cc, ["-I/include", "-V"]) == "vcheck"
+ assert dump_mode(cc, ["-I/include", "-v"]) == "vcheck"
+ assert dump_mode(cc, ["-I/include", "-dumpversion"]) == "vcheck"
+ assert dump_mode(cc, ["-I/include", "--version", "-c"]) == "vcheck"
+ assert dump_mode(cc, ["-I/include", "-V", "-o", "output"]) == "vcheck"
def test_cpp_mode(wrapper_environment):
- assert dump_mode(cc, ['-E']) == 'cpp'
- assert dump_mode(cxx, ['-E']) == 'cpp'
- assert dump_mode(cpp, []) == 'cpp'
+ assert dump_mode(cc, ["-E"]) == "cpp"
+ assert dump_mode(cxx, ["-E"]) == "cpp"
+ assert dump_mode(cpp, []) == "cpp"
def test_as_mode(wrapper_environment):
- assert dump_mode(cc, ['-S']) == 'as'
+ assert dump_mode(cc, ["-S"]) == "as"
def test_ccld_mode(wrapper_environment):
- assert dump_mode(cc, []) == 'ccld'
- assert dump_mode(cc, ['foo.c', '-o', 'foo']) == 'ccld'
- assert dump_mode(cc, ['foo.c', '-o', 'foo', '-Wl,-rpath,foo']) == 'ccld'
- assert dump_mode(cc, [
- 'foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo']) == 'ccld'
+ assert dump_mode(cc, []) == "ccld"
+ assert dump_mode(cc, ["foo.c", "-o", "foo"]) == "ccld"
+ assert dump_mode(cc, ["foo.c", "-o", "foo", "-Wl,-rpath,foo"]) == "ccld"
+ assert dump_mode(cc, ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath,foo"]) == "ccld"
def test_ld_mode(wrapper_environment):
- assert dump_mode(ld, []) == 'ld'
- assert dump_mode(ld, [
- 'foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo']) == 'ld'
+ assert dump_mode(ld, []) == "ld"
+ assert dump_mode(ld, ["foo.o", "bar.o", "baz.o", "-o", "foo", "-Wl,-rpath,foo"]) == "ld"
def test_ld_flags(wrapper_environment, wrapper_flags):
check_args(
- ld, test_args,
- ['ld'] +
- spack_ldflags +
- test_include_paths +
- test_library_paths +
- ['--disable-new-dtags'] +
- test_rpaths +
- test_args_without_paths +
- spack_ldlibs)
+ ld,
+ test_args,
+ ["ld"]
+ + spack_ldflags
+ + test_include_paths
+ + test_library_paths
+ + ["--disable-new-dtags"]
+ + test_rpaths
+ + test_args_without_paths
+ + spack_ldlibs,
+ )
def test_cpp_flags(wrapper_environment, wrapper_flags):
check_args(
- cpp, test_args,
- ['cpp'] +
- spack_cppflags +
- test_include_paths +
- test_library_paths +
- test_args_without_paths)
+ cpp,
+ test_args,
+ ["cpp"]
+ + spack_cppflags
+ + test_include_paths
+ + test_library_paths
+ + test_args_without_paths,
+ )
def test_cc_flags(wrapper_environment, wrapper_flags):
check_args(
- cc, test_args,
- [real_cc] +
- target_args +
- spack_cppflags +
- spack_cflags +
- spack_ldflags +
- common_compile_args +
- spack_ldlibs)
+ cc,
+ test_args,
+ [real_cc]
+ + target_args
+ + spack_cppflags
+ + spack_cflags
+ + spack_ldflags
+ + common_compile_args
+ + spack_ldlibs,
+ )
def test_cxx_flags(wrapper_environment, wrapper_flags):
check_args(
- cxx, test_args,
- [real_cc] +
- target_args +
- spack_cppflags +
- spack_cxxflags +
- spack_ldflags +
- common_compile_args +
- spack_ldlibs)
+ cxx,
+ test_args,
+ [real_cc]
+ + target_args
+ + spack_cppflags
+ + spack_cxxflags
+ + spack_ldflags
+ + common_compile_args
+ + spack_ldlibs,
+ )
def test_fc_flags(wrapper_environment, wrapper_flags):
check_args(
- fc, test_args,
- [real_cc] +
- target_args +
- spack_fflags +
- spack_cppflags +
- spack_ldflags +
- common_compile_args +
- spack_ldlibs)
+ fc,
+ test_args,
+ [real_cc]
+ + target_args
+ + spack_fflags
+ + spack_cppflags
+ + spack_ldflags
+ + common_compile_args
+ + spack_ldlibs,
+ )
def test_dep_rpath(wrapper_environment):
"""Ensure RPATHs for root package are added."""
- check_args(
- cc, test_args,
- [real_cc] +
- target_args +
- common_compile_args)
+ check_args(cc, test_args, [real_cc] + target_args + common_compile_args)
def test_dep_include(wrapper_environment):
"""Ensure a single dependency include directory is added."""
- with set_env(SPACK_INCLUDE_DIRS='x'):
+ with set_env(SPACK_INCLUDE_DIRS="x"):
check_args(
- cc, test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- ['-Ix'] +
- test_library_paths +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- test_args_without_paths)
+ cc,
+ test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + ["-Ix"]
+ + test_library_paths
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + test_args_without_paths,
+ )
def test_system_path_cleanup(wrapper_environment):
@@ -307,378 +352,381 @@ def test_system_path_cleanup(wrapper_environment):
a / to each element of PATH when adding AUTOTEST_PATH.
Thus, ensure that PATH cleanup works even with trailing /.
"""
- system_path = '/bin:/usr/bin:/usr/local/bin'
+ system_path = "/bin:/usr/bin:/usr/local/bin"
cc_dir = os.path.dirname(cc.path)
- with set_env(SPACK_ENV_PATH=cc_dir, SPACK_CC='true'):
- with set_env(PATH=cc_dir + ':' + system_path):
- check_env_var(cc, 'PATH', system_path)
- with set_env(PATH=cc_dir + '/:' + system_path):
- check_env_var(cc, 'PATH', system_path)
+ with set_env(SPACK_ENV_PATH=cc_dir, SPACK_CC="true"):
+ with set_env(PATH=cc_dir + ":" + system_path):
+ check_env_var(cc, "PATH", system_path)
+ with set_env(PATH=cc_dir + "/:" + system_path):
+ check_env_var(cc, "PATH", system_path)
def test_dep_lib(wrapper_environment):
"""Ensure a single dependency RPATH is added."""
- with set_env(SPACK_LINK_DIRS='x',
- SPACK_RPATH_DIRS='x'):
+ with set_env(SPACK_LINK_DIRS="x", SPACK_RPATH_DIRS="x"):
check_args(
- cc, test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- test_library_paths +
- ['-Lx'] +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- ['-Wl,-rpath,x'] +
- test_args_without_paths)
+ cc,
+ test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + test_library_paths
+ + ["-Lx"]
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + ["-Wl,-rpath,x"]
+ + test_args_without_paths,
+ )
def test_dep_lib_no_rpath(wrapper_environment):
"""Ensure a single dependency link flag is added with no dep RPATH."""
- with set_env(SPACK_LINK_DIRS='x'):
+ with set_env(SPACK_LINK_DIRS="x"):
check_args(
- cc, test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- test_library_paths +
- ['-Lx'] +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- test_args_without_paths)
+ cc,
+ test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + test_library_paths
+ + ["-Lx"]
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + test_args_without_paths,
+ )
def test_dep_lib_no_lib(wrapper_environment):
"""Ensure a single dependency RPATH is added with no -L."""
- with set_env(SPACK_RPATH_DIRS='x'):
+ with set_env(SPACK_RPATH_DIRS="x"):
check_args(
- cc, test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- test_library_paths +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- ['-Wl,-rpath,x'] +
- test_args_without_paths)
+ cc,
+ test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + test_library_paths
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + ["-Wl,-rpath,x"]
+ + test_args_without_paths,
+ )
def test_ccld_deps(wrapper_environment):
"""Ensure all flags are added in ccld mode."""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_RPATH_DIRS='xlib:ylib:zlib',
- SPACK_LINK_DIRS='xlib:ylib:zlib'):
+ with set_env(
+ SPACK_INCLUDE_DIRS="xinc:yinc:zinc",
+ SPACK_RPATH_DIRS="xlib:ylib:zlib",
+ SPACK_LINK_DIRS="xlib:ylib:zlib",
+ ):
check_args(
- cc, test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- ['-Ixinc',
- '-Iyinc',
- '-Izinc'] +
- test_library_paths +
- ['-Lxlib',
- '-Lylib',
- '-Lzlib'] +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- ['-Wl,-rpath,xlib',
- '-Wl,-rpath,ylib',
- '-Wl,-rpath,zlib'] +
- test_args_without_paths)
+ cc,
+ test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + ["-Ixinc", "-Iyinc", "-Izinc"]
+ + test_library_paths
+ + ["-Lxlib", "-Lylib", "-Lzlib"]
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + ["-Wl,-rpath,xlib", "-Wl,-rpath,ylib", "-Wl,-rpath,zlib"]
+ + test_args_without_paths,
+ )
def test_ccld_deps_isystem(wrapper_environment):
"""Ensure all flags are added in ccld mode.
- When a build uses -isystem, Spack should inject it's
- include paths using -isystem. Spack will insert these
- after any provided -isystem includes, but before any
- system directories included using -isystem"""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_RPATH_DIRS='xlib:ylib:zlib',
- SPACK_LINK_DIRS='xlib:ylib:zlib'):
- mytest_args = test_args + ['-isystem', 'fooinc']
+ When a build uses -isystem, Spack should inject it's
+ include paths using -isystem. Spack will insert these
+ after any provided -isystem includes, but before any
+ system directories included using -isystem"""
+ with set_env(
+ SPACK_INCLUDE_DIRS="xinc:yinc:zinc",
+ SPACK_RPATH_DIRS="xlib:ylib:zlib",
+ SPACK_LINK_DIRS="xlib:ylib:zlib",
+ ):
+ mytest_args = test_args + ["-isystem", "fooinc"]
check_args(
- cc, mytest_args,
- [real_cc] +
- target_args +
- test_include_paths +
- ['-isystem', 'fooinc',
- '-isystem', 'xinc',
- '-isystem', 'yinc',
- '-isystem', 'zinc'] +
- test_library_paths +
- ['-Lxlib',
- '-Lylib',
- '-Lzlib'] +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- ['-Wl,-rpath,xlib',
- '-Wl,-rpath,ylib',
- '-Wl,-rpath,zlib'] +
- test_args_without_paths)
+ cc,
+ mytest_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + ["-isystem", "fooinc", "-isystem", "xinc", "-isystem", "yinc", "-isystem", "zinc"]
+ + test_library_paths
+ + ["-Lxlib", "-Lylib", "-Lzlib"]
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + ["-Wl,-rpath,xlib", "-Wl,-rpath,ylib", "-Wl,-rpath,zlib"]
+ + test_args_without_paths,
+ )
def test_cc_deps(wrapper_environment):
"""Ensure -L and RPATHs are not added in cc mode."""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_RPATH_DIRS='xlib:ylib:zlib',
- SPACK_LINK_DIRS='xlib:ylib:zlib'):
+ with set_env(
+ SPACK_INCLUDE_DIRS="xinc:yinc:zinc",
+ SPACK_RPATH_DIRS="xlib:ylib:zlib",
+ SPACK_LINK_DIRS="xlib:ylib:zlib",
+ ):
check_args(
- cc, ['-c'] + test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- ['-Ixinc',
- '-Iyinc',
- '-Izinc'] +
- test_library_paths +
- ['-c'] +
- test_args_without_paths)
+ cc,
+ ["-c"] + test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + ["-Ixinc", "-Iyinc", "-Izinc"]
+ + test_library_paths
+ + ["-c"]
+ + test_args_without_paths,
+ )
def test_ccld_with_system_dirs(wrapper_environment):
"""Ensure all flags are added in ccld mode."""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_RPATH_DIRS='xlib:ylib:zlib',
- SPACK_LINK_DIRS='xlib:ylib:zlib'):
-
- sys_path_args = ['-I/usr/include',
- '-L/usr/local/lib',
- '-Wl,-rpath,/usr/lib64',
- '-I/usr/local/include',
- '-L/lib64/']
+ with set_env(
+ SPACK_INCLUDE_DIRS="xinc:yinc:zinc",
+ SPACK_RPATH_DIRS="xlib:ylib:zlib",
+ SPACK_LINK_DIRS="xlib:ylib:zlib",
+ ):
+
+ sys_path_args = [
+ "-I/usr/include",
+ "-L/usr/local/lib",
+ "-Wl,-rpath,/usr/lib64",
+ "-I/usr/local/include",
+ "-L/lib64/",
+ ]
check_args(
- cc, sys_path_args + test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- ['-Ixinc',
- '-Iyinc',
- '-Izinc'] +
- ['-I/usr/include',
- '-I/usr/local/include'] +
- test_library_paths +
- ['-Lxlib',
- '-Lylib',
- '-Lzlib'] +
- ['-L/usr/local/lib',
- '-L/lib64/'] +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- ['-Wl,-rpath,xlib',
- '-Wl,-rpath,ylib',
- '-Wl,-rpath,zlib'] +
- ['-Wl,-rpath,/usr/lib64'] +
- test_args_without_paths)
+ cc,
+ sys_path_args + test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + ["-Ixinc", "-Iyinc", "-Izinc"]
+ + ["-I/usr/include", "-I/usr/local/include"]
+ + test_library_paths
+ + ["-Lxlib", "-Lylib", "-Lzlib"]
+ + ["-L/usr/local/lib", "-L/lib64/"]
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + ["-Wl,-rpath,xlib", "-Wl,-rpath,ylib", "-Wl,-rpath,zlib"]
+ + ["-Wl,-rpath,/usr/lib64"]
+ + test_args_without_paths,
+ )
def test_ccld_with_system_dirs_isystem(wrapper_environment):
"""Ensure all flags are added in ccld mode.
- Ensure that includes are in the proper
- place when a build uses -isystem, and uses
- system directories in the include paths"""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_RPATH_DIRS='xlib:ylib:zlib',
- SPACK_LINK_DIRS='xlib:ylib:zlib'):
-
- sys_path_args = ['-isystem', '/usr/include',
- '-L/usr/local/lib',
- '-Wl,-rpath,/usr/lib64',
- '-isystem', '/usr/local/include',
- '-L/lib64/']
+ Ensure that includes are in the proper
+ place when a build uses -isystem, and uses
+ system directories in the include paths"""
+ with set_env(
+ SPACK_INCLUDE_DIRS="xinc:yinc:zinc",
+ SPACK_RPATH_DIRS="xlib:ylib:zlib",
+ SPACK_LINK_DIRS="xlib:ylib:zlib",
+ ):
+
+ sys_path_args = [
+ "-isystem",
+ "/usr/include",
+ "-L/usr/local/lib",
+ "-Wl,-rpath,/usr/lib64",
+ "-isystem",
+ "/usr/local/include",
+ "-L/lib64/",
+ ]
check_args(
- cc, sys_path_args + test_args,
- [real_cc] +
- target_args +
- test_include_paths +
- ['-isystem', 'xinc',
- '-isystem', 'yinc',
- '-isystem', 'zinc'] +
- ['-isystem', '/usr/include',
- '-isystem', '/usr/local/include'] +
- test_library_paths +
- ['-Lxlib',
- '-Lylib',
- '-Lzlib'] +
- ['-L/usr/local/lib',
- '-L/lib64/'] +
- ['-Wl,--disable-new-dtags'] +
- test_wl_rpaths +
- ['-Wl,-rpath,xlib',
- '-Wl,-rpath,ylib',
- '-Wl,-rpath,zlib'] +
- ['-Wl,-rpath,/usr/lib64'] +
- test_args_without_paths)
+ cc,
+ sys_path_args + test_args,
+ [real_cc]
+ + target_args
+ + test_include_paths
+ + ["-isystem", "xinc", "-isystem", "yinc", "-isystem", "zinc"]
+ + ["-isystem", "/usr/include", "-isystem", "/usr/local/include"]
+ + test_library_paths
+ + ["-Lxlib", "-Lylib", "-Lzlib"]
+ + ["-L/usr/local/lib", "-L/lib64/"]
+ + ["-Wl,--disable-new-dtags"]
+ + test_wl_rpaths
+ + ["-Wl,-rpath,xlib", "-Wl,-rpath,ylib", "-Wl,-rpath,zlib"]
+ + ["-Wl,-rpath,/usr/lib64"]
+ + test_args_without_paths,
+ )
def test_ld_deps(wrapper_environment):
"""Ensure no (extra) -I args or -Wl, are passed in ld mode."""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_RPATH_DIRS='xlib:ylib:zlib',
- SPACK_LINK_DIRS='xlib:ylib:zlib'):
+ with set_env(
+ SPACK_INCLUDE_DIRS="xinc:yinc:zinc",
+ SPACK_RPATH_DIRS="xlib:ylib:zlib",
+ SPACK_LINK_DIRS="xlib:ylib:zlib",
+ ):
check_args(
- ld, test_args,
- ['ld'] +
- test_include_paths +
- test_library_paths +
- ['-Lxlib',
- '-Lylib',
- '-Lzlib'] +
- ['--disable-new-dtags'] +
- test_rpaths +
- ['-rpath', 'xlib',
- '-rpath', 'ylib',
- '-rpath', 'zlib'] +
- test_args_without_paths)
+ ld,
+ test_args,
+ ["ld"]
+ + test_include_paths
+ + test_library_paths
+ + ["-Lxlib", "-Lylib", "-Lzlib"]
+ + ["--disable-new-dtags"]
+ + test_rpaths
+ + ["-rpath", "xlib", "-rpath", "ylib", "-rpath", "zlib"]
+ + test_args_without_paths,
+ )
def test_ld_deps_no_rpath(wrapper_environment):
"""Ensure SPACK_LINK_DEPS controls -L for ld."""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_LINK_DIRS='xlib:ylib:zlib'):
+ with set_env(SPACK_INCLUDE_DIRS="xinc:yinc:zinc", SPACK_LINK_DIRS="xlib:ylib:zlib"):
check_args(
- ld, test_args,
- ['ld'] +
- test_include_paths +
- test_library_paths +
- ['-Lxlib',
- '-Lylib',
- '-Lzlib'] +
- ['--disable-new-dtags'] +
- test_rpaths +
- test_args_without_paths)
+ ld,
+ test_args,
+ ["ld"]
+ + test_include_paths
+ + test_library_paths
+ + ["-Lxlib", "-Lylib", "-Lzlib"]
+ + ["--disable-new-dtags"]
+ + test_rpaths
+ + test_args_without_paths,
+ )
def test_ld_deps_no_link(wrapper_environment):
"""Ensure SPACK_RPATH_DEPS controls -rpath for ld."""
- with set_env(SPACK_INCLUDE_DIRS='xinc:yinc:zinc',
- SPACK_RPATH_DIRS='xlib:ylib:zlib'):
+ with set_env(SPACK_INCLUDE_DIRS="xinc:yinc:zinc", SPACK_RPATH_DIRS="xlib:ylib:zlib"):
check_args(
- ld, test_args,
- ['ld'] +
- test_include_paths +
- test_library_paths +
- ['--disable-new-dtags'] +
- test_rpaths +
- ['-rpath', 'xlib',
- '-rpath', 'ylib',
- '-rpath', 'zlib'] +
- test_args_without_paths)
+ ld,
+ test_args,
+ ["ld"]
+ + test_include_paths
+ + test_library_paths
+ + ["--disable-new-dtags"]
+ + test_rpaths
+ + ["-rpath", "xlib", "-rpath", "ylib", "-rpath", "zlib"]
+ + test_args_without_paths,
+ )
def test_ld_deps_partial(wrapper_environment):
"""Make sure ld -r (partial link) is handled correctly on OS's where it
- doesn't accept rpaths.
+ doesn't accept rpaths.
"""
- with set_env(SPACK_INCLUDE_DIRS='xinc',
- SPACK_RPATH_DIRS='xlib',
- SPACK_LINK_DIRS='xlib'):
+ with set_env(SPACK_INCLUDE_DIRS="xinc", SPACK_RPATH_DIRS="xlib", SPACK_LINK_DIRS="xlib"):
# TODO: do we need to add RPATHs on other platforms like Linux?
# TODO: Can't we treat them the same?
- os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
+ os.environ["SPACK_SHORT_SPEC"] = "foo@1.2=linux-x86_64"
check_args(
- ld, ['-r'] + test_args,
- ['ld'] +
- test_include_paths +
- test_library_paths +
- ['-Lxlib'] +
- ['--disable-new-dtags'] +
- test_rpaths +
- ['-rpath', 'xlib'] +
- ['-r'] +
- test_args_without_paths)
+ ld,
+ ["-r"] + test_args,
+ ["ld"]
+ + test_include_paths
+ + test_library_paths
+ + ["-Lxlib"]
+ + ["--disable-new-dtags"]
+ + test_rpaths
+ + ["-rpath", "xlib"]
+ + ["-r"]
+ + test_args_without_paths,
+ )
# rpaths from the underlying command will still appear
# Spack will not add its own rpaths.
- os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=darwin-x86_64"
+ os.environ["SPACK_SHORT_SPEC"] = "foo@1.2=darwin-x86_64"
check_args(
- ld, ['-r'] + test_args,
- ['ld'] +
- headerpad +
- test_include_paths +
- test_library_paths +
- ['-Lxlib'] +
- ['--disable-new-dtags'] +
- test_rpaths +
- ['-r'] +
- test_args_without_paths)
+ ld,
+ ["-r"] + test_args,
+ ["ld"]
+ + headerpad
+ + test_include_paths
+ + test_library_paths
+ + ["-Lxlib"]
+ + ["--disable-new-dtags"]
+ + test_rpaths
+ + ["-r"]
+ + test_args_without_paths,
+ )
def test_ccache_prepend_for_cc(wrapper_environment):
- with set_env(SPACK_CCACHE_BINARY='ccache'):
- os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
+ with set_env(SPACK_CCACHE_BINARY="ccache"):
+ os.environ["SPACK_SHORT_SPEC"] = "foo@1.2=linux-x86_64"
check_args(
- cc, test_args,
- ['ccache'] + # ccache prepended in cc mode
- [real_cc] +
- target_args +
- common_compile_args)
- os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=darwin-x86_64"
+ cc,
+ test_args,
+ ["ccache"]
+ + [real_cc] # ccache prepended in cc mode
+ + target_args
+ + common_compile_args,
+ )
+ os.environ["SPACK_SHORT_SPEC"] = "foo@1.2=darwin-x86_64"
check_args(
- cc, test_args,
- ['ccache'] + # ccache prepended in cc mode
- [real_cc] +
- target_args +
- lheaderpad +
- common_compile_args)
+ cc,
+ test_args,
+ ["ccache"]
+ + [real_cc] # ccache prepended in cc mode
+ + target_args
+ + lheaderpad
+ + common_compile_args,
+ )
def test_no_ccache_prepend_for_fc(wrapper_environment):
- os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
+ os.environ["SPACK_SHORT_SPEC"] = "foo@1.2=linux-x86_64"
check_args(
- fc, test_args,
+ fc,
+ test_args,
# no ccache for Fortran
- [real_cc] +
- target_args +
- common_compile_args)
- os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=darwin-x86_64"
+ [real_cc] + target_args + common_compile_args,
+ )
+ os.environ["SPACK_SHORT_SPEC"] = "foo@1.2=darwin-x86_64"
check_args(
- fc, test_args,
+ fc,
+ test_args,
# no ccache for Fortran
- [real_cc] +
- target_args +
- lheaderpad +
- common_compile_args)
+ [real_cc] + target_args + lheaderpad + common_compile_args,
+ )
-@pytest.mark.regression('9160')
+@pytest.mark.regression("9160")
def test_disable_new_dtags(wrapper_environment, wrapper_flags):
- with set_env(SPACK_TEST_COMMAND='dump-args'):
- result = ld(*test_args, output=str).strip().split('\n')
- assert '--disable-new-dtags' in result
- result = cc(*test_args, output=str).strip().split('\n')
- assert '-Wl,--disable-new-dtags' in result
+ with set_env(SPACK_TEST_COMMAND="dump-args"):
+ result = ld(*test_args, output=str).strip().split("\n")
+ assert "--disable-new-dtags" in result
+ result = cc(*test_args, output=str).strip().split("\n")
+ assert "-Wl,--disable-new-dtags" in result
-@pytest.mark.regression('9160')
+@pytest.mark.regression("9160")
def test_filter_enable_new_dtags(wrapper_environment, wrapper_flags):
- with set_env(SPACK_TEST_COMMAND='dump-args'):
- result = ld(*(test_args + ['--enable-new-dtags']), output=str)
- result = result.strip().split('\n')
- assert '--enable-new-dtags' not in result
+ with set_env(SPACK_TEST_COMMAND="dump-args"):
+ result = ld(*(test_args + ["--enable-new-dtags"]), output=str)
+ result = result.strip().split("\n")
+ assert "--enable-new-dtags" not in result
- result = cc(*(test_args + ['-Wl,--enable-new-dtags']), output=str)
- result = result.strip().split('\n')
- assert '-Wl,--enable-new-dtags' not in result
+ result = cc(*(test_args + ["-Wl,--enable-new-dtags"]), output=str)
+ result = result.strip().split("\n")
+ assert "-Wl,--enable-new-dtags" not in result
-@pytest.mark.regression('22643')
+@pytest.mark.regression("22643")
def test_linker_strips_loopopt(wrapper_environment, wrapper_flags):
- with set_env(SPACK_TEST_COMMAND='dump-args'):
+ with set_env(SPACK_TEST_COMMAND="dump-args"):
# ensure that -loopopt=0 is not present in ld mode
result = ld(*(test_args + ["-loopopt=0"]), output=str)
- result = result.strip().split('\n')
- assert '-loopopt=0' not in result
+ result = result.strip().split("\n")
+ assert "-loopopt=0" not in result
# ensure that -loopopt=0 is not present in ccld mode
result = cc(*(test_args + ["-loopopt=0"]), output=str)
- result = result.strip().split('\n')
- assert '-loopopt=0' not in result
+ result = result.strip().split("\n")
+ assert "-loopopt=0" not in result
# ensure that -loopopt=0 *is* present in cc mode
# The "-c" argument is needed for cc to be detected
# as compile only (cc) mode.
result = cc(*(test_args + ["-loopopt=0", "-c", "x.c"]), output=str)
- result = result.strip().split('\n')
- assert '-loopopt=0' in result
+ result = result.strip().split("\n")
+ assert "-loopopt=0" in result
diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py
index 3279fd4125..0964a1ba1a 100644
--- a/lib/spack/spack/test/ci.py
+++ b/lib/spack/spack/test/ci.py
@@ -26,15 +26,13 @@ import spack.util.spack_yaml as syaml
@pytest.fixture
def tmp_scope():
"""Creates a temporary configuration scope"""
- base_name = 'internal-testing-scope'
- current_overrides = set(
- x.name for x in
- cfg.config.matching_scopes(r'^{0}'.format(base_name)))
+ base_name = "internal-testing-scope"
+ current_overrides = set(x.name for x in cfg.config.matching_scopes(r"^{0}".format(base_name)))
num_overrides = 0
scope_name = base_name
while scope_name in current_overrides:
- scope_name = '{0}{1}'.format(base_name, num_overrides)
+ scope_name = "{0}{1}".format(base_name, num_overrides)
num_overrides += 1
with cfg.override(cfg.InternalConfigScope(scope_name)):
@@ -42,18 +40,17 @@ def tmp_scope():
def test_urlencode_string():
- s = 'Spack Test Project'
+ s = "Spack Test Project"
s_enc = ci._url_encode_string(s)
- assert(s_enc == 'Spack+Test+Project')
+ assert s_enc == "Spack+Test+Project"
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_import_signing_key(mock_gnupghome):
signing_key_dir = spack_paths.mock_gpg_keys_path
- signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
+ signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
with open(signing_key_path) as fd:
signing_key = fd.read()
@@ -62,33 +59,34 @@ def test_import_signing_key(mock_gnupghome):
def test_configure_compilers(mutable_config):
-
def assert_missing(config):
- assert('install_missing_compilers' not in config or
- config['install_missing_compilers'] is False)
+ assert (
+ "install_missing_compilers" not in config
+ or config["install_missing_compilers"] is False
+ )
def assert_present(config):
- assert('install_missing_compilers' in config and
- config['install_missing_compilers'] is True)
+ assert (
+ "install_missing_compilers" in config and config["install_missing_compilers"] is True
+ )
- original_config = cfg.get('config')
+ original_config = cfg.get("config")
assert_missing(original_config)
- ci.configure_compilers('FIND_ANY', scope='site')
+ ci.configure_compilers("FIND_ANY", scope="site")
- second_config = cfg.get('config')
+ second_config = cfg.get("config")
assert_missing(second_config)
- ci.configure_compilers('INSTALL_MISSING')
- last_config = cfg.get('config')
+ ci.configure_compilers("INSTALL_MISSING")
+ last_config = cfg.get("config")
assert_present(last_config)
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages):
- e = ev.create('test1')
- e.add('dyninst')
+ e = ev.create("test1")
+ e.add("dyninst")
e.concretize()
dyninst_hash = None
@@ -97,25 +95,23 @@ def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages):
with e as active_env:
for s in active_env.all_specs():
hash_dict[s.name] = s.dag_hash()
- if s.name == 'dyninst':
+ if s.name == "dyninst":
dyninst_hash = s.dag_hash()
- assert(dyninst_hash)
+ assert dyninst_hash
- spec_map = ci.get_concrete_specs(
- active_env, dyninst_hash, 'dyninst', 'NONE')
- assert 'root' in spec_map
+ spec_map = ci.get_concrete_specs(active_env, dyninst_hash, "dyninst", "NONE")
+ assert "root" in spec_map
- concrete_root = spec_map['root']
- assert(concrete_root.dag_hash() == dyninst_hash)
+ concrete_root = spec_map["root"]
+ assert concrete_root.dag_hash() == dyninst_hash
- s = spec.Spec('dyninst')
- print('nonconc spec name: {0}'.format(s.name))
+ s = spec.Spec("dyninst")
+ print("nonconc spec name: {0}".format(s.name))
- spec_map = ci.get_concrete_specs(
- active_env, s.name, s.name, 'FIND_ANY')
+ spec_map = ci.get_concrete_specs(active_env, s.name, s.name, "FIND_ANY")
- assert 'root' in spec_map
+ assert "root" in spec_map
class FakeWebResponder(object):
@@ -150,58 +146,60 @@ class FakeWebResponder(object):
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
- os.environ.update({
- 'GITLAB_PRIVATE_TOKEN': 'faketoken',
- })
+ os.environ.update(
+ {
+ "GITLAB_PRIVATE_TOKEN": "faketoken",
+ }
+ )
- url = 'https://www.nosuchurlexists.itsfake/artifacts.zip'
- working_dir = os.path.join(tmpdir.strpath, 'repro')
+ url = "https://www.nosuchurlexists.itsfake/artifacts.zip"
+ working_dir = os.path.join(tmpdir.strpath, "repro")
test_artifacts_path = os.path.join(
- spack_paths.test_path, 'data', 'ci', 'gitlab', 'artifacts.zip')
+ spack_paths.test_path, "data", "ci", "gitlab", "artifacts.zip"
+ )
- with open(test_artifacts_path, 'rb') as fd:
+ with open(test_artifacts_path, "rb") as fd:
fake_responder = FakeWebResponder(content_to_read=[fd.read()])
- monkeypatch.setattr(ci, 'build_opener', lambda handler: fake_responder)
+ monkeypatch.setattr(ci, "build_opener", lambda handler: fake_responder)
ci.download_and_extract_artifacts(url, working_dir)
- found_zip = fs.find(working_dir, 'artifacts.zip')
- assert(len(found_zip) == 0)
+ found_zip = fs.find(working_dir, "artifacts.zip")
+ assert len(found_zip) == 0
- found_install = fs.find(working_dir, 'install.sh')
- assert(len(found_install) == 1)
+ found_install = fs.find(working_dir, "install.sh")
+ assert len(found_install) == 1
fake_responder._resp_code = 400
with pytest.raises(spack.error.SpackError):
ci.download_and_extract_artifacts(url, working_dir)
-def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits,
- monkeypatch):
+def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits, monkeypatch):
c1, c2 = last_two_git_commits
- repro_dir = os.path.join(tmpdir.strpath, 'repro')
- spack_dir = os.path.join(repro_dir, 'spack')
+ repro_dir = os.path.join(tmpdir.strpath, "repro")
+ spack_dir = os.path.join(repro_dir, "spack")
os.makedirs(spack_dir)
prefix_save = spack.paths.prefix
- monkeypatch.setattr(spack.paths, 'prefix', '/garbage')
+ monkeypatch.setattr(spack.paths, "prefix", "/garbage")
ret = ci.setup_spack_repro_version(repro_dir, c2, c1)
out, err = capfd.readouterr()
- assert(not ret)
- assert('Unable to find the path' in err)
+ assert not ret
+ assert "Unable to find the path" in err
- monkeypatch.setattr(spack.paths, 'prefix', prefix_save)
+ monkeypatch.setattr(spack.paths, "prefix", prefix_save)
- monkeypatch.setattr(spack.util.executable, 'which', lambda cmd: None)
+ monkeypatch.setattr(spack.util.executable, "which", lambda cmd: None)
ret = ci.setup_spack_repro_version(repro_dir, c2, c1)
out, err = capfd.readouterr()
- assert(not ret)
- assert('requires git' in err)
+ assert not ret
+ assert "requires git" in err
class mock_git_cmd(object):
def __init__(self, *args, **kwargs):
@@ -216,42 +214,42 @@ def test_setup_spack_repro_version(tmpdir, capfd, last_two_git_commits,
git_cmd = mock_git_cmd()
- monkeypatch.setattr(spack.util.executable, 'which', lambda cmd: git_cmd)
+ monkeypatch.setattr(spack.util.executable, "which", lambda cmd: git_cmd)
git_cmd.check = lambda *a, **k: 1 if len(a) > 2 and a[2] == c2 else 0
ret = ci.setup_spack_repro_version(repro_dir, c2, c1)
out, err = capfd.readouterr()
- assert(not ret)
- assert('Missing commit: {0}'.format(c2) in err)
+ assert not ret
+ assert "Missing commit: {0}".format(c2) in err
git_cmd.check = lambda *a, **k: 1 if len(a) > 2 and a[2] == c1 else 0
ret = ci.setup_spack_repro_version(repro_dir, c2, c1)
out, err = capfd.readouterr()
- assert(not ret)
- assert('Missing commit: {0}'.format(c1) in err)
+ assert not ret
+ assert "Missing commit: {0}".format(c1) in err
- git_cmd.check = lambda *a, **k: 1 if a[0] == 'clone' else 0
+ git_cmd.check = lambda *a, **k: 1 if a[0] == "clone" else 0
ret = ci.setup_spack_repro_version(repro_dir, c2, c1)
out, err = capfd.readouterr()
- assert(not ret)
- assert('Unable to clone' in err)
+ assert not ret
+ assert "Unable to clone" in err
- git_cmd.check = lambda *a, **k: 1 if a[0] == 'checkout' else 0
+ git_cmd.check = lambda *a, **k: 1 if a[0] == "checkout" else 0
ret = ci.setup_spack_repro_version(repro_dir, c2, c1)
out, err = capfd.readouterr()
- assert(not ret)
- assert('Unable to checkout' in err)
+ assert not ret
+ assert "Unable to checkout" in err
- git_cmd.check = lambda *a, **k: 1 if 'merge' in a else 0
+ git_cmd.check = lambda *a, **k: 1 if "merge" in a else 0
ret = ci.setup_spack_repro_version(repro_dir, c2, c1)
out, err = capfd.readouterr()
- assert(not ret)
- assert('Unable to merge {0}'.format(c1) in err)
+ assert not ret
+ assert "Unable to merge {0}".format(c1) in err
@pytest.mark.parametrize(
@@ -269,7 +267,7 @@ def test_ci_opt_argument_checking(obj, proto):
@pytest.mark.parametrize(
"yaml",
[
- {'extends': 1},
+ {"extends": 1},
],
)
def test_ci_opt_add_extends_non_sequence(yaml):
@@ -280,112 +278,114 @@ def test_ci_opt_add_extends_non_sequence(yaml):
def test_ci_workarounds():
- fake_root_spec = 'x' * 544
- fake_spack_ref = 'x' * 40
+ fake_root_spec = "x" * 544
+ fake_spack_ref = "x" * 40
common_variables = {
- 'SPACK_COMPILER_ACTION': 'NONE',
- 'SPACK_IS_PR_PIPELINE': 'False',
+ "SPACK_COMPILER_ACTION": "NONE",
+ "SPACK_IS_PR_PIPELINE": "False",
}
common_before_script = [
'git clone "https://github.com/spack/spack"',
- ' && '.join((
- 'pushd ./spack',
- 'git checkout "{ref}"'.format(ref=fake_spack_ref),
- 'popd')),
- '. "./spack/share/spack/setup-env.sh"'
+ " && ".join(("pushd ./spack", 'git checkout "{ref}"'.format(ref=fake_spack_ref), "popd")),
+ '. "./spack/share/spack/setup-env.sh"',
]
- def make_build_job(name, deps, stage, use_artifact_buildcache, optimize,
- use_dependencies):
+ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize, use_dependencies):
variables = common_variables.copy()
- variables['SPACK_JOB_SPEC_PKG_NAME'] = name
+ variables["SPACK_JOB_SPEC_PKG_NAME"] = name
result = {
- 'stage': stage,
- 'tags': ['tag-0', 'tag-1'],
- 'artifacts': {
- 'paths': [
- 'jobs_scratch_dir',
- 'cdash_report',
- name + '.spec.json',
- name
- ],
- 'when': 'always'
+ "stage": stage,
+ "tags": ["tag-0", "tag-1"],
+ "artifacts": {
+ "paths": ["jobs_scratch_dir", "cdash_report", name + ".spec.json", name],
+ "when": "always",
},
- 'retry': {'max': 2, 'when': ['always']},
- 'after_script': ['rm -rf "./spack"'],
- 'script': ['spack ci rebuild'],
- 'image': {'name': 'spack/centos7', 'entrypoint': ['']}
+ "retry": {"max": 2, "when": ["always"]},
+ "after_script": ['rm -rf "./spack"'],
+ "script": ["spack ci rebuild"],
+ "image": {"name": "spack/centos7", "entrypoint": [""]},
}
if optimize:
- result['extends'] = ['.c0', '.c1']
+ result["extends"] = [".c0", ".c1"]
else:
- variables['SPACK_ROOT_SPEC'] = fake_root_spec
- result['before_script'] = common_before_script
+ variables["SPACK_ROOT_SPEC"] = fake_root_spec
+ result["before_script"] = common_before_script
- result['variables'] = variables
+ result["variables"] = variables
if use_dependencies:
- result['dependencies'] = (
- list(deps) if use_artifact_buildcache
- else [])
+ result["dependencies"] = list(deps) if use_artifact_buildcache else []
else:
- result['needs'] = [
- {'job': dep, 'artifacts': use_artifact_buildcache}
- for dep in deps]
+ result["needs"] = [{"job": dep, "artifacts": use_artifact_buildcache} for dep in deps]
return {name: result}
- def make_rebuild_index_job(
- use_artifact_buildcache, optimize, use_dependencies):
+ def make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies):
result = {
- 'stage': 'stage-rebuild-index',
- 'script': 'spack buildcache update-index -d s3://mirror',
- 'tags': ['tag-0', 'tag-1'],
- 'image': {'name': 'spack/centos7', 'entrypoint': ['']},
- 'after_script': ['rm -rf "./spack"'],
+ "stage": "stage-rebuild-index",
+ "script": "spack buildcache update-index -d s3://mirror",
+ "tags": ["tag-0", "tag-1"],
+ "image": {"name": "spack/centos7", "entrypoint": [""]},
+ "after_script": ['rm -rf "./spack"'],
}
if optimize:
- result['extends'] = '.c0'
+ result["extends"] = ".c0"
else:
- result['before_script'] = common_before_script
+ result["before_script"] = common_before_script
- return {'rebuild-index': result}
+ return {"rebuild-index": result}
def make_factored_jobs(optimize):
- return {
- '.c0': {'before_script': common_before_script},
- '.c1': {'variables': {'SPACK_ROOT_SPEC': fake_root_spec}}
- } if optimize else {}
+ return (
+ {
+ ".c0": {"before_script": common_before_script},
+ ".c1": {"variables": {"SPACK_ROOT_SPEC": fake_root_spec}},
+ }
+ if optimize
+ else {}
+ )
def make_stage_list(num_build_stages):
return {
- 'stages': (
- ['-'.join(('stage', str(i))) for i in range(num_build_stages)]
- + ['stage-rebuild-index'])}
+ "stages": (
+ ["-".join(("stage", str(i))) for i in range(num_build_stages)]
+ + ["stage-rebuild-index"]
+ )
+ }
def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
result = {}
- result.update(make_build_job(
- 'pkg-a', [], 'stage-0', use_artifact_buildcache, optimize,
- use_dependencies))
-
- result.update(make_build_job(
- 'pkg-b', ['pkg-a'], 'stage-1', use_artifact_buildcache, optimize,
- use_dependencies))
-
- result.update(make_build_job(
- 'pkg-c', ['pkg-a', 'pkg-b'], 'stage-2', use_artifact_buildcache,
- optimize, use_dependencies))
-
- result.update(make_rebuild_index_job(
- use_artifact_buildcache, optimize, use_dependencies))
+ result.update(
+ make_build_job(
+ "pkg-a", [], "stage-0", use_artifact_buildcache, optimize, use_dependencies
+ )
+ )
+
+ result.update(
+ make_build_job(
+ "pkg-b", ["pkg-a"], "stage-1", use_artifact_buildcache, optimize, use_dependencies
+ )
+ )
+
+ result.update(
+ make_build_job(
+ "pkg-c",
+ ["pkg-a", "pkg-b"],
+ "stage-2",
+ use_artifact_buildcache,
+ optimize,
+ use_dependencies,
+ )
+ )
+
+ result.update(make_rebuild_index_job(use_artifact_buildcache, optimize, use_dependencies))
result.update(make_factored_jobs(optimize))
@@ -399,9 +399,8 @@ def test_ci_workarounds():
# convert needs to dependencies: true or false
for use_ab in (False, True):
original = make_yaml_obj(
- use_artifact_buildcache=use_ab,
- optimize=False,
- use_dependencies=False)
+ use_artifact_buildcache=use_ab, optimize=False, use_dependencies=False
+ )
for opt, deps in it.product(*(((False, True),) * 2)):
# neither optimizing nor converting needs->dependencies
@@ -410,9 +409,8 @@ def test_ci_workarounds():
continue
predicted = make_yaml_obj(
- use_artifact_buildcache=use_ab,
- optimize=opt,
- use_dependencies=deps)
+ use_artifact_buildcache=use_ab, optimize=opt, use_dependencies=deps
+ )
actual = original.copy()
if opt:
@@ -420,20 +418,18 @@ def test_ci_workarounds():
if deps:
actual = cinw.needs_to_dependencies(actual)
- predicted = syaml.dump_config(
- ci_opt.sort_yaml_obj(predicted), default_flow_style=True)
- actual = syaml.dump_config(
- ci_opt.sort_yaml_obj(actual), default_flow_style=True)
+ predicted = syaml.dump_config(ci_opt.sort_yaml_obj(predicted), default_flow_style=True)
+ actual = syaml.dump_config(ci_opt.sort_yaml_obj(actual), default_flow_style=True)
- assert(predicted == actual)
+ assert predicted == actual
def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
"""Test that given an active environment and list of touched pkgs,
- we get the right list of possibly-changed env specs"""
- e1 = ev.create('test')
- e1.add('mpileaks')
- e1.add('hypre')
+ we get the right list of possibly-changed env specs"""
+ e1 = ev.create("test")
+ e1.add("mpileaks")
+ e1.add("hypre")
e1.concretize()
"""
@@ -447,7 +443,7 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
hypre -> openblas-with-lapack (provides lapack and blas virtual deps of hypre)
"""
- touched = ['libdwarf']
+ touched = ["libdwarf"]
# traversing both directions from libdwarf in the graphs depicted
# above results in the following possibly affected env specs:
@@ -456,22 +452,18 @@ def test_get_spec_filter_list(mutable_mock_env_path, config, mutable_mock_repo):
affected_specs = ci.get_spec_filter_list(e1, touched)
affected_pkg_names = set([s.name for s in affected_specs])
- expected_affected_pkg_names = set(['mpileaks',
- 'callpath',
- 'dyninst',
- 'libdwarf',
- 'libelf'])
+ expected_affected_pkg_names = set(["mpileaks", "callpath", "dyninst", "libdwarf", "libelf"])
assert affected_pkg_names == expected_affected_pkg_names
-@pytest.mark.regression('29947')
+@pytest.mark.regression("29947")
def test_affected_specs_on_first_concretization(mutable_mock_env_path, config):
- e = ev.create('first_concretization')
- e.add('hdf5~mpi~szip')
- e.add('hdf5~mpi+szip')
+ e = ev.create("first_concretization")
+ e.add("hdf5~mpi~szip")
+ e.add("hdf5~mpi+szip")
e.concretize()
- affected_specs = spack.ci.get_spec_filter_list(e, ['zlib'])
- hdf5_specs = [s for s in affected_specs if s.name == 'hdf5']
+ affected_specs = spack.ci.get_spec_filter_list(e, ["zlib"])
+ hdf5_specs = [s for s in affected_specs if s.name == "hdf5"]
assert len(hdf5_specs) == 2
diff --git a/lib/spack/spack/test/cmd/activate.py b/lib/spack/spack/test/cmd/activate.py
index edb126be83..9ba35cc988 100644
--- a/lib/spack/spack/test/cmd/activate.py
+++ b/lib/spack/spack/test/cmd/activate.py
@@ -8,41 +8,34 @@ import pytest
from spack.main import SpackCommand
-activate = SpackCommand('activate')
-deactivate = SpackCommand('deactivate')
-install = SpackCommand('install')
-extensions = SpackCommand('extensions')
-
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
-
-
-def test_activate(
- mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extension1')
- activate('extension1')
- output = extensions('--show', 'activated', 'extendee')
- assert 'extension1' in output
-
-
-def test_deactivate(
- mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extension1')
- activate('extension1')
- deactivate('extension1')
- output = extensions('--show', 'activated', 'extendee')
- assert 'extension1' not in output
-
-
-def test_deactivate_all(
- mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extension1')
- install('extension2')
- activate('extension1')
- activate('extension2')
- deactivate('--all', 'extendee')
- output = extensions('--show', 'activated', 'extendee')
- assert 'extension1' not in output
+activate = SpackCommand("activate")
+deactivate = SpackCommand("deactivate")
+install = SpackCommand("install")
+extensions = SpackCommand("extensions")
+
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
+
+
+def test_activate(mock_packages, mock_archive, mock_fetch, config, install_mockery):
+ install("extension1")
+ activate("extension1")
+ output = extensions("--show", "activated", "extendee")
+ assert "extension1" in output
+
+
+def test_deactivate(mock_packages, mock_archive, mock_fetch, config, install_mockery):
+ install("extension1")
+ activate("extension1")
+ deactivate("extension1")
+ output = extensions("--show", "activated", "extendee")
+ assert "extension1" not in output
+
+
+def test_deactivate_all(mock_packages, mock_archive, mock_fetch, config, install_mockery):
+ install("extension1")
+ install("extension2")
+ activate("extension1")
+ activate("extension2")
+ deactivate("--all", "extendee")
+ output = extensions("--show", "activated", "extendee")
+ assert "extension1" not in output
diff --git a/lib/spack/spack/test/cmd/arch.py b/lib/spack/spack/test/cmd/arch.py
index e7043672bc..7373b666ad 100644
--- a/lib/spack/spack/test/cmd/arch.py
+++ b/lib/spack/spack/test/cmd/arch.py
@@ -5,45 +5,45 @@
from spack.main import SpackCommand
-arch = SpackCommand('arch')
+arch = SpackCommand("arch")
def test_arch():
"""Sanity check ``spack arch`` to make sure it works."""
arch()
- arch('-f')
- arch('--frontend')
- arch('-b')
- arch('--backend')
+ arch("-f")
+ arch("--frontend")
+ arch("-b")
+ arch("--backend")
def test_arch_platform():
"""Sanity check ``spack arch --platform`` to make sure it works."""
- arch('-p')
- arch('--platform')
- arch('-f', '-p')
- arch('-b', '-p')
+ arch("-p")
+ arch("--platform")
+ arch("-f", "-p")
+ arch("-b", "-p")
def test_arch_operating_system():
"""Sanity check ``spack arch --operating-system`` to make sure it works."""
- arch('-o')
- arch('--operating-system')
- arch('-f', '-o')
- arch('-b', '-o')
+ arch("-o")
+ arch("--operating-system")
+ arch("-f", "-o")
+ arch("-b", "-o")
def test_arch_target():
"""Sanity check ``spack arch --target`` to make sure it works."""
- arch('-t')
- arch('--target')
- arch('-f', '-t')
- arch('-b', '-t')
+ arch("-t")
+ arch("--target")
+ arch("-f", "-t")
+ arch("-b", "-t")
def test_display_targets():
- arch('--known-targets')
+ arch("--known-targets")
diff --git a/lib/spack/spack/test/cmd/audit.py b/lib/spack/spack/test/cmd/audit.py
index 722572234b..33b428c5ed 100644
--- a/lib/spack/spack/test/cmd/audit.py
+++ b/lib/spack/spack/test/cmd/audit.py
@@ -6,29 +6,30 @@ import pytest
from spack.main import SpackCommand
-audit = SpackCommand('audit')
-
-
-@pytest.mark.parametrize('pkgs,expected_returncode', [
- # A single package with issues, should exit 1
- (['wrong-variant-in-conflicts'], 1),
- # A "sane" package should exit 0
- (['mpileaks'], 0),
- # A package with issues and a package without should exit 1
- (['wrong-variant-in-conflicts', 'mpileaks'], 1),
- (['mpileaks', 'wrong-variant-in-conflicts'], 1),
-])
-def test_audit_packages(
- pkgs, expected_returncode, mutable_config, mock_packages
-):
+audit = SpackCommand("audit")
+
+
+@pytest.mark.parametrize(
+ "pkgs,expected_returncode",
+ [
+ # A single package with issues, should exit 1
+ (["wrong-variant-in-conflicts"], 1),
+ # A "sane" package should exit 0
+ (["mpileaks"], 0),
+ # A package with issues and a package without should exit 1
+ (["wrong-variant-in-conflicts", "mpileaks"], 1),
+ (["mpileaks", "wrong-variant-in-conflicts"], 1),
+ ],
+)
+def test_audit_packages(pkgs, expected_returncode, mutable_config, mock_packages):
"""Sanity check ``spack audit packages`` to make sure it works."""
- audit('packages', *pkgs, fail_on_error=False)
+ audit("packages", *pkgs, fail_on_error=False)
assert audit.returncode == expected_returncode
def test_audit_configs(mutable_config, mock_packages):
"""Sanity check ``spack audit packages`` to make sure it works."""
- audit('configs', fail_on_error=False)
+ audit("configs", fail_on_error=False)
# The mock configuration has duplicate definitions of some compilers
assert audit.returncode == 1
@@ -36,18 +37,18 @@ def test_audit_configs(mutable_config, mock_packages):
def test_audit_packages_https(mutable_config, mock_packages):
# Without providing --all should fail
- audit('packages-https', fail_on_error=False)
+ audit("packages-https", fail_on_error=False)
# The mock configuration has duplicate definitions of some compilers
assert audit.returncode == 1
# This uses http and should fail
- audit('packages-https', "test-dependency", fail_on_error=False)
+ audit("packages-https", "test-dependency", fail_on_error=False)
assert audit.returncode == 1
# providing one or more package names with https should work
- audit('packages-https', "cmake", fail_on_error=True)
+ audit("packages-https", "cmake", fail_on_error=True)
assert audit.returncode == 0
# providing one or more package names with https should work
- audit('packages-https', "cmake", "conflict", fail_on_error=True)
+ audit("packages-https", "cmake", "conflict", fail_on_error=True)
assert audit.returncode == 0
diff --git a/lib/spack/spack/test/cmd/blame.py b/lib/spack/spack/test/cmd/blame.py
index f6ca34b4b1..d33abbf6ae 100644
--- a/lib/spack/spack/test/cmd/blame.py
+++ b/lib/spack/spack/test/cmd/blame.py
@@ -16,43 +16,42 @@ from spack.main import SpackCommand
from spack.util.executable import which
pytestmark = pytest.mark.skipif(
- not which('git') or not spack.cmd.spack_is_git_repo(),
- reason="needs git")
+ not which("git") or not spack.cmd.spack_is_git_repo(), reason="needs git"
+)
-blame = SpackCommand('blame')
+blame = SpackCommand("blame")
def test_blame_by_modtime(mock_packages):
"""Sanity check the blame command to make sure it works."""
- out = blame('--time', 'mpich')
- assert 'LAST_COMMIT' in out
- assert 'AUTHOR' in out
- assert 'EMAIL' in out
+ out = blame("--time", "mpich")
+ assert "LAST_COMMIT" in out
+ assert "AUTHOR" in out
+ assert "EMAIL" in out
def test_blame_by_percent(mock_packages):
"""Sanity check the blame command to make sure it works."""
- out = blame('--percent', 'mpich')
- assert 'LAST_COMMIT' in out
- assert 'AUTHOR' in out
- assert 'EMAIL' in out
+ out = blame("--percent", "mpich")
+ assert "LAST_COMMIT" in out
+ assert "AUTHOR" in out
+ assert "EMAIL" in out
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_blame_file(mock_packages):
"""Sanity check the blame command to make sure it works."""
with working_dir(spack.paths.prefix):
- out = blame('bin/spack')
- assert 'LAST_COMMIT' in out
- assert 'AUTHOR' in out
- assert 'EMAIL' in out
+ out = blame("bin/spack")
+ assert "LAST_COMMIT" in out
+ assert "AUTHOR" in out
+ assert "EMAIL" in out
def test_blame_json(mock_packages):
"""Ensure that we can output json as a blame."""
with working_dir(spack.paths.prefix):
- out = blame('--json', 'mpich')
+ out = blame("--json", "mpich")
# Test loading the json, and top level keys
loaded = sjson.load(out)
@@ -60,22 +59,22 @@ def test_blame_json(mock_packages):
assert "totals" in out
# Authors should be a list
- assert len(loaded['authors']) > 0
+ assert len(loaded["authors"]) > 0
# Each of authors and totals has these shared keys
keys = ["last_commit", "lines", "percentage"]
for key in keys:
- assert key in loaded['totals']
+ assert key in loaded["totals"]
# But authors is a list of multiple
for key in keys + ["author", "email"]:
- assert key in loaded['authors'][0]
+ assert key in loaded["authors"][0]
-@pytest.mark.skipif(sys.platform == 'win32', reason="git hangs")
+@pytest.mark.skipif(sys.platform == "win32", reason="git hangs")
def test_blame_by_git(mock_packages, capfd):
"""Sanity check the blame command to make sure it works."""
with capfd.disabled():
- out = blame('--git', 'mpich')
- assert 'class Mpich' in out
+ out = blame("--git", "mpich")
+ assert "class Mpich" in out
assert ' homepage = "http://www.mpich.org"' in out
diff --git a/lib/spack/spack/test/cmd/bootstrap.py b/lib/spack/spack/test/cmd/bootstrap.py
index e4cace7d89..2ff66ae35b 100644
--- a/lib/spack/spack/test/cmd/bootstrap.py
+++ b/lib/spack/spack/test/cmd/bootstrap.py
@@ -13,95 +13,86 @@ import spack.main
import spack.mirror
from spack.util.path import convert_to_posix_path
-_bootstrap = spack.main.SpackCommand('bootstrap')
+_bootstrap = spack.main.SpackCommand("bootstrap")
-@pytest.mark.parametrize('scope', [
- None, 'site', 'system', 'user'
-])
+@pytest.mark.parametrize("scope", [None, "site", "system", "user"])
def test_enable_and_disable(mutable_config, scope):
scope_args = []
if scope:
- scope_args = ['--scope={0}'.format(scope)]
+ scope_args = ["--scope={0}".format(scope)]
- _bootstrap('enable', *scope_args)
- assert spack.config.get('bootstrap:enable', scope=scope) is True
+ _bootstrap("enable", *scope_args)
+ assert spack.config.get("bootstrap:enable", scope=scope) is True
- _bootstrap('disable', *scope_args)
- assert spack.config.get('bootstrap:enable', scope=scope) is False
+ _bootstrap("disable", *scope_args)
+ assert spack.config.get("bootstrap:enable", scope=scope) is False
-@pytest.mark.parametrize('scope', [
- None, 'site', 'system', 'user'
-])
+@pytest.mark.parametrize("scope", [None, "site", "system", "user"])
def test_root_get_and_set(mutable_config, scope):
- scope_args, path = [], '/scratch/spack/bootstrap'
+ scope_args, path = [], "/scratch/spack/bootstrap"
if scope:
- scope_args = ['--scope={0}'.format(scope)]
+ scope_args = ["--scope={0}".format(scope)]
- _bootstrap('root', path, *scope_args)
- out = _bootstrap('root', *scope_args, output=str)
- if sys.platform == 'win32':
+ _bootstrap("root", path, *scope_args)
+ out = _bootstrap("root", *scope_args, output=str)
+ if sys.platform == "win32":
out = convert_to_posix_path(out)
assert out.strip() == path
-@pytest.mark.parametrize('scopes', [
- ('site',),
- ('system', 'user')
-])
+@pytest.mark.parametrize("scopes", [("site",), ("system", "user")])
def test_reset_in_file_scopes(mutable_config, scopes):
# Assert files are created in the right scopes
bootstrap_yaml_files = []
for s in scopes:
- _bootstrap('disable', '--scope={0}'.format(s))
+ _bootstrap("disable", "--scope={0}".format(s))
scope_path = spack.config.config.scopes[s].path
- bootstrap_yaml = os.path.join(
- scope_path, 'bootstrap.yaml'
- )
+ bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
assert os.path.exists(bootstrap_yaml)
bootstrap_yaml_files.append(bootstrap_yaml)
- _bootstrap('reset', '-y')
+ _bootstrap("reset", "-y")
for bootstrap_yaml in bootstrap_yaml_files:
assert not os.path.exists(bootstrap_yaml)
def test_reset_in_environment(mutable_mock_env_path, mutable_config):
- env = spack.main.SpackCommand('env')
- env('create', 'bootstrap-test')
- current_environment = ev.read('bootstrap-test')
+ env = spack.main.SpackCommand("env")
+ env("create", "bootstrap-test")
+ current_environment = ev.read("bootstrap-test")
with current_environment:
- _bootstrap('disable')
- assert spack.config.get('bootstrap:enable') is False
- _bootstrap('reset', '-y')
+ _bootstrap("disable")
+ assert spack.config.get("bootstrap:enable") is False
+ _bootstrap("reset", "-y")
# We have no default settings in tests
- assert spack.config.get('bootstrap:enable') is None
+ assert spack.config.get("bootstrap:enable") is None
# Check that reset didn't delete the entire file
- spack_yaml = os.path.join(current_environment.path, 'spack.yaml')
+ spack_yaml = os.path.join(current_environment.path, "spack.yaml")
assert os.path.exists(spack_yaml)
def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
# Create a bootstrap.yaml with some config
- _bootstrap('disable', '--scope=site')
- scope_path = spack.config.config.scopes['site'].path
- bootstrap_yaml = os.path.join(scope_path, 'bootstrap.yaml')
+ _bootstrap("disable", "--scope=site")
+ scope_path = spack.config.config.scopes["site"].path
+ bootstrap_yaml = os.path.join(scope_path, "bootstrap.yaml")
assert os.path.exists(bootstrap_yaml)
# Reset the bootstrap configuration
- _bootstrap('reset', '-y')
- backup_file = bootstrap_yaml + '.bkp'
+ _bootstrap("reset", "-y")
+ backup_file = bootstrap_yaml + ".bkp"
assert not os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
# Iterate another time
- _bootstrap('disable', '--scope=site')
+ _bootstrap("disable", "--scope=site")
assert os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
- _bootstrap('reset', '-y')
+ _bootstrap("reset", "-y")
assert not os.path.exists(bootstrap_yaml)
assert os.path.exists(backup_file)
@@ -109,62 +100,59 @@ def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
def test_list_sources(capsys):
# Get the merged list and ensure we get our defaults
with capsys.disabled():
- output = _bootstrap('list')
+ output = _bootstrap("list")
assert "github-actions" in output
# Ask for a specific scope and check that the list of sources is empty
with capsys.disabled():
- output = _bootstrap('list', '--scope', 'user')
+ output = _bootstrap("list", "--scope", "user")
assert "No method available" in output
-@pytest.mark.parametrize('command,value', [
- ('trust', True),
- ('untrust', False)
-])
+@pytest.mark.parametrize("command,value", [("trust", True), ("untrust", False)])
def test_trust_or_untrust_sources(mutable_config, command, value):
- key = 'bootstrap:trusted:github-actions'
+ key = "bootstrap:trusted:github-actions"
trusted = spack.config.get(key, default=None)
assert trusted is None
- _bootstrap(command, 'github-actions')
+ _bootstrap(command, "github-actions")
trusted = spack.config.get(key, default=None)
assert trusted is value
def test_trust_or_untrust_fails_with_no_method(mutable_config):
- with pytest.raises(RuntimeError, match='no bootstrapping method'):
- _bootstrap('trust', 'foo')
+ with pytest.raises(RuntimeError, match="no bootstrapping method"):
+ _bootstrap("trust", "foo")
def test_trust_or_untrust_fails_with_more_than_one_method(mutable_config):
- wrong_config = {'sources': [
- {'name': 'github-actions',
- 'metadata': '$spack/share/spack/bootstrap/github-actions'},
- {'name': 'github-actions',
- 'metadata': '$spack/share/spack/bootstrap/github-actions'}],
- 'trusted': {}
+ wrong_config = {
+ "sources": [
+ {"name": "github-actions", "metadata": "$spack/share/spack/bootstrap/github-actions"},
+ {"name": "github-actions", "metadata": "$spack/share/spack/bootstrap/github-actions"},
+ ],
+ "trusted": {},
}
- with spack.config.override('bootstrap', wrong_config):
- with pytest.raises(RuntimeError, match='more than one'):
- _bootstrap('trust', 'github-actions')
+ with spack.config.override("bootstrap", wrong_config):
+ with pytest.raises(RuntimeError, match="more than one"):
+ _bootstrap("trust", "github-actions")
-@pytest.mark.parametrize('use_existing_dir', [True, False])
+@pytest.mark.parametrize("use_existing_dir", [True, False])
def test_add_failures_for_non_existing_files(mutable_config, tmpdir, use_existing_dir):
- metadata_dir = str(tmpdir) if use_existing_dir else '/foo/doesnotexist'
- with pytest.raises(RuntimeError, match='does not exist'):
- _bootstrap('add', 'mock-mirror', metadata_dir)
+ metadata_dir = str(tmpdir) if use_existing_dir else "/foo/doesnotexist"
+ with pytest.raises(RuntimeError, match="does not exist"):
+ _bootstrap("add", "mock-mirror", metadata_dir)
def test_add_failures_for_already_existing_name(mutable_config):
- with pytest.raises(RuntimeError, match='already exist'):
- _bootstrap('add', 'github-actions', 'some-place')
+ with pytest.raises(RuntimeError, match="already exist"):
+ _bootstrap("add", "github-actions", "some-place")
def test_remove_failure_for_non_existing_names(mutable_config):
- with pytest.raises(RuntimeError, match='cannot find'):
- _bootstrap('remove', 'mock-mirror')
+ with pytest.raises(RuntimeError, match="cannot find"):
+ _bootstrap("remove", "mock-mirror")
def test_remove_and_add_a_source(mutable_config):
@@ -173,49 +161,48 @@ def test_remove_and_add_a_source(mutable_config):
assert len(sources) == 1
# Remove it and check the result
- _bootstrap('remove', 'github-actions')
+ _bootstrap("remove", "github-actions")
sources = spack.bootstrap.bootstrapping_sources()
assert not sources
# Add it back and check we restored the initial state
- _bootstrap(
- 'add', 'github-actions', '$spack/share/spack/bootstrap/github-actions-v0.2'
- )
+ _bootstrap("add", "github-actions", "$spack/share/spack/bootstrap/github-actions-v0.2")
sources = spack.bootstrap.bootstrapping_sources()
assert len(sources) == 1
@pytest.mark.maybeslow
-@pytest.mark.skipif(sys.platform == 'win32', reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_bootstrap_mirror_metadata(mutable_config, linux_os, monkeypatch, tmpdir):
"""Test that `spack bootstrap mirror` creates a folder that can be ingested by
`spack bootstrap add`. Here we don't download data, since that would be an
expensive operation for a unit test.
"""
old_create = spack.mirror.create
- monkeypatch.setattr(spack.mirror, 'create', lambda p, s: old_create(p, []))
+ monkeypatch.setattr(spack.mirror, "create", lambda p, s: old_create(p, []))
# Create the mirror in a temporary folder
- compilers = [{
- 'compiler': {
- 'spec': 'gcc@12.0.1',
- 'operating_system': '{0.name}{0.version}'.format(linux_os),
- 'modules': [],
- 'paths': {
- 'cc': '/usr/bin',
- 'cxx': '/usr/bin',
- 'fc': '/usr/bin',
- 'f77': '/usr/bin'
+ compilers = [
+ {
+ "compiler": {
+ "spec": "gcc@12.0.1",
+ "operating_system": "{0.name}{0.version}".format(linux_os),
+ "modules": [],
+ "paths": {
+ "cc": "/usr/bin",
+ "cxx": "/usr/bin",
+ "fc": "/usr/bin",
+ "f77": "/usr/bin",
+ },
}
}
- }]
- with spack.config.override('compilers', compilers):
- _bootstrap('mirror', str(tmpdir))
+ ]
+ with spack.config.override("compilers", compilers):
+ _bootstrap("mirror", str(tmpdir))
# Register the mirror
- metadata_dir = tmpdir.join('metadata', 'sources')
- _bootstrap('add', '--trust', 'test-mirror', str(metadata_dir))
+ metadata_dir = tmpdir.join("metadata", "sources")
+ _bootstrap("add", "--trust", "test-mirror", str(metadata_dir))
assert _bootstrap.returncode == 0
- assert any(m['name'] == 'test-mirror'
- for m in spack.bootstrap.bootstrapping_sources())
+ assert any(m["name"] == "test-mirror" for m in spack.bootstrap.bootstrapping_sources())
diff --git a/lib/spack/spack/test/cmd/build_env.py b/lib/spack/spack/test/cmd/build_env.py
index 954598cfb5..16d363eff4 100644
--- a/lib/spack/spack/test/cmd/build_env.py
+++ b/lib/spack/spack/test/cmd/build_env.py
@@ -8,50 +8,50 @@ from six.moves import cPickle
from spack.main import SpackCommand
-build_env = SpackCommand('build-env')
+build_env = SpackCommand("build-env")
-@pytest.mark.parametrize('pkg', [
- ('zlib',),
- ('zlib', '--')
-])
-@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
+@pytest.mark.parametrize("pkg", [("zlib",), ("zlib", "--")])
+@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_it_just_runs(pkg):
build_env(*pkg)
-@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
+@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_error_when_multiple_specs_are_given():
- output = build_env('libelf libdwarf', fail_on_error=False)
- assert 'only takes one spec' in output
-
-
-@pytest.mark.parametrize('args', [
- ('--', '/bin/bash', '-c', 'echo test'),
- ('--',),
- (),
-])
-@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
+ output = build_env("libelf libdwarf", fail_on_error=False)
+ assert "only takes one spec" in output
+
+
+@pytest.mark.parametrize(
+ "args",
+ [
+ ("--", "/bin/bash", "-c", "echo test"),
+ ("--",),
+ (),
+ ],
+)
+@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_build_env_requires_a_spec(args):
output = build_env(*args, fail_on_error=False)
- assert 'requires a spec' in output
+ assert "requires a spec" in output
-_out_file = 'env.out'
+_out_file = "env.out"
-@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
+@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_dump(tmpdir):
with tmpdir.as_cwd():
- build_env('--dump', _out_file, 'zlib')
+ build_env("--dump", _out_file, "zlib")
with open(_out_file) as f:
- assert(any(line.startswith('PATH=') for line in f.readlines()))
+ assert any(line.startswith("PATH=") for line in f.readlines())
-@pytest.mark.usefixtures('config', 'mock_packages', 'working_env')
+@pytest.mark.usefixtures("config", "mock_packages", "working_env")
def test_pickle(tmpdir):
with tmpdir.as_cwd():
- build_env('--pickle', _out_file, 'zlib')
- environment = cPickle.load(open(_out_file, 'rb'))
- assert(type(environment) == dict)
- assert('PATH' in environment)
+ build_env("--pickle", _out_file, "zlib")
+ environment = cPickle.load(open(_out_file, "rb"))
+ assert type(environment) == dict
+ assert "PATH" in environment
diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py
index 958f5c3777..638ad9a883 100644
--- a/lib/spack/spack/test/cmd/buildcache.py
+++ b/lib/spack/spack/test/cmd/buildcache.py
@@ -17,24 +17,21 @@ import spack.main
import spack.spec
from spack.spec import Spec
-buildcache = spack.main.SpackCommand('buildcache')
-install = spack.main.SpackCommand('install')
-env = spack.main.SpackCommand('env')
-add = spack.main.SpackCommand('add')
-gpg = spack.main.SpackCommand('gpg')
-mirror = spack.main.SpackCommand('mirror')
-uninstall = spack.main.SpackCommand('uninstall')
+buildcache = spack.main.SpackCommand("buildcache")
+install = spack.main.SpackCommand("install")
+env = spack.main.SpackCommand("env")
+add = spack.main.SpackCommand("add")
+gpg = spack.main.SpackCommand("gpg")
+mirror = spack.main.SpackCommand("mirror")
+uninstall = spack.main.SpackCommand("uninstall")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture()
def mock_get_specs(database, monkeypatch):
specs = database.query_local()
- monkeypatch.setattr(
- spack.binary_distribution, 'update_cache_and_get_specs', lambda: specs
- )
+ monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", lambda: specs)
@pytest.fixture()
@@ -44,119 +41,115 @@ def mock_get_specs_multiarch(database, monkeypatch):
# make one spec that is NOT the test architecture
for spec in specs:
if spec.name == "mpileaks":
- spec.architecture = spack.spec.ArchSpec('linux-rhel7-x86_64')
+ spec.architecture = spack.spec.ArchSpec("linux-rhel7-x86_64")
break
- monkeypatch.setattr(
- spack.binary_distribution, 'update_cache_and_get_specs', lambda: specs
- )
+ monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", lambda: specs)
@pytest.mark.skipif(
- platform.system().lower() != 'linux',
- reason='implementation for MacOS still missing'
+ platform.system().lower() != "linux", reason="implementation for MacOS still missing"
)
@pytest.mark.db
def test_buildcache_preview_just_runs(database):
- buildcache('preview', 'mpileaks')
+ buildcache("preview", "mpileaks")
@pytest.mark.db
-@pytest.mark.regression('13757')
+@pytest.mark.regression("13757")
def test_buildcache_list_duplicates(mock_get_specs, capsys):
with capsys.disabled():
- output = buildcache('list', 'mpileaks', '@2.3')
+ output = buildcache("list", "mpileaks", "@2.3")
- assert output.count('mpileaks') == 3
+ assert output.count("mpileaks") == 3
@pytest.mark.db
-@pytest.mark.regression('17827')
+@pytest.mark.regression("17827")
def test_buildcache_list_allarch(database, mock_get_specs_multiarch, capsys):
with capsys.disabled():
- output = buildcache('list', '--allarch')
+ output = buildcache("list", "--allarch")
- assert output.count('mpileaks') == 3
+ assert output.count("mpileaks") == 3
with capsys.disabled():
- output = buildcache('list')
+ output = buildcache("list")
- assert output.count('mpileaks') == 2
+ assert output.count("mpileaks") == 2
-def tests_buildcache_create(
- install_mockery, mock_fetch, monkeypatch, tmpdir):
- """"Ensure that buildcache create creates output files"""
- pkg = 'trivial-install-test-package'
+def tests_buildcache_create(install_mockery, mock_fetch, monkeypatch, tmpdir):
+ """ "Ensure that buildcache create creates output files"""
+ pkg = "trivial-install-test-package"
install(pkg)
- buildcache('create', '-d', str(tmpdir), '--unsigned', pkg)
+ buildcache("create", "-d", str(tmpdir), "--unsigned", pkg)
spec = Spec(pkg).concretized()
- tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
- tarball = spack.binary_distribution.tarball_name(spec, '.spec.json')
- assert os.path.exists(
- os.path.join(str(tmpdir), 'build_cache', tarball_path))
- assert os.path.exists(
- os.path.join(str(tmpdir), 'build_cache', tarball))
+ tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
+ tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
+ assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
+ assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
def tests_buildcache_create_env(
- install_mockery, mock_fetch, monkeypatch,
- tmpdir, mutable_mock_env_path):
- """"Ensure that buildcache create creates output files from env"""
- pkg = 'trivial-install-test-package'
+ install_mockery, mock_fetch, monkeypatch, tmpdir, mutable_mock_env_path
+):
+ """ "Ensure that buildcache create creates output files from env"""
+ pkg = "trivial-install-test-package"
- env('create', 'test')
- with ev.read('test'):
+ env("create", "test")
+ with ev.read("test"):
add(pkg)
install()
- buildcache('create', '-d', str(tmpdir), '--unsigned')
+ buildcache("create", "-d", str(tmpdir), "--unsigned")
spec = Spec(pkg).concretized()
- tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
- tarball = spack.binary_distribution.tarball_name(spec, '.spec.json')
- assert os.path.exists(
- os.path.join(str(tmpdir), 'build_cache', tarball_path))
- assert os.path.exists(
- os.path.join(str(tmpdir), 'build_cache', tarball))
+ tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
+ tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
+ assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
+ assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
def test_buildcache_create_fails_on_noargs(tmpdir):
"""Ensure that buildcache create fails when given no args or
environment."""
with pytest.raises(spack.main.SpackCommandError):
- buildcache('create', '-d', str(tmpdir), '--unsigned')
+ buildcache("create", "-d", str(tmpdir), "--unsigned")
-def test_buildcache_create_fail_on_perm_denied(
- install_mockery, mock_fetch, monkeypatch, tmpdir):
+def test_buildcache_create_fail_on_perm_denied(install_mockery, mock_fetch, monkeypatch, tmpdir):
"""Ensure that buildcache create fails on permission denied error."""
- install('trivial-install-test-package')
+ install("trivial-install-test-package")
tmpdir.chmod(0)
with pytest.raises(OSError) as error:
- buildcache('create', '-d', str(tmpdir),
- '--unsigned', 'trivial-install-test-package')
+ buildcache("create", "-d", str(tmpdir), "--unsigned", "trivial-install-test-package")
assert error.value.errno == errno.EACCES
tmpdir.chmod(0o700)
-def test_update_key_index(tmpdir, mutable_mock_env_path,
- install_mockery, mock_packages, mock_fetch,
- mock_stage, mock_gnupghome):
+def test_update_key_index(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ mock_fetch,
+ mock_stage,
+ mock_gnupghome,
+):
"""Test the update-index command with the --keys option"""
- working_dir = tmpdir.join('working_dir')
+ working_dir = tmpdir.join("working_dir")
- mirror_dir = working_dir.join('mirror')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = working_dir.join("mirror")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
- mirror('add', 'test-mirror', mirror_url)
+ mirror("add", "test-mirror", mirror_url)
- gpg('create', 'Test Signing Key', 'nobody@nowhere.com')
+ gpg("create", "Test Signing Key", "nobody@nowhere.com")
- s = Spec('libdwarf').concretized()
+ s = Spec("libdwarf").concretized()
# Install a package
install(s.name)
@@ -164,111 +157,111 @@ def test_update_key_index(tmpdir, mutable_mock_env_path,
# Put installed package in the buildcache, which, because we're signing
# it, should result in the public key getting pushed to the buildcache
# as well.
- buildcache('create', '-a', '-d', mirror_dir.strpath, s.name)
+ buildcache("create", "-a", "-d", mirror_dir.strpath, s.name)
# Now make sure that when we pass the "--keys" argument to update-index
# it causes the index to get update.
- buildcache('update-index', '--keys', '-d', mirror_dir.strpath)
+ buildcache("update-index", "--keys", "-d", mirror_dir.strpath)
- key_dir_list = os.listdir(os.path.join(
- mirror_dir.strpath, 'build_cache', '_pgp'))
+ key_dir_list = os.listdir(os.path.join(mirror_dir.strpath, "build_cache", "_pgp"))
- uninstall('-y', s.name)
- mirror('rm', 'test-mirror')
+ uninstall("-y", s.name)
+ mirror("rm", "test-mirror")
- assert 'index.json' in key_dir_list
+ assert "index.json" in key_dir_list
-def test_buildcache_sync(mutable_mock_env_path, install_mockery_mutable_config,
- mock_packages, mock_fetch, mock_stage, tmpdir):
+def test_buildcache_sync(
+ mutable_mock_env_path,
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_stage,
+ tmpdir,
+):
"""
Make sure buildcache sync works in an environment-aware manner, ignoring
any specs that may be in the mirror but not in the environment.
"""
- working_dir = tmpdir.join('working_dir')
+ working_dir = tmpdir.join("working_dir")
- src_mirror_dir = working_dir.join('src_mirror').strpath
- src_mirror_url = 'file://{0}'.format(src_mirror_dir)
+ src_mirror_dir = working_dir.join("src_mirror").strpath
+ src_mirror_url = "file://{0}".format(src_mirror_dir)
- dest_mirror_dir = working_dir.join('dest_mirror').strpath
- dest_mirror_url = 'file://{0}'.format(dest_mirror_dir)
+ dest_mirror_dir = working_dir.join("dest_mirror").strpath
+ dest_mirror_url = "file://{0}".format(dest_mirror_dir)
- in_env_pkg = 'trivial-install-test-package'
- out_env_pkg = 'libdwarf'
+ in_env_pkg = "trivial-install-test-package"
+ out_env_pkg = "libdwarf"
def verify_mirror_contents():
- dest_list = os.listdir(
- os.path.join(dest_mirror_dir, 'build_cache'))
+ dest_list = os.listdir(os.path.join(dest_mirror_dir, "build_cache"))
found_pkg = False
for p in dest_list:
- assert(out_env_pkg not in p)
+ assert out_env_pkg not in p
if in_env_pkg in p:
found_pkg = True
if not found_pkg:
- print('Expected to find {0} in {1}'.format(
- in_env_pkg, dest_mirror_dir))
- assert(False)
+ print("Expected to find {0} in {1}".format(in_env_pkg, dest_mirror_dir))
+ assert False
# Install a package and put it in the buildcache
s = Spec(out_env_pkg).concretized()
install(s.name)
- buildcache(
- 'create', '-u', '-f', '-a', '--mirror-url', src_mirror_url, s.name)
+ buildcache("create", "-u", "-f", "-a", "--mirror-url", src_mirror_url, s.name)
- env('create', 'test')
- with ev.read('test'):
+ env("create", "test")
+ with ev.read("test"):
add(in_env_pkg)
install()
- buildcache(
- 'create', '-u', '-f', '-a', '--mirror-url', src_mirror_url, in_env_pkg)
+ buildcache("create", "-u", "-f", "-a", "--mirror-url", src_mirror_url, in_env_pkg)
# Now run the spack buildcache sync command with all the various options
# for specifying mirrors
# Use urls to specify mirrors
- buildcache('sync',
- '--src-mirror-url', src_mirror_url,
- '--dest-mirror-url', dest_mirror_url)
+ buildcache(
+ "sync", "--src-mirror-url", src_mirror_url, "--dest-mirror-url", dest_mirror_url
+ )
verify_mirror_contents()
shutil.rmtree(dest_mirror_dir)
# Use local directory paths to specify fs locations
- buildcache('sync',
- '--src-directory', src_mirror_dir,
- '--dest-directory', dest_mirror_dir)
+ buildcache("sync", "--src-directory", src_mirror_dir, "--dest-directory", dest_mirror_dir)
verify_mirror_contents()
shutil.rmtree(dest_mirror_dir)
# Use mirror names to specify mirrors
- mirror('add', 'src', src_mirror_url)
- mirror('add', 'dest', dest_mirror_url)
+ mirror("add", "src", src_mirror_url)
+ mirror("add", "dest", dest_mirror_url)
- buildcache('sync',
- '--src-mirror-name', 'src',
- '--dest-mirror-name', 'dest')
+ buildcache("sync", "--src-mirror-name", "src", "--dest-mirror-name", "dest")
verify_mirror_contents()
-def test_buildcache_create_install(mutable_mock_env_path,
- install_mockery_mutable_config,
- mock_packages, mock_fetch, mock_stage,
- monkeypatch, tmpdir):
- """"Ensure that buildcache create creates output files"""
- pkg = 'trivial-install-test-package'
+def test_buildcache_create_install(
+ mutable_mock_env_path,
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_stage,
+ monkeypatch,
+ tmpdir,
+):
+ """ "Ensure that buildcache create creates output files"""
+ pkg = "trivial-install-test-package"
install(pkg)
- buildcache('create', '-d', str(tmpdir), '--unsigned', pkg)
+ buildcache("create", "-d", str(tmpdir), "--unsigned", pkg)
spec = Spec(pkg).concretized()
- tarball_path = spack.binary_distribution.tarball_path_name(spec, '.spack')
- tarball = spack.binary_distribution.tarball_name(spec, '.spec.json')
- assert os.path.exists(
- os.path.join(str(tmpdir), 'build_cache', tarball_path))
- assert os.path.exists(
- os.path.join(str(tmpdir), 'build_cache', tarball))
+ tarball_path = spack.binary_distribution.tarball_path_name(spec, ".spack")
+ tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
+ assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
+ assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
diff --git a/lib/spack/spack/test/cmd/cd.py b/lib/spack/spack/test/cmd/cd.py
index 346b9a6185..48c8e4adfc 100644
--- a/lib/spack/spack/test/cmd/cd.py
+++ b/lib/spack/spack/test/cmd/cd.py
@@ -5,7 +5,7 @@
from spack.main import SpackCommand
-cd = SpackCommand('cd')
+cd = SpackCommand("cd")
def test_cd():
diff --git a/lib/spack/spack/test/cmd/checksum.py b/lib/spack/spack/test/cmd/checksum.py
index d22636a3d2..b0fffbea08 100644
--- a/lib/spack/spack/test/cmd/checksum.py
+++ b/lib/spack/spack/test/cmd/checksum.py
@@ -14,14 +14,17 @@ import spack.cmd.checksum
import spack.repo
from spack.main import SpackCommand
-spack_checksum = SpackCommand('checksum')
+spack_checksum = SpackCommand("checksum")
-@pytest.mark.parametrize('arguments,expected', [
- (['--batch', 'patch'], (True, False, False)),
- (['--latest', 'patch'], (False, True, False)),
- (['--preferred', 'patch'], (False, False, True)),
-])
+@pytest.mark.parametrize(
+ "arguments,expected",
+ [
+ (["--batch", "patch"], (True, False, False)),
+ (["--latest", "patch"], (False, True, False)),
+ (["--preferred", "patch"], (False, False, True)),
+ ],
+)
def test_checksum_args(arguments, expected):
parser = argparse.ArgumentParser()
spack.cmd.checksum.setup_parser(parser)
@@ -30,35 +33,36 @@ def test_checksum_args(arguments, expected):
assert check == expected
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.parametrize('arguments,expected', [
- (['--batch', 'preferred-test'], 'version of preferred-test'),
- (['--latest', 'preferred-test'], 'Found 1 version'),
- (['--preferred', 'preferred-test'], 'Found 1 version'),
-])
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.parametrize(
+ "arguments,expected",
+ [
+ (["--batch", "preferred-test"], "version of preferred-test"),
+ (["--latest", "preferred-test"], "Found 1 version"),
+ (["--preferred", "preferred-test"], "Found 1 version"),
+ ],
+)
def test_checksum(arguments, expected, mock_packages, mock_stage):
output = spack_checksum(*arguments)
assert expected in output
- assert 'version(' in output
+ assert "version(" in output
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-def test_checksum_interactive(
- mock_packages, mock_fetch, mock_stage, monkeypatch):
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+def test_checksum_interactive(mock_packages, mock_fetch, mock_stage, monkeypatch):
def _get_number(*args, **kwargs):
return 1
- monkeypatch.setattr(tty, 'get_number', _get_number)
- output = spack_checksum('preferred-test')
- assert 'version of preferred-test' in output
- assert 'version(' in output
+ monkeypatch.setattr(tty, "get_number", _get_number)
+
+ output = spack_checksum("preferred-test")
+ assert "version of preferred-test" in output
+ assert "version(" in output
def test_checksum_versions(mock_packages, mock_fetch, mock_stage):
- pkg_cls = spack.repo.path.get_pkg_class('preferred-test')
+ pkg_cls = spack.repo.path.get_pkg_class("preferred-test")
versions = [str(v) for v in pkg_cls.versions if not v.isdevelop()]
- output = spack_checksum('preferred-test', versions[0])
- assert 'Found 1 version' in output
- assert 'version(' in output
+ output = spack_checksum("preferred-test", versions[0])
+ assert "Found 1 version" in output
+ assert "version(" in output
diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py
index f6b6bb8c19..2435f8d92a 100644
--- a/lib/spack/spack/test/cmd/ci.py
+++ b/lib/spack/spack/test/cmd/ci.py
@@ -34,58 +34,59 @@ from spack.spec import CompilerSpec, Spec
from spack.util.executable import which
from spack.util.mock_package import MockPackageMultiRepo
-ci_cmd = spack.main.SpackCommand('ci')
-env_cmd = spack.main.SpackCommand('env')
-mirror_cmd = spack.main.SpackCommand('mirror')
-gpg_cmd = spack.main.SpackCommand('gpg')
-install_cmd = spack.main.SpackCommand('install')
-uninstall_cmd = spack.main.SpackCommand('uninstall')
-buildcache_cmd = spack.main.SpackCommand('buildcache')
+ci_cmd = spack.main.SpackCommand("ci")
+env_cmd = spack.main.SpackCommand("env")
+mirror_cmd = spack.main.SpackCommand("mirror")
+gpg_cmd = spack.main.SpackCommand("gpg")
+install_cmd = spack.main.SpackCommand("install")
+uninstall_cmd = spack.main.SpackCommand("uninstall")
+buildcache_cmd = spack.main.SpackCommand("buildcache")
-pytestmark = [pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows"),
- pytest.mark.maybeslow]
+pytestmark = [
+ pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
+ pytest.mark.maybeslow,
+]
@pytest.fixture()
def ci_base_environment(working_env, tmpdir):
- os.environ['CI_PROJECT_DIR'] = tmpdir.strpath
+ os.environ["CI_PROJECT_DIR"] = tmpdir.strpath
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def mock_git_repo(tmpdir):
"""Create a mock git repo with two commits, the last one creating
a .gitlab-ci.yml"""
- repo_path = tmpdir.join('mockspackrepo').strpath
+ repo_path = tmpdir.join("mockspackrepo").strpath
mkdirp(repo_path)
- git = which('git', required=True)
+ git = which("git", required=True)
with working_dir(repo_path):
- git('init')
+ git("init")
- with open('README.md', 'w') as f:
- f.write('# Introduction')
+ with open("README.md", "w") as f:
+ f.write("# Introduction")
- with open('.gitlab-ci.yml', 'w') as f:
- f.write("""
+ with open(".gitlab-ci.yml", "w") as f:
+ f.write(
+ """
testjob:
script:
- echo "success"
- """)
+ """
+ )
- git('config', '--local', 'user.email', 'testing@spack.io')
- git('config', '--local', 'user.name', 'Spack Testing')
+ git("config", "--local", "user.email", "testing@spack.io")
+ git("config", "--local", "user.name", "Spack Testing")
# initial commit with README
- git('add', 'README.md')
- git('-c', 'commit.gpgsign=false', 'commit',
- '-m', 'initial commit')
+ git("add", "README.md")
+ git("-c", "commit.gpgsign=false", "commit", "-m", "initial commit")
# second commit, adding a .gitlab-ci.yml
- git('add', '.gitlab-ci.yml')
- git('-c', 'commit.gpgsign=false', 'commit',
- '-m', 'add a .gitlab-ci.yml')
+ git("add", ".gitlab-ci.yml")
+ git("-c", "commit.gpgsign=false", "commit", "-m", "add a .gitlab-ci.yml")
yield repo_path
@@ -106,58 +107,64 @@ In this case, we would expect 'c', 'e', 'f', and 'g' to be in the first stage,
and then 'd', 'b', and 'a' to be put in the next three stages, respectively.
"""
- default = ('build', 'link')
+ default = ("build", "link")
mock_repo = MockPackageMultiRepo()
- g = mock_repo.add_package('g', [], [])
- f = mock_repo.add_package('f', [], [])
- e = mock_repo.add_package('e', [], [])
- d = mock_repo.add_package('d', [f, g], [default, default])
- c = mock_repo.add_package('c', [], [])
- b = mock_repo.add_package('b', [d, e], [default, default])
- mock_repo.add_package('a', [b, c], [default, default])
+ g = mock_repo.add_package("g", [], [])
+ f = mock_repo.add_package("f", [], [])
+ e = mock_repo.add_package("e", [], [])
+ d = mock_repo.add_package("d", [f, g], [default, default])
+ c = mock_repo.add_package("c", [], [])
+ b = mock_repo.add_package("b", [d, e], [default, default])
+ mock_repo.add_package("a", [b, c], [default, default])
with repo.use_repositories(mock_repo):
- spec_a = Spec('a')
+ spec_a = Spec("a")
spec_a.concretize()
spec_a_label = ci._spec_deps_key(spec_a)
- spec_b_label = ci._spec_deps_key(spec_a['b'])
- spec_c_label = ci._spec_deps_key(spec_a['c'])
- spec_d_label = ci._spec_deps_key(spec_a['d'])
- spec_e_label = ci._spec_deps_key(spec_a['e'])
- spec_f_label = ci._spec_deps_key(spec_a['f'])
- spec_g_label = ci._spec_deps_key(spec_a['g'])
+ spec_b_label = ci._spec_deps_key(spec_a["b"])
+ spec_c_label = ci._spec_deps_key(spec_a["c"])
+ spec_d_label = ci._spec_deps_key(spec_a["d"])
+ spec_e_label = ci._spec_deps_key(spec_a["e"])
+ spec_f_label = ci._spec_deps_key(spec_a["f"])
+ spec_g_label = ci._spec_deps_key(spec_a["g"])
spec_labels, dependencies, stages = ci.stage_spec_jobs([spec_a])
- assert (len(stages) == 4)
+ assert len(stages) == 4
- assert (len(stages[0]) == 4)
- assert (spec_c_label in stages[0])
- assert (spec_e_label in stages[0])
- assert (spec_f_label in stages[0])
- assert (spec_g_label in stages[0])
+ assert len(stages[0]) == 4
+ assert spec_c_label in stages[0]
+ assert spec_e_label in stages[0]
+ assert spec_f_label in stages[0]
+ assert spec_g_label in stages[0]
- assert (len(stages[1]) == 1)
- assert (spec_d_label in stages[1])
+ assert len(stages[1]) == 1
+ assert spec_d_label in stages[1]
- assert (len(stages[2]) == 1)
- assert (spec_b_label in stages[2])
+ assert len(stages[2]) == 1
+ assert spec_b_label in stages[2]
- assert (len(stages[3]) == 1)
- assert (spec_a_label in stages[3])
+ assert len(stages[3]) == 1
+ assert spec_a_label in stages[3]
-def test_ci_generate_with_env(tmpdir, mutable_mock_env_path,
- install_mockery, mock_packages,
- ci_base_environment, mock_binary_index):
+def test_ci_generate_with_env(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ ci_base_environment,
+ mock_binary_index,
+):
"""Make sure we can get a .gitlab-ci.yml from an environment file
- which has the gitlab-ci, cdash, and mirrors sections."""
- mirror_url = 'https://my.fake.mirror'
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ which has the gitlab-ci, cdash, and mirrors sections."""
+ mirror_url = "https://my.fake.mirror"
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
definitions:
- bootstrap:
@@ -194,38 +201,40 @@ spack:
url: https://my.fake.cdash
project: Not used
site: Nothing
-""".format(mirror_url))
+""".format(
+ mirror_url
+ )
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
found_spec = False
for ci_key in yaml_contents.keys():
- if '(bootstrap)' in ci_key:
+ if "(bootstrap)" in ci_key:
found_spec = True
- assert('cmake' in ci_key)
- assert(found_spec)
- assert('stages' in yaml_contents)
- assert(len(yaml_contents['stages']) == 6)
- assert(yaml_contents['stages'][0] == 'stage-0')
- assert(yaml_contents['stages'][5] == 'stage-rebuild-index')
+ assert "cmake" in ci_key
+ assert found_spec
+ assert "stages" in yaml_contents
+ assert len(yaml_contents["stages"]) == 6
+ assert yaml_contents["stages"][0] == "stage-0"
+ assert yaml_contents["stages"][5] == "stage-rebuild-index"
- assert('rebuild-index' in yaml_contents)
- rebuild_job = yaml_contents['rebuild-index']
- expected = 'spack buildcache update-index --keys -d {0}'.format(
- mirror_url)
- assert(rebuild_job['script'][0] == expected)
+ assert "rebuild-index" in yaml_contents
+ rebuild_job = yaml_contents["rebuild-index"]
+ expected = "spack buildcache update-index --keys -d {0}".format(mirror_url)
+ assert rebuild_job["script"][0] == expected
- assert('variables' in yaml_contents)
- assert('SPACK_ARTIFACTS_ROOT' in yaml_contents['variables'])
- artifacts_root = yaml_contents['variables']['SPACK_ARTIFACTS_ROOT']
- assert(artifacts_root == 'jobs_scratch_dir')
+ assert "variables" in yaml_contents
+ assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"]
+ artifacts_root = yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"]
+ assert artifacts_root == "jobs_scratch_dir"
def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
@@ -233,25 +242,29 @@ def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
for needs_def_name, needs_list in needs_graph.items():
if job_name.startswith(needs_def_name):
# check job needs against the expected needs definition
- j_needs = job_def['needs']
- assert all([job_needs['job'][:job_needs['job'].index('/')]
- in needs_list for job_needs in j_needs])
- assert(all([nl in
- [n['job'][:n['job'].index('/')] for n in j_needs]
- for nl in needs_list]))
- assert all([job_needs['artifacts'] == artifacts
- for job_needs in j_needs])
+ j_needs = job_def["needs"]
+ assert all(
+ [
+ job_needs["job"][: job_needs["job"].index("/")] in needs_list
+ for job_needs in j_needs
+ ]
+ )
+ assert all(
+ [nl in [n["job"][: n["job"].index("/")] for n in j_needs] for nl in needs_list]
+ )
+ assert all([job_needs["artifacts"] == artifacts for job_needs in j_needs])
break
-def test_ci_generate_bootstrap_gcc(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, ci_base_environment):
+def test_ci_generate_bootstrap_gcc(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
+):
"""Test that we can bootstrap a compiler and use it as the
compiler for a spec in the environment"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
definitions:
- bootstrap:
@@ -271,33 +284,34 @@ spack:
runner-attributes:
tags:
- donotcare
-""")
+"""
+ )
needs_graph = {
- '(bootstrap) conflict': [],
- '(bootstrap) gcc': [
- '(bootstrap) conflict',
+ "(bootstrap) conflict": [],
+ "(bootstrap) gcc": [
+ "(bootstrap) conflict",
],
- '(specs) libelf': [
- '(bootstrap) gcc',
+ "(specs) libelf": [
+ "(bootstrap) gcc",
],
- '(specs) libdwarf': [
- '(bootstrap) gcc',
- '(specs) libelf',
+ "(specs) libdwarf": [
+ "(bootstrap) gcc",
+ "(specs) libelf",
],
- '(specs) dyninst': [
- '(bootstrap) gcc',
- '(specs) libelf',
- '(specs) libdwarf',
+ "(specs) dyninst": [
+ "(bootstrap) gcc",
+ "(specs) libelf",
+ "(specs) libdwarf",
],
}
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
@@ -305,16 +319,15 @@ spack:
_validate_needs_graph(yaml_contents, needs_graph, False)
-def test_ci_generate_bootstrap_artifacts_buildcache(tmpdir,
- mutable_mock_env_path,
- install_mockery,
- mock_packages,
- ci_base_environment):
+def test_ci_generate_bootstrap_artifacts_buildcache(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
+):
"""Test that we can bootstrap a compiler when artifacts buildcache
is turned on"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
definitions:
- bootstrap:
@@ -334,36 +347,37 @@ spack:
tags:
- donotcare
enable-artifacts-buildcache: True
-""")
+"""
+ )
needs_graph = {
- '(bootstrap) conflict': [],
- '(bootstrap) gcc': [
- '(bootstrap) conflict',
+ "(bootstrap) conflict": [],
+ "(bootstrap) gcc": [
+ "(bootstrap) conflict",
],
- '(specs) libelf': [
- '(bootstrap) gcc',
- '(bootstrap) conflict',
+ "(specs) libelf": [
+ "(bootstrap) gcc",
+ "(bootstrap) conflict",
],
- '(specs) libdwarf': [
- '(bootstrap) gcc',
- '(bootstrap) conflict',
- '(specs) libelf',
+ "(specs) libdwarf": [
+ "(bootstrap) gcc",
+ "(bootstrap) conflict",
+ "(specs) libelf",
],
- '(specs) dyninst': [
- '(bootstrap) gcc',
- '(bootstrap) conflict',
- '(specs) libelf',
- '(specs) libdwarf',
+ "(specs) dyninst": [
+ "(bootstrap) gcc",
+ "(bootstrap) conflict",
+ "(specs) libelf",
+ "(specs) libdwarf",
],
}
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
@@ -371,42 +385,55 @@ spack:
_validate_needs_graph(yaml_contents, needs_graph, True)
-def test_ci_generate_with_env_missing_section(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, ci_base_environment,
- mock_binary_index):
+def test_ci_generate_with_env_missing_section(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ ci_base_environment,
+ mock_binary_index,
+):
"""Make sure we get a reasonable message if we omit gitlab-ci section"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
mirrors:
some-mirror: https://my.fake.mirror
-""")
+"""
+ )
expect_out = 'Error: Environment yaml does not have "gitlab-ci" section'
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
+ env_cmd("create", "test", "./spack.yaml")
- with ev.read('test'):
- output = ci_cmd('generate', fail_on_error=False, output=str)
- assert(expect_out in output)
+ with ev.read("test"):
+ output = ci_cmd("generate", fail_on_error=False, output=str)
+ assert expect_out in output
-def test_ci_generate_with_cdash_token(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, ci_base_environment,
- mock_binary_index):
+def test_ci_generate_with_cdash_token(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ ci_base_environment,
+ mock_binary_index,
+):
"""Make sure we it doesn't break if we configure cdash"""
- os.environ.update({
- 'SPACK_CDASH_AUTH_TOKEN': 'notreallyatokenbutshouldnotmatter',
- })
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ os.environ.update(
+ {
+ "SPACK_CDASH_AUTH_TOKEN": "notreallyatokenbutshouldnotmatter",
+ }
+ )
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
@@ -426,37 +453,44 @@ spack:
url: https://my.fake.cdash
project: Not used
site: Nothing
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
+ env_cmd("create", "test", "./spack.yaml")
- with ev.read('test'):
- copy_to_file = str(tmpdir.join('backup-ci.yml'))
- output = ci_cmd('generate', '--copy-to', copy_to_file, output=str)
+ with ev.read("test"):
+ copy_to_file = str(tmpdir.join("backup-ci.yml"))
+ output = ci_cmd("generate", "--copy-to", copy_to_file, output=str)
# That fake token should still have resulted in being unable to
# register build group with cdash, but the workload should
# still have been generated.
- expect = 'Problem populating buildgroup'
- assert(expect in output)
+ expect = "Problem populating buildgroup"
+ assert expect in output
dir_contents = os.listdir(tmpdir.strpath)
- assert('backup-ci.yml' in dir_contents)
+ assert "backup-ci.yml" in dir_contents
- orig_file = str(tmpdir.join('.gitlab-ci.yml'))
+ orig_file = str(tmpdir.join(".gitlab-ci.yml"))
- assert(filecmp.cmp(orig_file, copy_to_file) is True)
+ assert filecmp.cmp(orig_file, copy_to_file) is True
-def test_ci_generate_with_custom_scripts(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment, mock_binary_index):
+def test_ci_generate_with_custom_scripts(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ monkeypatch,
+ ci_base_environment,
+ mock_binary_index,
+):
"""Test use of user-provided scripts"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
@@ -483,15 +517,16 @@ spack:
- spack -d ci rebuild
after_script:
- rm -rf /some/path/spack
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- monkeypatch.setattr(spack.main, 'get_version', lambda: '0.15.3')
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ monkeypatch.setattr(spack.main, "get_version", lambda: "0.15.3")
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
@@ -499,53 +534,54 @@ spack:
found_it = False
- assert('variables' in yaml_contents)
- global_vars = yaml_contents['variables']
- assert('SPACK_VERSION' in global_vars)
- assert(global_vars['SPACK_VERSION'] == '0.15.3')
- assert('SPACK_CHECKOUT_VERSION' in global_vars)
- assert(global_vars['SPACK_CHECKOUT_VERSION'] == 'v0.15.3')
+ assert "variables" in yaml_contents
+ global_vars = yaml_contents["variables"]
+ assert "SPACK_VERSION" in global_vars
+ assert global_vars["SPACK_VERSION"] == "0.15.3"
+ assert "SPACK_CHECKOUT_VERSION" in global_vars
+ assert global_vars["SPACK_CHECKOUT_VERSION"] == "v0.15.3"
for ci_key in yaml_contents.keys():
ci_obj = yaml_contents[ci_key]
- if 'archive-files' in ci_key:
+ if "archive-files" in ci_key:
# Ensure we have variables, possibly interpolated
- assert('variables' in ci_obj)
- var_d = ci_obj['variables']
- assert('ONE' in var_d)
- assert(var_d['ONE'] == 'plain-string-value')
- assert('TWO' in var_d)
- assert(var_d['TWO'] == '${INTERP_ON_BUILD}')
+ assert "variables" in ci_obj
+ var_d = ci_obj["variables"]
+ assert "ONE" in var_d
+ assert var_d["ONE"] == "plain-string-value"
+ assert "TWO" in var_d
+ assert var_d["TWO"] == "${INTERP_ON_BUILD}"
# Ensure we have scripts verbatim
- assert('before_script' in ci_obj)
- before_script = ci_obj['before_script']
- assert(before_script[0] == 'mkdir /some/path')
- assert(before_script[1] == 'pushd /some/path')
- assert(before_script[2] == 'git clone ${SPACK_REPO}')
- assert(before_script[3] == 'cd spack')
- assert(before_script[4] == 'git checkout ${SPACK_REF}')
- assert(before_script[5] == 'popd')
-
- assert('script' in ci_obj)
- assert(ci_obj['script'][0] == 'spack -d ci rebuild')
-
- assert('after_script' in ci_obj)
- after_script = ci_obj['after_script'][0]
- assert(after_script == 'rm -rf /some/path/spack')
+ assert "before_script" in ci_obj
+ before_script = ci_obj["before_script"]
+ assert before_script[0] == "mkdir /some/path"
+ assert before_script[1] == "pushd /some/path"
+ assert before_script[2] == "git clone ${SPACK_REPO}"
+ assert before_script[3] == "cd spack"
+ assert before_script[4] == "git checkout ${SPACK_REF}"
+ assert before_script[5] == "popd"
+
+ assert "script" in ci_obj
+ assert ci_obj["script"][0] == "spack -d ci rebuild"
+
+ assert "after_script" in ci_obj
+ after_script = ci_obj["after_script"][0]
+ assert after_script == "rm -rf /some/path/spack"
found_it = True
- assert(found_it)
+ assert found_it
-def test_ci_generate_pkg_with_deps(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, ci_base_environment):
+def test_ci_generate_pkg_with_deps(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
+):
"""Test pipeline generation for a package w/ dependencies"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- flatten-deps
@@ -564,14 +600,15 @@ spack:
runner-attributes:
tags:
- donotcare
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
@@ -579,33 +616,35 @@ spack:
found = []
for ci_key in yaml_contents.keys():
ci_obj = yaml_contents[ci_key]
- if 'dependency-install' in ci_key:
- assert('stage' in ci_obj)
- assert(ci_obj['stage'] == 'stage-0')
- found.append('dependency-install')
- if 'flatten-deps' in ci_key:
- assert('stage' in ci_obj)
- assert(ci_obj['stage'] == 'stage-1')
- found.append('flatten-deps')
-
- assert('flatten-deps' in found)
- assert('dependency-install' in found)
-
-
-def test_ci_generate_for_pr_pipeline(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment):
+ if "dependency-install" in ci_key:
+ assert "stage" in ci_obj
+ assert ci_obj["stage"] == "stage-0"
+ found.append("dependency-install")
+ if "flatten-deps" in ci_key:
+ assert "stage" in ci_obj
+ assert ci_obj["stage"] == "stage-1"
+ found.append("flatten-deps")
+
+ assert "flatten-deps" in found
+ assert "dependency-install" in found
+
+
+def test_ci_generate_for_pr_pipeline(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
+):
"""Test that PR pipelines do not include a final stage job for
rebuilding the mirror index, even if that job is specifically
configured"""
- os.environ.update({
- 'SPACK_PIPELINE_TYPE': 'spack_pull_request',
- 'SPACK_PR_BRANCH': 'fake-test-branch',
- })
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ os.environ.update(
+ {
+ "SPACK_PIPELINE_TYPE": "spack_pull_request",
+ "SPACK_PR_BRANCH": "fake-test-branch",
+ }
+ )
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- flatten-deps
@@ -628,35 +667,36 @@ spack:
image: donotcare
tags: [donotcare]
rebuild-index: False
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
- assert('rebuild-index' not in yaml_contents)
+ assert "rebuild-index" not in yaml_contents
- assert('variables' in yaml_contents)
- pipeline_vars = yaml_contents['variables']
- assert('SPACK_PIPELINE_TYPE' in pipeline_vars)
- assert(pipeline_vars['SPACK_PIPELINE_TYPE'] == 'spack_pull_request')
+ assert "variables" in yaml_contents
+ pipeline_vars = yaml_contents["variables"]
+ assert "SPACK_PIPELINE_TYPE" in pipeline_vars
+ assert pipeline_vars["SPACK_PIPELINE_TYPE"] == "spack_pull_request"
-def test_ci_generate_with_external_pkg(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment):
+def test_ci_generate_with_external_pkg(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
+):
"""Make sure we do not generate jobs for external pkgs"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
@@ -672,45 +712,53 @@ spack:
tags:
- donotcare
image: donotcare
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
yaml_contents = syaml.load(f)
# Check that the "externaltool" package was not erroneously staged
- assert not any('externaltool' in key for key in yaml_contents)
+ assert not any("externaltool" in key for key in yaml_contents)
-@pytest.mark.xfail(reason='fails intermittently and covered by gitlab ci')
-def test_ci_rebuild(tmpdir, mutable_mock_env_path,
- install_mockery, mock_packages, monkeypatch,
- mock_gnupghome, mock_fetch, ci_base_environment,
- mock_binary_index):
- working_dir = tmpdir.join('working_dir')
+@pytest.mark.xfail(reason="fails intermittently and covered by gitlab ci")
+def test_ci_rebuild(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ monkeypatch,
+ mock_gnupghome,
+ mock_fetch,
+ ci_base_environment,
+ mock_binary_index,
+):
+ working_dir = tmpdir.join("working_dir")
- log_dir = os.path.join(working_dir.strpath, 'logs')
- repro_dir = os.path.join(working_dir.strpath, 'repro')
- env_dir = working_dir.join('concrete_env')
+ log_dir = os.path.join(working_dir.strpath, "logs")
+ repro_dir = os.path.join(working_dir.strpath, "repro")
+ env_dir = working_dir.join("concrete_env")
- mirror_dir = working_dir.join('mirror')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = working_dir.join("mirror")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
- broken_specs_path = os.path.join(working_dir.strpath, 'naughty-list')
- broken_specs_url = url_util.join('file://', broken_specs_path)
- temp_storage_url = 'file:///path/to/per/pipeline/storage'
+ broken_specs_path = os.path.join(working_dir.strpath, "naughty-list")
+ broken_specs_url = url_util.join("file://", broken_specs_path)
+ temp_storage_url = "file:///path/to/per/pipeline/storage"
- ci_job_url = 'https://some.domain/group/project/-/jobs/42'
- ci_pipeline_url = 'https://some.domain/group/project/-/pipelines/7'
+ ci_job_url = "https://some.domain/group/project/-/jobs/42"
+ ci_pipeline_url = "https://some.domain/group/project/-/pipelines/7"
signing_key_dir = spack_paths.mock_gpg_keys_path
- signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
+ signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
with open(signing_key_path) as fd:
signing_key = fd.read()
@@ -737,15 +785,17 @@ spack:
url: https://my.fake.cdash
project: Not used
site: Nothing
-""".format(mirror_url, broken_specs_url, temp_storage_url)
+""".format(
+ mirror_url, broken_specs_url, temp_storage_url
+ )
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
f.write(spack_yaml_contents)
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- with ev.read('test') as env:
+ env_cmd("create", "test", "./spack.yaml")
+ with ev.read("test") as env:
with env.write_transaction():
env.concretize()
env.write()
@@ -753,99 +803,104 @@ spack:
if not os.path.exists(env_dir.strpath):
os.makedirs(env_dir.strpath)
- shutil.copyfile(env.manifest_path,
- os.path.join(env_dir.strpath, 'spack.yaml'))
- shutil.copyfile(env.lock_path,
- os.path.join(env_dir.strpath, 'spack.lock'))
+ shutil.copyfile(env.manifest_path, os.path.join(env_dir.strpath, "spack.yaml"))
+ shutil.copyfile(env.lock_path, os.path.join(env_dir.strpath, "spack.lock"))
root_spec_dag_hash = None
for h, s in env.specs_by_hash.items():
- if s.name == 'archive-files':
+ if s.name == "archive-files":
root_spec_dag_hash = h
assert root_spec_dag_hash
def fake_cdash_register(build_name, base_url, project, site, track):
- return ('fakebuildid', 'fakestamp')
+ return ("fakebuildid", "fakestamp")
- monkeypatch.setattr(spack.cmd.ci, 'CI_REBUILD_INSTALL_BASE_ARGS', [
- 'notcommand'
- ])
- monkeypatch.setattr(spack.cmd.ci, 'INSTALL_FAIL_CODE', 127)
+ monkeypatch.setattr(spack.cmd.ci, "CI_REBUILD_INSTALL_BASE_ARGS", ["notcommand"])
+ monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127)
with env_dir.as_cwd():
- env_cmd('activate', '--without-view', '--sh', '-d', '.')
+ env_cmd("activate", "--without-view", "--sh", "-d", ".")
# Create environment variables as gitlab would do it
- os.environ.update({
- 'SPACK_ARTIFACTS_ROOT': working_dir.strpath,
- 'SPACK_JOB_LOG_DIR': log_dir,
- 'SPACK_JOB_REPRO_DIR': repro_dir,
- 'SPACK_LOCAL_MIRROR_DIR': mirror_dir.strpath,
- 'SPACK_CONCRETE_ENV_DIR': env_dir.strpath,
- 'CI_PIPELINE_ID': '7192',
- 'SPACK_SIGNING_KEY': signing_key,
- 'SPACK_ROOT_SPEC': root_spec_dag_hash,
- 'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
- 'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
- 'SPACK_COMPILER_ACTION': 'NONE',
- 'SPACK_CDASH_BUILD_NAME': '(specs) archive-files',
- 'SPACK_REMOTE_MIRROR_URL': mirror_url,
- 'SPACK_PIPELINE_TYPE': 'spack_protected_branch',
- 'CI_JOB_URL': ci_job_url,
- 'CI_PIPELINE_URL': ci_pipeline_url,
- })
-
- ci_cmd('rebuild', fail_on_error=False)
+ os.environ.update(
+ {
+ "SPACK_ARTIFACTS_ROOT": working_dir.strpath,
+ "SPACK_JOB_LOG_DIR": log_dir,
+ "SPACK_JOB_REPRO_DIR": repro_dir,
+ "SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath,
+ "SPACK_CONCRETE_ENV_DIR": env_dir.strpath,
+ "CI_PIPELINE_ID": "7192",
+ "SPACK_SIGNING_KEY": signing_key,
+ "SPACK_ROOT_SPEC": root_spec_dag_hash,
+ "SPACK_JOB_SPEC_DAG_HASH": root_spec_dag_hash,
+ "SPACK_JOB_SPEC_PKG_NAME": "archive-files",
+ "SPACK_COMPILER_ACTION": "NONE",
+ "SPACK_CDASH_BUILD_NAME": "(specs) archive-files",
+ "SPACK_REMOTE_MIRROR_URL": mirror_url,
+ "SPACK_PIPELINE_TYPE": "spack_protected_branch",
+ "CI_JOB_URL": ci_job_url,
+ "CI_PIPELINE_URL": ci_pipeline_url,
+ }
+ )
+
+ ci_cmd("rebuild", fail_on_error=False)
expected_repro_files = [
- 'install.sh',
- 'root.json',
- 'archive-files.json',
- 'spack.yaml',
- 'spack.lock'
+ "install.sh",
+ "root.json",
+ "archive-files.json",
+ "spack.yaml",
+ "spack.lock",
]
repro_files = os.listdir(repro_dir)
- assert(all([f in repro_files for f in expected_repro_files]))
+ assert all([f in repro_files for f in expected_repro_files])
- install_script_path = os.path.join(repro_dir, 'install.sh')
+ install_script_path = os.path.join(repro_dir, "install.sh")
install_line = None
with open(install_script_path) as fd:
for line in fd:
if line.startswith('"notcommand"'):
install_line = line
- assert(install_line)
+ assert install_line
def mystrip(s):
- return s.strip('"').rstrip('\n').rstrip('"')
+ return s.strip('"').rstrip("\n").rstrip('"')
- install_parts = [mystrip(s) for s in install_line.split(' ')]
+ install_parts = [mystrip(s) for s in install_line.split(" ")]
- assert('--keep-stage' in install_parts)
- assert('--no-check-signature' not in install_parts)
- assert('--no-add' in install_parts)
- assert('-f' in install_parts)
- flag_index = install_parts.index('-f')
- assert('archive-files.json' in install_parts[flag_index + 1])
+ assert "--keep-stage" in install_parts
+ assert "--no-check-signature" not in install_parts
+ assert "--no-add" in install_parts
+ assert "-f" in install_parts
+ flag_index = install_parts.index("-f")
+ assert "archive-files.json" in install_parts[flag_index + 1]
broken_spec_file = os.path.join(broken_specs_path, root_spec_dag_hash)
with open(broken_spec_file) as fd:
broken_spec_content = fd.read()
- assert(ci_job_url in broken_spec_content)
- assert(ci_pipeline_url) in broken_spec_content
+ assert ci_job_url in broken_spec_content
+ assert (ci_pipeline_url) in broken_spec_content
- env_cmd('deactivate')
+ env_cmd("deactivate")
-def test_ci_nothing_to_rebuild(tmpdir, mutable_mock_env_path,
- install_mockery, mock_packages, monkeypatch,
- mock_fetch, ci_base_environment, mock_binary_index):
- working_dir = tmpdir.join('working_dir')
+def test_ci_nothing_to_rebuild(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ monkeypatch,
+ mock_fetch,
+ ci_base_environment,
+ mock_binary_index,
+):
+ working_dir = tmpdir.join("working_dir")
- mirror_dir = working_dir.join('mirror')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = working_dir.join("mirror")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
spack_yaml_contents = """
spack:
@@ -864,69 +919,79 @@ spack:
tags:
- donotcare
image: donotcare
-""".format(mirror_url)
+""".format(
+ mirror_url
+ )
- install_cmd('archive-files')
- buildcache_cmd('create', '-a', '-f', '-u', '--mirror-url',
- mirror_url, 'archive-files')
+ install_cmd("archive-files")
+ buildcache_cmd("create", "-a", "-f", "-u", "--mirror-url", mirror_url, "archive-files")
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
f.write(spack_yaml_contents)
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- with ev.read('test') as env:
+ env_cmd("create", "test", "./spack.yaml")
+ with ev.read("test") as env:
env.concretize()
root_spec_dag_hash = None
for h, s in env.specs_by_hash.items():
- if s.name == 'archive-files':
+ if s.name == "archive-files":
root_spec_dag_hash = h
# Create environment variables as gitlab would do it
- os.environ.update({
- 'SPACK_ARTIFACTS_ROOT': working_dir.strpath,
- 'SPACK_JOB_LOG_DIR': 'log_dir',
- 'SPACK_JOB_REPRO_DIR': 'repro_dir',
- 'SPACK_LOCAL_MIRROR_DIR': mirror_dir.strpath,
- 'SPACK_CONCRETE_ENV_DIR': tmpdir.strpath,
- 'SPACK_ROOT_SPEC': root_spec_dag_hash,
- 'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
- 'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
- 'SPACK_COMPILER_ACTION': 'NONE',
- 'SPACK_REMOTE_MIRROR_URL': mirror_url,
- })
+ os.environ.update(
+ {
+ "SPACK_ARTIFACTS_ROOT": working_dir.strpath,
+ "SPACK_JOB_LOG_DIR": "log_dir",
+ "SPACK_JOB_REPRO_DIR": "repro_dir",
+ "SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath,
+ "SPACK_CONCRETE_ENV_DIR": tmpdir.strpath,
+ "SPACK_ROOT_SPEC": root_spec_dag_hash,
+ "SPACK_JOB_SPEC_DAG_HASH": root_spec_dag_hash,
+ "SPACK_JOB_SPEC_PKG_NAME": "archive-files",
+ "SPACK_COMPILER_ACTION": "NONE",
+ "SPACK_REMOTE_MIRROR_URL": mirror_url,
+ }
+ )
def fake_dl_method(spec, *args, **kwargs):
- print('fake download buildcache {0}'.format(spec.name))
+ print("fake download buildcache {0}".format(spec.name))
- monkeypatch.setattr(
- spack.binary_distribution, 'download_single_spec', fake_dl_method)
+ monkeypatch.setattr(spack.binary_distribution, "download_single_spec", fake_dl_method)
- ci_out = ci_cmd('rebuild', output=str)
+ ci_out = ci_cmd("rebuild", output=str)
- assert('No need to rebuild archive-files' in ci_out)
- assert('fake download buildcache archive-files' in ci_out)
+ assert "No need to rebuild archive-files" in ci_out
+ assert "fake download buildcache archive-files" in ci_out
- env_cmd('deactivate')
+ env_cmd("deactivate")
-def test_ci_generate_mirror_override(tmpdir, mutable_mock_env_path,
- install_mockery_mutable_config, mock_packages,
- mock_fetch, mock_stage, mock_binary_index,
- ci_base_environment):
+def test_ci_generate_mirror_override(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_stage,
+ mock_binary_index,
+ ci_base_environment,
+):
"""Ensure that protected pipelines using --buildcache-destination do not
skip building specs that are not in the override mirror when they are
found in the main mirror."""
- os.environ.update({
- 'SPACK_PIPELINE_TYPE': 'spack_protected_branch',
- })
+ os.environ.update(
+ {
+ "SPACK_PIPELINE_TYPE": "spack_protected_branch",
+ }
+ )
- working_dir = tmpdir.join('working_dir')
+ working_dir = tmpdir.join("working_dir")
- mirror_dir = working_dir.join('mirror')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = working_dir.join("mirror")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
spack_yaml_contents = """
spack:
@@ -948,53 +1013,68 @@ spack:
tags:
- nonbuildtag
image: basicimage
-""".format(mirror_url)
+""".format(
+ mirror_url
+ )
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
f.write(spack_yaml_contents)
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- first_ci_yaml = str(tmpdir.join('.gitlab-ci-1.yml'))
- second_ci_yaml = str(tmpdir.join('.gitlab-ci-2.yml'))
- with ev.read('test'):
+ env_cmd("create", "test", "./spack.yaml")
+ first_ci_yaml = str(tmpdir.join(".gitlab-ci-1.yml"))
+ second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml"))
+ with ev.read("test"):
install_cmd()
- buildcache_cmd('create', '-u', '--mirror-url', mirror_url, 'patchelf')
- buildcache_cmd('update-index', '--mirror-url', mirror_url, output=str)
+ buildcache_cmd("create", "-u", "--mirror-url", mirror_url, "patchelf")
+ buildcache_cmd("update-index", "--mirror-url", mirror_url, output=str)
# This generate should not trigger a rebuild of patchelf, since it's in
# the main mirror referenced in the environment.
- ci_cmd('generate', '--check-index-only', '--output-file', first_ci_yaml)
+ ci_cmd("generate", "--check-index-only", "--output-file", first_ci_yaml)
# Because we used a mirror override (--buildcache-destination) on a
# spack protected pipeline, we expect to only look in the override
# mirror for the spec, and thus the patchelf job should be generated in
# this pipeline
- ci_cmd('generate', '--check-index-only', '--output-file', second_ci_yaml,
- '--buildcache-destination', 'file:///mirror/not/exist')
+ ci_cmd(
+ "generate",
+ "--check-index-only",
+ "--output-file",
+ second_ci_yaml,
+ "--buildcache-destination",
+ "file:///mirror/not/exist",
+ )
with open(first_ci_yaml) as fd1:
first_yaml = fd1.read()
- assert 'no-specs-to-rebuild' in first_yaml
+ assert "no-specs-to-rebuild" in first_yaml
with open(second_ci_yaml) as fd2:
second_yaml = fd2.read()
- assert 'no-specs-to-rebuild' not in second_yaml
+ assert "no-specs-to-rebuild" not in second_yaml
@pytest.mark.disable_clean_stage_check
-def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
- install_mockery_mutable_config, mock_packages,
- mock_fetch, mock_stage, mock_gnupghome,
- ci_base_environment, mock_binary_index):
- working_dir = tmpdir.join('working_dir')
-
- mirror_dir = working_dir.join('mirror')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+def test_push_mirror_contents(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_stage,
+ mock_gnupghome,
+ ci_base_environment,
+ mock_binary_index,
+):
+ working_dir = tmpdir.join("working_dir")
+
+ mirror_dir = working_dir.join("mirror")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
signing_key_dir = spack_paths.mock_gpg_keys_path
- signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
+ signing_key_path = os.path.join(signing_key_dir, "package-signing-key")
with open(signing_key_path) as fd:
signing_key = fd.read()
@@ -1021,51 +1101,51 @@ spack:
tags:
- nonbuildtag
image: basicimage
-""".format(mirror_url)
+""".format(
+ mirror_url
+ )
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
f.write(spack_yaml_contents)
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- with ev.read('test') as env:
- spec_map = ci.get_concrete_specs(
- env, 'patchelf', 'patchelf', 'FIND_ANY')
- concrete_spec = spec_map['patchelf']
+ env_cmd("create", "test", "./spack.yaml")
+ with ev.read("test") as env:
+ spec_map = ci.get_concrete_specs(env, "patchelf", "patchelf", "FIND_ANY")
+ concrete_spec = spec_map["patchelf"]
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
- json_path = str(tmpdir.join('spec.json'))
- with open(json_path, 'w') as ypfd:
+ json_path = str(tmpdir.join("spec.json"))
+ with open(json_path, "w") as ypfd:
ypfd.write(spec_json)
- install_cmd('--keep-stage', json_path)
+ install_cmd("--keep-stage", json_path)
# env, spec, json_path, mirror_url, build_id, sign_binaries
ci.push_mirror_contents(env, json_path, mirror_url, True)
- buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')
+ buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
# Now test the --prune-dag (default) option of spack ci generate
- mirror_cmd('add', 'test-ci', mirror_url)
+ mirror_cmd("add", "test-ci", mirror_url)
- outputfile_pruned = str(tmpdir.join('pruned_pipeline.yml'))
- ci_cmd('generate', '--output-file', outputfile_pruned)
+ outputfile_pruned = str(tmpdir.join("pruned_pipeline.yml"))
+ ci_cmd("generate", "--output-file", outputfile_pruned)
with open(outputfile_pruned) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
- assert('no-specs-to-rebuild' in yaml_contents)
+ assert "no-specs-to-rebuild" in yaml_contents
# Make sure there are no other spec jobs or rebuild-index
- assert(len(yaml_contents.keys()) == 1)
- the_elt = yaml_contents['no-specs-to-rebuild']
- assert('tags' in the_elt)
- assert('nonbuildtag' in the_elt['tags'])
- assert('image' in the_elt)
- assert(the_elt['image'] == 'basicimage')
+ assert len(yaml_contents.keys()) == 1
+ the_elt = yaml_contents["no-specs-to-rebuild"]
+ assert "tags" in the_elt
+ assert "nonbuildtag" in the_elt["tags"]
+ assert "image" in the_elt
+ assert the_elt["image"] == "basicimage"
- outputfile_not_pruned = str(tmpdir.join('unpruned_pipeline.yml'))
- ci_cmd('generate', '--no-prune-dag', '--output-file',
- outputfile_not_pruned)
+ outputfile_not_pruned = str(tmpdir.join("unpruned_pipeline.yml"))
+ ci_cmd("generate", "--no-prune-dag", "--output-file", outputfile_not_pruned)
# Test the --no-prune-dag option of spack ci generate
with open(outputfile_not_pruned) as f:
@@ -1075,38 +1155,38 @@ spack:
found_spec_job = False
for ci_key in yaml_contents.keys():
- if '(specs) patchelf' in ci_key:
+ if "(specs) patchelf" in ci_key:
the_elt = yaml_contents[ci_key]
- assert('variables' in the_elt)
- job_vars = the_elt['variables']
- assert('SPACK_SPEC_NEEDS_REBUILD' in job_vars)
- assert(job_vars['SPACK_SPEC_NEEDS_REBUILD'] == 'False')
+ assert "variables" in the_elt
+ job_vars = the_elt["variables"]
+ assert "SPACK_SPEC_NEEDS_REBUILD" in job_vars
+ assert job_vars["SPACK_SPEC_NEEDS_REBUILD"] == "False"
found_spec_job = True
- assert(found_spec_job)
+ assert found_spec_job
- mirror_cmd('rm', 'test-ci')
+ mirror_cmd("rm", "test-ci")
# Test generating buildcache index while we have bin mirror
- buildcache_cmd('update-index', '--mirror-url', mirror_url)
- index_path = os.path.join(buildcache_path, 'index.json')
+ buildcache_cmd("update-index", "--mirror-url", mirror_url)
+ index_path = os.path.join(buildcache_path, "index.json")
with open(index_path) as idx_fd:
index_object = json.load(idx_fd)
validate(index_object, db_idx_schema)
# Now that index is regenerated, validate "buildcache list" output
- buildcache_list_output = buildcache_cmd('list', output=str)
- assert('patchelf' in buildcache_list_output)
+ buildcache_list_output = buildcache_cmd("list", output=str)
+ assert "patchelf" in buildcache_list_output
# Also test buildcache_spec schema
bc_files_list = os.listdir(buildcache_path)
for file_name in bc_files_list:
- if file_name.endswith('.spec.json.sig'):
+ if file_name.endswith(".spec.json.sig"):
spec_json_path = os.path.join(buildcache_path, file_name)
with open(spec_json_path) as json_fd:
json_object = Spec.extract_json_from_clearsig(json_fd.read())
validate(json_object, specfile_schema)
- logs_dir = working_dir.join('logs_dir')
+ logs_dir = working_dir.join("logs_dir")
if not os.path.exists(logs_dir.strpath):
os.makedirs(logs_dir.strpath)
@@ -1114,50 +1194,49 @@ spack:
logs_dir_list = os.listdir(logs_dir.strpath)
- assert('spack-build-out.txt' in logs_dir_list)
+ assert "spack-build-out.txt" in logs_dir_list
# Also just make sure that if something goes wrong with the
# stage logs copy, no exception is thrown
ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)
- dl_dir = working_dir.join('download_dir')
+ dl_dir = working_dir.join("download_dir")
if not os.path.exists(dl_dir.strpath):
os.makedirs(dl_dir.strpath)
- buildcache_cmd('download', '--spec-file', json_path, '--path',
- dl_dir.strpath)
+ buildcache_cmd("download", "--spec-file", json_path, "--path", dl_dir.strpath)
dl_dir_list = os.listdir(dl_dir.strpath)
- assert(len(dl_dir_list) == 2)
+ assert len(dl_dir_list) == 2
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
def failing_access(*args, **kwargs):
- raise Exception('Error: Access Denied')
+ raise Exception("Error: Access Denied")
- monkeypatch.setattr(spack.ci, '_push_mirror_contents', failing_access)
+ monkeypatch.setattr(spack.ci, "_push_mirror_contents", failing_access)
# Input doesn't matter, as wwe are faking exceptional output
- url = 'fakejunk'
+ url = "fakejunk"
ci.push_mirror_contents(None, None, url, None)
captured = capsys.readouterr()
std_out = captured[0]
- expect_msg = 'Permission problem writing to {0}'.format(url)
+ expect_msg = "Permission problem writing to {0}".format(url)
assert expect_msg in std_out
-def test_ci_generate_override_runner_attrs(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment):
+def test_ci_generate_override_runner_attrs(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
+):
"""Test that we get the behavior we want with respect to the provision
- of runner attributes like tags, variables, and scripts, both when we
- inherit them from the top level, as well as when we override one or
- more at the runner level"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ of runner attributes like tags, variables, and scripts, both when we
+ inherit them from the top level, as well as when we override one or
+ more at the runner level"""
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- flatten-deps
@@ -1205,96 +1284,94 @@ spack:
service-job-attributes:
image: donotcare
tags: [donotcare]
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- monkeypatch.setattr(
- spack.main, 'get_version', lambda: '0.15.3-416-12ad69eb1')
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ monkeypatch.setattr(spack.main, "get_version", lambda: "0.15.3-416-12ad69eb1")
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
- assert('variables' in yaml_contents)
- global_vars = yaml_contents['variables']
- assert('SPACK_VERSION' in global_vars)
- assert(global_vars['SPACK_VERSION'] == '0.15.3-416-12ad69eb1')
- assert('SPACK_CHECKOUT_VERSION' in global_vars)
- assert(global_vars['SPACK_CHECKOUT_VERSION'] == '12ad69eb1')
+ assert "variables" in yaml_contents
+ global_vars = yaml_contents["variables"]
+ assert "SPACK_VERSION" in global_vars
+ assert global_vars["SPACK_VERSION"] == "0.15.3-416-12ad69eb1"
+ assert "SPACK_CHECKOUT_VERSION" in global_vars
+ assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
for ci_key in yaml_contents.keys():
- if '(specs) b' in ci_key:
- assert(False)
- if '(specs) a' in ci_key:
+ if "(specs) b" in ci_key:
+ assert False
+ if "(specs) a" in ci_key:
# Make sure a's attributes override variables, and all the
# scripts. Also, make sure the 'toplevel' tag doesn't
# appear twice, but that a's specific extra tag does appear
the_elt = yaml_contents[ci_key]
- assert(the_elt['variables']['ONE'] == 'specificvarone')
- assert(the_elt['variables']['TWO'] == 'specificvartwo')
- assert('THREE' not in the_elt['variables'])
- assert(len(the_elt['tags']) == 2)
- assert('specific-a' in the_elt['tags'])
- assert('toplevel' in the_elt['tags'])
- assert(len(the_elt['before_script']) == 1)
- assert(the_elt['before_script'][0] ==
- 'custom pre step one')
- assert(len(the_elt['script']) == 1)
- assert(the_elt['script'][0] == 'custom main step')
- assert(len(the_elt['after_script']) == 1)
- assert(the_elt['after_script'][0] ==
- 'custom post step one')
- if '(specs) dependency-install' in ci_key:
+ assert the_elt["variables"]["ONE"] == "specificvarone"
+ assert the_elt["variables"]["TWO"] == "specificvartwo"
+ assert "THREE" not in the_elt["variables"]
+ assert len(the_elt["tags"]) == 2
+ assert "specific-a" in the_elt["tags"]
+ assert "toplevel" in the_elt["tags"]
+ assert len(the_elt["before_script"]) == 1
+ assert the_elt["before_script"][0] == "custom pre step one"
+ assert len(the_elt["script"]) == 1
+ assert the_elt["script"][0] == "custom main step"
+ assert len(the_elt["after_script"]) == 1
+ assert the_elt["after_script"][0] == "custom post step one"
+ if "(specs) dependency-install" in ci_key:
# Since the dependency-install match omits any
# runner-attributes, make sure it inherited all the
# top-level attributes.
the_elt = yaml_contents[ci_key]
- assert(the_elt['variables']['ONE'] == 'toplevelvarone')
- assert(the_elt['variables']['TWO'] == 'toplevelvartwo')
- assert('THREE' not in the_elt['variables'])
- assert(len(the_elt['tags']) == 1)
- assert(the_elt['tags'][0] == 'toplevel')
- assert(len(the_elt['before_script']) == 2)
- assert(the_elt['before_script'][0] == 'pre step one')
- assert(the_elt['before_script'][1] == 'pre step two')
- assert(len(the_elt['script']) == 1)
- assert(the_elt['script'][0] == 'main step')
- assert(len(the_elt['after_script']) == 1)
- assert(the_elt['after_script'][0] == 'post step one')
- if '(specs) flatten-deps' in ci_key:
+ assert the_elt["variables"]["ONE"] == "toplevelvarone"
+ assert the_elt["variables"]["TWO"] == "toplevelvartwo"
+ assert "THREE" not in the_elt["variables"]
+ assert len(the_elt["tags"]) == 1
+ assert the_elt["tags"][0] == "toplevel"
+ assert len(the_elt["before_script"]) == 2
+ assert the_elt["before_script"][0] == "pre step one"
+ assert the_elt["before_script"][1] == "pre step two"
+ assert len(the_elt["script"]) == 1
+ assert the_elt["script"][0] == "main step"
+ assert len(the_elt["after_script"]) == 1
+ assert the_elt["after_script"][0] == "post step one"
+ if "(specs) flatten-deps" in ci_key:
# The flatten-deps match specifies that we keep the two
# top level variables, but add a third specifc one. It
# also adds a custom tag which should be combined with
# the top-level tag.
the_elt = yaml_contents[ci_key]
- assert(the_elt['variables']['ONE'] == 'toplevelvarone')
- assert(the_elt['variables']['TWO'] == 'toplevelvartwo')
- assert(the_elt['variables']['THREE'] == 'specificvarthree')
- assert(len(the_elt['tags']) == 2)
- assert('specific-one' in the_elt['tags'])
- assert('toplevel' in the_elt['tags'])
- assert(len(the_elt['before_script']) == 2)
- assert(the_elt['before_script'][0] == 'pre step one')
- assert(the_elt['before_script'][1] == 'pre step two')
- assert(len(the_elt['script']) == 1)
- assert(the_elt['script'][0] == 'main step')
- assert(len(the_elt['after_script']) == 1)
- assert(the_elt['after_script'][0] == 'post step one')
-
-
-def test_ci_generate_with_workarounds(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment):
+ assert the_elt["variables"]["ONE"] == "toplevelvarone"
+ assert the_elt["variables"]["TWO"] == "toplevelvartwo"
+ assert the_elt["variables"]["THREE"] == "specificvarthree"
+ assert len(the_elt["tags"]) == 2
+ assert "specific-one" in the_elt["tags"]
+ assert "toplevel" in the_elt["tags"]
+ assert len(the_elt["before_script"]) == 2
+ assert the_elt["before_script"][0] == "pre step one"
+ assert the_elt["before_script"][1] == "pre step two"
+ assert len(the_elt["script"]) == 1
+ assert the_elt["script"][0] == "main step"
+ assert len(the_elt["after_script"]) == 1
+ assert the_elt["after_script"][0] == "post step one"
+
+
+def test_ci_generate_with_workarounds(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
+):
"""Make sure the post-processing cli workarounds do what they should"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- callpath%gcc@3.0
@@ -1308,14 +1385,15 @@ spack:
- donotcare
image: donotcare
enable-artifacts-buildcache: true
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile, '--dependencies')
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile, "--dependencies")
with open(outputfile) as f:
contents = f.read()
@@ -1324,23 +1402,23 @@ spack:
found_one = False
for ci_key in yaml_contents.keys():
- if ci_key.startswith('(specs) '):
+ if ci_key.startswith("(specs) "):
found_one = True
job_obj = yaml_contents[ci_key]
- assert('needs' not in job_obj)
- assert('dependencies' in job_obj)
+ assert "needs" not in job_obj
+ assert "dependencies" in job_obj
- assert(found_one is True)
+ assert found_one is True
@pytest.mark.disable_clean_stage_check
-def test_ci_rebuild_index(tmpdir, mutable_mock_env_path,
- install_mockery, mock_packages, mock_fetch,
- mock_stage):
- working_dir = tmpdir.join('working_dir')
+def test_ci_rebuild_index(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, mock_fetch, mock_stage
+):
+ working_dir = tmpdir.join("working_dir")
- mirror_dir = working_dir.join('mirror')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = working_dir.join("mirror")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
spack_yaml_contents = """
spack:
@@ -1356,78 +1434,83 @@ spack:
tags:
- donotcare
image: donotcare
-""".format(mirror_url)
+""".format(
+ mirror_url
+ )
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
f.write(spack_yaml_contents)
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- with ev.read('test') as env:
- spec_map = ci.get_concrete_specs(
- env, 'callpath', 'callpath', 'FIND_ANY')
- concrete_spec = spec_map['callpath']
+ env_cmd("create", "test", "./spack.yaml")
+ with ev.read("test") as env:
+ spec_map = ci.get_concrete_specs(env, "callpath", "callpath", "FIND_ANY")
+ concrete_spec = spec_map["callpath"]
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
- json_path = str(tmpdir.join('spec.json'))
- with open(json_path, 'w') as ypfd:
+ json_path = str(tmpdir.join("spec.json"))
+ with open(json_path, "w") as ypfd:
ypfd.write(spec_json)
- install_cmd('--keep-stage', '-f', json_path)
- buildcache_cmd('create', '-u', '-a', '-f', '--mirror-url',
- mirror_url, 'callpath')
- ci_cmd('rebuild-index')
+ install_cmd("--keep-stage", "-f", json_path)
+ buildcache_cmd("create", "-u", "-a", "-f", "--mirror-url", mirror_url, "callpath")
+ ci_cmd("rebuild-index")
- buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')
- index_path = os.path.join(buildcache_path, 'index.json')
+ buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
+ index_path = os.path.join(buildcache_path, "index.json")
with open(index_path) as idx_fd:
index_object = json.load(idx_fd)
validate(index_object, db_idx_schema)
def test_ci_generate_bootstrap_prune_dag(
- install_mockery_mutable_config, mock_packages, mock_fetch,
- mock_archive, mutable_config, monkeypatch, tmpdir,
- mutable_mock_env_path, ci_base_environment):
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_archive,
+ mutable_config,
+ monkeypatch,
+ tmpdir,
+ mutable_mock_env_path,
+ ci_base_environment,
+):
"""Test compiler bootstrapping with DAG pruning. Specifically, make
- sure that if we detect the bootstrapped compiler needs to be rebuilt,
- we ensure the spec we want to build with that compiler is scheduled
- for rebuild as well."""
+ sure that if we detect the bootstrapped compiler needs to be rebuilt,
+ we ensure the spec we want to build with that compiler is scheduled
+ for rebuild as well."""
# Create a temp mirror directory for buildcache usage
- mirror_dir = tmpdir.join('mirror_dir')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = tmpdir.join("mirror_dir")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
# Install a compiler, because we want to put it in a buildcache
- install_cmd('gcc@10.1.0%gcc@4.5.0')
+ install_cmd("gcc@10.1.0%gcc@4.5.0")
# Put installed compiler in the buildcache
- buildcache_cmd('create', '-u', '-a', '-f', '-d', mirror_dir.strpath,
- 'gcc@10.1.0%gcc@4.5.0')
+ buildcache_cmd("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.1.0%gcc@4.5.0")
# Now uninstall the compiler
- uninstall_cmd('-y', 'gcc@10.1.0%gcc@4.5.0')
+ uninstall_cmd("-y", "gcc@10.1.0%gcc@4.5.0")
- monkeypatch.setattr(spack.concretize.Concretizer,
- 'check_for_compiler_existence', False)
- spack.config.set('config:install_missing_compilers', True)
- assert CompilerSpec('gcc@10.1.0') not in compilers.all_compiler_specs()
+ monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
+ spack.config.set("config:install_missing_compilers", True)
+ assert CompilerSpec("gcc@10.1.0") not in compilers.all_compiler_specs()
# Configure the mirror where we put that buildcache w/ the compiler
- mirror_cmd('add', 'test-mirror', mirror_url)
+ mirror_cmd("add", "test-mirror", mirror_url)
- install_cmd('--no-check-signature', 'a%gcc@10.1.0')
+ install_cmd("--no-check-signature", "a%gcc@10.1.0")
# Put spec built with installed compiler in the buildcache
- buildcache_cmd('create', '-u', '-a', '-f', '-d', mirror_dir.strpath,
- 'a%gcc@10.1.0')
+ buildcache_cmd("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, "a%gcc@10.1.0")
# Now uninstall the spec
- uninstall_cmd('-y', 'a%gcc@10.1.0')
+ uninstall_cmd("-y", "a%gcc@10.1.0")
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
definitions:
- bootstrap:
@@ -1451,28 +1534,32 @@ spack:
runner-attributes:
tags:
- meh
-""".format(mirror_url))
+""".format(
+ mirror_url
+ )
+ )
# Without this monkeypatch, pipeline generation process would think that
# nothing in the environment needs rebuilding. With the monkeypatch, the
# process sees the compiler as needing a rebuild, which should then result
# in the specs built with that compiler needing a rebuild too.
- def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None,
- index_only=False):
- if spec.name == 'gcc':
+ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
+ if spec.name == "gcc":
return []
else:
- return [{
- 'spec': spec,
- 'mirror_url': mirror_url,
- }]
+ return [
+ {
+ "spec": spec,
+ "mirror_url": mirror_url,
+ }
+ ]
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as of:
yaml_contents = of.read()
@@ -1480,33 +1567,33 @@ spack:
# without the monkeypatch, everything appears up to date and no
# rebuild jobs are generated.
- assert(original_yaml_contents)
- assert('no-specs-to-rebuild' in original_yaml_contents)
+ assert original_yaml_contents
+ assert "no-specs-to-rebuild" in original_yaml_contents
- monkeypatch.setattr(spack.binary_distribution,
- 'get_mirrors_for_spec',
- fake_get_mirrors_for_spec)
+ monkeypatch.setattr(
+ spack.binary_distribution, "get_mirrors_for_spec", fake_get_mirrors_for_spec
+ )
- ci_cmd('generate', '--output-file', outputfile)
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as of:
yaml_contents = of.read()
new_yaml_contents = syaml.load(yaml_contents)
- assert(new_yaml_contents)
+ assert new_yaml_contents
# This 'needs' graph reflects that even though specs 'a' and 'b' do
# not otherwise need to be rebuilt (thanks to DAG pruning), they
# both end up in the generated pipeline because the compiler they
# depend on is bootstrapped, and *does* need to be rebuilt.
needs_graph = {
- '(bootstrap) gcc': [],
- '(specs) b': [
- '(bootstrap) gcc',
+ "(bootstrap) gcc": [],
+ "(specs) b": [
+ "(bootstrap) gcc",
],
- '(specs) a': [
- '(bootstrap) gcc',
- '(specs) b',
+ "(specs) a": [
+ "(bootstrap) gcc",
+ "(specs) b",
],
}
@@ -1516,22 +1603,25 @@ spack:
def test_ci_get_stack_changed(mock_git_repo, monkeypatch):
"""Test that we can detect the change to .gitlab-ci.yml in a
mock spack git repo."""
- monkeypatch.setattr(spack.paths, 'prefix', mock_git_repo)
- assert ci.get_stack_changed('/no/such/env/path') is True
+ monkeypatch.setattr(spack.paths, "prefix", mock_git_repo)
+ assert ci.get_stack_changed("/no/such/env/path") is True
-def test_ci_generate_prune_untouched(tmpdir, mutable_mock_env_path,
- install_mockery, mock_packages,
- ci_base_environment, monkeypatch):
+def test_ci_generate_prune_untouched(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment, monkeypatch
+):
"""Test pipeline generation with pruning works to eliminate
- specs that were not affected by a change"""
- os.environ.update({
- 'SPACK_PRUNE_UNTOUCHED': 'TRUE', # enables pruning of untouched specs
- })
- mirror_url = 'https://my.fake.mirror'
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ specs that were not affected by a change"""
+ os.environ.update(
+ {
+ "SPACK_PRUNE_UNTOUCHED": "TRUE", # enables pruning of untouched specs
+ }
+ )
+ mirror_url = "https://my.fake.mirror"
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
@@ -1546,42 +1636,48 @@ spack:
tags:
- donotcare
image: donotcare
-""".format(mirror_url))
+""".format(
+ mirror_url
+ )
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
def fake_compute_affected(r1=None, r2=None):
- return ['libdwarf']
+ return ["libdwarf"]
- def fake_stack_changed(env_path, rev1='HEAD^', rev2='HEAD'):
+ def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
return False
- with ev.read('test'):
- monkeypatch.setattr(
- ci, 'compute_affected_packages', fake_compute_affected)
- monkeypatch.setattr(
- ci, 'get_stack_changed', fake_stack_changed)
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected)
+ monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed)
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
for ci_key in yaml_contents.keys():
- if 'archive-files' in ci_key or 'mpich' in ci_key:
- print('Error: archive-files and mpich should have been pruned')
- assert(False)
-
-
-def test_ci_subcommands_without_mirror(tmpdir, mutable_mock_env_path,
- mock_packages,
- install_mockery, ci_base_environment,
- mock_binary_index):
+ if "archive-files" in ci_key or "mpich" in ci_key:
+ print("Error: archive-files and mpich should have been pruned")
+ assert False
+
+
+def test_ci_subcommands_without_mirror(
+ tmpdir,
+ mutable_mock_env_path,
+ mock_packages,
+ install_mockery,
+ ci_base_environment,
+ mock_binary_index,
+):
"""Make sure we catch if there is not a mirror and report an error"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
@@ -1593,23 +1689,25 @@ spack:
tags:
- donotcare
image: donotcare
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
+ with ev.read("test"):
# Check the 'generate' subcommand
- output = ci_cmd('generate', '--output-file', outputfile,
- output=str, fail_on_error=False)
- ex = 'spack ci generate requires an env containing a mirror'
- assert(ex in output)
+ output = ci_cmd(
+ "generate", "--output-file", outputfile, output=str, fail_on_error=False
+ )
+ ex = "spack ci generate requires an env containing a mirror"
+ assert ex in output
# Also check the 'rebuild-index' subcommand
- output = ci_cmd('rebuild-index', output=str, fail_on_error=False)
- ex = 'spack ci rebuild-index requires an env containing a mirror'
- assert(ex in output)
+ output = ci_cmd("rebuild-index", output=str, fail_on_error=False)
+ ex = "spack ci rebuild-index requires an env containing a mirror"
+ assert ex in output
def test_ensure_only_one_temporary_storage():
@@ -1626,12 +1724,14 @@ def test_ensure_only_one_temporary_storage():
- donotcare
"""
- enable_artifacts = 'enable-artifacts-buildcache: True'
- temp_storage = 'temporary-storage-url-prefix: file:///temp/mirror'
+ enable_artifacts = "enable-artifacts-buildcache: True"
+ temp_storage = "temporary-storage-url-prefix: file:///temp/mirror"
specify_both = """{0}
{1}
-""".format(enable_artifacts, temp_storage)
- specify_neither = ''
+""".format(
+ enable_artifacts, temp_storage
+ )
+ specify_neither = ""
# User can specify "enable-artifacts-buildcache" (boolean)
yaml_obj = syaml.load(gitlab_ci_template.format(enable_artifacts))
@@ -1652,14 +1752,20 @@ def test_ensure_only_one_temporary_storage():
validate(yaml_obj, gitlab_ci_schema)
-def test_ci_generate_temp_storage_url(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment, mock_binary_index):
+def test_ci_generate_temp_storage_url(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ monkeypatch,
+ ci_base_environment,
+ mock_binary_index,
+):
"""Verify correct behavior when using temporary-storage-url-prefix"""
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
@@ -1674,58 +1780,59 @@ spack:
tags:
- donotcare
image: donotcare
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as of:
pipeline_doc = syaml.load(of.read())
- assert('cleanup' in pipeline_doc)
- cleanup_job = pipeline_doc['cleanup']
+ assert "cleanup" in pipeline_doc
+ cleanup_job = pipeline_doc["cleanup"]
- assert('script' in cleanup_job)
- cleanup_task = cleanup_job['script'][0]
+ assert "script" in cleanup_job
+ cleanup_task = cleanup_job["script"][0]
- assert(cleanup_task.startswith('spack -d mirror destroy'))
+ assert cleanup_task.startswith("spack -d mirror destroy")
- assert('stages' in pipeline_doc)
- stages = pipeline_doc['stages']
+ assert "stages" in pipeline_doc
+ stages = pipeline_doc["stages"]
# Cleanup job should be 2nd to last, just before rebuild-index
- assert('stage' in cleanup_job)
- assert(cleanup_job['stage'] == stages[-2])
+ assert "stage" in cleanup_job
+ assert cleanup_job["stage"] == stages[-2]
-def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment):
+def test_ci_generate_read_broken_specs_url(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
+):
"""Verify that `broken-specs-url` works as intended"""
- spec_a = Spec('a')
+ spec_a = Spec("a")
spec_a.concretize()
a_dag_hash = spec_a.dag_hash()
- spec_flattendeps = Spec('flatten-deps')
+ spec_flattendeps = Spec("flatten-deps")
spec_flattendeps.concretize()
flattendeps_dag_hash = spec_flattendeps.dag_hash()
# Mark 'a' as broken (but not 'flatten-deps')
broken_spec_a_path = str(tmpdir.join(a_dag_hash))
- with open(broken_spec_a_path, 'w') as bsf:
- bsf.write('')
+ with open(broken_spec_a_path, "w") as bsf:
+ bsf.write("")
- broken_specs_url = 'file://{0}'.format(tmpdir.strpath)
+ broken_specs_url = "file://{0}".format(tmpdir.strpath)
# Test that `spack ci generate` notices this broken spec and fails.
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- flatten-deps
@@ -1744,35 +1851,36 @@ spack:
tags:
- donotcare
image: donotcare
-""".format(broken_specs_url))
+""".format(
+ broken_specs_url
+ )
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env_cmd("create", "test", "./spack.yaml")
+ with ev.read("test"):
# Check output of the 'generate' subcommand
- output = ci_cmd('generate', output=str, fail_on_error=False)
- assert('known to be broken' in output)
+ output = ci_cmd("generate", output=str, fail_on_error=False)
+ assert "known to be broken" in output
- ex = '({0})'.format(a_dag_hash)
- assert(ex in output)
+ ex = "({0})".format(a_dag_hash)
+ assert ex in output
- ex = '({0})'.format(flattendeps_dag_hash)
- assert(ex not in output)
+ ex = "({0})".format(flattendeps_dag_hash)
+ assert ex not in output
-def test_ci_generate_external_signing_job(tmpdir, mutable_mock_env_path,
- install_mockery,
- mock_packages, monkeypatch,
- ci_base_environment):
+def test_ci_generate_external_signing_job(
+ tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment
+):
"""Verify that in external signing mode: 1) each rebuild jobs includes
the location where the binary hash information is written and 2) we
properly generate a final signing job in the pipeline."""
- os.environ.update({
- 'SPACK_PIPELINE_TYPE': 'spack_protected_branch'
- })
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ os.environ.update({"SPACK_PIPELINE_TYPE": "spack_protected_branch"})
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
spack:
specs:
- archive-files
@@ -1798,31 +1906,39 @@ spack:
IMPORTANT_INFO: avalue
script:
- echo hello
-""")
+"""
+ )
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+ env_cmd("create", "test", "./spack.yaml")
+ outputfile = str(tmpdir.join(".gitlab-ci.yml"))
- with ev.read('test'):
- ci_cmd('generate', '--output-file', outputfile)
+ with ev.read("test"):
+ ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as of:
pipeline_doc = syaml.load(of.read())
- assert 'sign-pkgs' in pipeline_doc
- signing_job = pipeline_doc['sign-pkgs']
- assert 'tags' in signing_job
- signing_job_tags = signing_job['tags']
- for expected_tag in ['notary', 'protected', 'aws']:
+ assert "sign-pkgs" in pipeline_doc
+ signing_job = pipeline_doc["sign-pkgs"]
+ assert "tags" in signing_job
+ signing_job_tags = signing_job["tags"]
+ for expected_tag in ["notary", "protected", "aws"]:
assert expected_tag in signing_job_tags
-def test_ci_reproduce(tmpdir, mutable_mock_env_path,
- install_mockery, mock_packages, monkeypatch,
- last_two_git_commits, ci_base_environment, mock_binary_index):
- working_dir = tmpdir.join('repro_dir')
- image_name = 'org/image:tag'
+def test_ci_reproduce(
+ tmpdir,
+ mutable_mock_env_path,
+ install_mockery,
+ mock_packages,
+ monkeypatch,
+ last_two_git_commits,
+ ci_base_environment,
+ mock_binary_index,
+):
+ working_dir = tmpdir.join("repro_dir")
+ image_name = "org/image:tag"
spack_yaml_contents = """
spack:
@@ -1840,15 +1956,17 @@ spack:
tags:
- donotcare
image: {0}
-""".format(image_name)
+""".format(
+ image_name
+ )
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
f.write(spack_yaml_contents)
with tmpdir.as_cwd():
- env_cmd('create', 'test', './spack.yaml')
- with ev.read('test') as env:
+ env_cmd("create", "test", "./spack.yaml")
+ with ev.read("test") as env:
with env.write_transaction():
env.concretize()
env.write()
@@ -1856,69 +1974,65 @@ spack:
if not os.path.exists(working_dir.strpath):
os.makedirs(working_dir.strpath)
- shutil.copyfile(env.manifest_path,
- os.path.join(working_dir.strpath, 'spack.yaml'))
- shutil.copyfile(env.lock_path,
- os.path.join(working_dir.strpath, 'spack.lock'))
+ shutil.copyfile(env.manifest_path, os.path.join(working_dir.strpath, "spack.yaml"))
+ shutil.copyfile(env.lock_path, os.path.join(working_dir.strpath, "spack.lock"))
root_spec = None
job_spec = None
for h, s in env.specs_by_hash.items():
- if s.name == 'archive-files':
+ if s.name == "archive-files":
root_spec = s
job_spec = s
- job_spec_json_path = os.path.join(
- working_dir.strpath, 'archivefiles.json')
- with open(job_spec_json_path, 'w') as fd:
+ job_spec_json_path = os.path.join(working_dir.strpath, "archivefiles.json")
+ with open(job_spec_json_path, "w") as fd:
fd.write(job_spec.to_json(hash=ht.dag_hash))
- root_spec_json_path = os.path.join(
- working_dir.strpath, 'root.json')
- with open(root_spec_json_path, 'w') as fd:
+ root_spec_json_path = os.path.join(working_dir.strpath, "root.json")
+ with open(root_spec_json_path, "w") as fd:
fd.write(root_spec.to_json(hash=ht.dag_hash))
- artifacts_root = os.path.join(working_dir.strpath, 'scratch_dir')
- pipeline_path = os.path.join(artifacts_root, 'pipeline.yml')
+ artifacts_root = os.path.join(working_dir.strpath, "scratch_dir")
+ pipeline_path = os.path.join(artifacts_root, "pipeline.yml")
- ci_cmd('generate', '--output-file', pipeline_path,
- '--artifacts-root', artifacts_root)
+ ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root)
- job_name = ci.get_job_name(
- 'specs', False, job_spec, 'test-debian6-core2', None)
+ job_name = ci.get_job_name("specs", False, job_spec, "test-debian6-core2", None)
- repro_file = os.path.join(working_dir.strpath, 'repro.json')
+ repro_file = os.path.join(working_dir.strpath, "repro.json")
repro_details = {
- 'job_name': job_name,
- 'job_spec_json': 'archivefiles.json',
- 'root_spec_json': 'root.json',
- 'ci_project_dir': working_dir.strpath
+ "job_name": job_name,
+ "job_spec_json": "archivefiles.json",
+ "root_spec_json": "root.json",
+ "ci_project_dir": working_dir.strpath,
}
- with open(repro_file, 'w') as fd:
+ with open(repro_file, "w") as fd:
fd.write(json.dumps(repro_details))
- install_script = os.path.join(working_dir.strpath, 'install.sh')
- with open(install_script, 'w') as fd:
- fd.write('#!/bin/bash\n\n#fake install\nspack install blah\n')
+ install_script = os.path.join(working_dir.strpath, "install.sh")
+ with open(install_script, "w") as fd:
+ fd.write("#!/bin/bash\n\n#fake install\nspack install blah\n")
- spack_info_file = os.path.join(
- working_dir.strpath, 'spack_info.txt')
- with open(spack_info_file, 'w') as fd:
- fd.write('\nMerge {0} into {1}\n\n'.format(
- last_two_git_commits[1], last_two_git_commits[0]))
+ spack_info_file = os.path.join(working_dir.strpath, "spack_info.txt")
+ with open(spack_info_file, "w") as fd:
+ fd.write(
+ "\nMerge {0} into {1}\n\n".format(
+ last_two_git_commits[1], last_two_git_commits[0]
+ )
+ )
def fake_download_and_extract_artifacts(url, work_dir):
pass
- monkeypatch.setattr(ci, 'download_and_extract_artifacts',
- fake_download_and_extract_artifacts)
- rep_out = ci_cmd('reproduce-build',
- 'https://some.domain/api/v1/projects/1/jobs/2/artifacts',
- '--working-dir',
- working_dir.strpath,
- output=str)
- expect_out = 'docker run --rm -v {0}:{0} -ti {1}'.format(
- working_dir.strpath, image_name)
-
- assert(expect_out in rep_out)
+ monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts)
+ rep_out = ci_cmd(
+ "reproduce-build",
+ "https://some.domain/api/v1/projects/1/jobs/2/artifacts",
+ "--working-dir",
+ working_dir.strpath,
+ output=str,
+ )
+ expect_out = "docker run --rm -v {0}:{0} -ti {1}".format(working_dir.strpath, image_name)
+
+ assert expect_out in rep_out
diff --git a/lib/spack/spack/test/cmd/clean.py b/lib/spack/spack/test/cmd/clean.py
index 39b4dc3cd9..da7a8672f7 100644
--- a/lib/spack/spack/test/cmd/clean.py
+++ b/lib/spack/spack/test/cmd/clean.py
@@ -15,10 +15,9 @@ import spack.main
import spack.package_base
import spack.stage
-clean = spack.main.SpackCommand('clean')
+clean = spack.main.SpackCommand("clean")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture()
@@ -34,38 +33,33 @@ def mock_calls_for_clean(monkeypatch):
def __call__(self, *args, **kwargs):
counts[self.name] += 1
- monkeypatch.setattr(spack.package_base.PackageBase, 'do_clean',
- Counter('package'))
- monkeypatch.setattr(spack.stage, 'purge', Counter('stages'))
- monkeypatch.setattr(
- spack.caches.fetch_cache, 'destroy', Counter('downloads'),
- raising=False)
- monkeypatch.setattr(
- spack.caches.misc_cache, 'destroy', Counter('caches'))
- monkeypatch.setattr(
- spack.installer, 'clear_failures', Counter('failures'))
- monkeypatch.setattr(spack.cmd.clean, 'remove_python_cache',
- Counter('python_cache'))
+ monkeypatch.setattr(spack.package_base.PackageBase, "do_clean", Counter("package"))
+ monkeypatch.setattr(spack.stage, "purge", Counter("stages"))
+ monkeypatch.setattr(spack.caches.fetch_cache, "destroy", Counter("downloads"), raising=False)
+ monkeypatch.setattr(spack.caches.misc_cache, "destroy", Counter("caches"))
+ monkeypatch.setattr(spack.installer, "clear_failures", Counter("failures"))
+ monkeypatch.setattr(spack.cmd.clean, "remove_python_cache", Counter("python_cache"))
yield counts
-all_effects = ['stages', 'downloads', 'caches', 'failures', 'python_cache']
+all_effects = ["stages", "downloads", "caches", "failures", "python_cache"]
-@pytest.mark.usefixtures(
- 'mock_packages', 'config'
+@pytest.mark.usefixtures("mock_packages", "config")
+@pytest.mark.parametrize(
+ "command_line,effects",
+ [
+ ("mpileaks", ["package"]),
+ ("-s", ["stages"]),
+ ("-sd", ["stages", "downloads"]),
+ ("-m", ["caches"]),
+ ("-f", ["failures"]),
+ ("-p", ["python_cache"]),
+ ("-a", all_effects),
+ ("", []),
+ ],
)
-@pytest.mark.parametrize('command_line,effects', [
- ('mpileaks', ['package']),
- ('-s', ['stages']),
- ('-sd', ['stages', 'downloads']),
- ('-m', ['caches']),
- ('-f', ['failures']),
- ('-p', ['python_cache']),
- ('-a', all_effects),
- ('', []),
-])
def test_function_calls(command_line, effects, mock_calls_for_clean):
# Call the command with the supplied command line
@@ -73,17 +67,17 @@ def test_function_calls(command_line, effects, mock_calls_for_clean):
# Assert that we called the expected functions the correct
# number of times
- for name in ['package'] + all_effects:
+ for name in ["package"] + all_effects:
assert mock_calls_for_clean[name] == (1 if name in effects else 0)
def test_remove_python_cache(tmpdir, monkeypatch):
- cache_files = ['file1.pyo', 'file2.pyc']
- source_file = 'file1.py'
+ cache_files = ["file1.pyo", "file2.pyc"]
+ source_file = "file1.py"
def _setup_files(directory):
# Create a python cache and source file.
- cache_dir = fs.join_path(directory, '__pycache__')
+ cache_dir = fs.join_path(directory, "__pycache__")
fs.mkdirp(cache_dir)
fs.touch(fs.join_path(directory, source_file))
fs.touch(fs.join_path(directory, cache_files[0]))
@@ -96,10 +90,10 @@ def test_remove_python_cache(tmpdir, monkeypatch):
# and the source file is not.
assert os.path.exists(fs.join_path(directory, source_file))
assert not os.path.exists(fs.join_path(directory, cache_files[0]))
- assert not os.path.exists(fs.join_path(directory, '__pycache__'))
+ assert not os.path.exists(fs.join_path(directory, "__pycache__"))
- source_dir = fs.join_path(tmpdir, 'lib', 'spack', 'spack')
- var_dir = fs.join_path(tmpdir, 'var', 'spack', 'stuff')
+ source_dir = fs.join_path(tmpdir, "lib", "spack", "spack")
+ var_dir = fs.join_path(tmpdir, "var", "spack", "stuff")
for d in [source_dir, var_dir]:
_setup_files(d)
diff --git a/lib/spack/spack/test/cmd/commands.py b/lib/spack/spack/test/cmd/commands.py
index bec6cba729..23794a6bb0 100644
--- a/lib/spack/spack/test/cmd/commands.py
+++ b/lib/spack/spack/test/cmd/commands.py
@@ -16,7 +16,7 @@ import spack.main
import spack.paths
from spack.cmd.commands import _positional_to_subroutine
-commands = spack.main.SpackCommand('commands')
+commands = spack.main.SpackCommand("commands")
parser = spack.main.make_argument_parser()
spack.main.add_all_commands(parser)
@@ -24,116 +24,118 @@ spack.main.add_all_commands(parser)
def test_names():
"""Test default output of spack commands."""
- out1 = commands().strip().split('\n')
+ out1 = commands().strip().split("\n")
assert out1 == spack.cmd.all_commands()
- assert 'rm' not in out1
+ assert "rm" not in out1
- out2 = commands('--aliases').strip().split('\n')
+ out2 = commands("--aliases").strip().split("\n")
assert out1 != out2
- assert 'rm' in out2
+ assert "rm" in out2
- out3 = commands('--format=names').strip().split('\n')
+ out3 = commands("--format=names").strip().split("\n")
assert out1 == out3
def test_subcommands():
"""Test subcommand traversal."""
- out1 = commands('--format=subcommands')
- assert 'spack mirror create' in out1
- assert 'spack buildcache list' in out1
- assert 'spack repo add' in out1
- assert 'spack pkg diff' in out1
- assert 'spack url parse' in out1
- assert 'spack view symlink' in out1
- assert 'spack rm' not in out1
- assert 'spack compiler add' not in out1
-
- out2 = commands('--aliases', '--format=subcommands')
- assert 'spack mirror create' in out2
- assert 'spack buildcache list' in out2
- assert 'spack repo add' in out2
- assert 'spack pkg diff' in out2
- assert 'spack url parse' in out2
- assert 'spack view symlink' in out2
- assert 'spack rm' in out2
- assert 'spack compiler add' in out2
+ out1 = commands("--format=subcommands")
+ assert "spack mirror create" in out1
+ assert "spack buildcache list" in out1
+ assert "spack repo add" in out1
+ assert "spack pkg diff" in out1
+ assert "spack url parse" in out1
+ assert "spack view symlink" in out1
+ assert "spack rm" not in out1
+ assert "spack compiler add" not in out1
+
+ out2 = commands("--aliases", "--format=subcommands")
+ assert "spack mirror create" in out2
+ assert "spack buildcache list" in out2
+ assert "spack repo add" in out2
+ assert "spack pkg diff" in out2
+ assert "spack url parse" in out2
+ assert "spack view symlink" in out2
+ assert "spack rm" in out2
+ assert "spack compiler add" in out2
def test_rst():
"""Do some simple sanity checks of the rst writer."""
- out1 = commands('--format=rst')
- assert 'spack mirror create' in out1
- assert 'spack buildcache list' in out1
- assert 'spack repo add' in out1
- assert 'spack pkg diff' in out1
- assert 'spack url parse' in out1
- assert 'spack view symlink' in out1
- assert 'spack rm' not in out1
- assert 'spack compiler add' not in out1
-
- out2 = commands('--aliases', '--format=rst')
- assert 'spack mirror create' in out2
- assert 'spack buildcache list' in out2
- assert 'spack repo add' in out2
- assert 'spack pkg diff' in out2
- assert 'spack url parse' in out2
- assert 'spack view symlink' in out2
- assert 'spack rm' in out2
- assert 'spack compiler add' in out2
+ out1 = commands("--format=rst")
+ assert "spack mirror create" in out1
+ assert "spack buildcache list" in out1
+ assert "spack repo add" in out1
+ assert "spack pkg diff" in out1
+ assert "spack url parse" in out1
+ assert "spack view symlink" in out1
+ assert "spack rm" not in out1
+ assert "spack compiler add" not in out1
+
+ out2 = commands("--aliases", "--format=rst")
+ assert "spack mirror create" in out2
+ assert "spack buildcache list" in out2
+ assert "spack repo add" in out2
+ assert "spack pkg diff" in out2
+ assert "spack url parse" in out2
+ assert "spack view symlink" in out2
+ assert "spack rm" in out2
+ assert "spack compiler add" in out2
def test_rst_with_input_files(tmpdir):
- filename = tmpdir.join('file.rst')
- with filename.open('w') as f:
- f.write('''
+ filename = tmpdir.join("file.rst")
+ with filename.open("w") as f:
+ f.write(
+ """
.. _cmd-spack-fetch:
cmd-spack-list:
.. _cmd-spack-stage:
_cmd-spack-install:
.. _cmd-spack-patch:
-''')
+"""
+ )
- out = commands('--format=rst', str(filename))
- for name in ['fetch', 'stage', 'patch']:
- assert (':ref:`More documentation <cmd-spack-%s>`' % name) in out
+ out = commands("--format=rst", str(filename))
+ for name in ["fetch", "stage", "patch"]:
+ assert (":ref:`More documentation <cmd-spack-%s>`" % name) in out
- for name in ['list', 'install']:
- assert (':ref:`More documentation <cmd-spack-%s>`' % name) not in out
+ for name in ["list", "install"]:
+ assert (":ref:`More documentation <cmd-spack-%s>`" % name) not in out
def test_rst_with_header(tmpdir):
- fake_header = 'this is a header!\n\n'
+ fake_header = "this is a header!\n\n"
- filename = tmpdir.join('header.txt')
- with filename.open('w') as f:
+ filename = tmpdir.join("header.txt")
+ with filename.open("w") as f:
f.write(fake_header)
- out = commands('--format=rst', '--header', str(filename))
+ out = commands("--format=rst", "--header", str(filename))
assert out.startswith(fake_header)
with pytest.raises(spack.main.SpackCommandError):
- commands('--format=rst', '--header', 'asdfjhkf')
+ commands("--format=rst", "--header", "asdfjhkf")
def test_rst_update(tmpdir):
- update_file = tmpdir.join('output')
+ update_file = tmpdir.join("output")
- commands('--update', str(update_file))
+ commands("--update", str(update_file))
assert update_file.exists()
def test_update_with_header(tmpdir):
- update_file = tmpdir.join('output')
+ update_file = tmpdir.join("output")
- commands('--update', str(update_file))
+ commands("--update", str(update_file))
assert update_file.exists()
- fake_header = 'this is a header!\n\n'
+ fake_header = "this is a header!\n\n"
- filename = tmpdir.join('header.txt')
- with filename.open('w') as f:
+ filename = tmpdir.join("header.txt")
+ with filename.open("w") as f:
f.write(fake_header)
- commands('--update', str(update_file), '--header', str(filename))
+ commands("--update", str(update_file), "--header", str(filename))
@pytest.mark.xfail
@@ -141,45 +143,45 @@ def test_no_pipe_error():
"""Make sure we don't see any pipe errors when piping output."""
proc = subprocess.Popen(
- ['spack', 'commands', '--format=rst'],
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ ["spack", "commands", "--format=rst"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
# Call close() on stdout to cause a broken pipe
proc.stdout.close()
proc.wait()
- stderr = proc.stderr.read().decode('utf-8')
+ stderr = proc.stderr.read().decode("utf-8")
- assert 'Broken pipe' not in stderr
+ assert "Broken pipe" not in stderr
def test_bash_completion():
"""Test the bash completion writer."""
- out1 = commands('--format=bash')
+ out1 = commands("--format=bash")
# Make sure header not included
- assert '_bash_completion_spack() {' not in out1
- assert '_all_packages() {' not in out1
+ assert "_bash_completion_spack() {" not in out1
+ assert "_all_packages() {" not in out1
# Make sure subcommands appear
- assert '_spack_remove() {' in out1
- assert '_spack_compiler_find() {' in out1
+ assert "_spack_remove() {" in out1
+ assert "_spack_compiler_find() {" in out1
# Make sure aliases don't appear
- assert '_spack_rm() {' not in out1
- assert '_spack_compiler_add() {' not in out1
+ assert "_spack_rm() {" not in out1
+ assert "_spack_compiler_add() {" not in out1
# Make sure options appear
- assert '-h --help' in out1
+ assert "-h --help" in out1
# Make sure subcommands are called
for function in _positional_to_subroutine.values():
assert function in out1
- out2 = commands('--aliases', '--format=bash')
+ out2 = commands("--aliases", "--format=bash")
# Make sure aliases appear
- assert '_spack_rm() {' in out2
- assert '_spack_compiler_add() {' in out2
+ assert "_spack_rm() {" in out2
+ assert "_spack_compiler_add() {" in out2
def test_update_completion_arg(tmpdir, monkeypatch):
@@ -187,7 +189,7 @@ def test_update_completion_arg(tmpdir, monkeypatch):
mock_bashfile = tmpdir.join("spack-completion.bash")
mock_args = {
- "bash": {
+ "bash": {
"aliases": True,
"format": "bash",
"header": str(mock_infile),
@@ -197,14 +199,13 @@ def test_update_completion_arg(tmpdir, monkeypatch):
# make a mock completion file missing the --update-completion argument
real_args = spack.cmd.commands.update_completion_args
- shutil.copy(real_args['bash']['header'], mock_args['bash']['header'])
- with open(real_args['bash']['update']) as old:
+ shutil.copy(real_args["bash"]["header"], mock_args["bash"]["header"])
+ with open(real_args["bash"]["update"]) as old:
old_file = old.read()
- with open(mock_args['bash']['update'], 'w') as mock:
+ with open(mock_args["bash"]["update"], "w") as mock:
mock.write(old_file.replace("--update-completion", ""))
- monkeypatch.setattr(
- spack.cmd.commands, 'update_completion_args', mock_args)
+ monkeypatch.setattr(spack.cmd.commands, "update_completion_args", mock_args)
# ensure things fail if --update-completion isn't specified alone
with pytest.raises(spack.main.SpackCommandError):
@@ -217,24 +218,25 @@ def test_update_completion_arg(tmpdir, monkeypatch):
# Note: this test is never expected to be supported on Windows
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="bash completion script generator fails on windows")
+@pytest.mark.skipif(
+ sys.platform == "win32", reason="bash completion script generator fails on windows"
+)
def test_updated_completion_scripts(tmpdir):
"""Make sure our shell tab completion scripts remain up-to-date."""
- msg = ("It looks like Spack's command-line interface has been modified. "
- "Please update Spack's shell tab completion scripts by running:\n\n"
- " spack commands --update-completion\n\n"
- "and adding the changed files to your pull request.")
+ msg = (
+ "It looks like Spack's command-line interface has been modified. "
+ "Please update Spack's shell tab completion scripts by running:\n\n"
+ " spack commands --update-completion\n\n"
+ "and adding the changed files to your pull request."
+ )
- for shell in ['bash']: # 'zsh', 'fish']:
- header = os.path.join(
- spack.paths.share_path, shell, 'spack-completion.in')
- script = 'spack-completion.{0}'.format(shell)
+ for shell in ["bash"]: # 'zsh', 'fish']:
+ header = os.path.join(spack.paths.share_path, shell, "spack-completion.in")
+ script = "spack-completion.{0}".format(shell)
old_script = os.path.join(spack.paths.share_path, script)
new_script = str(tmpdir.join(script))
- commands('--aliases', '--format', shell,
- '--header', header, '--update', new_script)
+ commands("--aliases", "--format", shell, "--header", header, "--update", new_script)
assert filecmp.cmp(old_script, new_script), msg
diff --git a/lib/spack/spack/test/cmd/common/arguments.py b/lib/spack/spack/test/cmd/common/arguments.py
index c527cc074a..58bf8e0ac5 100644
--- a/lib/spack/spack/test/cmd/common/arguments.py
+++ b/lib/spack/spack/test/cmd/common/arguments.py
@@ -19,51 +19,54 @@ def job_parser():
# --jobs needs to write to a command_line config scope, so this is the only
# scope we create.
p = argparse.ArgumentParser()
- arguments.add_common_arguments(p, ['jobs'])
- scopes = [spack.config.InternalConfigScope('command_line', {'config': {}})]
+ arguments.add_common_arguments(p, ["jobs"])
+ scopes = [spack.config.InternalConfigScope("command_line", {"config": {}})]
with spack.config.use_configuration(*scopes):
yield p
def test_setting_jobs_flag(job_parser):
- namespace = job_parser.parse_args(['-j', '24'])
+ namespace = job_parser.parse_args(["-j", "24"])
assert namespace.jobs == 24
- assert spack.config.get('config:build_jobs', scope='command_line') == 24
+ assert spack.config.get("config:build_jobs", scope="command_line") == 24
def test_omitted_job_flag(job_parser):
namespace = job_parser.parse_args([])
assert namespace.jobs is None
- assert spack.config.get('config:build_jobs') is None
+ assert spack.config.get("config:build_jobs") is None
def test_negative_integers_not_allowed_for_parallel_jobs(job_parser):
with pytest.raises(ValueError) as exc_info:
- job_parser.parse_args(['-j', '-2'])
+ job_parser.parse_args(["-j", "-2"])
- assert 'expected a positive integer' in str(exc_info.value)
+ assert "expected a positive integer" in str(exc_info.value)
-@pytest.mark.parametrize('specs,cflags,negated_variants', [
- (['coreutils cflags="-O3 -g"'], ['-O3', '-g'], []),
- (['coreutils', 'cflags=-O3 -g'], ['-O3'], ['g']),
- (['coreutils', 'cflags=-O3', '-g'], ['-O3'], ['g']),
-])
-@pytest.mark.regression('12951')
+@pytest.mark.parametrize(
+ "specs,cflags,negated_variants",
+ [
+ (['coreutils cflags="-O3 -g"'], ["-O3", "-g"], []),
+ (["coreutils", "cflags=-O3 -g"], ["-O3"], ["g"]),
+ (["coreutils", "cflags=-O3", "-g"], ["-O3"], ["g"]),
+ ],
+)
+@pytest.mark.regression("12951")
def test_parse_spec_flags_with_spaces(specs, cflags, negated_variants):
spec_list = spack.cmd.parse_specs(specs)
assert len(spec_list) == 1
s = spec_list.pop()
- assert s.compiler_flags['cflags'] == cflags
+ assert s.compiler_flags["cflags"] == cflags
assert list(s.variants.keys()) == negated_variants
for v in negated_variants:
- assert '~{0}'.format(v) in s
+ assert "~{0}".format(v) in s
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_match_spec_env(mock_packages, mutable_mock_env_path):
"""
Concretize a spec with non-default options in an environment. Make
@@ -72,47 +75,44 @@ def test_match_spec_env(mock_packages, mutable_mock_env_path):
"""
# Initial sanity check: we are planning on choosing a non-default
# value, so make sure that is in fact not the default.
- check_defaults = spack.cmd.parse_specs(['a'], concretize=True)[0]
- assert not check_defaults.satisfies('foobar=baz')
+ check_defaults = spack.cmd.parse_specs(["a"], concretize=True)[0]
+ assert not check_defaults.satisfies("foobar=baz")
- e = ev.create('test')
- e.add('a foobar=baz')
+ e = ev.create("test")
+ e.add("a foobar=baz")
e.concretize()
with e:
- env_spec = spack.cmd.matching_spec_from_env(
- spack.cmd.parse_specs(['a'])[0])
- assert env_spec.satisfies('foobar=baz')
+ env_spec = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
+ assert env_spec.satisfies("foobar=baz")
assert env_spec.concrete
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
- e = ev.create('test')
- e.add('a foobar=baz')
- e.add('a foobar=fee')
+ e = ev.create("test")
+ e.add("a foobar=baz")
+ e.add("a foobar=fee")
e.concretize()
with e:
with pytest.raises(ev.SpackEnvironmentError) as exc_info:
- spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(['a'])[0])
+ spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["a"])[0])
- assert 'matches multiple specs' in exc_info.value.message
+ assert "matches multiple specs" in exc_info.value.message
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_root_and_dep_match_returns_root(mock_packages, mutable_mock_env_path):
- e = ev.create('test')
- e.add('b@0.9')
- e.add('a foobar=bar') # Depends on b, should choose b@1.0
+ e = ev.create("test")
+ e.add("b@0.9")
+ e.add("a foobar=bar") # Depends on b, should choose b@1.0
e.concretize()
with e:
# This query matches the root b and b as a dependency of a. In that
# case the root instance should be preferred.
- env_spec1 = spack.cmd.matching_spec_from_env(
- spack.cmd.parse_specs(['b'])[0])
- assert env_spec1.satisfies('@0.9')
+ env_spec1 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b"])[0])
+ assert env_spec1.satisfies("@0.9")
- env_spec2 = spack.cmd.matching_spec_from_env(
- spack.cmd.parse_specs(['b@1.0'])[0])
+ env_spec2 = spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(["b@1.0"])[0])
assert env_spec2
diff --git a/lib/spack/spack/test/cmd/compiler.py b/lib/spack/spack/test/cmd/compiler.py
index f799d96b0f..d12f2d9645 100644
--- a/lib/spack/spack/test/cmd/compiler.py
+++ b/lib/spack/spack/test/cmd/compiler.py
@@ -14,26 +14,27 @@ import spack.compilers
import spack.main
import spack.version
-compiler = spack.main.SpackCommand('compiler')
+compiler = spack.main.SpackCommand("compiler")
@pytest.fixture
def mock_compiler_version():
- return '4.5.3'
+ return "4.5.3"
@pytest.fixture()
def mock_compiler_dir(tmpdir, mock_compiler_version):
"""Return a directory containing a fake, but detectable compiler."""
- tmpdir.ensure('bin', dir=True)
- bin_dir = tmpdir.join('bin')
+ tmpdir.ensure("bin", dir=True)
+ bin_dir = tmpdir.join("bin")
- gcc_path = bin_dir.join('gcc')
- gxx_path = bin_dir.join('g++')
- gfortran_path = bin_dir.join('gfortran')
+ gcc_path = bin_dir.join("gcc")
+ gxx_path = bin_dir.join("g++")
+ gfortran_path = bin_dir.join("gfortran")
- gcc_path.write("""\
+ gcc_path.write(
+ """\
#!/bin/sh
for arg in "$@"; do
@@ -41,7 +42,9 @@ for arg in "$@"; do
echo '%s'
fi
done
-""" % mock_compiler_version)
+"""
+ % mock_compiler_version
+ )
# Create some mock compilers in the temporary directory
llnl.util.filesystem.set_executable(str(gcc_path))
@@ -51,30 +54,36 @@ done
return str(tmpdir)
-@pytest.mark.skipif(sys.platform == 'win32', reason="Cannot execute bash \
- script on Windows")
-@pytest.mark.regression('11678,13138')
+@pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="Cannot execute bash \
+ script on Windows",
+)
+@pytest.mark.regression("11678,13138")
def test_compiler_find_without_paths(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
- with open('gcc', 'w') as f:
- f.write("""\
+ with open("gcc", "w") as f:
+ f.write(
+ """\
#!/bin/sh
echo "0.0.0"
-""")
- os.chmod('gcc', 0o700)
+"""
+ )
+ os.chmod("gcc", 0o700)
- os.environ['PATH'] = str(tmpdir)
- output = compiler('find', '--scope=site')
+ os.environ["PATH"] = str(tmpdir)
+ output = compiler("find", "--scope=site")
- assert 'gcc' in output
+ assert "gcc" in output
-@pytest.mark.regression('17589')
+@pytest.mark.regression("17589")
def test_compiler_find_no_apple_gcc(no_compilers_yaml, working_env, tmpdir):
with tmpdir.as_cwd():
# make a script to emulate apple gcc's version args
- with open('gcc', 'w') as f:
- f.write("""\
+ with open("gcc", "w") as f:
+ f.write(
+ """\
#!/bin/sh
if [ "$1" = "-dumpversion" ]; then
echo "4.2.1"
@@ -87,45 +96,41 @@ elif [ "$1" = "--version" ]; then
else
echo "clang: error: no input files"
fi
-""")
- os.chmod('gcc', 0o700)
+"""
+ )
+ os.chmod("gcc", 0o700)
- os.environ['PATH'] = str(tmpdir)
- output = compiler('find', '--scope=site')
+ os.environ["PATH"] = str(tmpdir)
+ output = compiler("find", "--scope=site")
- assert 'gcc' not in output
+ assert "gcc" not in output
def test_compiler_remove(mutable_config, mock_packages):
- args = spack.util.pattern.Bunch(
- all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None
- )
+ args = spack.util.pattern.Bunch(all=True, compiler_spec="gcc@4.5.0", add_paths=[], scope=None)
spack.cmd.compiler.compiler_remove(args)
compilers = spack.compilers.all_compiler_specs()
assert spack.spec.CompilerSpec("gcc@4.5.0") not in compilers
-@pytest.mark.skipif(sys.platform == 'win32', reason="Cannot execute bash \
- script on Windows")
-def test_compiler_add(
- mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version
-):
+@pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="Cannot execute bash \
+ script on Windows",
+)
+def test_compiler_add(mutable_config, mock_packages, mock_compiler_dir, mock_compiler_version):
# Compilers available by default.
old_compilers = set(spack.compilers.all_compiler_specs())
args = spack.util.pattern.Bunch(
- all=None,
- compiler_spec=None,
- add_paths=[mock_compiler_dir],
- scope=None
+ all=None, compiler_spec=None, add_paths=[mock_compiler_dir], scope=None
)
spack.cmd.compiler.compiler_find(args)
# Ensure new compiler is in there
new_compilers = set(spack.compilers.all_compiler_specs())
new_compiler = new_compilers - old_compilers
- assert any(c.version == spack.version.Version(mock_compiler_version)
- for c in new_compiler)
+ assert any(c.version == spack.version.Version(mock_compiler_version) for c in new_compiler)
@pytest.fixture
@@ -141,8 +146,9 @@ def clangdir(tmpdir):
"""
with tmpdir.as_cwd():
- with open('clang', 'w') as f:
- f.write("""\
+ with open("clang", "w") as f:
+ f.write(
+ """\
#!/bin/sh
if [ "$1" = "--version" ]; then
echo "clang version 11.0.0 (clang-1100.0.33.16)"
@@ -153,8 +159,9 @@ else
echo "clang: error: no input files"
exit 1
fi
-""")
- shutil.copy('clang', 'clang++')
+"""
+ )
+ shutil.copy("clang", "clang++")
gcc_script = """\
#!/bin/sh
@@ -171,116 +178,115 @@ else
exit 1
fi
"""
- with open('gcc-8', 'w') as f:
- f.write(gcc_script.format('gcc', 'gcc-8'))
- with open('g++-8', 'w') as f:
- f.write(gcc_script.format('g++', 'g++-8'))
- with open('gfortran-8', 'w') as f:
- f.write(gcc_script.format('GNU Fortran', 'gfortran-8'))
- os.chmod('clang', 0o700)
- os.chmod('clang++', 0o700)
- os.chmod('gcc-8', 0o700)
- os.chmod('g++-8', 0o700)
- os.chmod('gfortran-8', 0o700)
+ with open("gcc-8", "w") as f:
+ f.write(gcc_script.format("gcc", "gcc-8"))
+ with open("g++-8", "w") as f:
+ f.write(gcc_script.format("g++", "g++-8"))
+ with open("gfortran-8", "w") as f:
+ f.write(gcc_script.format("GNU Fortran", "gfortran-8"))
+ os.chmod("clang", 0o700)
+ os.chmod("clang++", 0o700)
+ os.chmod("gcc-8", 0o700)
+ os.chmod("g++-8", 0o700)
+ os.chmod("gfortran-8", 0o700)
yield tmpdir
-@pytest.mark.skipif(sys.platform == 'win32', reason="Cannot execute bash \
- script on Windows")
-@pytest.mark.regression('17590')
-def test_compiler_find_mixed_suffixes(
- no_compilers_yaml, working_env, clangdir):
- """Ensure that we'll mix compilers with different suffixes when necessary.
- """
- os.environ['PATH'] = str(clangdir)
- output = compiler('find', '--scope=site')
+@pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="Cannot execute bash \
+ script on Windows",
+)
+@pytest.mark.regression("17590")
+def test_compiler_find_mixed_suffixes(no_compilers_yaml, working_env, clangdir):
+ """Ensure that we'll mix compilers with different suffixes when necessary."""
+ os.environ["PATH"] = str(clangdir)
+ output = compiler("find", "--scope=site")
- assert 'clang@11.0.0' in output
- assert 'gcc@8.4.0' in output
+ assert "clang@11.0.0" in output
+ assert "gcc@8.4.0" in output
- config = spack.compilers.get_compiler_config('site', False)
- clang = next(c['compiler'] for c in config
- if c['compiler']['spec'] == 'clang@11.0.0')
- gcc = next(c['compiler'] for c in config
- if c['compiler']['spec'] == 'gcc@8.4.0')
+ config = spack.compilers.get_compiler_config("site", False)
+ clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@11.0.0")
+ gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@8.4.0")
- gfortran_path = str(clangdir.join('gfortran-8'))
+ gfortran_path = str(clangdir.join("gfortran-8"))
- assert clang['paths'] == {
- 'cc': str(clangdir.join('clang')),
- 'cxx': str(clangdir.join('clang++')),
+ assert clang["paths"] == {
+ "cc": str(clangdir.join("clang")),
+ "cxx": str(clangdir.join("clang++")),
# we only auto-detect mixed clang on macos
- 'f77': gfortran_path if sys.platform == 'darwin' else None,
- 'fc': gfortran_path if sys.platform == 'darwin' else None,
+ "f77": gfortran_path if sys.platform == "darwin" else None,
+ "fc": gfortran_path if sys.platform == "darwin" else None,
}
- assert gcc['paths'] == {
- 'cc': str(clangdir.join('gcc-8')),
- 'cxx': str(clangdir.join('g++-8')),
- 'f77': gfortran_path,
- 'fc': gfortran_path,
+ assert gcc["paths"] == {
+ "cc": str(clangdir.join("gcc-8")),
+ "cxx": str(clangdir.join("g++-8")),
+ "f77": gfortran_path,
+ "fc": gfortran_path,
}
-@pytest.mark.skipif(sys.platform == 'win32', reason="Cannot execute bash \
- script on Windows")
-@pytest.mark.regression('17590')
-def test_compiler_find_prefer_no_suffix(
- no_compilers_yaml, working_env, clangdir):
- """Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice.
- """
+@pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="Cannot execute bash \
+ script on Windows",
+)
+@pytest.mark.regression("17590")
+def test_compiler_find_prefer_no_suffix(no_compilers_yaml, working_env, clangdir):
+ """Ensure that we'll pick 'clang' over 'clang-gpu' when there is a choice."""
with clangdir.as_cwd():
- shutil.copy('clang', 'clang-gpu')
- shutil.copy('clang++', 'clang++-gpu')
- os.chmod('clang-gpu', 0o700)
- os.chmod('clang++-gpu', 0o700)
+ shutil.copy("clang", "clang-gpu")
+ shutil.copy("clang++", "clang++-gpu")
+ os.chmod("clang-gpu", 0o700)
+ os.chmod("clang++-gpu", 0o700)
- os.environ['PATH'] = str(clangdir)
- output = compiler('find', '--scope=site')
+ os.environ["PATH"] = str(clangdir)
+ output = compiler("find", "--scope=site")
- assert 'clang@11.0.0' in output
- assert 'gcc@8.4.0' in output
+ assert "clang@11.0.0" in output
+ assert "gcc@8.4.0" in output
- config = spack.compilers.get_compiler_config('site', False)
- clang = next(c['compiler'] for c in config
- if c['compiler']['spec'] == 'clang@11.0.0')
+ config = spack.compilers.get_compiler_config("site", False)
+ clang = next(c["compiler"] for c in config if c["compiler"]["spec"] == "clang@11.0.0")
- assert clang['paths']['cc'] == str(clangdir.join('clang'))
- assert clang['paths']['cxx'] == str(clangdir.join('clang++'))
+ assert clang["paths"]["cc"] == str(clangdir.join("clang"))
+ assert clang["paths"]["cxx"] == str(clangdir.join("clang++"))
-@pytest.mark.skipif(sys.platform == 'win32', reason="Cannot execute bash \
- script on Windows")
-def test_compiler_find_path_order(
- no_compilers_yaml, working_env, clangdir):
- """Ensure that we find compilers that come first in the PATH first
- """
+@pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="Cannot execute bash \
+ script on Windows",
+)
+def test_compiler_find_path_order(no_compilers_yaml, working_env, clangdir):
+ """Ensure that we find compilers that come first in the PATH first"""
with clangdir.as_cwd():
- os.mkdir('first_in_path')
- shutil.copy('gcc-8', 'first_in_path/gcc-8')
- shutil.copy('g++-8', 'first_in_path/g++-8')
- shutil.copy('gfortran-8', 'first_in_path/gfortran-8')
+ os.mkdir("first_in_path")
+ shutil.copy("gcc-8", "first_in_path/gcc-8")
+ shutil.copy("g++-8", "first_in_path/g++-8")
+ shutil.copy("gfortran-8", "first_in_path/gfortran-8")
# the first_in_path folder should be searched first
- os.environ['PATH'] = '{0}:{1}'.format(
+ os.environ["PATH"] = "{0}:{1}".format(
str(clangdir.join("first_in_path")),
str(clangdir),
)
- compiler('find', '--scope=site')
+ compiler("find", "--scope=site")
- config = spack.compilers.get_compiler_config('site', False)
+ config = spack.compilers.get_compiler_config("site", False)
- gcc = next(c['compiler'] for c in config
- if c['compiler']['spec'] == 'gcc@8.4.0')
+ gcc = next(c["compiler"] for c in config if c["compiler"]["spec"] == "gcc@8.4.0")
- assert gcc['paths'] == {
- 'cc': str(clangdir.join('first_in_path', 'gcc-8')),
- 'cxx': str(clangdir.join('first_in_path', 'g++-8')),
- 'f77': str(clangdir.join('first_in_path', 'gfortran-8')),
- 'fc': str(clangdir.join('first_in_path', 'gfortran-8')),
+ assert gcc["paths"] == {
+ "cc": str(clangdir.join("first_in_path", "gcc-8")),
+ "cxx": str(clangdir.join("first_in_path", "g++-8")),
+ "f77": str(clangdir.join("first_in_path", "gfortran-8")),
+ "fc": str(clangdir.join("first_in_path", "gfortran-8")),
}
@@ -288,7 +294,7 @@ def test_compiler_list_empty(no_compilers_yaml, working_env, clangdir):
# Spack should not automatically search for compilers when listing them and none
# are available. And when stdout is not a tty like in tests, there should be no
# output and no error exit code.
- os.environ['PATH'] = str(clangdir)
- out = compiler('list')
+ os.environ["PATH"] = str(clangdir)
+ out = compiler("list")
assert not out
assert compiler.returncode == 0
diff --git a/lib/spack/spack/test/cmd/concretize.py b/lib/spack/spack/test/cmd/concretize.py
index a92e059464..e5d2162423 100644
--- a/lib/spack/spack/test/cmd/concretize.py
+++ b/lib/spack/spack/test/cmd/concretize.py
@@ -10,49 +10,48 @@ import spack.environment as ev
from spack.main import SpackCommand
# everything here uses the mock_env_path
-pytestmark = pytest.mark.usefixtures(
- 'mutable_mock_env_path', 'config', 'mutable_mock_repo')
+pytestmark = pytest.mark.usefixtures("mutable_mock_env_path", "config", "mutable_mock_repo")
-env = SpackCommand('env')
-add = SpackCommand('add')
-concretize = SpackCommand('concretize')
+env = SpackCommand("env")
+add = SpackCommand("add")
+concretize = SpackCommand("concretize")
-unification_strategies = [False, True, 'when_possible']
+unification_strategies = [False, True, "when_possible"]
-@pytest.mark.parametrize('unify', unification_strategies)
+@pytest.mark.parametrize("unify", unification_strategies)
def test_concretize_all_test_dependencies(unify):
"""Check all test dependencies are concretized."""
- env('create', 'test')
+ env("create", "test")
- with ev.read('test') as e:
+ with ev.read("test") as e:
e.unify = unify
- add('depb')
- concretize('--test', 'all')
- assert e.matching_spec('test-dependency')
+ add("depb")
+ concretize("--test", "all")
+ assert e.matching_spec("test-dependency")
-@pytest.mark.parametrize('unify', unification_strategies)
+@pytest.mark.parametrize("unify", unification_strategies)
def test_concretize_root_test_dependencies_not_recursive(unify):
"""Check that test dependencies are not concretized recursively."""
- env('create', 'test')
+ env("create", "test")
- with ev.read('test') as e:
+ with ev.read("test") as e:
e.unify = unify
- add('depb')
- concretize('--test', 'root')
- assert e.matching_spec('test-dependency') is None
+ add("depb")
+ concretize("--test", "root")
+ assert e.matching_spec("test-dependency") is None
-@pytest.mark.parametrize('unify', unification_strategies)
+@pytest.mark.parametrize("unify", unification_strategies)
def test_concretize_root_test_dependencies_are_concretized(unify):
"""Check that root test dependencies are concretized."""
- env('create', 'test')
+ env("create", "test")
- with ev.read('test') as e:
+ with ev.read("test") as e:
e.unify = unify
- add('a')
- add('b')
- concretize('--test', 'root')
- assert e.matching_spec('test-dependency')
+ add("a")
+ add("b")
+ concretize("--test", "root")
+ assert e.matching_spec("test-dependency")
diff --git a/lib/spack/spack/test/cmd/config.py b/lib/spack/spack/test/cmd/config.py
index 2ecf4bd178..42152ca3a5 100644
--- a/lib/spack/spack/test/cmd/config.py
+++ b/lib/spack/spack/test/cmd/config.py
@@ -17,14 +17,14 @@ import spack.spec
import spack.store
import spack.util.spack_yaml as syaml
-config = spack.main.SpackCommand('config')
-env = spack.main.SpackCommand('env')
+config = spack.main.SpackCommand("config")
+env = spack.main.SpackCommand("env")
-def _create_config(scope=None, data={}, section='packages'):
+def _create_config(scope=None, data={}, section="packages"):
scope = scope or spack.config.default_modify_scope()
cfg_file = spack.config.config.get_config_filename(scope, section)
- with open(cfg_file, 'w') as f:
+ with open(cfg_file, "w") as f:
syaml.dump(data, stream=f)
return cfg_file
@@ -33,208 +33,234 @@ def _create_config(scope=None, data={}, section='packages'):
def config_yaml_v015(mutable_config):
"""Create a packages.yaml in the old format"""
old_data = {
- 'config': {
- 'install_tree': '/fake/path',
- 'install_path_scheme': '{name}-{version}',
+ "config": {
+ "install_tree": "/fake/path",
+ "install_path_scheme": "{name}-{version}",
}
}
- return functools.partial(_create_config, data=old_data, section='config')
+ return functools.partial(_create_config, data=old_data, section="config")
def test_get_config_scope(mock_low_high_config):
- assert config('get', 'compilers').strip() == 'compilers: {}'
+ assert config("get", "compilers").strip() == "compilers: {}"
def test_get_config_scope_merged(mock_low_high_config):
- low_path = mock_low_high_config.scopes['low'].path
- high_path = mock_low_high_config.scopes['high'].path
+ low_path = mock_low_high_config.scopes["low"].path
+ high_path = mock_low_high_config.scopes["high"].path
fs.mkdirp(low_path)
fs.mkdirp(high_path)
- with open(os.path.join(low_path, 'repos.yaml'), 'w') as f:
- f.write('''\
+ with open(os.path.join(low_path, "repos.yaml"), "w") as f:
+ f.write(
+ """\
repos:
- repo3
-''')
+"""
+ )
- with open(os.path.join(high_path, 'repos.yaml'), 'w') as f:
- f.write('''\
+ with open(os.path.join(high_path, "repos.yaml"), "w") as f:
+ f.write(
+ """\
repos:
- repo1
- repo2
-''')
+"""
+ )
- assert config('get', 'repos').strip() == '''repos:
+ assert (
+ config("get", "repos").strip()
+ == """repos:
- repo1
- repo2
-- repo3'''
+- repo3"""
+ )
def test_config_edit():
"""Ensure `spack config edit` edits the right paths."""
- dms = spack.config.default_modify_scope('compilers')
+ dms = spack.config.default_modify_scope("compilers")
dms_path = spack.config.config.scopes[dms].path
- user_path = spack.config.config.scopes['user'].path
+ user_path = spack.config.config.scopes["user"].path
- comp_path = os.path.join(dms_path, 'compilers.yaml')
- repos_path = os.path.join(user_path, 'repos.yaml')
+ comp_path = os.path.join(dms_path, "compilers.yaml")
+ repos_path = os.path.join(user_path, "repos.yaml")
- assert config('edit', '--print-file', 'compilers').strip() == comp_path
- assert config('edit', '--print-file', 'repos').strip() == repos_path
+ assert config("edit", "--print-file", "compilers").strip() == comp_path
+ assert config("edit", "--print-file", "repos").strip() == repos_path
def test_config_get_gets_spack_yaml(mutable_mock_env_path):
- env = ev.create('test')
+ env = ev.create("test")
- config('get', fail_on_error=False)
+ config("get", fail_on_error=False)
assert config.returncode == 1
with env:
- config('get', fail_on_error=False)
+ config("get", fail_on_error=False)
assert config.returncode == 1
env.write()
- assert 'mpileaks' not in config('get')
+ assert "mpileaks" not in config("get")
- env.add('mpileaks')
+ env.add("mpileaks")
env.write()
- assert 'mpileaks' in config('get')
+ assert "mpileaks" in config("get")
def test_config_edit_edits_spack_yaml(mutable_mock_env_path):
- env = ev.create('test')
+ env = ev.create("test")
with env:
- assert config('edit', '--print-file').strip() == env.manifest_path
+ assert config("edit", "--print-file").strip() == env.manifest_path
def test_config_edit_fails_correctly_with_no_env(mutable_mock_env_path):
- output = config('edit', '--print-file', fail_on_error=False)
+ output = config("edit", "--print-file", fail_on_error=False)
assert "requires a section argument or an active environment" in output
def test_config_get_fails_correctly_with_no_env(mutable_mock_env_path):
- output = config('get', fail_on_error=False)
+ output = config("get", fail_on_error=False)
assert "requires a section argument or an active environment" in output
def test_config_list():
- output = config('list')
- assert 'compilers' in output
- assert 'packages' in output
+ output = config("list")
+ assert "compilers" in output
+ assert "packages" in output
def test_config_add(mutable_empty_config):
- config('add', 'config:dirty:true')
- output = config('get', 'config')
+ config("add", "config:dirty:true")
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
dirty: true
"""
+ )
def test_config_add_list(mutable_empty_config):
- config('add', 'config:template_dirs:test1')
- config('add', 'config:template_dirs:[test2]')
- config('add', 'config:template_dirs:test3')
- output = config('get', 'config')
-
- assert output == """config:
+ config("add", "config:template_dirs:test1")
+ config("add", "config:template_dirs:[test2]")
+ config("add", "config:template_dirs:test3")
+ output = config("get", "config")
+
+ assert (
+ output
+ == """config:
template_dirs:
- test3
- test2
- test1
"""
+ )
def test_config_add_override(mutable_empty_config):
- config('--scope', 'site', 'add', 'config:template_dirs:test1')
- config('add', 'config:template_dirs:[test2]')
- output = config('get', 'config')
+ config("--scope", "site", "add", "config:template_dirs:test1")
+ config("add", "config:template_dirs:[test2]")
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
template_dirs:
- test2
- test1
"""
+ )
- config('add', 'config::template_dirs:[test2]')
- output = config('get', 'config')
+ config("add", "config::template_dirs:[test2]")
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
template_dirs:
- test2
"""
+ )
def test_config_add_override_leaf(mutable_empty_config):
- config('--scope', 'site', 'add', 'config:template_dirs:test1')
- config('add', 'config:template_dirs:[test2]')
- output = config('get', 'config')
+ config("--scope", "site", "add", "config:template_dirs:test1")
+ config("add", "config:template_dirs:[test2]")
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
template_dirs:
- test2
- test1
"""
+ )
- config('add', 'config:template_dirs::[test2]')
- output = config('get', 'config')
+ config("add", "config:template_dirs::[test2]")
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
'template_dirs:':
- test2
"""
+ )
def test_config_add_update_dict(mutable_empty_config):
- config('add', 'packages:all:version:[1.0.0]')
- output = config('get', 'packages')
+ config("add", "packages:all:version:[1.0.0]")
+ output = config("get", "packages")
- expected = 'packages:\n all:\n version: [1.0.0]\n'
+ expected = "packages:\n all:\n version: [1.0.0]\n"
assert output == expected
def test_config_with_c_argument(mutable_empty_config):
# I don't know how to add a spack argument to a Spack Command, so we test this way
- config_file = 'config:install_root:root:/path/to/config.yaml'
+ config_file = "config:install_root:root:/path/to/config.yaml"
parser = spack.main.make_argument_parser()
- args = parser.parse_args(['-c', config_file])
+ args = parser.parse_args(["-c", config_file])
assert config_file in args.config_vars
# Add the path to the config
- config("add", args.config_vars[0], scope='command_line')
- output = config("get", 'config')
+ config("add", args.config_vars[0], scope="command_line")
+ output = config("get", "config")
assert "config:\n install_root:\n root: /path/to/config.yaml" in output
def test_config_add_ordered_dict(mutable_empty_config):
- config('add', 'mirrors:first:/path/to/first')
- config('add', 'mirrors:second:/path/to/second')
- output = config('get', 'mirrors')
+ config("add", "mirrors:first:/path/to/first")
+ config("add", "mirrors:second:/path/to/second")
+ output = config("get", "mirrors")
- assert output == """mirrors:
+ assert (
+ output
+ == """mirrors:
first: /path/to/first
second: /path/to/second
"""
+ )
def test_config_add_interpret_oneof(mutable_empty_config):
# Regression test for a bug that would raise a validation error
- config('add', 'packages:all:target:[x86_64]')
- config('add', 'packages:all:variants:~shared')
+ config("add", "packages:all:target:[x86_64]")
+ config("add", "packages:all:variants:~shared")
def test_config_add_invalid_fails(mutable_empty_config):
- config('add', 'packages:all:variants:+debug')
- with pytest.raises(
- (spack.config.ConfigFormatError, AttributeError)
- ):
- config('add', 'packages:all:True')
+ config("add", "packages:all:variants:+debug")
+ with pytest.raises((spack.config.ConfigFormatError, AttributeError)):
+ config("add", "packages:all:True")
def test_config_add_from_file(mutable_empty_config, tmpdir):
@@ -243,15 +269,18 @@ def test_config_add_from_file(mutable_empty_config, tmpdir):
dirty: true
"""
- file = str(tmpdir.join('spack.yaml'))
- with open(file, 'w') as f:
+ file = str(tmpdir.join("spack.yaml"))
+ with open(file, "w") as f:
f.write(contents)
- config('add', '-f', file)
- output = config('get', 'config')
+ config("add", "-f", file)
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
dirty: true
"""
+ )
def test_config_add_from_file_multiple(mutable_empty_config, tmpdir):
@@ -261,56 +290,65 @@ def test_config_add_from_file_multiple(mutable_empty_config, tmpdir):
template_dirs: [test1]
"""
- file = str(tmpdir.join('spack.yaml'))
- with open(file, 'w') as f:
+ file = str(tmpdir.join("spack.yaml"))
+ with open(file, "w") as f:
f.write(contents)
- config('add', '-f', file)
- output = config('get', 'config')
+ config("add", "-f", file)
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
dirty: true
template_dirs: [test1]
"""
+ )
def test_config_add_override_from_file(mutable_empty_config, tmpdir):
- config('--scope', 'site', 'add', 'config:template_dirs:test1')
+ config("--scope", "site", "add", "config:template_dirs:test1")
contents = """spack:
config::
template_dirs: [test2]
"""
- file = str(tmpdir.join('spack.yaml'))
- with open(file, 'w') as f:
+ file = str(tmpdir.join("spack.yaml"))
+ with open(file, "w") as f:
f.write(contents)
- config('add', '-f', file)
- output = config('get', 'config')
+ config("add", "-f", file)
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
template_dirs: [test2]
"""
+ )
def test_config_add_override_leaf_from_file(mutable_empty_config, tmpdir):
- config('--scope', 'site', 'add', 'config:template_dirs:test1')
+ config("--scope", "site", "add", "config:template_dirs:test1")
contents = """spack:
config:
template_dirs:: [test2]
"""
- file = str(tmpdir.join('spack.yaml'))
- with open(file, 'w') as f:
+ file = str(tmpdir.join("spack.yaml"))
+ with open(file, "w") as f:
f.write(contents)
- config('add', '-f', file)
- output = config('get', 'config')
+ config("add", "-f", file)
+ output = config("get", "config")
- assert output == """config:
+ assert (
+ output
+ == """config:
'template_dirs:': [test2]
"""
+ )
def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
- config('add', 'packages:all:compiler:[gcc]')
+ config("add", "packages:all:compiler:[gcc]")
# contents to add to file
contents = """spack:
@@ -321,13 +359,13 @@ def test_config_add_update_dict_from_file(mutable_empty_config, tmpdir):
"""
# create temp file and add it to config
- file = str(tmpdir.join('spack.yaml'))
- with open(file, 'w') as f:
+ file = str(tmpdir.join("spack.yaml"))
+ with open(file, "w") as f:
f.write(contents)
- config('add', '-f', file)
+ config("add", "-f", file)
# get results
- output = config('get', 'packages')
+ output = config("get", "packages")
# added config comes before prior config
expected = """packages:
@@ -350,76 +388,89 @@ def test_config_add_invalid_file_fails(tmpdir):
"""
# create temp file and add it to config
- file = str(tmpdir.join('spack.yaml'))
- with open(file, 'w') as f:
+ file = str(tmpdir.join("spack.yaml"))
+ with open(file, "w") as f:
f.write(contents)
- with pytest.raises(
- (spack.config.ConfigFormatError)
- ):
- config('add', '-f', file)
+ with pytest.raises((spack.config.ConfigFormatError)):
+ config("add", "-f", file)
def test_config_remove_value(mutable_empty_config):
- config('add', 'config:dirty:true')
- config('remove', 'config:dirty:true')
- output = config('get', 'config')
+ config("add", "config:dirty:true")
+ config("remove", "config:dirty:true")
+ output = config("get", "config")
- assert output == """config: {}
+ assert (
+ output
+ == """config: {}
"""
+ )
def test_config_remove_alias_rm(mutable_empty_config):
- config('add', 'config:dirty:true')
- config('rm', 'config:dirty:true')
- output = config('get', 'config')
+ config("add", "config:dirty:true")
+ config("rm", "config:dirty:true")
+ output = config("get", "config")
- assert output == """config: {}
+ assert (
+ output
+ == """config: {}
"""
+ )
def test_config_remove_dict(mutable_empty_config):
- config('add', 'config:dirty:true')
- config('rm', 'config:dirty')
- output = config('get', 'config')
+ config("add", "config:dirty:true")
+ config("rm", "config:dirty")
+ output = config("get", "config")
- assert output == """config: {}
+ assert (
+ output
+ == """config: {}
"""
+ )
def test_remove_from_list(mutable_empty_config):
- config('add', 'config:template_dirs:test1')
- config('add', 'config:template_dirs:[test2]')
- config('add', 'config:template_dirs:test3')
- config('remove', 'config:template_dirs:test2')
- output = config('get', 'config')
-
- assert output == """config:
+ config("add", "config:template_dirs:test1")
+ config("add", "config:template_dirs:[test2]")
+ config("add", "config:template_dirs:test3")
+ config("remove", "config:template_dirs:test2")
+ output = config("get", "config")
+
+ assert (
+ output
+ == """config:
template_dirs:
- test3
- test1
"""
+ )
def test_remove_list(mutable_empty_config):
- config('add', 'config:template_dirs:test1')
- config('add', 'config:template_dirs:[test2]')
- config('add', 'config:template_dirs:test3')
- config('remove', 'config:template_dirs:[test2]')
- output = config('get', 'config')
-
- assert output == """config:
+ config("add", "config:template_dirs:test1")
+ config("add", "config:template_dirs:[test2]")
+ config("add", "config:template_dirs:test3")
+ config("remove", "config:template_dirs:[test2]")
+ output = config("get", "config")
+
+ assert (
+ output
+ == """config:
template_dirs:
- test3
- test1
"""
+ )
def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path):
- env('create', 'test')
- with ev.read('test'):
- config('add', 'config:dirty:true')
- output = config('get')
+ env("create", "test")
+ with ev.read("test"):
+ config("add", "config:dirty:true")
+ output = config("get")
expected = """ config:
dirty: true
@@ -428,10 +479,8 @@ def test_config_add_to_env(mutable_empty_config, mutable_mock_env_path):
assert expected in output
-def test_config_add_to_env_preserve_comments(mutable_empty_config,
- mutable_mock_env_path,
- tmpdir):
- filepath = str(tmpdir.join('spack.yaml'))
+def test_config_add_to_env_preserve_comments(mutable_empty_config, mutable_mock_env_path, tmpdir):
+ filepath = str(tmpdir.join("spack.yaml"))
manifest = """# comment
spack: # comment
# comment
@@ -445,12 +494,12 @@ spack: # comment
# comment
compiler: [gcc] # comment
"""
- with open(filepath, 'w') as f:
+ with open(filepath, "w") as f:
f.write(manifest)
env = ev.Environment(str(tmpdir))
with env:
- config('add', 'config:dirty:true')
- output = config('get')
+ config("add", "config:dirty:true")
+ output = config("get")
expected = manifest
expected += """ config:
@@ -461,14 +510,14 @@ spack: # comment
def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
- env('create', 'test')
+ env("create", "test")
- with ev.read('test'):
- config('add', 'config:dirty:true')
+ with ev.read("test"):
+ config("add", "config:dirty:true")
- with ev.read('test'):
- config('rm', 'config:dirty')
- output = config('get')
+ with ev.read("test"):
+ config("rm", "config:dirty")
+ output = config("get")
expected = ev.default_manifest_yaml()
expected += """ config: {}
@@ -479,114 +528,111 @@ def test_config_remove_from_env(mutable_empty_config, mutable_mock_env_path):
def test_config_update_config(config_yaml_v015):
config_yaml_v015()
- config('update', '-y', 'config')
+ config("update", "-y", "config")
# Check the entires have been transformed
- data = spack.config.get('config')
+ data = spack.config.get("config")
check_config_updated(data)
def test_config_update_not_needed(mutable_config):
- data_before = spack.config.get('repos')
- config('update', '-y', 'repos')
- data_after = spack.config.get('repos')
+ data_before = spack.config.get("repos")
+ config("update", "-y", "repos")
+ data_after = spack.config.get("repos")
assert data_before == data_after
-@pytest.mark.regression('18031')
+@pytest.mark.regression("18031")
def test_config_update_can_handle_comments(mutable_config):
# Create an outdated config file with comments
scope = spack.config.default_modify_scope()
- cfg_file = spack.config.config.get_config_filename(scope, 'config')
- with open(cfg_file, mode='w') as f:
- f.write("""
+ cfg_file = spack.config.config.get_config_filename(scope, "config")
+ with open(cfg_file, mode="w") as f:
+ f.write(
+ """
config:
# system cmake in /usr
install_tree: './foo'
# Another comment after the outdated section
install_hash_length: 7
-""")
+"""
+ )
# Try to update it, it should not raise errors
- config('update', '-y', 'config')
+ config("update", "-y", "config")
# Check data
- data = spack.config.get('config', scope=scope)
- assert 'root' in data['install_tree']
+ data = spack.config.get("config", scope=scope)
+ assert "root" in data["install_tree"]
# Check the comment is there
with open(cfg_file) as f:
- text = ''.join(f.readlines())
+ text = "".join(f.readlines())
- assert '# system cmake in /usr' in text
- assert '# Another comment after the outdated section' in text
+ assert "# system cmake in /usr" in text
+ assert "# Another comment after the outdated section" in text
-@pytest.mark.regression('18050')
+@pytest.mark.regression("18050")
def test_config_update_works_for_empty_paths(mutable_config):
scope = spack.config.default_modify_scope()
- cfg_file = spack.config.config.get_config_filename(scope, 'config')
- with open(cfg_file, mode='w') as f:
- f.write("""
+ cfg_file = spack.config.config.get_config_filename(scope, "config")
+ with open(cfg_file, mode="w") as f:
+ f.write(
+ """
config:
install_tree: ''
-""")
+"""
+ )
# Try to update it, it should not raise errors
- output = config('update', '-y', 'config')
+ output = config("update", "-y", "config")
# This ensures that we updated the configuration
- assert '[backup=' in output
+ assert "[backup=" in output
def check_config_updated(data):
- assert isinstance(data['install_tree'], dict)
- assert data['install_tree']['root'] == '/fake/path'
- assert data['install_tree']['projections'] == {'all': '{name}-{version}'}
+ assert isinstance(data["install_tree"], dict)
+ assert data["install_tree"]["root"] == "/fake/path"
+ assert data["install_tree"]["projections"] == {"all": "{name}-{version}"}
-def test_config_prefer_upstream(tmpdir_factory, install_mockery, mock_fetch,
- mutable_config, gen_mock_layout, monkeypatch):
+def test_config_prefer_upstream(
+ tmpdir_factory, install_mockery, mock_fetch, mutable_config, gen_mock_layout, monkeypatch
+):
"""Check that when a dependency package is recorded as installed in
- an upstream database that it is not reinstalled.
+ an upstream database that it is not reinstalled.
"""
- mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))
+ mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
prepared_db = spack.database.Database(mock_db_root)
- upstream_layout = gen_mock_layout('/a/')
+ upstream_layout = gen_mock_layout("/a/")
- for spec in [
- 'hdf5 +mpi',
- 'hdf5 ~mpi',
- 'boost+debug~icu+graph',
- 'dependency-install',
- 'patch']:
+ for spec in ["hdf5 +mpi", "hdf5 ~mpi", "boost+debug~icu+graph", "dependency-install", "patch"]:
dep = spack.spec.Spec(spec)
dep.concretize()
prepared_db.add(dep, upstream_layout)
- downstream_db_root = str(
- tmpdir_factory.mktemp('mock_downstream_db_root'))
- db_for_test = spack.database.Database(
- downstream_db_root, upstream_dbs=[prepared_db])
- monkeypatch.setattr(spack.store, 'db', db_for_test)
+ downstream_db_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
+ db_for_test = spack.database.Database(downstream_db_root, upstream_dbs=[prepared_db])
+ monkeypatch.setattr(spack.store, "db", db_for_test)
- output = config('prefer-upstream')
- scope = spack.config.default_modify_scope('packages')
- cfg_file = spack.config.config.get_config_filename(scope, 'packages')
- packages = syaml.load(open(cfg_file))['packages']
+ output = config("prefer-upstream")
+ scope = spack.config.default_modify_scope("packages")
+ cfg_file = spack.config.config.get_config_filename(scope, "packages")
+ packages = syaml.load(open(cfg_file))["packages"]
# Make sure only the non-default variants are set.
- assert packages['boost'] == {
- 'compiler': ['gcc@4.5.0'],
- 'variants': '+debug +graph',
- 'version': ['1.63.0']}
- assert packages['dependency-install'] == {
- 'compiler': ['gcc@4.5.0'], 'version': ['2.0']}
+ assert packages["boost"] == {
+ "compiler": ["gcc@4.5.0"],
+ "variants": "+debug +graph",
+ "version": ["1.63.0"],
+ }
+ assert packages["dependency-install"] == {"compiler": ["gcc@4.5.0"], "version": ["2.0"]}
# Ensure that neither variant gets listed for hdf5, since they conflict
- assert packages['hdf5'] == {
- 'compiler': ['gcc@4.5.0'], 'version': ['2.3']}
+ assert packages["hdf5"] == {"compiler": ["gcc@4.5.0"], "version": ["2.3"]}
# Make sure a message about the conflicting hdf5's was given.
- assert '- hdf5' in output
+ assert "- hdf5" in output
diff --git a/lib/spack/spack/test/cmd/create.py b/lib/spack/spack/test/cmd/create.py
index 20059b35d4..39d3ab7bd7 100644
--- a/lib/spack/spack/test/cmd/create.py
+++ b/lib/spack/spack/test/cmd/create.py
@@ -13,10 +13,10 @@ import spack.util.editor
from spack.main import SpackCommand
from spack.url import UndetectableNameError
-create = SpackCommand('create')
+create = SpackCommand("create")
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def parser():
"""Returns the parser for the module"""
prs = argparse.ArgumentParser()
@@ -24,81 +24,137 @@ def parser():
return prs
-@pytest.mark.parametrize('args,name,expected', [
- # Basic package cases
- (['/test-package'], 'test-package',
- [r'TestPackage(Package)', r'def install(self']),
- (['-n', 'test-named-package', 'file://example.tar.gz'],
- 'test-named-package',
- [r'TestNamedPackage(Package)', r'def install(self']),
- (['file://example.tar.gz'], 'example',
- [r'Example(Package)', r'def install(self']),
-
- # Template-specific cases
- (['-t', 'autoreconf', '/test-autoreconf'], 'test-autoreconf',
- [r'TestAutoreconf(AutotoolsPackage)', r"depends_on('autoconf",
- r'def autoreconf(self', r'def configure_args(self']),
- (['-t', 'autotools', '/test-autotools'], 'test-autotools',
- [r'TestAutotools(AutotoolsPackage)', r'def configure_args(self']),
- (['-t', 'bazel', '/test-bazel'], 'test-bazel',
- [r'TestBazel(Package)', r"depends_on('bazel", r'bazel()']),
- (['-t', 'bundle', '/test-bundle'], 'test-bundle',
- [r'TestBundle(BundlePackage)']),
- (['-t', 'cmake', '/test-cmake'], 'test-cmake',
- [r'TestCmake(CMakePackage)', r'def cmake_args(self']),
- (['-t', 'intel', '/test-intel'], 'test-intel',
- [r'TestIntel(IntelPackage)', r'setup_environment']),
- (['-t', 'makefile', '/test-makefile'], 'test-makefile',
- [r'TestMakefile(MakefilePackage)', r'def edit(self', r'makefile']),
- (['-t', 'meson', '/test-meson'], 'test-meson',
- [r'TestMeson(MesonPackage)', r'def meson_args(self']),
- (['-t', 'octave', '/test-octave'], 'octave-test-octave',
- [r'OctaveTestOctave(OctavePackage)', r"extends('octave",
- r"depends_on('octave"]),
- (['-t', 'perlbuild', '/test-perlbuild'], 'perl-test-perlbuild',
- [r'PerlTestPerlbuild(PerlPackage)', r"depends_on('perl-module-build",
- r'def configure_args(self']),
- (['-t', 'perlmake', '/test-perlmake'], 'perl-test-perlmake',
- [r'PerlTestPerlmake(PerlPackage)', r"depends_on('perl-",
- r'def configure_args(self']),
- (['-t', 'python', '/test-python'], 'py-test-python',
- [r'PyTestPython(PythonPackage)', r"depends_on('py-",
- r'def global_options(self', r'def install_options(self']),
- (['-t', 'qmake', '/test-qmake'], 'test-qmake',
- [r'TestQmake(QMakePackage)', r'def qmake_args(self']),
- (['-t', 'r', '/test-r'], 'r-test-r',
- [r'RTestR(RPackage)', r"depends_on('r-", r'def configure_args(self']),
- (['-t', 'scons', '/test-scons'], 'test-scons',
- [r'TestScons(SConsPackage)', r'def build_args(self']),
- (['-t', 'sip', '/test-sip'], 'py-test-sip',
- [r'PyTestSip(SIPPackage)', r'def configure_args(self']),
- (['-t', 'waf', '/test-waf'], 'test-waf',
- [r'TestWaf(WafPackage)', r'configure_args()'])
-])
+@pytest.mark.parametrize(
+ "args,name,expected",
+ [
+ # Basic package cases
+ (["/test-package"], "test-package", [r"TestPackage(Package)", r"def install(self"]),
+ (
+ ["-n", "test-named-package", "file://example.tar.gz"],
+ "test-named-package",
+ [r"TestNamedPackage(Package)", r"def install(self"],
+ ),
+ (["file://example.tar.gz"], "example", [r"Example(Package)", r"def install(self"]),
+ # Template-specific cases
+ (
+ ["-t", "autoreconf", "/test-autoreconf"],
+ "test-autoreconf",
+ [
+ r"TestAutoreconf(AutotoolsPackage)",
+ r"depends_on('autoconf",
+ r"def autoreconf(self",
+ r"def configure_args(self",
+ ],
+ ),
+ (
+ ["-t", "autotools", "/test-autotools"],
+ "test-autotools",
+ [r"TestAutotools(AutotoolsPackage)", r"def configure_args(self"],
+ ),
+ (
+ ["-t", "bazel", "/test-bazel"],
+ "test-bazel",
+ [r"TestBazel(Package)", r"depends_on('bazel", r"bazel()"],
+ ),
+ (["-t", "bundle", "/test-bundle"], "test-bundle", [r"TestBundle(BundlePackage)"]),
+ (
+ ["-t", "cmake", "/test-cmake"],
+ "test-cmake",
+ [r"TestCmake(CMakePackage)", r"def cmake_args(self"],
+ ),
+ (
+ ["-t", "intel", "/test-intel"],
+ "test-intel",
+ [r"TestIntel(IntelPackage)", r"setup_environment"],
+ ),
+ (
+ ["-t", "makefile", "/test-makefile"],
+ "test-makefile",
+ [r"TestMakefile(MakefilePackage)", r"def edit(self", r"makefile"],
+ ),
+ (
+ ["-t", "meson", "/test-meson"],
+ "test-meson",
+ [r"TestMeson(MesonPackage)", r"def meson_args(self"],
+ ),
+ (
+ ["-t", "octave", "/test-octave"],
+ "octave-test-octave",
+ [r"OctaveTestOctave(OctavePackage)", r"extends('octave", r"depends_on('octave"],
+ ),
+ (
+ ["-t", "perlbuild", "/test-perlbuild"],
+ "perl-test-perlbuild",
+ [
+ r"PerlTestPerlbuild(PerlPackage)",
+ r"depends_on('perl-module-build",
+ r"def configure_args(self",
+ ],
+ ),
+ (
+ ["-t", "perlmake", "/test-perlmake"],
+ "perl-test-perlmake",
+ [r"PerlTestPerlmake(PerlPackage)", r"depends_on('perl-", r"def configure_args(self"],
+ ),
+ (
+ ["-t", "python", "/test-python"],
+ "py-test-python",
+ [
+ r"PyTestPython(PythonPackage)",
+ r"depends_on('py-",
+ r"def global_options(self",
+ r"def install_options(self",
+ ],
+ ),
+ (
+ ["-t", "qmake", "/test-qmake"],
+ "test-qmake",
+ [r"TestQmake(QMakePackage)", r"def qmake_args(self"],
+ ),
+ (
+ ["-t", "r", "/test-r"],
+ "r-test-r",
+ [r"RTestR(RPackage)", r"depends_on('r-", r"def configure_args(self"],
+ ),
+ (
+ ["-t", "scons", "/test-scons"],
+ "test-scons",
+ [r"TestScons(SConsPackage)", r"def build_args(self"],
+ ),
+ (
+ ["-t", "sip", "/test-sip"],
+ "py-test-sip",
+ [r"PyTestSip(SIPPackage)", r"def configure_args(self"],
+ ),
+ (["-t", "waf", "/test-waf"], "test-waf", [r"TestWaf(WafPackage)", r"configure_args()"]),
+ ],
+)
def test_create_template(parser, mock_test_repo, args, name, expected):
"""Test template creation."""
repo, repodir = mock_test_repo
- constr_args = parser.parse_args(['--skip-editor'] + args)
+ constr_args = parser.parse_args(["--skip-editor"] + args)
spack.cmd.create.create(parser, constr_args)
filename = repo.filename_for_package_name(name)
assert os.path.exists(filename)
- with open(filename, 'r') as package_file:
- content = ' '.join(package_file.readlines())
+ with open(filename, "r") as package_file:
+ content = " ".join(package_file.readlines())
for entry in expected:
assert entry in content
-@pytest.mark.parametrize('name,expected', [
- (' ', 'name must be provided'),
- ('bad#name', 'name can only contain'),
-])
-def test_create_template_bad_name(
- parser, mock_test_repo, name, expected, capsys):
+@pytest.mark.parametrize(
+ "name,expected",
+ [
+ (" ", "name must be provided"),
+ ("bad#name", "name can only contain"),
+ ],
+)
+def test_create_template_bad_name(parser, mock_test_repo, name, expected, capsys):
"""Test template creation with bad name options."""
- constr_args = parser.parse_args(['--skip-editor', '-n', name])
+ constr_args = parser.parse_args(["--skip-editor", "-n", name])
with pytest.raises(SystemExit):
spack.cmd.create.create(parser, constr_args)
@@ -111,9 +167,8 @@ def test_build_system_guesser_no_stage(parser):
guesser = spack.cmd.create.BuildSystemGuesser()
# Ensure get the expected build system
- with pytest.raises(AttributeError,
- match="'NoneType' object has no attribute"):
- guesser(None, '/the/url/does/not/matter')
+ with pytest.raises(AttributeError, match="'NoneType' object has no attribute"):
+ guesser(None, "/the/url/does/not/matter")
def test_build_system_guesser_octave(parser):
@@ -122,7 +177,7 @@ def test_build_system_guesser_octave(parser):
identifies the build system rather than guessing the build system from
files contained in the archive.
"""
- url, expected = 'downloads.sourceforge.net/octave/', 'octave'
+ url, expected = "downloads.sourceforge.net/octave/", "octave"
guesser = spack.cmd.create.BuildSystemGuesser()
# Ensure get the expected build system
@@ -135,10 +190,13 @@ def test_build_system_guesser_octave(parser):
assert bs == expected
-@pytest.mark.parametrize('url,expected', [
- ('testname', 'testname'),
- ('file://example.com/archive.tar.gz', 'archive'),
-])
+@pytest.mark.parametrize(
+ "url,expected",
+ [
+ ("testname", "testname"),
+ ("file://example.com/archive.tar.gz", "archive"),
+ ],
+)
def test_get_name_urls(parser, url, expected):
"""Test get_name with different URLs."""
args = parser.parse_args([url])
@@ -148,12 +206,13 @@ def test_get_name_urls(parser, url, expected):
def test_get_name_error(parser, monkeypatch, capsys):
"""Test get_name UndetectableNameError exception path."""
+
def _parse_name_offset(path, v):
raise UndetectableNameError(path)
- monkeypatch.setattr(spack.url, 'parse_name_offset', _parse_name_offset)
+ monkeypatch.setattr(spack.url, "parse_name_offset", _parse_name_offset)
- url = 'downloads.sourceforge.net/noapp/'
+ url = "downloads.sourceforge.net/noapp/"
args = parser.parse_args([url])
with pytest.raises(SystemExit):
@@ -164,5 +223,5 @@ def test_get_name_error(parser, monkeypatch, capsys):
def test_no_url(parser):
"""Test creation of package without a URL."""
- args = parser.parse_args(['--skip-editor', '-n', 'create-new-package'])
+ args = parser.parse_args(["--skip-editor", "-n", "create-new-package"])
spack.cmd.create.create(parser, args)
diff --git a/lib/spack/spack/test/cmd/debug.py b/lib/spack/spack/test/cmd/debug.py
index ef00e28f48..ec52234517 100644
--- a/lib/spack/spack/test/cmd/debug.py
+++ b/lib/spack/spack/test/cmd/debug.py
@@ -15,30 +15,29 @@ import spack.platforms
from spack.main import SpackCommand, get_version
from spack.util.executable import which
-debug = SpackCommand('debug')
+debug = SpackCommand("debug")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.mark.db
def test_create_db_tarball(tmpdir, database):
with tmpdir.as_cwd():
- debug('create-db-tarball')
+ debug("create-db-tarball")
# get the first non-dotfile to avoid coverage files in the directory
files = os.listdir(os.getcwd())
- tarball_name = next(f for f in files if not f.startswith('.'))
+ tarball_name = next(f for f in files if not f.startswith("."))
# debug command made an archive
assert os.path.exists(tarball_name)
# print contents of archive
- tar = which('tar')
- contents = tar('tzf', tarball_name, output=str)
+ tar = which("tar")
+ contents = tar("tzf", tarball_name, output=str)
# DB file is included
- assert 'index.json' in contents
+ assert "index.json" in contents
# specfiles from all installs are included
for spec in database.query():
@@ -46,20 +45,18 @@ def test_create_db_tarball(tmpdir, database):
if spec.external:
continue
- spec_suffix = '%s/.spack/spec.json' % spec.dag_hash()
+ spec_suffix = "%s/.spack/spec.json" % spec.dag_hash()
assert spec_suffix in contents
def test_report():
- out = debug('report')
+ out = debug("report")
host_platform = spack.platforms.host()
- host_os = host_platform.operating_system('frontend')
- host_target = host_platform.target('frontend')
- architecture = spack.spec.ArchSpec(
- (str(host_platform), str(host_os), str(host_target))
- )
+ host_os = host_platform.operating_system("frontend")
+ host_target = host_platform.target("frontend")
+ architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target)))
assert get_version() in out
assert platform.python_version() in out
assert str(architecture) in out
- assert spack.config.get('config:concretizer') in out
+ assert spack.config.get("config:concretizer") in out
diff --git a/lib/spack/spack/test/cmd/dependencies.py b/lib/spack/spack/test/cmd/dependencies.py
index 50ce7b8dde..647c72f5f8 100644
--- a/lib/spack/spack/test/cmd/dependencies.py
+++ b/lib/spack/spack/test/cmd/dependencies.py
@@ -13,59 +13,51 @@ from llnl.util.tty.color import color_when
import spack.store
from spack.main import SpackCommand
-dependencies = SpackCommand('dependencies')
+dependencies = SpackCommand("dependencies")
-mpis = [
- 'low-priority-provider', 'mpich', 'mpich2', 'multi-provider-mpi', 'zmpi'
-]
-mpi_deps = ['fake']
+mpis = ["low-priority-provider", "mpich", "mpich2", "multi-provider-mpi", "zmpi"]
+mpi_deps = ["fake"]
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_direct_dependencies(mock_packages):
- out = dependencies('mpileaks')
- actual = set(re.split(r'\s+', out.strip()))
- expected = set(['callpath'] + mpis)
+ out = dependencies("mpileaks")
+ actual = set(re.split(r"\s+", out.strip()))
+ expected = set(["callpath"] + mpis)
assert expected == actual
def test_transitive_dependencies(mock_packages):
- out = dependencies('--transitive', 'mpileaks')
- actual = set(re.split(r'\s+', out.strip()))
- expected = set(
- ['callpath', 'dyninst', 'libdwarf', 'libelf'] + mpis + mpi_deps)
+ out = dependencies("--transitive", "mpileaks")
+ actual = set(re.split(r"\s+", out.strip()))
+ expected = set(["callpath", "dyninst", "libdwarf", "libelf"] + mpis + mpi_deps)
assert expected == actual
def test_transitive_dependencies_with_deptypes(mock_packages):
- out = dependencies('--transitive', '--deptype=link,run', 'dtbuild1')
- deps = set(re.split(r'\s+', out.strip()))
- assert set(['dtlink2', 'dtrun2']) == deps
+ out = dependencies("--transitive", "--deptype=link,run", "dtbuild1")
+ deps = set(re.split(r"\s+", out.strip()))
+ assert set(["dtlink2", "dtrun2"]) == deps
- out = dependencies('--transitive', '--deptype=build', 'dtbuild1')
- deps = set(re.split(r'\s+', out.strip()))
- assert set(['dtbuild2', 'dtlink2']) == deps
+ out = dependencies("--transitive", "--deptype=build", "dtbuild1")
+ deps = set(re.split(r"\s+", out.strip()))
+ assert set(["dtbuild2", "dtlink2"]) == deps
- out = dependencies('--transitive', '--deptype=link', 'dtbuild1')
- deps = set(re.split(r'\s+', out.strip()))
- assert set(['dtlink2']) == deps
+ out = dependencies("--transitive", "--deptype=link", "dtbuild1")
+ deps = set(re.split(r"\s+", out.strip()))
+ assert set(["dtlink2"]) == deps
@pytest.mark.db
def test_direct_installed_dependencies(mock_packages, database):
with color_when(False):
- out = dependencies('--installed', 'mpileaks^mpich')
+ out = dependencies("--installed", "mpileaks^mpich")
- lines = [
- line for line in out.strip().split('\n')
- if not line.startswith('--')
- ]
- hashes = set([re.split(r'\s+', line)[0] for line in lines])
+ lines = [line for line in out.strip().split("\n") if not line.startswith("--")]
+ hashes = set([re.split(r"\s+", line)[0] for line in lines])
- expected = set([spack.store.db.query_one(s).dag_hash(7)
- for s in ['mpich', 'callpath^mpich']])
+ expected = set([spack.store.db.query_one(s).dag_hash(7) for s in ["mpich", "callpath^mpich"]])
assert expected == hashes
@@ -73,16 +65,16 @@ def test_direct_installed_dependencies(mock_packages, database):
@pytest.mark.db
def test_transitive_installed_dependencies(mock_packages, database):
with color_when(False):
- out = dependencies('--installed', '--transitive', 'mpileaks^zmpi')
+ out = dependencies("--installed", "--transitive", "mpileaks^zmpi")
- lines = [
- line for line in out.strip().split('\n')
- if not line.startswith('--')
- ]
- hashes = set([re.split(r'\s+', line)[0] for line in lines])
+ lines = [line for line in out.strip().split("\n") if not line.startswith("--")]
+ hashes = set([re.split(r"\s+", line)[0] for line in lines])
- expected = set([spack.store.db.query_one(s).dag_hash(7)
- for s in ['zmpi', 'callpath^zmpi', 'fake',
- 'dyninst', 'libdwarf', 'libelf']])
+ expected = set(
+ [
+ spack.store.db.query_one(s).dag_hash(7)
+ for s in ["zmpi", "callpath^zmpi", "fake", "dyninst", "libdwarf", "libelf"]
+ ]
+ )
assert expected == hashes
diff --git a/lib/spack/spack/test/cmd/dependents.py b/lib/spack/spack/test/cmd/dependents.py
index d9b6d8a2f6..99052d7995 100644
--- a/lib/spack/spack/test/cmd/dependents.py
+++ b/lib/spack/spack/test/cmd/dependents.py
@@ -13,53 +13,56 @@ from llnl.util.tty.color import color_when
import spack.store
from spack.main import SpackCommand
-dependents = SpackCommand('dependents')
+dependents = SpackCommand("dependents")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_immediate_dependents(mock_packages):
- out = dependents('libelf')
- actual = set(re.split(r'\s+', out.strip()))
- assert actual == set([
- 'dyninst',
- 'libdwarf',
- 'patch-a-dependency',
- 'patch-several-dependencies',
- 'quantum-espresso',
- 'conditionally-patch-dependency'
- ])
+ out = dependents("libelf")
+ actual = set(re.split(r"\s+", out.strip()))
+ assert actual == set(
+ [
+ "dyninst",
+ "libdwarf",
+ "patch-a-dependency",
+ "patch-several-dependencies",
+ "quantum-espresso",
+ "conditionally-patch-dependency",
+ ]
+ )
def test_transitive_dependents(mock_packages):
- out = dependents('--transitive', 'libelf')
- actual = set(re.split(r'\s+', out.strip()))
- assert actual == set([
- 'callpath',
- 'dyninst',
- 'libdwarf',
- 'mpileaks',
- 'multivalue-variant',
- 'singlevalue-variant-dependent',
- 'patch-a-dependency', 'patch-several-dependencies',
- 'quantum-espresso',
- 'conditionally-patch-dependency'
- ])
+ out = dependents("--transitive", "libelf")
+ actual = set(re.split(r"\s+", out.strip()))
+ assert actual == set(
+ [
+ "callpath",
+ "dyninst",
+ "libdwarf",
+ "mpileaks",
+ "multivalue-variant",
+ "singlevalue-variant-dependent",
+ "patch-a-dependency",
+ "patch-several-dependencies",
+ "quantum-espresso",
+ "conditionally-patch-dependency",
+ ]
+ )
@pytest.mark.db
def test_immediate_installed_dependents(mock_packages, database):
with color_when(False):
- out = dependents('--installed', 'libelf')
+ out = dependents("--installed", "libelf")
- lines = [li for li in out.strip().split('\n') if not li.startswith('--')]
- hashes = set([re.split(r'\s+', li)[0] for li in lines])
+ lines = [li for li in out.strip().split("\n") if not li.startswith("--")]
+ hashes = set([re.split(r"\s+", li)[0] for li in lines])
- expected = set([spack.store.db.query_one(s).dag_hash(7)
- for s in ['dyninst', 'libdwarf']])
+ expected = set([spack.store.db.query_one(s).dag_hash(7) for s in ["dyninst", "libdwarf"]])
- libelf = spack.store.db.query_one('libelf')
+ libelf = spack.store.db.query_one("libelf")
expected = set([d.dag_hash(7) for d in libelf.dependents()])
assert expected == hashes
@@ -68,12 +71,16 @@ def test_immediate_installed_dependents(mock_packages, database):
@pytest.mark.db
def test_transitive_installed_dependents(mock_packages, database):
with color_when(False):
- out = dependents('--installed', '--transitive', 'fake')
+ out = dependents("--installed", "--transitive", "fake")
- lines = [li for li in out.strip().split('\n') if not li.startswith('--')]
- hashes = set([re.split(r'\s+', li)[0] for li in lines])
+ lines = [li for li in out.strip().split("\n") if not li.startswith("--")]
+ hashes = set([re.split(r"\s+", li)[0] for li in lines])
- expected = set([spack.store.db.query_one(s).dag_hash(7)
- for s in ['zmpi', 'callpath^zmpi', 'mpileaks^zmpi']])
+ expected = set(
+ [
+ spack.store.db.query_one(s).dag_hash(7)
+ for s in ["zmpi", "callpath^zmpi", "mpileaks^zmpi"]
+ ]
+ )
assert expected == hashes
diff --git a/lib/spack/spack/test/cmd/deprecate.py b/lib/spack/spack/test/cmd/deprecate.py
index 0abe0024d7..b1fe8ca059 100644
--- a/lib/spack/spack/test/cmd/deprecate.py
+++ b/lib/spack/spack/test/cmd/deprecate.py
@@ -11,78 +11,72 @@ import spack.store
from spack.database import InstallStatuses
from spack.main import SpackCommand
-install = SpackCommand('install')
-uninstall = SpackCommand('uninstall')
-deprecate = SpackCommand('deprecate')
-find = SpackCommand('find')
-activate = SpackCommand('activate')
+install = SpackCommand("install")
+uninstall = SpackCommand("uninstall")
+deprecate = SpackCommand("deprecate")
+find = SpackCommand("find")
+activate = SpackCommand("activate")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_deprecate(mock_packages, mock_archive, mock_fetch, install_mockery):
- install('libelf@0.8.13')
- install('libelf@0.8.10')
+ install("libelf@0.8.13")
+ install("libelf@0.8.10")
all_installed = spack.store.db.query()
assert len(all_installed) == 2
- deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.13')
+ deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
assert all_available == all_installed
- assert non_deprecated == spack.store.db.query('libelf@0.8.13')
+ assert non_deprecated == spack.store.db.query("libelf@0.8.13")
-def test_deprecate_fails_no_such_package(mock_packages, mock_archive,
- mock_fetch, install_mockery):
+def test_deprecate_fails_no_such_package(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that deprecating a spec that is not installed fails.
Tests that deprecating without the ``-i`` option in favor of a spec that
is not installed fails."""
- output = deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.13',
- fail_on_error=False)
+ output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
assert "Spec 'libelf@0.8.10' matches no installed packages" in output
- install('libelf@0.8.10')
+ install("libelf@0.8.10")
- output = deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.13',
- fail_on_error=False)
+ output = deprecate("-y", "libelf@0.8.10", "libelf@0.8.13", fail_on_error=False)
assert "Spec 'libelf@0.8.13' matches no installed packages" in output
-def test_deprecate_install(mock_packages, mock_archive, mock_fetch,
- install_mockery):
+def test_deprecate_install(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that the ```-i`` option allows us to deprecate in favor of a spec
that is not yet installed."""
- install('libelf@0.8.10')
+ install("libelf@0.8.10")
to_deprecate = spack.store.db.query()
assert len(to_deprecate) == 1
- deprecate('-y', '-i', 'libelf@0.8.10', 'libelf@0.8.13')
+ deprecate("-y", "-i", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
deprecated = spack.store.db.query(installed=InstallStatuses.DEPRECATED)
assert deprecated == to_deprecate
assert len(non_deprecated) == 1
- assert non_deprecated[0].satisfies('libelf@0.8.13')
+ assert non_deprecated[0].satisfies("libelf@0.8.13")
-def test_deprecate_deps(mock_packages, mock_archive, mock_fetch,
- install_mockery):
+def test_deprecate_deps(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Test that the deprecate command deprecates all dependencies properly."""
- install('libdwarf@20130729 ^libelf@0.8.13')
- install('libdwarf@20130207 ^libelf@0.8.10')
+ install("libdwarf@20130729 ^libelf@0.8.13")
+ install("libdwarf@20130207 ^libelf@0.8.10")
- new_spec = spack.spec.Spec('libdwarf@20130729^libelf@0.8.13').concretized()
- old_spec = spack.spec.Spec('libdwarf@20130207^libelf@0.8.10').concretized()
+ new_spec = spack.spec.Spec("libdwarf@20130729^libelf@0.8.13").concretized()
+ old_spec = spack.spec.Spec("libdwarf@20130207^libelf@0.8.10").concretized()
all_installed = spack.store.db.query()
- deprecate('-y', '-d', 'libdwarf@20130207', 'libdwarf@20130729')
+ deprecate("-y", "-d", "libdwarf@20130207", "libdwarf@20130729")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
@@ -95,56 +89,53 @@ def test_deprecate_deps(mock_packages, mock_archive, mock_fetch,
assert sorted(deprecated) == sorted(list(old_spec.traverse()))
-def test_deprecate_fails_active_extensions(mock_packages, mock_archive,
- mock_fetch, install_mockery):
+def test_deprecate_fails_active_extensions(
+ mock_packages, mock_archive, mock_fetch, install_mockery
+):
"""Tests that active extensions and their extendees cannot be
deprecated."""
- install('extendee')
- install('extension1')
- activate('extension1')
+ install("extendee")
+ install("extension1")
+ activate("extension1")
- output = deprecate('-yi', 'extendee', 'extendee@nonexistent',
- fail_on_error=False)
- assert 'extension1' in output
+ output = deprecate("-yi", "extendee", "extendee@nonexistent", fail_on_error=False)
+ assert "extension1" in output
assert "Deactivate extensions before deprecating" in output
- output = deprecate('-yiD', 'extension1', 'extension1@notaversion',
- fail_on_error=False)
- assert 'extendee' in output
- assert 'is an active extension of' in output
+ output = deprecate("-yiD", "extension1", "extension1@notaversion", fail_on_error=False)
+ assert "extendee" in output
+ assert "is an active extension of" in output
-def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch,
- install_mockery):
+def test_uninstall_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that we can still uninstall deprecated packages."""
- install('libelf@0.8.13')
- install('libelf@0.8.10')
+ install("libelf@0.8.13")
+ install("libelf@0.8.10")
- deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.13')
+ deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
- uninstall('-y', 'libelf@0.8.10')
+ uninstall("-y", "libelf@0.8.10")
assert spack.store.db.query() == spack.store.db.query(installed=any)
assert spack.store.db.query() == non_deprecated
-def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch,
- install_mockery):
+def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that we can re-deprecate a spec to change its deprecator."""
- install('libelf@0.8.13')
- install('libelf@0.8.12')
- install('libelf@0.8.10')
+ install("libelf@0.8.13")
+ install("libelf@0.8.12")
+ install("libelf@0.8.10")
- deprecated_spec = spack.spec.Spec('libelf@0.8.10').concretized()
+ deprecated_spec = spack.spec.Spec("libelf@0.8.10").concretized()
- deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.12')
+ deprecate("-y", "libelf@0.8.10", "libelf@0.8.12")
deprecator = spack.store.db.deprecator(deprecated_spec)
- assert deprecator == spack.spec.Spec('libelf@0.8.12').concretized()
+ assert deprecator == spack.spec.Spec("libelf@0.8.12").concretized()
- deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.13')
+ deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
@@ -152,27 +143,26 @@ def test_deprecate_already_deprecated(mock_packages, mock_archive, mock_fetch,
assert len(all_available) == 3
deprecator = spack.store.db.deprecator(deprecated_spec)
- assert deprecator == spack.spec.Spec('libelf@0.8.13').concretized()
+ assert deprecator == spack.spec.Spec("libelf@0.8.13").concretized()
-def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch,
- install_mockery):
+def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that when a deprecator spec is deprecated, its deprecatee specs
are updated to point to the new deprecator."""
- install('libelf@0.8.13')
- install('libelf@0.8.12')
- install('libelf@0.8.10')
+ install("libelf@0.8.13")
+ install("libelf@0.8.12")
+ install("libelf@0.8.10")
- first_deprecated_spec = spack.spec.Spec('libelf@0.8.10').concretized()
- second_deprecated_spec = spack.spec.Spec('libelf@0.8.12').concretized()
- final_deprecator = spack.spec.Spec('libelf@0.8.13').concretized()
+ first_deprecated_spec = spack.spec.Spec("libelf@0.8.10").concretized()
+ second_deprecated_spec = spack.spec.Spec("libelf@0.8.12").concretized()
+ final_deprecator = spack.spec.Spec("libelf@0.8.13").concretized()
- deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.12')
+ deprecate("-y", "libelf@0.8.10", "libelf@0.8.12")
deprecator = spack.store.db.deprecator(first_deprecated_spec)
assert deprecator == second_deprecated_spec
- deprecate('-y', 'libelf@0.8.12', 'libelf@0.8.13')
+ deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
non_deprecated = spack.store.db.query()
all_available = spack.store.db.query(installed=any)
@@ -185,15 +175,14 @@ def test_deprecate_deprecator(mock_packages, mock_archive, mock_fetch,
assert second_deprecator == final_deprecator
-def test_concretize_deprecated(mock_packages, mock_archive, mock_fetch,
- install_mockery):
+def test_concretize_deprecated(mock_packages, mock_archive, mock_fetch, install_mockery):
"""Tests that the concretizer throws an error if we concretize to a
deprecated spec"""
- install('libelf@0.8.13')
- install('libelf@0.8.10')
+ install("libelf@0.8.13")
+ install("libelf@0.8.10")
- deprecate('-y', 'libelf@0.8.10', 'libelf@0.8.13')
+ deprecate("-y", "libelf@0.8.10", "libelf@0.8.13")
- spec = spack.spec.Spec('libelf@0.8.10')
+ spec = spack.spec.Spec("libelf@0.8.10")
with pytest.raises(spack.spec.SpecDeprecatedError):
spec.concretize()
diff --git a/lib/spack/spack/test/cmd/dev_build.py b/lib/spack/spack/test/cmd/dev_build.py
index bd1abdf2d6..cad706e648 100644
--- a/lib/spack/spack/test/cmd/dev_build.py
+++ b/lib/spack/spack/test/cmd/dev_build.py
@@ -14,62 +14,61 @@ import spack.environment as ev
import spack.spec
from spack.main import SpackCommand
-dev_build = SpackCommand('dev-build')
-install = SpackCommand('install')
-env = SpackCommand('env')
+dev_build = SpackCommand("dev-build")
+install = SpackCommand("install")
+env = SpackCommand("env")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_dev_build_basics(tmpdir, mock_packages, install_mockery):
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
spec.concretize()
- assert 'dev_path' in spec.variants
+ assert "dev_path" in spec.variants
with tmpdir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
- dev_build('dev-build-test-install@0.0.0')
+ dev_build("dev-build-test-install@0.0.0")
assert spec.package.filename in os.listdir(spec.prefix)
- with open(os.path.join(spec.prefix, spec.package.filename), 'r') as f:
+ with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
assert f.read() == spec.package.replacement_string
assert os.path.exists(str(tmpdir))
def test_dev_build_before(tmpdir, mock_packages, install_mockery):
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
spec.concretize()
with tmpdir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
- dev_build('-b', 'edit', 'dev-build-test-install@0.0.0')
+ dev_build("-b", "edit", "dev-build-test-install@0.0.0")
assert spec.package.filename in os.listdir(os.getcwd())
- with open(spec.package.filename, 'r') as f:
+ with open(spec.package.filename, "r") as f:
assert f.read() == spec.package.original_string
assert not os.path.exists(spec.prefix)
def test_dev_build_until(tmpdir, mock_packages, install_mockery):
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
spec.concretize()
with tmpdir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
- dev_build('-u', 'edit', 'dev-build-test-install@0.0.0')
+ dev_build("-u", "edit", "dev-build-test-install@0.0.0")
assert spec.package.filename in os.listdir(os.getcwd())
- with open(spec.package.filename, 'r') as f:
+ with open(spec.package.filename, "r") as f:
assert f.read() == spec.package.replacement_string
assert not os.path.exists(spec.prefix)
@@ -78,17 +77,17 @@ def test_dev_build_until(tmpdir, mock_packages, install_mockery):
def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
# Test that we ignore the last_phase argument if it is already last
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
spec.concretize()
with tmpdir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
- dev_build('-u', 'install', 'dev-build-test-install@0.0.0')
+ dev_build("-u", "install", "dev-build-test-install@0.0.0")
assert spec.package.filename in os.listdir(os.getcwd())
- with open(spec.package.filename, 'r') as f:
+ with open(spec.package.filename, "r") as f:
assert f.read() == spec.package.replacement_string
assert os.path.exists(spec.prefix)
@@ -97,28 +96,25 @@ def test_dev_build_until_last_phase(tmpdir, mock_packages, install_mockery):
def test_dev_build_before_until(tmpdir, mock_packages, install_mockery, capsys):
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
spec.concretize()
with tmpdir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
with pytest.raises(SystemExit):
- dev_build('-u', 'edit', '-b', 'edit',
- 'dev-build-test-install@0.0.0')
-
- bad_phase = 'phase_that_does_not_exist'
- not_allowed = 'is not a valid phase'
- not_installed = 'was not installed'
- out = dev_build('-u', bad_phase, 'dev-build-test-install@0.0.0',
- fail_on_error=False)
+ dev_build("-u", "edit", "-b", "edit", "dev-build-test-install@0.0.0")
+
+ bad_phase = "phase_that_does_not_exist"
+ not_allowed = "is not a valid phase"
+ not_installed = "was not installed"
+ out = dev_build("-u", bad_phase, "dev-build-test-install@0.0.0", fail_on_error=False)
assert bad_phase in out
assert not_allowed in out
assert not_installed in out
- out = dev_build('-b', bad_phase, 'dev-build-test-install@0.0.0',
- fail_on_error=False)
+ out = dev_build("-b", bad_phase, "dev-build-test-install@0.0.0", fail_on_error=False)
assert bad_phase in out
assert not_allowed in out
assert not_installed in out
@@ -126,7 +122,7 @@ def test_dev_build_before_until(tmpdir, mock_packages, install_mockery, capsys):
def print_spack_cc(*args):
# Eat arguments and print environment variable to test
- print(os.environ.get('CC', ''))
+ print(os.environ.get("CC", ""))
# `module unload cray-libsci` in test environment causes failure
@@ -137,70 +133,66 @@ def mock_module_noop(*args):
pass
-def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch,
- install_mockery, working_env):
- monkeypatch.setattr(os, 'execvp', print_spack_cc)
+def test_dev_build_drop_in(tmpdir, mock_packages, monkeypatch, install_mockery, working_env):
+ monkeypatch.setattr(os, "execvp", print_spack_cc)
- monkeypatch.setattr(spack.build_environment, 'module', mock_module_noop)
+ monkeypatch.setattr(spack.build_environment, "module", mock_module_noop)
with tmpdir.as_cwd():
- output = dev_build('-b', 'edit', '--drop-in', 'sh',
- 'dev-build-test-install@0.0.0')
+ output = dev_build("-b", "edit", "--drop-in", "sh", "dev-build-test-install@0.0.0")
assert "lib/spack/env" in output
-def test_dev_build_fails_already_installed(tmpdir, mock_packages,
- install_mockery):
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
+def test_dev_build_fails_already_installed(tmpdir, mock_packages, install_mockery):
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
spec.concretize()
with tmpdir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
- dev_build('dev-build-test-install@0.0.0')
- output = dev_build('dev-build-test-install@0.0.0', fail_on_error=False)
- assert 'Already installed in %s' % spec.prefix in output
+ dev_build("dev-build-test-install@0.0.0")
+ output = dev_build("dev-build-test-install@0.0.0", fail_on_error=False)
+ assert "Already installed in %s" % spec.prefix in output
def test_dev_build_fails_no_spec():
output = dev_build(fail_on_error=False)
- assert 'requires a package spec argument' in output
+ assert "requires a package spec argument" in output
def test_dev_build_fails_multiple_specs(mock_packages):
- output = dev_build('libelf', 'libdwarf', fail_on_error=False)
- assert 'only takes one spec' in output
+ output = dev_build("libelf", "libdwarf", fail_on_error=False)
+ assert "only takes one spec" in output
def test_dev_build_fails_nonexistent_package_name(mock_packages):
- output = dev_build('no_such_package', fail_on_error=False)
+ output = dev_build("no_such_package", fail_on_error=False)
assert "No package for 'no_such_package' was found" in output
def test_dev_build_fails_no_version(mock_packages):
- output = dev_build('dev-build-test-install', fail_on_error=False)
- assert 'dev-build spec must have a single, concrete version' in output
+ output = dev_build("dev-build-test-install", fail_on_error=False)
+ assert "dev-build spec must have a single, concrete version" in output
-def test_dev_build_env(tmpdir, mock_packages, install_mockery,
- mutable_mock_env_path):
+def test_dev_build_env(tmpdir, mock_packages, install_mockery, mutable_mock_env_path):
"""Test Spack does dev builds for packages in develop section of env."""
# setup dev-build-test-install package for dev build
- build_dir = tmpdir.mkdir('build')
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' %
- build_dir)
+ build_dir = tmpdir.mkdir("build")
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % build_dir)
spec.concretize()
with build_dir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
# setup environment
- envdir = tmpdir.mkdir('env')
+ envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
- with open('spack.yaml', 'w') as f:
- f.write("""\
+ with open("spack.yaml", "w") as f:
+ f.write(
+ """\
env:
specs:
- dev-build-test-install@0.0.0
@@ -209,34 +201,38 @@ env:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: %s
-""" % os.path.relpath(str(build_dir), start=str(envdir)))
+"""
+ % os.path.relpath(str(build_dir), start=str(envdir))
+ )
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
assert spec.package.filename in os.listdir(spec.prefix)
- with open(os.path.join(spec.prefix, spec.package.filename), 'r') as f:
+ with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
assert f.read() == spec.package.replacement_string
-def test_dev_build_env_version_mismatch(tmpdir, mock_packages, install_mockery,
- mutable_mock_env_path):
+def test_dev_build_env_version_mismatch(
+ tmpdir, mock_packages, install_mockery, mutable_mock_env_path
+):
"""Test Spack constraints concretization by develop specs."""
# setup dev-build-test-install package for dev build
- build_dir = tmpdir.mkdir('build')
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' % tmpdir)
+ build_dir = tmpdir.mkdir("build")
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % tmpdir)
spec.concretize()
with build_dir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
# setup environment
- envdir = tmpdir.mkdir('env')
+ envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
- with open('spack.yaml', 'w') as f:
- f.write("""\
+ with open("spack.yaml", "w") as f:
+ f.write(
+ """\
env:
specs:
- dev-build-test-install@0.0.0
@@ -245,42 +241,46 @@ env:
dev-build-test-install:
spec: dev-build-test-install@1.1.1
path: %s
-""" % build_dir)
+"""
+ % build_dir
+ )
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
with pytest.raises(RuntimeError):
install()
-def test_dev_build_multiple(tmpdir, mock_packages, install_mockery,
- mutable_mock_env_path, mock_fetch):
+def test_dev_build_multiple(
+ tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch
+):
"""Test spack install with multiple developer builds"""
# setup dev-build-test-install package for dev build
# Wait to concretize inside the environment to set dev_path on the specs;
# without the environment, the user would need to set dev_path for both the
# root and dependency if they wanted a dev build for both.
- leaf_dir = tmpdir.mkdir('leaf')
- leaf_spec = spack.spec.Spec('dev-build-test-install@0.0.0')
+ leaf_dir = tmpdir.mkdir("leaf")
+ leaf_spec = spack.spec.Spec("dev-build-test-install@0.0.0")
leaf_pkg_cls = spack.repo.path.get_pkg_class(leaf_spec.name)
with leaf_dir.as_cwd():
- with open(leaf_pkg_cls.filename, 'w') as f:
+ with open(leaf_pkg_cls.filename, "w") as f:
f.write(leaf_pkg_cls.original_string)
# setup dev-build-test-dependent package for dev build
# don't concretize outside environment -- dev info will be wrong
- root_dir = tmpdir.mkdir('root')
- root_spec = spack.spec.Spec('dev-build-test-dependent@0.0.0')
+ root_dir = tmpdir.mkdir("root")
+ root_spec = spack.spec.Spec("dev-build-test-dependent@0.0.0")
root_pkg_cls = spack.repo.path.get_pkg_class(root_spec.name)
with root_dir.as_cwd():
- with open(root_pkg_cls.filename, 'w') as f:
+ with open(root_pkg_cls.filename, "w") as f:
f.write(root_pkg_cls.original_string)
# setup environment
- envdir = tmpdir.mkdir('env')
+ envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
- with open('spack.yaml', 'w') as f:
- f.write("""\
+ with open("spack.yaml", "w") as f:
+ f.write(
+ """\
env:
specs:
- dev-build-test-install@0.0.0
@@ -293,10 +293,12 @@ env:
dev-build-test-dependent:
spec: dev-build-test-dependent@0.0.0
path: %s
-""" % (leaf_dir, root_dir))
+"""
+ % (leaf_dir, root_dir)
+ )
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
# Do concretization inside environment for dev info
leaf_spec.concretize()
root_spec.concretize()
@@ -306,30 +308,32 @@ env:
for spec in (leaf_spec, root_spec):
assert spec.package.filename in os.listdir(spec.prefix)
- with open(os.path.join(spec.prefix, spec.package.filename), 'r') as f:
+ with open(os.path.join(spec.prefix, spec.package.filename), "r") as f:
assert f.read() == spec.package.replacement_string
-def test_dev_build_env_dependency(tmpdir, mock_packages, install_mockery,
- mock_fetch, mutable_mock_env_path):
+def test_dev_build_env_dependency(
+ tmpdir, mock_packages, install_mockery, mock_fetch, mutable_mock_env_path
+):
"""
Test non-root specs in an environment are properly marked for dev builds.
"""
# setup dev-build-test-install package for dev build
- build_dir = tmpdir.mkdir('build')
- spec = spack.spec.Spec('dependent-of-dev-build@0.0.0')
- dep_spec = spack.spec.Spec('dev-build-test-install')
+ build_dir = tmpdir.mkdir("build")
+ spec = spack.spec.Spec("dependent-of-dev-build@0.0.0")
+ dep_spec = spack.spec.Spec("dev-build-test-install")
with build_dir.as_cwd():
dep_pkg_cls = spack.repo.path.get_pkg_class(dep_spec.name)
- with open(dep_pkg_cls.filename, 'w') as f:
+ with open(dep_pkg_cls.filename, "w") as f:
f.write(dep_pkg_cls.original_string)
# setup environment
- envdir = tmpdir.mkdir('env')
+ envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
- with open('spack.yaml', 'w') as f:
- f.write("""\
+ with open("spack.yaml", "w") as f:
+ f.write(
+ """\
env:
specs:
- dependent-of-dev-build@0.0.0
@@ -338,10 +342,12 @@ env:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: %s
-""" % os.path.relpath(str(build_dir), start=str(envdir)))
+"""
+ % os.path.relpath(str(build_dir), start=str(envdir))
+ )
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
# concretize in the environment to get the dev build info
# equivalent to setting dev_build and dev_path variants
# on all specs above
@@ -354,39 +360,38 @@ env:
assert os.path.exists(spec.prefix)
# Ensure variants set properly; ensure build_dir is absolute and normalized
- for dep in (dep_spec, spec['dev-build-test-install']):
- assert dep.satisfies('dev_path=%s' % build_dir)
- assert spec.satisfies('^dev_path=*')
+ for dep in (dep_spec, spec["dev-build-test-install"]):
+ assert dep.satisfies("dev_path=%s" % build_dir)
+ assert spec.satisfies("^dev_path=*")
-@pytest.mark.parametrize('test_spec', ['dev-build-test-install',
- 'dependent-of-dev-build'])
+@pytest.mark.parametrize("test_spec", ["dev-build-test-install", "dependent-of-dev-build"])
def test_dev_build_rebuild_on_source_changes(
- test_spec, tmpdir, mock_packages, install_mockery,
- mutable_mock_env_path, mock_fetch):
+ test_spec, tmpdir, mock_packages, install_mockery, mutable_mock_env_path, mock_fetch
+):
"""Test dev builds rebuild on changes to source code.
``test_spec = dev-build-test-install`` tests rebuild for changes to package
``test_spec = dependent-of-dev-build`` tests rebuild for changes to dep
"""
# setup dev-build-test-install package for dev build
- build_dir = tmpdir.mkdir('build')
- spec = spack.spec.Spec('dev-build-test-install@0.0.0 dev_path=%s' %
- build_dir)
+ build_dir = tmpdir.mkdir("build")
+ spec = spack.spec.Spec("dev-build-test-install@0.0.0 dev_path=%s" % build_dir)
spec.concretize()
def reset_string():
with build_dir.as_cwd():
- with open(spec.package.filename, 'w') as f:
+ with open(spec.package.filename, "w") as f:
f.write(spec.package.original_string)
reset_string()
# setup environment
- envdir = tmpdir.mkdir('env')
+ envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
- with open('spack.yaml', 'w') as f:
- f.write("""\
+ with open("spack.yaml", "w") as f:
+ f.write(
+ """\
env:
specs:
- %s@0.0.0
@@ -395,15 +400,17 @@ env:
dev-build-test-install:
spec: dev-build-test-install@0.0.0
path: %s
-""" % (test_spec, build_dir))
+"""
+ % (test_spec, build_dir)
+ )
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
reset_string() # so the package will accept rebuilds
- fs.touch(os.path.join(str(build_dir), 'test'))
+ fs.touch(os.path.join(str(build_dir), "test"))
output = install()
- assert 'Installing %s' % test_spec in output
+ assert "Installing %s" % test_spec in output
diff --git a/lib/spack/spack/test/cmd/develop.py b/lib/spack/spack/test/cmd/develop.py
index 325815451b..2c2faaf467 100644
--- a/lib/spack/spack/test/cmd/develop.py
+++ b/lib/spack/spack/test/cmd/develop.py
@@ -14,16 +14,13 @@ import spack.environment as ev
import spack.spec
from spack.main import SpackCommand
-develop = SpackCommand('develop')
-env = SpackCommand('env')
+develop = SpackCommand("develop")
+env = SpackCommand("env")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-@pytest.mark.usefixtures(
- 'mutable_mock_env_path', 'mock_packages', 'mock_fetch', 'config'
-)
+@pytest.mark.usefixtures("mutable_mock_env_path", "mock_packages", "mock_fetch", "config")
class TestDevelop(object):
def check_develop(self, env, spec, path=None):
path = path or spec.name
@@ -31,77 +28,77 @@ class TestDevelop(object):
# check in memory representation
assert spec.name in env.dev_specs
dev_specs_entry = env.dev_specs[spec.name]
- assert dev_specs_entry['path'] == path
- assert dev_specs_entry['spec'] == str(spec)
+ assert dev_specs_entry["path"] == path
+ assert dev_specs_entry["spec"] == str(spec)
# check yaml representation
yaml = ev.config_dict(env.yaml)
- assert spec.name in yaml['develop']
- yaml_entry = yaml['develop'][spec.name]
- assert yaml_entry['spec'] == str(spec)
+ assert spec.name in yaml["develop"]
+ yaml_entry = yaml["develop"][spec.name]
+ assert yaml_entry["spec"] == str(spec)
if path == spec.name:
# default paths aren't written out
- assert 'path' not in yaml_entry
+ assert "path" not in yaml_entry
else:
- assert yaml_entry['path'] == path
+ assert yaml_entry["path"] == path
def test_develop_no_path_no_clone(self):
- env('create', 'test')
- with ev.read('test') as e:
+ env("create", "test")
+ with ev.read("test") as e:
# develop checks that the path exists
- fs.mkdirp(os.path.join(e.path, 'mpich'))
- develop('--no-clone', 'mpich@1.0')
- self.check_develop(e, spack.spec.Spec('mpich@1.0'))
+ fs.mkdirp(os.path.join(e.path, "mpich"))
+ develop("--no-clone", "mpich@1.0")
+ self.check_develop(e, spack.spec.Spec("mpich@1.0"))
def test_develop_no_clone(self, tmpdir):
- env('create', 'test')
- with ev.read('test') as e:
- develop('--no-clone', '-p', str(tmpdir), 'mpich@1.0')
- self.check_develop(e, spack.spec.Spec('mpich@1.0'), str(tmpdir))
+ env("create", "test")
+ with ev.read("test") as e:
+ develop("--no-clone", "-p", str(tmpdir), "mpich@1.0")
+ self.check_develop(e, spack.spec.Spec("mpich@1.0"), str(tmpdir))
def test_develop(self):
- env('create', 'test')
- with ev.read('test') as e:
- develop('mpich@1.0')
- self.check_develop(e, spack.spec.Spec('mpich@1.0'))
+ env("create", "test")
+ with ev.read("test") as e:
+ develop("mpich@1.0")
+ self.check_develop(e, spack.spec.Spec("mpich@1.0"))
def test_develop_no_args(self):
- env('create', 'test')
- with ev.read('test') as e:
+ env("create", "test")
+ with ev.read("test") as e:
# develop and remove it
- develop('mpich@1.0')
- shutil.rmtree(os.path.join(e.path, 'mpich'))
+ develop("mpich@1.0")
+ shutil.rmtree(os.path.join(e.path, "mpich"))
# test develop with no args
develop()
- self.check_develop(e, spack.spec.Spec('mpich@1.0'))
+ self.check_develop(e, spack.spec.Spec("mpich@1.0"))
def test_develop_twice(self):
- env('create', 'test')
- with ev.read('test') as e:
- develop('mpich@1.0')
- self.check_develop(e, spack.spec.Spec('mpich@1.0'))
+ env("create", "test")
+ with ev.read("test") as e:
+ develop("mpich@1.0")
+ self.check_develop(e, spack.spec.Spec("mpich@1.0"))
- develop('mpich@1.0')
+ develop("mpich@1.0")
# disk representation isn't updated unless we write
# second develop command doesn't change it, so we don't write
# but we check disk representation
e.write()
- self.check_develop(e, spack.spec.Spec('mpich@1.0'))
+ self.check_develop(e, spack.spec.Spec("mpich@1.0"))
assert len(e.dev_specs) == 1
def test_develop_update_path(self, tmpdir):
- env('create', 'test')
- with ev.read('test') as e:
- develop('mpich@1.0')
- develop('-p', str(tmpdir), 'mpich@1.0')
- self.check_develop(e, spack.spec.Spec('mpich@1.0'), str(tmpdir))
+ env("create", "test")
+ with ev.read("test") as e:
+ develop("mpich@1.0")
+ develop("-p", str(tmpdir), "mpich@1.0")
+ self.check_develop(e, spack.spec.Spec("mpich@1.0"), str(tmpdir))
assert len(e.dev_specs) == 1
def test_develop_update_spec(self):
- env('create', 'test')
- with ev.read('test') as e:
- develop('mpich@1.0')
- develop('mpich@2.0')
- self.check_develop(e, spack.spec.Spec('mpich@2.0'))
+ env("create", "test")
+ with ev.read("test") as e:
+ develop("mpich@1.0")
+ develop("mpich@2.0")
+ self.check_develop(e, spack.spec.Spec("mpich@2.0"))
assert len(e.dev_specs) == 1
diff --git a/lib/spack/spack/test/cmd/diff.py b/lib/spack/spack/test/cmd/diff.py
index 1568f391c4..9189c45ede 100644
--- a/lib/spack/spack/test/cmd/diff.py
+++ b/lib/spack/spack/test/cmd/diff.py
@@ -13,69 +13,69 @@ import spack.main
import spack.store
import spack.util.spack_json as sjson
-install_cmd = spack.main.SpackCommand('install')
-diff_cmd = spack.main.SpackCommand('diff')
-find_cmd = spack.main.SpackCommand('find')
+install_cmd = spack.main.SpackCommand("install")
+diff_cmd = spack.main.SpackCommand("diff")
+find_cmd = spack.main.SpackCommand("find")
def test_diff_cmd(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test that we can install two packages and diff them"""
- specA = spack.spec.Spec('mpileaks').concretized()
- specB = spack.spec.Spec('mpileaks+debug').concretized()
+ specA = spack.spec.Spec("mpileaks").concretized()
+ specB = spack.spec.Spec("mpileaks+debug").concretized()
# Specs should be the same as themselves
c = spack.cmd.diff.compare_specs(specA, specA, to_string=True)
- assert len(c['a_not_b']) == 0
- assert len(c['b_not_a']) == 0
+ assert len(c["a_not_b"]) == 0
+ assert len(c["b_not_a"]) == 0
# Calculate the comparison (c)
c = spack.cmd.diff.compare_specs(specA, specB, to_string=True)
# these particular diffs should have the same length b/c thre aren't
# any node differences -- just value differences.
- assert len(c['a_not_b']) == len(c['b_not_a'])
+ assert len(c["a_not_b"]) == len(c["b_not_a"])
# ensure that variant diffs are in here the result
- assert ['variant_value', 'mpileaks debug False'] in c['a_not_b']
- assert ['variant_value', 'mpileaks debug True'] in c['b_not_a']
+ assert ["variant_value", "mpileaks debug False"] in c["a_not_b"]
+ assert ["variant_value", "mpileaks debug True"] in c["b_not_a"]
# ensure that hash diffs are in here the result
- assert ['hash', 'mpileaks %s' % specA.dag_hash()] in c['a_not_b']
- assert ['hash', 'mpileaks %s' % specB.dag_hash()] in c['b_not_a']
+ assert ["hash", "mpileaks %s" % specA.dag_hash()] in c["a_not_b"]
+ assert ["hash", "mpileaks %s" % specB.dag_hash()] in c["b_not_a"]
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test with and without the --first option"""
- install_cmd('mpileaks')
+ install_cmd("mpileaks")
# Only one version of mpileaks will work
- diff_cmd('mpileaks', 'mpileaks')
+ diff_cmd("mpileaks", "mpileaks")
# 2 specs are required for a diff
with pytest.raises(spack.main.SpackCommandError):
- diff_cmd('mpileaks')
+ diff_cmd("mpileaks")
with pytest.raises(spack.main.SpackCommandError):
- diff_cmd('mpileaks', 'mpileaks', 'mpileaks')
+ diff_cmd("mpileaks", "mpileaks", "mpileaks")
# Ensure they are the same
- assert "No differences" in diff_cmd('mpileaks', 'mpileaks')
- output = diff_cmd('--json', 'mpileaks', 'mpileaks')
+ assert "No differences" in diff_cmd("mpileaks", "mpileaks")
+ output = diff_cmd("--json", "mpileaks", "mpileaks")
result = sjson.load(output)
- assert not result['a_not_b']
- assert not result['b_not_a']
+ assert not result["a_not_b"]
+ assert not result["b_not_a"]
- assert 'mpileaks' in result['a_name']
- assert 'mpileaks' in result['b_name']
+ assert "mpileaks" in result["a_name"]
+ assert "mpileaks" in result["b_name"]
# spot check attributes in the intersection to ensure they describe the spec
assert "intersect" in result
- assert all(["node", dep] in result["intersect"] for dep in (
- "mpileaks", "callpath", "dyninst", "libelf", "libdwarf", "mpich"
- ))
+ assert all(
+ ["node", dep] in result["intersect"]
+ for dep in ("mpileaks", "callpath", "dyninst", "libelf", "libdwarf", "mpich")
+ )
assert all(
len([diff for diff in result["intersect"] if diff[0] == attr]) == 6
for attr in (
@@ -91,26 +91,26 @@ def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
)
# After we install another version, it should ask us to disambiguate
- install_cmd('mpileaks+debug')
+ install_cmd("mpileaks+debug")
# There are two versions of mpileaks
with pytest.raises(spack.main.SpackCommandError):
- diff_cmd('mpileaks', 'mpileaks+debug')
+ diff_cmd("mpileaks", "mpileaks+debug")
# But if we tell it to use the first, it won't try to disambiguate
- assert "variant" in diff_cmd('--first', 'mpileaks', 'mpileaks+debug')
+ assert "variant" in diff_cmd("--first", "mpileaks", "mpileaks+debug")
# This matches them exactly
- debug_hash = find_cmd('--format', '{hash}', 'mpileaks+debug').strip()
- no_debug_hashes = find_cmd('--format', '{hash}', 'mpileaks~debug')
+ debug_hash = find_cmd("--format", "{hash}", "mpileaks+debug").strip()
+ no_debug_hashes = find_cmd("--format", "{hash}", "mpileaks~debug")
no_debug_hash = no_debug_hashes.split()[0]
- output = diff_cmd("--json",
- "mpileaks/{0}".format(debug_hash),
- "mpileaks/{0}".format(no_debug_hash))
+ output = diff_cmd(
+ "--json", "mpileaks/{0}".format(debug_hash), "mpileaks/{0}".format(no_debug_hash)
+ )
result = sjson.load(output)
- assert ['hash', 'mpileaks %s' % debug_hash] in result['a_not_b']
- assert ['variant_value', 'mpileaks debug True'] in result['a_not_b']
+ assert ["hash", "mpileaks %s" % debug_hash] in result["a_not_b"]
+ assert ["variant_value", "mpileaks debug True"] in result["a_not_b"]
- assert ['hash', 'mpileaks %s' % no_debug_hash] in result['b_not_a']
- assert ['variant_value', 'mpileaks debug False'] in result['b_not_a']
+ assert ["hash", "mpileaks %s" % no_debug_hash] in result["b_not_a"]
+ assert ["variant_value", "mpileaks debug False"] in result["b_not_a"]
diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py
index 0aa3ae24f7..978ff987da 100644
--- a/lib/spack/spack/test/cmd/env.py
+++ b/lib/spack/spack/test/cmd/env.py
@@ -34,172 +34,173 @@ from spack.version import Version
# TODO-27021
# everything here uses the mock_env_path
pytestmark = [
- pytest.mark.usefixtures('mutable_mock_env_path', 'config', 'mutable_mock_repo'),
+ pytest.mark.usefixtures("mutable_mock_env_path", "config", "mutable_mock_repo"),
pytest.mark.maybeslow,
- pytest.mark.skipif(sys.platform == 'win32', reason='Envs unsupported on Window')
+ pytest.mark.skipif(sys.platform == "win32", reason="Envs unsupported on Window"),
]
-env = SpackCommand('env')
-install = SpackCommand('install')
-add = SpackCommand('add')
-remove = SpackCommand('remove')
-concretize = SpackCommand('concretize')
-stage = SpackCommand('stage')
-uninstall = SpackCommand('uninstall')
-find = SpackCommand('find')
+env = SpackCommand("env")
+install = SpackCommand("install")
+add = SpackCommand("add")
+remove = SpackCommand("remove")
+concretize = SpackCommand("concretize")
+stage = SpackCommand("stage")
+uninstall = SpackCommand("uninstall")
+find = SpackCommand("find")
sep = os.sep
def check_mpileaks_and_deps_in_view(viewdir):
"""Check that the expected install directories exist."""
- assert os.path.exists(str(viewdir.join('.spack', 'mpileaks')))
- assert os.path.exists(str(viewdir.join('.spack', 'libdwarf')))
+ assert os.path.exists(str(viewdir.join(".spack", "mpileaks")))
+ assert os.path.exists(str(viewdir.join(".spack", "libdwarf")))
def check_viewdir_removal(viewdir):
"""Check that the uninstall/removal worked."""
- assert (not os.path.exists(str(viewdir.join('.spack'))) or
- os.listdir(str(viewdir.join('.spack'))) == ['projections.yaml'])
+ assert not os.path.exists(str(viewdir.join(".spack"))) or os.listdir(
+ str(viewdir.join(".spack"))
+ ) == ["projections.yaml"]
def test_add():
- e = ev.create('test')
- e.add('mpileaks')
- assert Spec('mpileaks') in e.user_specs
+ e = ev.create("test")
+ e.add("mpileaks")
+ assert Spec("mpileaks") in e.user_specs
def test_env_add_virtual():
- env('create', 'test')
+ env("create", "test")
- e = ev.read('test')
- e.add('mpi')
+ e = ev.read("test")
+ e.add("mpi")
e.concretize()
hashes = e.concretized_order
assert len(hashes) == 1
spec = e.specs_by_hash[hashes[0]]
- assert spec.satisfies('mpi')
+ assert spec.satisfies("mpi")
def test_env_add_nonexistant_fails():
- env('create', 'test')
+ env("create", "test")
- e = ev.read('test')
- with pytest.raises(ev.SpackEnvironmentError, match=r'no such package'):
- e.add('thispackagedoesnotexist')
+ e = ev.read("test")
+ with pytest.raises(ev.SpackEnvironmentError, match=r"no such package"):
+ e.add("thispackagedoesnotexist")
def test_env_list(mutable_mock_env_path):
- env('create', 'foo')
- env('create', 'bar')
- env('create', 'baz')
+ env("create", "foo")
+ env("create", "bar")
+ env("create", "baz")
- out = env('list')
+ out = env("list")
- assert 'foo' in out
- assert 'bar' in out
- assert 'baz' in out
+ assert "foo" in out
+ assert "bar" in out
+ assert "baz" in out
# make sure `spack env list` skips invalid things in var/spack/env
- mutable_mock_env_path.join('.DS_Store').ensure(file=True)
- out = env('list')
+ mutable_mock_env_path.join(".DS_Store").ensure(file=True)
+ out = env("list")
- assert 'foo' in out
- assert 'bar' in out
- assert 'baz' in out
- assert '.DS_Store' not in out
+ assert "foo" in out
+ assert "bar" in out
+ assert "baz" in out
+ assert ".DS_Store" not in out
def test_env_remove(capfd):
- env('create', 'foo')
- env('create', 'bar')
+ env("create", "foo")
+ env("create", "bar")
- out = env('list')
- assert 'foo' in out
- assert 'bar' in out
+ out = env("list")
+ assert "foo" in out
+ assert "bar" in out
- foo = ev.read('foo')
+ foo = ev.read("foo")
with foo:
with pytest.raises(spack.main.SpackCommandError):
with capfd.disabled():
- env('remove', '-y', 'foo')
- assert 'foo' in env('list')
+ env("remove", "-y", "foo")
+ assert "foo" in env("list")
- env('remove', '-y', 'foo')
- out = env('list')
- assert 'foo' not in out
- assert 'bar' in out
+ env("remove", "-y", "foo")
+ out = env("list")
+ assert "foo" not in out
+ assert "bar" in out
- env('remove', '-y', 'bar')
- out = env('list')
- assert 'foo' not in out
- assert 'bar' not in out
+ env("remove", "-y", "bar")
+ out = env("list")
+ assert "foo" not in out
+ assert "bar" not in out
def test_concretize():
- e = ev.create('test')
- e.add('mpileaks')
+ e = ev.create("test")
+ e.add("mpileaks")
e.concretize()
env_specs = e._get_environment_specs()
- assert any(x.name == 'mpileaks' for x in env_specs)
+ assert any(x.name == "mpileaks" for x in env_specs)
def test_env_specs_partition(install_mockery, mock_fetch):
- e = ev.create('test')
- e.add('cmake-client')
+ e = ev.create("test")
+ e.add("cmake-client")
e.concretize()
# Single not installed root spec.
roots_already_installed, roots_to_install = e._partition_roots_by_install_status()
assert len(roots_already_installed) == 0
assert len(roots_to_install) == 1
- assert roots_to_install[0].name == 'cmake-client'
+ assert roots_to_install[0].name == "cmake-client"
# Single installed root.
e.install_all()
roots_already_installed, roots_to_install = e._partition_roots_by_install_status()
assert len(roots_already_installed) == 1
- assert roots_already_installed[0].name == 'cmake-client'
+ assert roots_already_installed[0].name == "cmake-client"
assert len(roots_to_install) == 0
# One installed root, one not installed root.
- e.add('mpileaks')
+ e.add("mpileaks")
e.concretize()
roots_already_installed, roots_to_install = e._partition_roots_by_install_status()
assert len(roots_already_installed) == 1
assert len(roots_to_install) == 1
- assert roots_already_installed[0].name == 'cmake-client'
- assert roots_to_install[0].name == 'mpileaks'
+ assert roots_already_installed[0].name == "cmake-client"
+ assert roots_to_install[0].name == "mpileaks"
def test_env_install_all(install_mockery, mock_fetch):
- e = ev.create('test')
- e.add('cmake-client')
+ e = ev.create("test")
+ e.add("cmake-client")
e.concretize()
e.install_all()
env_specs = e._get_environment_specs()
- spec = next(x for x in env_specs if x.name == 'cmake-client')
+ spec = next(x for x in env_specs if x.name == "cmake-client")
assert spec.installed
def test_env_install_single_spec(install_mockery, mock_fetch):
- env('create', 'test')
- install = SpackCommand('install')
+ env("create", "test")
+ install = SpackCommand("install")
- e = ev.read('test')
+ e = ev.read("test")
with e:
- install('cmake-client')
+ install("cmake-client")
- e = ev.read('test')
- assert e.user_specs[0].name == 'cmake-client'
- assert e.concretized_user_specs[0].name == 'cmake-client'
- assert e.specs_by_hash[e.concretized_order[0]].name == 'cmake-client'
+ e = ev.read("test")
+ assert e.user_specs[0].name == "cmake-client"
+ assert e.concretized_user_specs[0].name == "cmake-client"
+ assert e.specs_by_hash[e.concretized_order[0]].name == "cmake-client"
def test_env_roots_marked_explicit(install_mockery, mock_fetch):
- install = SpackCommand('install')
- install('dependent-install')
+ install = SpackCommand("install")
+ install("dependent-install")
# Check one explicit, one implicit install
dependent = spack.store.db.query(explicit=True)
@@ -207,8 +208,8 @@ def test_env_roots_marked_explicit(install_mockery, mock_fetch):
assert len(dependent) == 1
assert len(dependency) == 1
- env('create', 'test')
- with ev.read('test') as e:
+ env("create", "test")
+ with ev.read("test") as e:
# make implicit install a root of the env
e.add(dependency[0].name)
e.concretize()
@@ -218,14 +219,13 @@ def test_env_roots_marked_explicit(install_mockery, mock_fetch):
assert len(explicit) == 2
-def test_env_modifications_error_on_activate(
- install_mockery, mock_fetch, monkeypatch, capfd):
- env('create', 'test')
- install = SpackCommand('install')
+def test_env_modifications_error_on_activate(install_mockery, mock_fetch, monkeypatch, capfd):
+ env("create", "test")
+ install = SpackCommand("install")
- e = ev.read('test')
+ e = ev.read("test")
with e:
- install('cmake-client')
+ install("cmake-client")
def setup_error(pkg, env):
raise RuntimeError("cmake-client had issues!")
@@ -240,41 +240,40 @@ def test_env_modifications_error_on_activate(
assert "Warning: couldn't get environment settings" in err
-def test_activate_adds_transitive_run_deps_to_path(
- install_mockery, mock_fetch, monkeypatch):
- env('create', 'test')
- install = SpackCommand('install')
+def test_activate_adds_transitive_run_deps_to_path(install_mockery, mock_fetch, monkeypatch):
+ env("create", "test")
+ install = SpackCommand("install")
- e = ev.read('test')
+ e = ev.read("test")
with e:
- install('depends-on-run-env')
+ install("depends-on-run-env")
env_variables = {}
spack.environment.shell.activate(e).apply_modifications(env_variables)
- assert env_variables['DEPENDENCY_ENV_VAR'] == '1'
+ assert env_variables["DEPENDENCY_ENV_VAR"] == "1"
def test_env_install_same_spec_twice(install_mockery, mock_fetch):
- env('create', 'test')
+ env("create", "test")
- e = ev.read('test')
+ e = ev.read("test")
with e:
# The first installation outputs the package prefix, updates the view
- out = install('cmake-client')
- assert 'Updating view at' in out
+ out = install("cmake-client")
+ assert "Updating view at" in out
# The second installation reports all packages already installed
- out = install('cmake-client')
- assert 'already installed' in out
+ out = install("cmake-client")
+ assert "already installed" in out
def test_env_definition_symlink(install_mockery, mock_fetch, tmpdir):
- filepath = str(tmpdir.join('spack.yaml'))
- filepath_mid = str(tmpdir.join('spack_mid.yaml'))
+ filepath = str(tmpdir.join("spack.yaml"))
+ filepath_mid = str(tmpdir.join("spack_mid.yaml"))
- env('create', 'test')
- e = ev.read('test')
- e.add('mpileaks')
+ env("create", "test")
+ e = ev.read("test")
+ e.add("mpileaks")
os.rename(e.manifest_path, filepath)
os.symlink(filepath, filepath_mid)
@@ -287,235 +286,240 @@ def test_env_definition_symlink(install_mockery, mock_fetch, tmpdir):
assert os.path.islink(filepath_mid)
-def test_env_install_two_specs_same_dep(
- install_mockery, mock_fetch, tmpdir, capsys):
+def test_env_install_two_specs_same_dep(install_mockery, mock_fetch, tmpdir, capsys):
"""Test installation of two packages that share a dependency with no
connection and the second specifying the dependency as a 'build'
dependency.
"""
- path = tmpdir.join('spack.yaml')
+ path = tmpdir.join("spack.yaml")
with tmpdir.as_cwd():
- with open(str(path), 'w') as f:
- f.write("""\
+ with open(str(path), "w") as f:
+ f.write(
+ """\
env:
specs:
- a
- depb
-""")
+"""
+ )
- env('create', 'test', 'spack.yaml')
+ env("create", "test", "spack.yaml")
- with ev.read('test'):
+ with ev.read("test"):
with capsys.disabled():
out = install()
# Ensure both packages reach install phase processing and are installed
out = str(out)
- assert 'depb: Executing phase:' in out
- assert 'a: Executing phase:' in out
+ assert "depb: Executing phase:" in out
+ assert "a: Executing phase:" in out
- depb = spack.store.db.query_one('depb', installed=True)
- assert depb, 'Expected depb to be installed'
+ depb = spack.store.db.query_one("depb", installed=True)
+ assert depb, "Expected depb to be installed"
- a = spack.store.db.query_one('a', installed=True)
- assert a, 'Expected a to be installed'
+ a = spack.store.db.query_one("a", installed=True)
+ assert a, "Expected a to be installed"
def test_remove_after_concretize():
- e = ev.create('test')
+ e = ev.create("test")
- e.add('mpileaks')
+ e.add("mpileaks")
e.concretize()
- e.add('python')
+ e.add("python")
e.concretize()
- e.remove('mpileaks')
- assert Spec('mpileaks') not in e.user_specs
+ e.remove("mpileaks")
+ assert Spec("mpileaks") not in e.user_specs
env_specs = e._get_environment_specs()
- assert any(s.name == 'mpileaks' for s in env_specs)
+ assert any(s.name == "mpileaks" for s in env_specs)
- e.add('mpileaks')
- assert any(s.name == 'mpileaks' for s in e.user_specs)
+ e.add("mpileaks")
+ assert any(s.name == "mpileaks" for s in e.user_specs)
- e.remove('mpileaks', force=True)
- assert Spec('mpileaks') not in e.user_specs
+ e.remove("mpileaks", force=True)
+ assert Spec("mpileaks") not in e.user_specs
env_specs = e._get_environment_specs()
- assert not any(s.name == 'mpileaks' for s in env_specs)
+ assert not any(s.name == "mpileaks" for s in env_specs)
def test_remove_command():
- env('create', 'test')
- assert 'test' in env('list')
-
- with ev.read('test'):
- add('mpileaks')
- assert 'mpileaks' in find()
- assert 'mpileaks@' not in find()
- assert 'mpileaks@' not in find('--show-concretized')
-
- with ev.read('test'):
- remove('mpileaks')
- assert 'mpileaks' not in find()
- assert 'mpileaks@' not in find()
- assert 'mpileaks@' not in find('--show-concretized')
-
- with ev.read('test'):
- add('mpileaks')
- assert 'mpileaks' in find()
- assert 'mpileaks@' not in find()
- assert 'mpileaks@' not in find('--show-concretized')
-
- with ev.read('test'):
+ env("create", "test")
+ assert "test" in env("list")
+
+ with ev.read("test"):
+ add("mpileaks")
+ assert "mpileaks" in find()
+ assert "mpileaks@" not in find()
+ assert "mpileaks@" not in find("--show-concretized")
+
+ with ev.read("test"):
+ remove("mpileaks")
+ assert "mpileaks" not in find()
+ assert "mpileaks@" not in find()
+ assert "mpileaks@" not in find("--show-concretized")
+
+ with ev.read("test"):
+ add("mpileaks")
+ assert "mpileaks" in find()
+ assert "mpileaks@" not in find()
+ assert "mpileaks@" not in find("--show-concretized")
+
+ with ev.read("test"):
concretize()
- assert 'mpileaks' in find()
- assert 'mpileaks@' not in find()
- assert 'mpileaks@' in find('--show-concretized')
+ assert "mpileaks" in find()
+ assert "mpileaks@" not in find()
+ assert "mpileaks@" in find("--show-concretized")
- with ev.read('test'):
- remove('mpileaks')
- assert 'mpileaks' not in find()
+ with ev.read("test"):
+ remove("mpileaks")
+ assert "mpileaks" not in find()
# removed but still in last concretized specs
- assert 'mpileaks@' in find('--show-concretized')
+ assert "mpileaks@" in find("--show-concretized")
- with ev.read('test'):
+ with ev.read("test"):
concretize()
- assert 'mpileaks' not in find()
- assert 'mpileaks@' not in find()
+ assert "mpileaks" not in find()
+ assert "mpileaks@" not in find()
# now the lockfile is regenerated and it's gone.
- assert 'mpileaks@' not in find('--show-concretized')
+ assert "mpileaks@" not in find("--show-concretized")
def test_environment_status(capsys, tmpdir):
with tmpdir.as_cwd():
with capsys.disabled():
- assert 'No active environment' in env('status')
+ assert "No active environment" in env("status")
- with ev.create('test'):
+ with ev.create("test"):
with capsys.disabled():
- assert 'In environment test' in env('status')
+ assert "In environment test" in env("status")
- with ev.Environment('local_dir'):
+ with ev.Environment("local_dir"):
with capsys.disabled():
- assert os.path.join(os.getcwd(), 'local_dir') in env('status')
+ assert os.path.join(os.getcwd(), "local_dir") in env("status")
- e = ev.Environment('myproject')
+ e = ev.Environment("myproject")
e.write()
- with tmpdir.join('myproject').as_cwd():
+ with tmpdir.join("myproject").as_cwd():
with e:
with capsys.disabled():
- assert 'in current directory' in env('status')
+ assert "in current directory" in env("status")
def test_env_status_broken_view(
- mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
- install_mockery, tmpdir
+ mutable_mock_env_path, mock_archive, mock_fetch, mock_packages, install_mockery, tmpdir
):
env_dir = str(tmpdir)
with ev.Environment(env_dir):
- install('trivial-install-test-package')
+ install("trivial-install-test-package")
# switch to a new repo that doesn't include the installed package
# test that Spack detects the missing package and warns the user
with spack.repo.use_repositories(MockPackageMultiRepo()):
with ev.Environment(env_dir):
- output = env('status')
- assert 'includes out of date packages or repos' in output
+ output = env("status")
+ assert "includes out of date packages or repos" in output
# Test that the warning goes away when it's fixed
with ev.Environment(env_dir):
- output = env('status')
- assert 'includes out of date packages or repos' not in output
+ output = env("status")
+ assert "includes out of date packages or repos" not in output
def test_env_activate_broken_view(
- mutable_mock_env_path, mock_archive, mock_fetch, mock_packages,
- install_mockery
+ mutable_mock_env_path, mock_archive, mock_fetch, mock_packages, install_mockery
):
- with ev.create('test'):
- install('trivial-install-test-package')
+ with ev.create("test"):
+ install("trivial-install-test-package")
# switch to a new repo that doesn't include the installed package
# test that Spack detects the missing package and fails gracefully
new_repo = MockPackageMultiRepo()
with spack.repo.use_repositories(new_repo):
with pytest.raises(SpackCommandError):
- env('activate', '--sh', 'test')
+ env("activate", "--sh", "test")
# test replacing repo fixes it
- env('activate', '--sh', 'test')
+ env("activate", "--sh", "test")
def test_to_lockfile_dict():
- e = ev.create('test')
- e.add('mpileaks')
+ e = ev.create("test")
+ e.add("mpileaks")
e.concretize()
context_dict = e._to_lockfile_dict()
- e_copy = ev.create('test_copy')
+ e_copy = ev.create("test_copy")
e_copy._read_lockfile_dict(context_dict)
assert e.specs_by_hash == e_copy.specs_by_hash
def test_env_repo():
- e = ev.create('test')
- e.add('mpileaks')
+ e = ev.create("test")
+ e.add("mpileaks")
e.write()
- with ev.read('test'):
+ with ev.read("test"):
concretize()
- pkg_cls = e.repo.get_pkg_class('mpileaks')
- assert pkg_cls.name == 'mpileaks'
- assert pkg_cls.namespace == 'builtin.mock'
+ pkg_cls = e.repo.get_pkg_class("mpileaks")
+ assert pkg_cls.name == "mpileaks"
+ assert pkg_cls.namespace == "builtin.mock"
def test_user_removed_spec():
"""Ensure a user can remove from any position in the spack.yaml file."""
- initial_yaml = StringIO("""\
+ initial_yaml = StringIO(
+ """\
env:
specs:
- mpileaks
- hypre
- libelf
-""")
+"""
+ )
- before = ev.create('test', initial_yaml)
+ before = ev.create("test", initial_yaml)
before.concretize()
before.write()
# user modifies yaml externally to spack and removes hypre
- with open(before.manifest_path, 'w') as f:
- f.write("""\
+ with open(before.manifest_path, "w") as f:
+ f.write(
+ """\
env:
specs:
- mpileaks
- libelf
-""")
+"""
+ )
- after = ev.read('test')
+ after = ev.read("test")
after.concretize()
after.write()
env_specs = after._get_environment_specs()
- read = ev.read('test')
+ read = ev.read("test")
env_specs = read._get_environment_specs()
- assert not any(x.name == 'hypre' for x in env_specs)
+ assert not any(x.name == "hypre" for x in env_specs)
def test_init_from_lockfile(tmpdir):
"""Test that an environment can be instantiated from a lockfile."""
- initial_yaml = StringIO("""\
+ initial_yaml = StringIO(
+ """\
env:
specs:
- mpileaks
- hypre
- libelf
-""")
- e1 = ev.create('test', initial_yaml)
+"""
+ )
+ e1 = ev.create("test", initial_yaml)
e1.concretize()
e1.write()
@@ -534,14 +538,16 @@ env:
def test_init_from_yaml(tmpdir):
"""Test that an environment can be instantiated from a lockfile."""
- initial_yaml = StringIO("""\
+ initial_yaml = StringIO(
+ """\
env:
specs:
- mpileaks
- hypre
- libelf
-""")
- e1 = ev.create('test', initial_yaml)
+"""
+ )
+ e1 = ev.create("test", initial_yaml)
e1.concretize()
e1.write()
@@ -555,36 +561,39 @@ env:
assert not e2.specs_by_hash
-@pytest.mark.usefixtures('config')
-def test_env_view_external_prefix(
- tmpdir_factory, mutable_database, mock_packages
-):
- fake_prefix = tmpdir_factory.mktemp('a-prefix')
- fake_bin = fake_prefix.join('bin')
+@pytest.mark.usefixtures("config")
+def test_env_view_external_prefix(tmpdir_factory, mutable_database, mock_packages):
+ fake_prefix = tmpdir_factory.mktemp("a-prefix")
+ fake_bin = fake_prefix.join("bin")
fake_bin.ensure(dir=True)
- initial_yaml = StringIO("""\
+ initial_yaml = StringIO(
+ """\
env:
specs:
- a
view: true
-""")
+"""
+ )
- external_config = StringIO("""\
+ external_config = StringIO(
+ """\
packages:
a:
externals:
- spec: a@2.0
prefix: {a_prefix}
buildable: false
-""".format(a_prefix=str(fake_prefix)))
+""".format(
+ a_prefix=str(fake_prefix)
+ )
+ )
external_config_dict = spack.util.spack_yaml.load_config(external_config)
- test_scope = spack.config.InternalConfigScope(
- 'env-external-test', data=external_config_dict)
+ test_scope = spack.config.InternalConfigScope("env-external-test", data=external_config_dict)
with spack.config.override(test_scope):
- e = ev.create('test', initial_yaml)
+ e = ev.create("test", initial_yaml)
e.concretize()
# Note: normally installing specs in a test environment requires doing
# a fake install, but not for external specs since no actions are
@@ -598,33 +607,35 @@ packages:
e.add_default_view_to_env(env_mod)
env_variables = {}
env_mod.apply_modifications(env_variables)
- assert str(fake_bin) in env_variables['PATH']
+ assert str(fake_bin) in env_variables["PATH"]
def test_init_with_file_and_remove(tmpdir):
"""Ensure a user can remove from any position in the spack.yaml file."""
- path = tmpdir.join('spack.yaml')
+ path = tmpdir.join("spack.yaml")
with tmpdir.as_cwd():
- with open(str(path), 'w') as f:
- f.write("""\
+ with open(str(path), "w") as f:
+ f.write(
+ """\
env:
specs:
- mpileaks
-""")
+"""
+ )
- env('create', 'test', 'spack.yaml')
+ env("create", "test", "spack.yaml")
- out = env('list')
- assert 'test' in out
+ out = env("list")
+ assert "test" in out
- with ev.read('test'):
- assert 'mpileaks' in find()
+ with ev.read("test"):
+ assert "mpileaks" in find()
- env('remove', '-y', 'test')
+ env("remove", "-y", "test")
- out = env('list')
- assert 'test' not in out
+ out = env("list")
+ assert "test" not in out
def test_env_with_config():
@@ -636,18 +647,17 @@ env:
mpileaks:
version: [2.2]
"""
- _env_create('test', StringIO(test_config))
+ _env_create("test", StringIO(test_config))
- e = ev.read('test')
+ e = ev.read("test")
with e:
e.concretize()
- assert any(x.satisfies('mpileaks@2.2')
- for x in e._get_environment_specs())
+ assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
def test_with_config_bad_include(capfd):
- env_name = 'test_bad_include'
+ env_name = "test_bad_include"
test_config = """\
spack:
include:
@@ -663,9 +673,9 @@ spack:
out, err = capfd.readouterr()
- assert 'missing include' in err
- assert '/no/such/directory' in err
- assert os.path.join('no', 'such', 'file.yaml') in err
+ assert "missing include" in err
+ assert "/no/such/directory" in err
+ assert os.path.join("no", "such", "file.yaml") in err
assert ev.active_environment() is None
@@ -679,36 +689,36 @@ def test_env_with_include_config_files_same_basename():
[libelf, mpileaks]
"""
- _env_create('test', StringIO(test_config))
- e = ev.read('test')
+ _env_create("test", StringIO(test_config))
+ e = ev.read("test")
- fs.mkdirp(os.path.join(e.path, 'path', 'to'))
- with open(os.path.join(
- e.path,
- './path/to/included-config.yaml'), 'w') as f:
- f.write("""\
+ fs.mkdirp(os.path.join(e.path, "path", "to"))
+ with open(os.path.join(e.path, "./path/to/included-config.yaml"), "w") as f:
+ f.write(
+ """\
packages:
libelf:
version: [0.8.10]
- """)
+ """
+ )
- fs.mkdirp(os.path.join(e.path, 'second', 'path', 'to'))
- with open(os.path.join(
- e.path,
- './second/path/to/include-config.yaml'), 'w') as f:
- f.write("""\
+ fs.mkdirp(os.path.join(e.path, "second", "path", "to"))
+ with open(os.path.join(e.path, "./second/path/to/include-config.yaml"), "w") as f:
+ f.write(
+ """\
packages:
mpileaks:
version: [2.2]
- """)
+ """
+ )
with e:
e.concretize()
environment_specs = e._get_environment_specs(False)
- assert(environment_specs[0].satisfies('libelf@0.8.10'))
- assert(environment_specs[1].satisfies('mpileaks@2.2'))
+ assert environment_specs[0].satisfies("libelf@0.8.10")
+ assert environment_specs[1].satisfies("mpileaks@2.2")
def test_env_with_included_config_file():
@@ -719,79 +729,88 @@ env:
specs:
- mpileaks
"""
- _env_create('test', StringIO(test_config))
- e = ev.read('test')
+ _env_create("test", StringIO(test_config))
+ e = ev.read("test")
- with open(os.path.join(e.path, 'included-config.yaml'), 'w') as f:
- f.write("""\
+ with open(os.path.join(e.path, "included-config.yaml"), "w") as f:
+ f.write(
+ """\
packages:
mpileaks:
version: [2.2]
-""")
+"""
+ )
with e:
e.concretize()
- assert any(x.satisfies('mpileaks@2.2')
- for x in e._get_environment_specs())
+ assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
def test_env_with_included_config_scope():
- config_scope_path = os.path.join(ev.root('test'), 'config')
- test_config = """\
+ config_scope_path = os.path.join(ev.root("test"), "config")
+ test_config = (
+ """\
env:
include:
- %s
specs:
- mpileaks
-""" % config_scope_path
+"""
+ % config_scope_path
+ )
- _env_create('test', StringIO(test_config))
+ _env_create("test", StringIO(test_config))
- e = ev.read('test')
+ e = ev.read("test")
fs.mkdirp(config_scope_path)
- with open(os.path.join(config_scope_path, 'packages.yaml'), 'w') as f:
- f.write("""\
+ with open(os.path.join(config_scope_path, "packages.yaml"), "w") as f:
+ f.write(
+ """\
packages:
mpileaks:
version: [2.2]
-""")
+"""
+ )
with e:
e.concretize()
- assert any(x.satisfies('mpileaks@2.2')
- for x in e._get_environment_specs())
+ assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
def test_env_with_included_config_var_path():
- config_var_path = os.path.join('$tempdir', 'included-config.yaml')
- test_config = """\
+ config_var_path = os.path.join("$tempdir", "included-config.yaml")
+ test_config = (
+ """\
env:
include:
- %s
specs:
- mpileaks
-""" % config_var_path
+"""
+ % config_var_path
+ )
- _env_create('test', StringIO(test_config))
- e = ev.read('test')
+ _env_create("test", StringIO(test_config))
+ e = ev.read("test")
config_real_path = substitute_path_variables(config_var_path)
fs.mkdirp(os.path.dirname(config_real_path))
- with open(config_real_path, 'w') as f:
- f.write("""\
+ with open(config_real_path, "w") as f:
+ f.write(
+ """\
packages:
mpileaks:
version: [2.2]
-""")
+"""
+ )
with e:
e.concretize()
- assert any(x.satisfies('mpileaks@2.2')
- for x in e._get_environment_specs())
+ assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
def test_env_config_precedence():
@@ -805,28 +824,28 @@ env:
specs:
- mpileaks
"""
- _env_create('test', StringIO(test_config))
- e = ev.read('test')
+ _env_create("test", StringIO(test_config))
+ e = ev.read("test")
- with open(os.path.join(e.path, 'included-config.yaml'), 'w') as f:
- f.write("""\
+ with open(os.path.join(e.path, "included-config.yaml"), "w") as f:
+ f.write(
+ """\
packages:
mpileaks:
version: [2.2]
libelf:
version: [0.8.11]
-""")
+"""
+ )
with e:
e.concretize()
# ensure included scope took effect
- assert any(
- x.satisfies('mpileaks@2.2') for x in e._get_environment_specs())
+ assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
# ensure env file takes precedence
- assert any(
- x.satisfies('libelf@0.8.12') for x in e._get_environment_specs())
+ assert any(x.satisfies("libelf@0.8.12") for x in e._get_environment_specs())
def test_included_config_precedence():
@@ -838,78 +857,82 @@ env:
specs:
- mpileaks
"""
- _env_create('test', StringIO(test_config))
- e = ev.read('test')
+ _env_create("test", StringIO(test_config))
+ e = ev.read("test")
- with open(os.path.join(e.path, 'high-config.yaml'), 'w') as f:
- f.write("""\
+ with open(os.path.join(e.path, "high-config.yaml"), "w") as f:
+ f.write(
+ """\
packages:
libelf:
version: [0.8.10] # this should override libelf version below
-""")
+"""
+ )
- with open(os.path.join(e.path, 'low-config.yaml'), 'w') as f:
- f.write("""\
+ with open(os.path.join(e.path, "low-config.yaml"), "w") as f:
+ f.write(
+ """\
packages:
mpileaks:
version: [2.2]
libelf:
version: [0.8.12]
-""")
+"""
+ )
with e:
e.concretize()
- assert any(
- x.satisfies('mpileaks@2.2') for x in e._get_environment_specs())
+ assert any(x.satisfies("mpileaks@2.2") for x in e._get_environment_specs())
- assert any(
- [x.satisfies('libelf@0.8.10') for x in e._get_environment_specs()])
+ assert any([x.satisfies("libelf@0.8.10") for x in e._get_environment_specs()])
def test_bad_env_yaml_format(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
spacks:
- mpileaks
-""")
+"""
+ )
with tmpdir.as_cwd():
with pytest.raises(spack.config.ConfigFormatError) as e:
- env('create', 'test', './spack.yaml')
- assert './spack.yaml:2' in str(e)
+ env("create", "test", "./spack.yaml")
+ assert "./spack.yaml:2" in str(e)
assert "'spacks' was unexpected" in str(e)
def test_env_loads(install_mockery, mock_fetch):
- env('create', 'test')
+ env("create", "test")
- with ev.read('test'):
- add('mpileaks')
+ with ev.read("test"):
+ add("mpileaks")
concretize()
- install('--fake')
+ install("--fake")
- with ev.read('test'):
- env('loads')
+ with ev.read("test"):
+ env("loads")
- e = ev.read('test')
+ e = ev.read("test")
- loads_file = os.path.join(e.path, 'loads')
+ loads_file = os.path.join(e.path, "loads")
assert os.path.exists(loads_file)
with open(loads_file) as f:
contents = f.read()
- assert 'module load mpileaks' in contents
+ assert "module load mpileaks" in contents
@pytest.mark.disable_clean_stage_check
def test_stage(mock_stage, mock_fetch, install_mockery):
- env('create', 'test')
- with ev.read('test'):
- add('mpileaks')
- add('zmpi')
+ env("create", "test")
+ with ev.read("test"):
+ add("mpileaks")
+ add("zmpi")
concretize()
stage()
@@ -918,108 +941,110 @@ def test_stage(mock_stage, mock_fetch, install_mockery):
def check_stage(spec):
spec = Spec(spec).concretized()
for dep in spec.traverse():
- stage_name = "{0}{1}-{2}-{3}".format(stage_prefix, dep.name,
- dep.version, dep.dag_hash())
+ stage_name = "{0}{1}-{2}-{3}".format(
+ stage_prefix, dep.name, dep.version, dep.dag_hash()
+ )
assert os.path.isdir(os.path.join(root, stage_name))
- check_stage('mpileaks')
- check_stage('zmpi')
+ check_stage("mpileaks")
+ check_stage("zmpi")
def test_env_commands_die_with_no_env_arg():
# these fail in argparse when given no arg
with pytest.raises(SystemExit):
- env('create')
+ env("create")
with pytest.raises(SystemExit):
- env('remove')
+ env("remove")
# these have an optional env arg and raise errors via tty.die
with pytest.raises(spack.main.SpackCommandError):
- env('loads')
+ env("loads")
# This should NOT raise an error with no environment
# it just tells the user there isn't an environment
- env('status')
+ env("status")
def test_env_blocks_uninstall(mock_stage, mock_fetch, install_mockery):
- env('create', 'test')
- with ev.read('test'):
- add('mpileaks')
- install('--fake')
+ env("create", "test")
+ with ev.read("test"):
+ add("mpileaks")
+ install("--fake")
- out = uninstall('mpileaks', fail_on_error=False)
+ out = uninstall("mpileaks", fail_on_error=False)
assert uninstall.returncode == 1
- assert 'used by the following environments' in out
+ assert "used by the following environments" in out
def test_roots_display_with_variants():
- env('create', 'test')
- with ev.read('test'):
- add('boost+shared')
+ env("create", "test")
+ with ev.read("test"):
+ add("boost+shared")
- with ev.read('test'):
+ with ev.read("test"):
out = find(output=str)
assert "boost +shared" in out
def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
- env('create', 'test')
- with ev.read('test'):
- add('mpileaks')
- add('libelf')
- install('--fake')
+ env("create", "test")
+ with ev.read("test"):
+ add("mpileaks")
+ add("libelf")
+ install("--fake")
- test = ev.read('test')
- assert any(s.name == 'mpileaks' for s in test.specs_by_hash.values())
- assert any(s.name == 'libelf' for s in test.specs_by_hash.values())
+ test = ev.read("test")
+ assert any(s.name == "mpileaks" for s in test.specs_by_hash.values())
+ assert any(s.name == "libelf" for s in test.specs_by_hash.values())
- with ev.read('test'):
- uninstall('-ya')
+ with ev.read("test"):
+ uninstall("-ya")
- test = ev.read('test')
+ test = ev.read("test")
assert not test.specs_by_hash
assert not test.concretized_order
assert not test.user_specs
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_indirect_build_dep():
"""Simple case of X->Y->Z where Y is a build/link dep and Z is a
build-only dep. Make sure this concrete DAG is preserved when writing the
environment out and reading it back.
"""
- default = ('build', 'link')
- build_only = ('build',)
+ default = ("build", "link")
+ build_only = ("build",)
mock_repo = MockPackageMultiRepo()
- z = mock_repo.add_package('z', [], [])
- y = mock_repo.add_package('y', [z], [build_only])
- mock_repo.add_package('x', [y], [default])
+ z = mock_repo.add_package("z", [], [])
+ y = mock_repo.add_package("y", [z], [build_only])
+ mock_repo.add_package("x", [y], [default])
def noop(*args):
pass
- setattr(mock_repo, 'dump_provenance', noop)
+
+ setattr(mock_repo, "dump_provenance", noop)
with spack.repo.use_repositories(mock_repo):
- x_spec = Spec('x')
+ x_spec = Spec("x")
x_concretized = x_spec.concretized()
- _env_create('test', with_view=False)
- e = ev.read('test')
+ _env_create("test", with_view=False)
+ e = ev.read("test")
e.add(x_spec)
e.concretize()
e.write()
- e_read = ev.read('test')
- x_env_hash, = e_read.concretized_order
+ e_read = ev.read("test")
+ (x_env_hash,) = e_read.concretized_order
x_env_spec = e_read.specs_by_hash[x_env_hash]
assert x_env_spec == x_concretized
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_store_different_build_deps():
r"""Ensure that an environment can store two instances of a build-only
dependency::
@@ -1031,39 +1056,40 @@ def test_store_different_build_deps():
z1
"""
- default = ('build', 'link')
- build_only = ('build',)
+ default = ("build", "link")
+ build_only = ("build",)
mock_repo = MockPackageMultiRepo()
- z = mock_repo.add_package('z', [], [])
- y = mock_repo.add_package('y', [z], [build_only])
- mock_repo.add_package('x', [y, z], [default, build_only])
+ z = mock_repo.add_package("z", [], [])
+ y = mock_repo.add_package("y", [z], [build_only])
+ mock_repo.add_package("x", [y, z], [default, build_only])
def noop(*args):
pass
- setattr(mock_repo, 'dump_provenance', noop)
+
+ setattr(mock_repo, "dump_provenance", noop)
with spack.repo.use_repositories(mock_repo):
- y_spec = Spec('y ^z@3')
+ y_spec = Spec("y ^z@3")
y_concretized = y_spec.concretized()
- x_spec = Spec('x ^z@2')
+ x_spec = Spec("x ^z@2")
x_concretized = x_spec.concretized()
# Even though x chose a different 'z', the y it chooses should be identical
# *aside* from the dependency on 'z'. The dag_hash() will show the difference
# in build dependencies.
- assert x_concretized['y'].eq_node(y_concretized)
- assert x_concretized['y'].dag_hash() != y_concretized.dag_hash()
+ assert x_concretized["y"].eq_node(y_concretized)
+ assert x_concretized["y"].dag_hash() != y_concretized.dag_hash()
- _env_create('test', with_view=False)
- e = ev.read('test')
+ _env_create("test", with_view=False)
+ e = ev.read("test")
e.add(y_spec)
e.add(x_spec)
e.concretize()
e.write()
- e_read = ev.read('test')
+ e_read = ev.read("test")
y_env_hash, x_env_hash = e_read.concretized_order
y_read = e_read.specs_by_hash[y_env_hash]
@@ -1071,227 +1097,222 @@ def test_store_different_build_deps():
# make sure the DAG hashes and build deps are preserved after
# a round trip to/from the lockfile
- assert x_read['z'] != y_read['z']
- assert x_read['z'].dag_hash() != y_read['z'].dag_hash()
+ assert x_read["z"] != y_read["z"]
+ assert x_read["z"].dag_hash() != y_read["z"].dag_hash()
- assert x_read['y'].eq_node(y_read)
- assert x_read['y'].dag_hash() != y_read.dag_hash()
+ assert x_read["y"].eq_node(y_read)
+ assert x_read["y"].dag_hash() != y_read.dag_hash()
-def test_env_updates_view_install(
- tmpdir, mock_stage, mock_fetch, install_mockery):
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- with ev.read('test'):
- add('mpileaks')
- install('--fake')
+def test_env_updates_view_install(tmpdir, mock_stage, mock_fetch, install_mockery):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ with ev.read("test"):
+ add("mpileaks")
+ install("--fake")
check_mpileaks_and_deps_in_view(view_dir)
-def test_env_view_fails(
- tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
+def test_env_view_fails(tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
# We currently ignore file-file conflicts for the prefix merge,
# so in principle there will be no errors in this test. But
# the .spack metadata dir is handled separately and is more strict.
# It also throws on file-file conflicts. That's what we're checking here
# by adding the same package twice to a view.
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- with ev.read('test'):
- add('libelf')
- add('libelf cflags=-g')
- with pytest.raises(llnl.util.link_tree.MergeConflictSummary,
- match=spack.store.layout.metadata_dir):
- install('--fake')
-
-
-def test_env_view_fails_dir_file(
- tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ with ev.read("test"):
+ add("libelf")
+ add("libelf cflags=-g")
+ with pytest.raises(
+ llnl.util.link_tree.MergeConflictSummary, match=spack.store.layout.metadata_dir
+ ):
+ install("--fake")
+
+
+def test_env_view_fails_dir_file(tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
# This environment view fails to be created because a file
# and a dir are in the same path. Test that it mentions the problematic path.
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- with ev.read('test'):
- add('view-dir-file')
- add('view-dir-dir')
- with pytest.raises(llnl.util.link_tree.MergeConflictSummary,
- match=os.path.join('bin', 'x')):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ with ev.read("test"):
+ add("view-dir-file")
+ add("view-dir-dir")
+ with pytest.raises(
+ llnl.util.link_tree.MergeConflictSummary, match=os.path.join("bin", "x")
+ ):
install()
def test_env_view_succeeds_symlinked_dir_file(
- tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery):
+ tmpdir, mock_packages, mock_stage, mock_fetch, install_mockery
+):
# A symlinked dir and an ordinary dir merge happily
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- with ev.read('test'):
- add('view-dir-symlinked-dir')
- add('view-dir-dir')
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ with ev.read("test"):
+ add("view-dir-symlinked-dir")
+ add("view-dir-dir")
install()
- x_dir = os.path.join(str(view_dir), 'bin', 'x')
- assert os.path.exists(os.path.join(x_dir, 'file_in_dir'))
- assert os.path.exists(os.path.join(x_dir, 'file_in_symlinked_dir'))
+ x_dir = os.path.join(str(view_dir), "bin", "x")
+ assert os.path.exists(os.path.join(x_dir, "file_in_dir"))
+ assert os.path.exists(os.path.join(x_dir, "file_in_symlinked_dir"))
-def test_env_without_view_install(
- tmpdir, mock_stage, mock_fetch, install_mockery):
+def test_env_without_view_install(tmpdir, mock_stage, mock_fetch, install_mockery):
# Test enabling a view after installing specs
- env('create', '--without-view', 'test')
+ env("create", "--without-view", "test")
- test_env = ev.read('test')
+ test_env = ev.read("test")
with pytest.raises(ev.SpackEnvironmentError):
test_env.default_view
- view_dir = tmpdir.join('view')
+ view_dir = tmpdir.join("view")
- with ev.read('test'):
- add('mpileaks')
- install('--fake')
+ with ev.read("test"):
+ add("mpileaks")
+ install("--fake")
- env('view', 'enable', str(view_dir))
+ env("view", "enable", str(view_dir))
# After enabling the view, the specs should be linked into the environment
# view dir
check_mpileaks_and_deps_in_view(view_dir)
-def test_env_config_view_default(
- tmpdir, mock_stage, mock_fetch, install_mockery):
+def test_env_config_view_default(tmpdir, mock_stage, mock_fetch, install_mockery):
# This config doesn't mention whether a view is enabled
test_config = """\
env:
specs:
- mpileaks
"""
- _env_create('test', StringIO(test_config))
+ _env_create("test", StringIO(test_config))
- with ev.read('test'):
- install('--fake')
+ with ev.read("test"):
+ install("--fake")
- e = ev.read('test')
+ e = ev.read("test")
# Check that metadata folder for this spec exists
- assert os.path.isdir(os.path.join(e.default_view.view()._root,
- '.spack', 'mpileaks'))
+ assert os.path.isdir(os.path.join(e.default_view.view()._root, ".spack", "mpileaks"))
-def test_env_updates_view_install_package(
- tmpdir, mock_stage, mock_fetch, install_mockery):
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- with ev.read('test'):
- install('--fake', 'mpileaks')
+def test_env_updates_view_install_package(tmpdir, mock_stage, mock_fetch, install_mockery):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ with ev.read("test"):
+ install("--fake", "mpileaks")
- assert os.path.exists(str(view_dir.join('.spack/mpileaks')))
+ assert os.path.exists(str(view_dir.join(".spack/mpileaks")))
-def test_env_updates_view_add_concretize(
- tmpdir, mock_stage, mock_fetch, install_mockery):
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- install('--fake', 'mpileaks')
- with ev.read('test'):
- add('mpileaks')
+def test_env_updates_view_add_concretize(tmpdir, mock_stage, mock_fetch, install_mockery):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ install("--fake", "mpileaks")
+ with ev.read("test"):
+ add("mpileaks")
concretize()
check_mpileaks_and_deps_in_view(view_dir)
-def test_env_updates_view_uninstall(
- tmpdir, mock_stage, mock_fetch, install_mockery):
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- with ev.read('test'):
- install('--fake', 'mpileaks')
+def test_env_updates_view_uninstall(tmpdir, mock_stage, mock_fetch, install_mockery):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ with ev.read("test"):
+ install("--fake", "mpileaks")
check_mpileaks_and_deps_in_view(view_dir)
- with ev.read('test'):
- uninstall('-ay')
+ with ev.read("test"):
+ uninstall("-ay")
check_viewdir_removal(view_dir)
def test_env_updates_view_uninstall_referenced_elsewhere(
- tmpdir, mock_stage, mock_fetch, install_mockery):
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- install('--fake', 'mpileaks')
- with ev.read('test'):
- add('mpileaks')
+ tmpdir, mock_stage, mock_fetch, install_mockery
+):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ install("--fake", "mpileaks")
+ with ev.read("test"):
+ add("mpileaks")
concretize()
check_mpileaks_and_deps_in_view(view_dir)
- with ev.read('test'):
- uninstall('-ay')
+ with ev.read("test"):
+ uninstall("-ay")
check_viewdir_removal(view_dir)
-def test_env_updates_view_remove_concretize(
- tmpdir, mock_stage, mock_fetch, install_mockery):
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- install('--fake', 'mpileaks')
- with ev.read('test'):
- add('mpileaks')
+def test_env_updates_view_remove_concretize(tmpdir, mock_stage, mock_fetch, install_mockery):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ install("--fake", "mpileaks")
+ with ev.read("test"):
+ add("mpileaks")
concretize()
check_mpileaks_and_deps_in_view(view_dir)
- with ev.read('test'):
- remove('mpileaks')
+ with ev.read("test"):
+ remove("mpileaks")
concretize()
check_viewdir_removal(view_dir)
-def test_env_updates_view_force_remove(
- tmpdir, mock_stage, mock_fetch, install_mockery):
- view_dir = tmpdir.join('view')
- env('create', '--with-view=%s' % view_dir, 'test')
- with ev.read('test'):
- install('--fake', 'mpileaks')
+def test_env_updates_view_force_remove(tmpdir, mock_stage, mock_fetch, install_mockery):
+ view_dir = tmpdir.join("view")
+ env("create", "--with-view=%s" % view_dir, "test")
+ with ev.read("test"):
+ install("--fake", "mpileaks")
check_mpileaks_and_deps_in_view(view_dir)
- with ev.read('test'):
- remove('-f', 'mpileaks')
+ with ev.read("test"):
+ remove("-f", "mpileaks")
check_viewdir_removal(view_dir)
-def test_env_activate_view_fails(
- tmpdir, mock_stage, mock_fetch, install_mockery):
+def test_env_activate_view_fails(tmpdir, mock_stage, mock_fetch, install_mockery):
"""Sanity check on env activate to make sure it requires shell support"""
- out = env('activate', 'test')
+ out = env("activate", "test")
assert "To set up shell support" in out
def test_stack_yaml_definitions(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- test = ev.read('test')
+ env("create", "test", "./spack.yaml")
+ test = ev.read("test")
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_yaml_definitions_as_constraints(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1300,21 +1321,23 @@ env:
- matrix:
- [$packages]
- [$^mpis]
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- test = ev.read('test')
+ env("create", "test", "./spack.yaml")
+ test = ev.read("test")
- assert Spec('mpileaks^mpich') in test.user_specs
- assert Spec('callpath^mpich') in test.user_specs
- assert Spec('mpileaks^openmpi') in test.user_specs
- assert Spec('callpath^openmpi') in test.user_specs
+ assert Spec("mpileaks^mpich") in test.user_specs
+ assert Spec("callpath^mpich") in test.user_specs
+ assert Spec("mpileaks^openmpi") in test.user_specs
+ assert Spec("callpath^openmpi") in test.user_specs
def test_stack_yaml_definitions_as_constraints_on_matrix(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1326,87 +1349,95 @@ env:
- matrix:
- [$packages]
- [$^mpis]
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- test = ev.read('test')
+ env("create", "test", "./spack.yaml")
+ test = ev.read("test")
- assert Spec('mpileaks^mpich@3.0.4') in test.user_specs
- assert Spec('callpath^mpich@3.0.4') in test.user_specs
- assert Spec('mpileaks^mpich@3.0.3') in test.user_specs
- assert Spec('callpath^mpich@3.0.3') in test.user_specs
+ assert Spec("mpileaks^mpich@3.0.4") in test.user_specs
+ assert Spec("callpath^mpich@3.0.4") in test.user_specs
+ assert Spec("mpileaks^mpich@3.0.3") in test.user_specs
+ assert Spec("callpath^mpich@3.0.3") in test.user_specs
-@pytest.mark.regression('12095')
+@pytest.mark.regression("12095")
def test_stack_yaml_definitions_write_reference(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
- indirect: [$packages]
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
- with ev.read('test'):
+ with ev.read("test"):
concretize()
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_yaml_add_to_list(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
- add('-l', 'packages', 'libelf')
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
+ add("-l", "packages", "libelf")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('libelf') in test.user_specs
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("libelf") in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_yaml_remove_from_list(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
- remove('-l', 'packages', 'mpileaks')
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
+ remove("-l", "packages", "mpileaks")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('mpileaks') not in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("mpileaks") not in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_yaml_remove_from_list_force(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1414,26 +1445,28 @@ env:
- matrix:
- [$packages]
- [^mpich, ^zmpi]
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
concretize()
- remove('-f', '-l', 'packages', 'mpileaks')
- find_output = find('-c')
+ remove("-f", "-l", "packages", "mpileaks")
+ find_output = find("-c")
- assert 'mpileaks' not in find_output
+ assert "mpileaks" not in find_output
- test = ev.read('test')
+ test = ev.read("test")
assert len(test.user_specs) == 2
- assert Spec('callpath ^zmpi') in test.user_specs
- assert Spec('callpath ^mpich') in test.user_specs
+ assert Spec("callpath ^zmpi") in test.user_specs
+ assert Spec("callpath ^mpich") in test.user_specs
def test_stack_yaml_remove_from_matrix_no_effect(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages:
@@ -1442,21 +1475,23 @@ env:
- [target=be]
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test') as e:
+ env("create", "test", "./spack.yaml")
+ with ev.read("test") as e:
before = e.user_specs.specs
- remove('-l', 'packages', 'mpileaks')
+ remove("-l", "packages", "mpileaks")
after = e.user_specs.specs
assert before == after
def test_stack_yaml_force_remove_from_matrix(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages:
@@ -1465,23 +1500,24 @@ env:
- [target=be]
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test') as e:
+ env("create", "test", "./spack.yaml")
+ with ev.read("test") as e:
concretize()
before_user = e.user_specs.specs
before_conc = e.concretized_user_specs
- remove('-f', '-l', 'packages', 'mpileaks')
+ remove("-f", "-l", "packages", "mpileaks")
after_user = e.user_specs.specs
after_conc = e.concretized_user_specs
assert before_user == after_user
- mpileaks_spec = Spec('mpileaks target=be')
+ mpileaks_spec = Spec("mpileaks target=be")
assert mpileaks_spec in before_conc
assert mpileaks_spec not in after_conc
@@ -1494,9 +1530,10 @@ def test_stack_concretize_extraneous_deps(tmpdir, config, mock_packages):
# if spack.config.get('config:concretizer') == 'clingo':
# pytest.skip('Clingo concretizer does not support soft constraints')
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1506,27 +1543,29 @@ env:
- ['^zmpi', '^mpich']
specs:
- $install
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
concretize()
- test = ev.read('test')
+ test = ev.read("test")
for user, concrete in test.concretized_specs():
assert concrete.concrete
assert not user.concrete
- if user.name == 'libelf':
- assert not concrete.satisfies('^mpi', strict=True)
- elif user.name == 'mpileaks':
- assert concrete.satisfies('^mpi', strict=True)
+ if user.name == "libelf":
+ assert not concrete.satisfies("^mpi", strict=True)
+ elif user.name == "mpileaks":
+ assert concrete.satisfies("^mpi", strict=True)
def test_stack_concretize_extraneous_variants(tmpdir, config, mock_packages):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1536,29 +1575,29 @@ env:
- ['~shared', '+shared']
specs:
- $install
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
concretize()
- test = ev.read('test')
+ test = ev.read("test")
for user, concrete in test.concretized_specs():
assert concrete.concrete
assert not user.concrete
- if user.name == 'libelf':
- assert 'shared' not in concrete.variants
- if user.name == 'mpileaks':
- assert (concrete.variants['shared'].value ==
- user.variants['shared'].value)
-
-
-def test_stack_concretize_extraneous_variants_with_dash(tmpdir, config,
- mock_packages):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ if user.name == "libelf":
+ assert "shared" not in concrete.variants
+ if user.name == "mpileaks":
+ assert concrete.variants["shared"].value == user.variants["shared"].value
+
+
+def test_stack_concretize_extraneous_variants_with_dash(tmpdir, config, mock_packages):
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1568,13 +1607,14 @@ env:
- ['shared=False', '+shared-libs']
specs:
- $install
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
concretize()
- ev.read('test')
+ ev.read("test")
# Regression test for handling of variants with dashes in them
# will fail before this point if code regresses
@@ -1582,30 +1622,33 @@ env:
def test_stack_definition_extension(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
- packages: [callpath]
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('libelf') in test.user_specs
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("libelf") in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_definition_conditional_false(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1613,21 +1656,23 @@ env:
when: 'False'
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('libelf') in test.user_specs
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') not in test.user_specs
+ assert Spec("libelf") in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") not in test.user_specs
def test_stack_definition_conditional_true(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1635,21 +1680,23 @@ env:
when: 'True'
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('libelf') in test.user_specs
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("libelf") in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_definition_conditional_with_variable(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1657,21 +1704,23 @@ env:
when: platform == 'test'
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('libelf') in test.user_specs
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("libelf") in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_definition_conditional_with_satisfaction(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1680,21 +1729,23 @@ env:
when: arch.satisfies('platform=test')
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('libelf') not in test.user_specs
- assert Spec('mpileaks') not in test.user_specs
- assert Spec('callpath') in test.user_specs
+ assert Spec("libelf") not in test.user_specs
+ assert Spec("mpileaks") not in test.user_specs
+ assert Spec("callpath") in test.user_specs
def test_stack_definition_complex_conditional(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1702,21 +1753,23 @@ env:
when: re.search(r'foo', hostname) and env['test'] == 'THISSHOULDBEFALSE'
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
- test = ev.read('test')
+ test = ev.read("test")
- assert Spec('libelf') in test.user_specs
- assert Spec('mpileaks') in test.user_specs
- assert Spec('callpath') not in test.user_specs
+ assert Spec("libelf") in test.user_specs
+ assert Spec("mpileaks") in test.user_specs
+ assert Spec("callpath") not in test.user_specs
def test_stack_definition_conditional_invalid_variable(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1724,16 +1777,18 @@ env:
when: bad_variable == 'test'
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
with pytest.raises(NameError):
- env('create', 'test', './spack.yaml')
+ env("create", "test", "./spack.yaml")
def test_stack_definition_conditional_add_write(tmpdir):
- filename = str(tmpdir.join('spack.yaml'))
- with open(filename, 'w') as f:
- f.write("""\
+ filename = str(tmpdir.join("spack.yaml"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [libelf, mpileaks]
@@ -1741,30 +1796,32 @@ env:
when: platform == 'test'
specs:
- $packages
-""")
+"""
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
- add('-l', 'packages', 'zmpi')
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
+ add("-l", "packages", "zmpi")
- test = ev.read('test')
+ test = ev.read("test")
- packages_lists = list(filter(lambda x: 'packages' in x,
- test.yaml['env']['definitions']))
+ packages_lists = list(filter(lambda x: "packages" in x, test.yaml["env"]["definitions"]))
assert len(packages_lists) == 2
- assert 'callpath' not in packages_lists[0]['packages']
- assert 'callpath' in packages_lists[1]['packages']
- assert 'zmpi' in packages_lists[0]['packages']
- assert 'zmpi' not in packages_lists[1]['packages']
-
-
-def test_stack_combinatorial_view(tmpdir, mock_fetch, mock_packages,
- mock_archive, install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+ assert "callpath" not in packages_lists[0]["packages"]
+ assert "callpath" in packages_lists[1]["packages"]
+ assert "zmpi" in packages_lists[0]["packages"]
+ assert "zmpi" not in packages_lists[1]["packages"]
+
+
+def test_stack_combinatorial_view(
+ tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
+):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1778,25 +1835,27 @@ env:
combinatorial:
root: %s
projections:
- 'all': '{name}/{version}-{compiler.name}'""" % viewdir)
+ 'all': '{name}/{version}-{compiler.name}'"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- test = ev.read('test')
+ test = ev.read("test")
for spec in test._get_environment_specs():
assert os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
-def test_stack_view_select(tmpdir, mock_fetch, mock_packages,
- mock_archive, install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_stack_view_select(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1811,30 +1870,32 @@ env:
root: %s
select: ['%%gcc']
projections:
- 'all': '{name}/{version}-{compiler.name}'""" % viewdir)
+ 'all': '{name}/{version}-{compiler.name}'"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- test = ev.read('test')
+ test = ev.read("test")
for spec in test._get_environment_specs():
- if spec.satisfies('%gcc'):
+ if spec.satisfies("%gcc"):
assert os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
else:
assert not os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
-def test_stack_view_exclude(tmpdir, mock_fetch, mock_packages,
- mock_archive, install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_stack_view_exclude(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1849,30 +1910,34 @@ env:
root: %s
exclude: [callpath]
projections:
- 'all': '{name}/{version}-{compiler.name}'""" % viewdir)
+ 'all': '{name}/{version}-{compiler.name}'"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- test = ev.read('test')
+ test = ev.read("test")
for spec in test._get_environment_specs():
- if not spec.satisfies('callpath'):
+ if not spec.satisfies("callpath"):
assert os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
else:
assert not os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
-def test_stack_view_select_and_exclude(tmpdir, mock_fetch, mock_packages,
- mock_archive, install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_stack_view_select_and_exclude(
+ tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
+):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1888,30 +1953,32 @@ env:
select: ['%%gcc']
exclude: [callpath]
projections:
- 'all': '{name}/{version}-{compiler.name}'""" % viewdir)
+ 'all': '{name}/{version}-{compiler.name}'"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- test = ev.read('test')
+ test = ev.read("test")
for spec in test._get_environment_specs():
- if spec.satisfies('%gcc') and not spec.satisfies('callpath'):
+ if spec.satisfies("%gcc") and not spec.satisfies("callpath"):
assert os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
else:
assert not os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
-def test_view_link_roots(tmpdir, mock_fetch, mock_packages, mock_archive,
- install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_view_link_roots(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -1928,32 +1995,35 @@ env:
exclude: [callpath]
link: 'roots'
projections:
- 'all': '{name}/{version}-{compiler.name}'""" % viewdir)
+ 'all': '{name}/{version}-{compiler.name}'"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- test = ev.read('test')
+ test = ev.read("test")
for spec in test._get_environment_specs():
- if spec in test.roots() and (spec.satisfies('%gcc') and
- not spec.satisfies('callpath')):
+ if spec in test.roots() and (
+ spec.satisfies("%gcc") and not spec.satisfies("callpath")
+ ):
assert os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
else:
assert not os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
-def test_view_link_run(tmpdir, mock_fetch, mock_packages, mock_archive,
- install_mockery):
- yaml = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
+def test_view_link_run(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
+ yaml = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
envdir = str(tmpdir)
- with open(yaml, 'w') as f:
- f.write("""
+ with open(yaml, "w") as f:
+ f.write(
+ """
spack:
specs:
- dttop
@@ -1963,56 +2033,68 @@ spack:
root: %s
link: run
projections:
- all: '{name}'""" % viewdir)
+ all: '{name}'"""
+ % viewdir
+ )
with ev.Environment(envdir):
install()
# make sure transitive run type deps are in the view
- for pkg in ('dtrun1', 'dtrun3'):
+ for pkg in ("dtrun1", "dtrun3"):
assert os.path.exists(os.path.join(viewdir, pkg))
# and non-run-type deps are not.
- for pkg in ('dtlink1', 'dtlink2', 'dtlink3', 'dtlink4', 'dtlink5'
- 'dtbuild1', 'dtbuild2', 'dtbuild3'):
+ for pkg in (
+ "dtlink1",
+ "dtlink2",
+ "dtlink3",
+ "dtlink4",
+ "dtlink5" "dtbuild1",
+ "dtbuild2",
+ "dtbuild3",
+ ):
assert not os.path.exists(os.path.join(viewdir, pkg))
-@pytest.mark.parametrize('link_type', ['hardlink', 'copy', 'symlink'])
-def test_view_link_type(link_type, tmpdir, mock_fetch, mock_packages, mock_archive,
- install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+@pytest.mark.parametrize("link_type", ["hardlink", "copy", "symlink"])
+def test_view_link_type(
+ link_type, tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
+):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
specs:
- mpileaks
view:
default:
root: %s
- link_type: %s""" % (viewdir, link_type))
+ link_type: %s"""
+ % (viewdir, link_type)
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- test = ev.read('test')
+ test = ev.read("test")
for spec in test.roots():
file_path = test.default_view.view()._root
- file_to_test = os.path.join(
- file_path, spec.name)
+ file_to_test = os.path.join(file_path, spec.name)
assert os.path.isfile(file_to_test)
- assert os.path.islink(file_to_test) == (link_type == 'symlink')
+ assert os.path.islink(file_to_test) == (link_type == "symlink")
-def test_view_link_all(tmpdir, mock_fetch, mock_packages, mock_archive,
- install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_view_link_all(tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, callpath]
@@ -2029,30 +2111,34 @@ env:
exclude: [callpath]
link: 'all'
projections:
- 'all': '{name}/{version}-{compiler.name}'""" % viewdir)
+ 'all': '{name}/{version}-{compiler.name}'"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- test = ev.read('test')
+ test = ev.read("test")
for spec in test._get_environment_specs():
- if spec.satisfies('%gcc') and not spec.satisfies('callpath'):
+ if spec.satisfies("%gcc") and not spec.satisfies("callpath"):
assert os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
else:
assert not os.path.exists(
- os.path.join(viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name))
+ )
-def test_stack_view_activate_from_default(tmpdir, mock_fetch, mock_packages,
- mock_archive, install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_stack_view_activate_from_default(
+ tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
+):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, cmake]
@@ -2065,26 +2151,29 @@ env:
view:
default:
root: %s
- select: ['%%gcc']""" % viewdir)
+ select: ['%%gcc']"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- shell = env('activate', '--sh', 'test')
+ shell = env("activate", "--sh", "test")
- assert 'PATH' in shell
- assert os.path.join(viewdir, 'bin') in shell
- assert 'FOOBAR=mpileaks' in shell
+ assert "PATH" in shell
+ assert os.path.join(viewdir, "bin") in shell
+ assert "FOOBAR=mpileaks" in shell
-def test_stack_view_no_activate_without_default(tmpdir, mock_fetch,
- mock_packages, mock_archive,
- install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- viewdir = str(tmpdir.join('view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_stack_view_no_activate_without_default(
+ tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
+):
+ filename = str(tmpdir.join("spack.yaml"))
+ viewdir = str(tmpdir.join("view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, cmake]
@@ -2097,24 +2186,28 @@ env:
view:
not-default:
root: %s
- select: ['%%gcc']""" % viewdir)
+ select: ['%%gcc']"""
+ % viewdir
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- shell = env('activate', '--sh', 'test')
- assert 'PATH' not in shell
+ shell = env("activate", "--sh", "test")
+ assert "PATH" not in shell
assert viewdir not in shell
-def test_stack_view_multiple_views(tmpdir, mock_fetch, mock_packages,
- mock_archive, install_mockery):
- filename = str(tmpdir.join('spack.yaml'))
- default_viewdir = str(tmpdir.join('default-view'))
- combin_viewdir = str(tmpdir.join('combinatorial-view'))
- with open(filename, 'w') as f:
- f.write("""\
+def test_stack_view_multiple_views(
+ tmpdir, mock_fetch, mock_packages, mock_archive, install_mockery
+):
+ filename = str(tmpdir.join("spack.yaml"))
+ default_viewdir = str(tmpdir.join("default-view"))
+ combin_viewdir = str(tmpdir.join("combinatorial-view"))
+ with open(filename, "w") as f:
+ f.write(
+ """\
env:
definitions:
- packages: [mpileaks, cmake]
@@ -2132,149 +2225,152 @@ env:
root: %s
exclude: [callpath %%gcc]
projections:
- 'all': '{name}/{version}-{compiler.name}'""" % (default_viewdir,
- combin_viewdir))
+ 'all': '{name}/{version}-{compiler.name}'"""
+ % (default_viewdir, combin_viewdir)
+ )
with tmpdir.as_cwd():
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
install()
- shell = env('activate', '--sh', 'test')
- assert 'PATH' in shell
- assert os.path.join(default_viewdir, 'bin') in shell
+ shell = env("activate", "--sh", "test")
+ assert "PATH" in shell
+ assert os.path.join(default_viewdir, "bin") in shell
- test = ev.read('test')
+ test = ev.read("test")
for spec in test._get_environment_specs():
- if not spec.satisfies('callpath%gcc'):
+ if not spec.satisfies("callpath%gcc"):
assert os.path.exists(
- os.path.join(combin_viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(
+ combin_viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name)
+ )
+ )
else:
assert not os.path.exists(
- os.path.join(combin_viewdir, spec.name, '%s-%s' %
- (spec.version, spec.compiler.name)))
+ os.path.join(
+ combin_viewdir, spec.name, "%s-%s" % (spec.version, spec.compiler.name)
+ )
+ )
-def test_env_activate_sh_prints_shell_output(
- tmpdir, mock_stage, mock_fetch, install_mockery
-):
+def test_env_activate_sh_prints_shell_output(tmpdir, mock_stage, mock_fetch, install_mockery):
"""Check the shell commands output by ``spack env activate --sh``.
This is a cursory check; ``share/spack/qa/setup-env-test.sh`` checks
for correctness.
"""
- env('create', 'test', add_view=True)
+ env("create", "test", add_view=True)
- out = env('activate', '--sh', 'test')
+ out = env("activate", "--sh", "test")
assert "export SPACK_ENV=" in out
assert "export PS1=" not in out
assert "alias despacktivate=" in out
- out = env('activate', '--sh', '--prompt', 'test')
+ out = env("activate", "--sh", "--prompt", "test")
assert "export SPACK_ENV=" in out
assert "export PS1=" in out
assert "alias despacktivate=" in out
-def test_env_activate_csh_prints_shell_output(
- tmpdir, mock_stage, mock_fetch, install_mockery
-):
+def test_env_activate_csh_prints_shell_output(tmpdir, mock_stage, mock_fetch, install_mockery):
"""Check the shell commands output by ``spack env activate --csh``."""
- env('create', 'test', add_view=True)
+ env("create", "test", add_view=True)
- out = env('activate', '--csh', 'test')
+ out = env("activate", "--csh", "test")
assert "setenv SPACK_ENV" in out
assert "setenv set prompt" not in out
assert "alias despacktivate" in out
- out = env('activate', '--csh', '--prompt', 'test')
+ out = env("activate", "--csh", "--prompt", "test")
assert "setenv SPACK_ENV" in out
assert "set prompt=" in out
assert "alias despacktivate" in out
-@pytest.mark.regression('12719')
+@pytest.mark.regression("12719")
def test_env_activate_default_view_root_unconditional(mutable_mock_env_path):
"""Check that the root of the default view in the environment is added
to the shell unconditionally."""
- env('create', 'test', add_view=True)
+ env("create", "test", add_view=True)
- with ev.read('test') as e:
+ with ev.read("test") as e:
viewdir = e.default_view.root
- out = env('activate', '--sh', 'test')
- viewdir_bin = os.path.join(viewdir, 'bin')
+ out = env("activate", "--sh", "test")
+ viewdir_bin = os.path.join(viewdir, "bin")
- assert "export PATH={0}".format(viewdir_bin) in out or \
- "export PATH='{0}".format(viewdir_bin) in out or \
- 'export PATH="{0}'.format(viewdir_bin) in out
+ assert (
+ "export PATH={0}".format(viewdir_bin) in out
+ or "export PATH='{0}".format(viewdir_bin) in out
+ or 'export PATH="{0}'.format(viewdir_bin) in out
+ )
def test_concretize_user_specs_together():
- e = ev.create('coconcretization')
+ e = ev.create("coconcretization")
e.unify = True
# Concretize a first time using 'mpich' as the MPI provider
- e.add('mpileaks')
- e.add('mpich')
+ e.add("mpileaks")
+ e.add("mpich")
e.concretize()
- assert all('mpich' in spec for _, spec in e.concretized_specs())
- assert all('mpich2' not in spec for _, spec in e.concretized_specs())
+ assert all("mpich" in spec for _, spec in e.concretized_specs())
+ assert all("mpich2" not in spec for _, spec in e.concretized_specs())
# Concretize a second time using 'mpich2' as the MPI provider
- e.remove('mpich')
- e.add('mpich2')
+ e.remove("mpich")
+ e.add("mpich2")
e.concretize()
- assert all('mpich2' in spec for _, spec in e.concretized_specs())
- assert all('mpich' not in spec for _, spec in e.concretized_specs())
+ assert all("mpich2" in spec for _, spec in e.concretized_specs())
+ assert all("mpich" not in spec for _, spec in e.concretized_specs())
# Concretize again without changing anything, check everything
# stays the same
e.concretize()
- assert all('mpich2' in spec for _, spec in e.concretized_specs())
- assert all('mpich' not in spec for _, spec in e.concretized_specs())
+ assert all("mpich2" in spec for _, spec in e.concretized_specs())
+ assert all("mpich" not in spec for _, spec in e.concretized_specs())
def test_cant_install_single_spec_when_concretizing_together():
- e = ev.create('coconcretization')
+ e = ev.create("coconcretization")
e.unify = True
- with pytest.raises(ev.SpackEnvironmentError, match=r'cannot install'):
- e.concretize_and_add('zlib')
+ with pytest.raises(ev.SpackEnvironmentError, match=r"cannot install"):
+ e.concretize_and_add("zlib")
e.install_all()
def test_duplicate_packages_raise_when_concretizing_together():
- e = ev.create('coconcretization')
+ e = ev.create("coconcretization")
e.unify = True
- e.add('mpileaks+opt')
- e.add('mpileaks~opt')
- e.add('mpich')
+ e.add("mpileaks+opt")
+ e.add("mpileaks~opt")
+ e.add("mpich")
- with pytest.raises(ev.SpackEnvironmentError, match=r'cannot contain more'):
+ with pytest.raises(ev.SpackEnvironmentError, match=r"cannot contain more"):
e.concretize()
def test_env_write_only_non_default():
- env('create', 'test')
+ env("create", "test")
- e = ev.read('test')
- with open(e.manifest_path, 'r') as f:
+ e = ev.read("test")
+ with open(e.manifest_path, "r") as f:
yaml = f.read()
assert yaml == ev.default_manifest_yaml()
-@pytest.mark.regression('20526')
+@pytest.mark.regression("20526")
def test_env_write_only_non_default_nested(tmpdir):
# setup an environment file
# the environment includes configuration because nested configs proved the
# most difficult to avoid writing.
- filename = 'spack.yaml'
+ filename = "spack.yaml"
filepath = str(tmpdir.join(filename))
contents = """\
env:
@@ -2288,42 +2384,46 @@ env:
"""
# create environment with some structure
- with open(filepath, 'w') as f:
+ with open(filepath, "w") as f:
f.write(contents)
- env('create', 'test', filepath)
+ env("create", "test", filepath)
# concretize
- with ev.read('test') as e:
+ with ev.read("test") as e:
concretize()
e.write()
- with open(e.manifest_path, 'r') as f:
+ with open(e.manifest_path, "r") as f:
manifest = f.read()
assert manifest == contents
-@pytest.mark.parametrize('concretization,unify', [
- ('together', 'true'),
- ('separately', 'false')
-])
+@pytest.mark.parametrize("concretization,unify", [("together", "true"), ("separately", "false")])
def test_update_concretization_to_concretizer_unify(concretization, unify, tmpdir):
spack_yaml = """\
spack:
concretization: {}
-""".format(concretization)
- tmpdir.join('spack.yaml').write(spack_yaml)
+""".format(
+ concretization
+ )
+ tmpdir.join("spack.yaml").write(spack_yaml)
# Update the environment
- env('update', '-y', str(tmpdir))
- with open(str(tmpdir.join('spack.yaml'))) as f:
- assert f.read() == """\
+ env("update", "-y", str(tmpdir))
+ with open(str(tmpdir.join("spack.yaml"))) as f:
+ assert (
+ f.read()
+ == """\
spack:
concretizer:
unify: {}
-""".format(unify)
+""".format(
+ unify
+ )
+ )
-@pytest.mark.regression('18147')
+@pytest.mark.regression("18147")
def test_can_update_attributes_with_override(tmpdir):
spack_yaml = """
spack:
@@ -2336,14 +2436,14 @@ spack:
specs:
- hdf5
"""
- abspath = tmpdir.join('spack.yaml')
+ abspath = tmpdir.join("spack.yaml")
abspath.write(spack_yaml)
# Check that an update does not raise
- env('update', '-y', str(abspath.dirname))
+ env("update", "-y", str(abspath.dirname))
-@pytest.mark.regression('18338')
+@pytest.mark.regression("18338")
def test_newline_in_commented_sequence_is_not_an_issue(tmpdir):
spack_yaml = """
spack:
@@ -2359,12 +2459,12 @@ spack:
concretizer:
unify: false
"""
- abspath = tmpdir.join('spack.yaml')
+ abspath = tmpdir.join("spack.yaml")
abspath.write(spack_yaml)
def extract_dag_hash(environment):
_, dyninst = next(iter(environment.specs_by_hash.items()))
- return dyninst['libelf'].dag_hash()
+ return dyninst["libelf"].dag_hash()
# Concretize a first time and create a lockfile
with ev.Environment(str(tmpdir)) as e:
@@ -2379,7 +2479,7 @@ spack:
assert libelf_first_hash == libelf_second_hash
-@pytest.mark.regression('18441')
+@pytest.mark.regression("18441")
def test_lockfile_not_deleted_on_write_error(tmpdir, monkeypatch):
raw_yaml = """
spack:
@@ -2391,9 +2491,9 @@ spack:
- spec: libelf@0.8.13
prefix: /usr
"""
- spack_yaml = tmpdir.join('spack.yaml')
+ spack_yaml = tmpdir.join("spack.yaml")
spack_yaml.write(raw_yaml)
- spack_lock = tmpdir.join('spack.lock')
+ spack_lock = tmpdir.join("spack.lock")
# Concretize a first time and create a lockfile
with ev.Environment(str(tmpdir)):
@@ -2403,11 +2503,9 @@ spack:
# If I run concretize again and there's an error during write,
# the spack.lock file shouldn't disappear from disk
def _write_helper_raise(self, x, y):
- raise RuntimeError('some error')
+ raise RuntimeError("some error")
- monkeypatch.setattr(
- ev.Environment, '_update_and_write_manifest', _write_helper_raise
- )
+ monkeypatch.setattr(ev.Environment, "_update_and_write_manifest", _write_helper_raise)
with ev.Environment(str(tmpdir)) as e:
e.concretize(force=True)
with pytest.raises(RuntimeError):
@@ -2418,10 +2516,10 @@ spack:
def _setup_develop_packages(tmpdir):
"""Sets up a structure ./init_env/spack.yaml, ./build_folder, ./dest_env
- where spack.yaml has a relative develop path to build_folder"""
- init_env = tmpdir.join('init_env')
- build_folder = tmpdir.join('build_folder')
- dest_env = tmpdir.join('dest_env')
+ where spack.yaml has a relative develop path to build_folder"""
+ init_env = tmpdir.join("init_env")
+ build_folder = tmpdir.join("build_folder")
+ dest_env = tmpdir.join("dest_env")
fs.mkdirp(str(init_env))
fs.mkdirp(str(build_folder))
@@ -2438,7 +2536,7 @@ spack:
path: /some/other/path
spec: mypkg@main
"""
- spack_yaml = init_env.join('spack.yaml')
+ spack_yaml = init_env.join("spack.yaml")
spack_yaml.write(raw_yaml)
return init_env, build_folder, dest_env, spack_yaml
@@ -2446,69 +2544,67 @@ spack:
def test_rewrite_rel_dev_path_new_dir(tmpdir):
"""Relative develop paths should be rewritten for new environments in
- a different directory from the original manifest file"""
+ a different directory from the original manifest file"""
_, build_folder, dest_env, spack_yaml = _setup_develop_packages(tmpdir)
- env('create', '-d', str(dest_env), str(spack_yaml))
+ env("create", "-d", str(dest_env), str(spack_yaml))
with ev.Environment(str(dest_env)) as e:
- assert e.dev_specs['mypkg1']['path'] == str(build_folder)
- assert e.dev_specs['mypkg2']['path'] == sep + os.path.join('some',
- 'other', 'path')
+ assert e.dev_specs["mypkg1"]["path"] == str(build_folder)
+ assert e.dev_specs["mypkg2"]["path"] == sep + os.path.join("some", "other", "path")
def test_rewrite_rel_dev_path_named_env(tmpdir):
"""Relative develop paths should by default be rewritten for new named
- environment"""
+ environment"""
_, build_folder, _, spack_yaml = _setup_develop_packages(tmpdir)
- env('create', 'named_env', str(spack_yaml))
- with ev.read('named_env') as e:
- assert e.dev_specs['mypkg1']['path'] == str(build_folder)
- assert e.dev_specs['mypkg2']['path'] == sep + os.path.join('some',
- 'other', 'path')
+ env("create", "named_env", str(spack_yaml))
+ with ev.read("named_env") as e:
+ assert e.dev_specs["mypkg1"]["path"] == str(build_folder)
+ assert e.dev_specs["mypkg2"]["path"] == sep + os.path.join("some", "other", "path")
def test_rewrite_rel_dev_path_original_dir(tmpdir):
"""Relative devevelop paths should not be rewritten when initializing an
- environment with root path set to the same directory"""
+ environment with root path set to the same directory"""
init_env, _, _, spack_yaml = _setup_develop_packages(tmpdir)
with ev.Environment(str(init_env), str(spack_yaml)) as e:
- assert e.dev_specs['mypkg1']['path'] == '../build_folder'
- assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
+ assert e.dev_specs["mypkg1"]["path"] == "../build_folder"
+ assert e.dev_specs["mypkg2"]["path"] == "/some/other/path"
def test_rewrite_rel_dev_path_create_original_dir(tmpdir):
"""Relative develop paths should not be rewritten when creating an
- environment in the original directory"""
+ environment in the original directory"""
init_env, _, _, spack_yaml = _setup_develop_packages(tmpdir)
- env('create', '-d', str(init_env), str(spack_yaml))
+ env("create", "-d", str(init_env), str(spack_yaml))
with ev.Environment(str(init_env)) as e:
- assert e.dev_specs['mypkg1']['path'] == '../build_folder'
- assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
+ assert e.dev_specs["mypkg1"]["path"] == "../build_folder"
+ assert e.dev_specs["mypkg2"]["path"] == "/some/other/path"
def test_does_not_rewrite_rel_dev_path_when_keep_relative_is_set(tmpdir):
"""Relative develop paths should not be rewritten when --keep-relative is
- passed to create"""
+ passed to create"""
_, _, _, spack_yaml = _setup_develop_packages(tmpdir)
- env('create', '--keep-relative', 'named_env', str(spack_yaml))
- with ev.read('named_env') as e:
- assert e.dev_specs['mypkg1']['path'] == '../build_folder'
- assert e.dev_specs['mypkg2']['path'] == '/some/other/path'
+ env("create", "--keep-relative", "named_env", str(spack_yaml))
+ with ev.read("named_env") as e:
+ assert e.dev_specs["mypkg1"]["path"] == "../build_folder"
+ assert e.dev_specs["mypkg2"]["path"] == "/some/other/path"
-@pytest.mark.regression('23440')
+@pytest.mark.regression("23440")
def test_custom_version_concretize_together(tmpdir):
# Custom versions should be permitted in specs when
# concretizing together
- e = ev.create('custom_version')
+ e = ev.create("custom_version")
e.unify = True
# Concretize a first time using 'mpich' as the MPI provider
- e.add('hdf5@myversion')
- e.add('mpich')
+ e.add("hdf5@myversion")
+ e.add("mpich")
e.concretize()
- assert any('hdf5@myversion' in spec for _, spec in e.concretized_specs())
+ assert any("hdf5@myversion" in spec for _, spec in e.concretized_specs())
def test_modules_relative_to_views(tmpdir, install_mockery, mock_fetch):
@@ -2523,19 +2619,19 @@ spack:
roots:
tcl: modules
"""
- _env_create('test', StringIO(spack_yaml))
+ _env_create("test", StringIO(spack_yaml))
- with ev.read('test') as e:
+ with ev.read("test") as e:
install()
spec = e.specs_by_hash[e.concretized_order[0]]
view_prefix = e.default_view.get_projection_for_spec(spec)
- modules_glob = '%s/modules/**/*' % e.path
+ modules_glob = "%s/modules/**/*" % e.path
modules = glob.glob(modules_glob)
assert len(modules) == 1
module = modules[0]
- with open(module, 'r') as f:
+ with open(module, "r") as f:
contents = f.read()
assert view_prefix in contents
@@ -2558,27 +2654,27 @@ spack:
roots:
tcl: full_modules
"""
- _env_create('test', StringIO(spack_yaml))
+ _env_create("test", StringIO(spack_yaml))
- with ev.read('test') as e:
+ with ev.read("test") as e:
install()
spec = e.specs_by_hash[e.concretized_order[0]]
view_prefix = e.default_view.get_projection_for_spec(spec)
- modules_glob = '%s/modules/**/*' % e.path
+ modules_glob = "%s/modules/**/*" % e.path
modules = glob.glob(modules_glob)
assert len(modules) == 1
module = modules[0]
- full_modules_glob = '%s/full_modules/**/*' % e.path
+ full_modules_glob = "%s/full_modules/**/*" % e.path
full_modules = glob.glob(full_modules_glob)
assert len(full_modules) == 1
- full_module = full_modules[0]
+ full_module = full_modules[0]
- with open(module, 'r') as f:
+ with open(module, "r") as f:
contents = f.read()
- with open(full_module, 'r') as f:
+ with open(full_module, "r") as f:
full_contents = f.read()
assert view_prefix in contents
@@ -2588,70 +2684,69 @@ spack:
assert spec.prefix in full_contents
-@pytest.mark.regression('24148')
+@pytest.mark.regression("24148")
def test_virtual_spec_concretize_together(tmpdir):
# An environment should permit to concretize "mpi"
- e = ev.create('virtual_spec')
+ e = ev.create("virtual_spec")
e.unify = True
- e.add('mpi')
+ e.add("mpi")
e.concretize()
- assert any(s.package.provides('mpi') for _, s in e.concretized_specs())
+ assert any(s.package.provides("mpi") for _, s in e.concretized_specs())
def test_query_develop_specs():
"""Test whether a spec is develop'ed or not"""
- env('create', 'test')
- with ev.read('test') as e:
- e.add('mpich')
- e.add('mpileaks')
- e.develop(Spec('mpich@1'), 'here', clone=False)
+ env("create", "test")
+ with ev.read("test") as e:
+ e.add("mpich")
+ e.add("mpileaks")
+ e.develop(Spec("mpich@1"), "here", clone=False)
- assert e.is_develop(Spec('mpich'))
- assert not e.is_develop(Spec('mpileaks'))
+ assert e.is_develop(Spec("mpich"))
+ assert not e.is_develop(Spec("mpileaks"))
-@pytest.mark.parametrize('method', [
- spack.cmd.env.env_activate,
- spack.cmd.env.env_deactivate
-])
+@pytest.mark.parametrize("method", [spack.cmd.env.env_activate, spack.cmd.env.env_deactivate])
@pytest.mark.parametrize(
- 'env,no_env,env_dir',
+ "env,no_env,env_dir",
[
- ('b', False, None),
+ ("b", False, None),
(None, True, None),
- (None, False, 'path/'),
- ])
+ (None, False, "path/"),
+ ],
+)
def test_activation_and_deactiviation_ambiguities(method, env, no_env, env_dir, capsys):
"""spack [-e x | -E | -D x/] env [activate | deactivate] y are ambiguous"""
- args = Namespace(shell='sh', activate_env='a',
- env=env, no_env=no_env, env_dir=env_dir)
+ args = Namespace(shell="sh", activate_env="a", env=env, no_env=no_env, env_dir=env_dir)
with pytest.raises(SystemExit):
method(args)
_, err = capsys.readouterr()
- assert 'is ambiguous' in err
+ assert "is ambiguous" in err
-@pytest.mark.regression('26548')
+@pytest.mark.regression("26548")
def test_custom_store_in_environment(mutable_config, tmpdir):
- spack_yaml = tmpdir.join('spack.yaml')
- spack_yaml.write("""
+ spack_yaml = tmpdir.join("spack.yaml")
+ spack_yaml.write(
+ """
spack:
specs:
- libelf
config:
install_tree:
root: /tmp/store
-""")
- if sys.platform == 'win32':
- sep = '\\'
+"""
+ )
+ if sys.platform == "win32":
+ sep = "\\"
else:
- sep = '/'
+ sep = "/"
current_store_root = str(spack.store.root)
- assert str(current_store_root) != sep + os.path.join('tmp', 'store')
+ assert str(current_store_root) != sep + os.path.join("tmp", "store")
with spack.environment.Environment(str(tmpdir)):
- assert str(spack.store.root) == sep + os.path.join('tmp', 'store')
+ assert str(spack.store.root) == sep + os.path.join("tmp", "store")
assert str(spack.store.root) == current_store_root
@@ -2660,30 +2755,29 @@ def test_activate_temp(monkeypatch, tmpdir):
temporary directory"""
env_dir = lambda: str(tmpdir)
monkeypatch.setattr(spack.cmd.env, "create_temp_env_directory", env_dir)
- shell = env('activate', '--temp', '--sh')
- active_env_var = next(line for line in shell.splitlines()
- if ev.spack_env_var in line)
+ shell = env("activate", "--temp", "--sh")
+ active_env_var = next(line for line in shell.splitlines() if ev.spack_env_var in line)
assert str(tmpdir) in active_env_var
assert ev.is_env_dir(str(tmpdir))
def test_env_view_fail_if_symlink_points_elsewhere(tmpdir, install_mockery, mock_fetch):
- view = str(tmpdir.join('view'))
+ view = str(tmpdir.join("view"))
# Put a symlink to an actual directory in view
- non_view_dir = str(tmpdir.mkdir('dont-delete-me'))
+ non_view_dir = str(tmpdir.mkdir("dont-delete-me"))
os.symlink(non_view_dir, view)
- with ev.create('env', with_view=view):
- add('libelf')
- install('--fake')
+ with ev.create("env", with_view=view):
+ add("libelf")
+ install("--fake")
assert os.path.isdir(non_view_dir)
def test_failed_view_cleanup(tmpdir, mock_stage, mock_fetch, install_mockery):
"""Tests whether Spack cleans up after itself when a view fails to create"""
- view = str(tmpdir.join('view'))
- with ev.create('env', with_view=view):
- add('libelf')
- install('--fake')
+ view = str(tmpdir.join("view"))
+ with ev.create("env", with_view=view):
+ add("libelf")
+ install("--fake")
# Save the current view directory.
resolved_view = os.path.realpath(view)
@@ -2691,10 +2785,10 @@ def test_failed_view_cleanup(tmpdir, mock_stage, mock_fetch, install_mockery):
views_before = os.listdir(all_views)
# Add a spec that results in MergeConflictError's when creating a view
- with ev.read('env'):
- add('libelf cflags=-O3')
+ with ev.read("env"):
+ add("libelf cflags=-O3")
with pytest.raises(llnl.util.link_tree.MergeConflictError):
- install('--fake')
+ install("--fake")
# Make sure there is no broken view in the views directory, and the current
# view is the original view from before the failed regenerate attempt.
@@ -2703,19 +2797,17 @@ def test_failed_view_cleanup(tmpdir, mock_stage, mock_fetch, install_mockery):
assert os.path.samefile(resolved_view, view)
-def test_environment_view_target_already_exists(
- tmpdir, mock_stage, mock_fetch, install_mockery
-):
+def test_environment_view_target_already_exists(tmpdir, mock_stage, mock_fetch, install_mockery):
"""When creating a new view, Spack should check whether
the new view dir already exists. If so, it should not be
removed or modified."""
# Create a new environment
- view = str(tmpdir.join('view'))
- env('create', '--with-view={0}'.format(view), 'test')
- with ev.read('test'):
- add('libelf')
- install('--fake')
+ view = str(tmpdir.join("view"))
+ env("create", "--with-view={0}".format(view), "test")
+ with ev.read("test"):
+ add("libelf")
+ install("--fake")
# Empty the underlying view
real_view = os.path.realpath(view)
@@ -2724,33 +2816,33 @@ def test_environment_view_target_already_exists(
# Replace it with something new.
os.mkdir(real_view)
- fs.touch(os.path.join(real_view, 'file'))
+ fs.touch(os.path.join(real_view, "file"))
# Remove the symlink so Spack can't know about the "previous root"
os.unlink(view)
# Regenerate the view, which should realize it can't write into the same dir.
- msg = 'Failed to generate environment view'
- with ev.read('test'):
+ msg = "Failed to generate environment view"
+ with ev.read("test"):
with pytest.raises(ev.SpackEnvironmentViewError, match=msg):
- env('view', 'regenerate')
+ env("view", "regenerate")
# Make sure the dir was left untouched.
assert not os.path.lexists(view)
- assert os.listdir(real_view) == ['file']
+ assert os.listdir(real_view) == ["file"]
def test_environment_query_spec_by_hash(mock_stage, mock_fetch, install_mockery):
- env('create', 'test')
- with ev.read('test'):
- add('libdwarf')
+ env("create", "test")
+ with ev.read("test"):
+ add("libdwarf")
concretize()
- with ev.read('test') as e:
- spec = e.matching_spec('libelf')
- install('/{0}'.format(spec.dag_hash()))
- with ev.read('test') as e:
- assert not e.matching_spec('libdwarf').installed
- assert e.matching_spec('libelf').installed
+ with ev.read("test") as e:
+ spec = e.matching_spec("libelf")
+ install("/{0}".format(spec.dag_hash()))
+ with ev.read("test") as e:
+ assert not e.matching_spec("libdwarf").installed
+ assert e.matching_spec("libelf").installed
@pytest.mark.parametrize("lockfile", ["v1", "v2", "v3"])
@@ -2773,9 +2865,7 @@ def test_read_old_lock_and_write_new(config, tmpdir, lockfile):
# the environment, anyway.
#
# This test ensures the behavior described above.
- lockfile_path = os.path.join(
- spack.paths.test_path, "data", "legacy_env", "%s.lock" % lockfile
- )
+ lockfile_path = os.path.join(spack.paths.test_path, "data", "legacy_env", "%s.lock" % lockfile)
# read in the JSON from a legacy lockfile
with open(lockfile_path) as f:
@@ -2827,9 +2917,7 @@ def test_read_v1_lock_creates_backup(config, tmpdir):
is created.
"""
# read in the JSON from a legacy v1 lockfile
- v1_lockfile_path = os.path.join(
- spack.paths.test_path, "data", "legacy_env", "v1.lock"
- )
+ v1_lockfile_path = os.path.join(spack.paths.test_path, "data", "legacy_env", "v1.lock")
# make an env out of the old lockfile
test_lockfile_path = str(tmpdir.join(ev.lockfile_name))
@@ -2841,9 +2929,7 @@ def test_read_v1_lock_creates_backup(config, tmpdir):
@pytest.mark.parametrize("lockfile", ["v1", "v2", "v3"])
-def test_read_legacy_lockfile_and_reconcretize(
- mock_stage, mock_fetch, install_mockery, lockfile
-):
+def test_read_legacy_lockfile_and_reconcretize(mock_stage, mock_fetch, install_mockery, lockfile):
# In legacy lockfiles v2 and v3 (keyed by build hash), there may be multiple
# versions of the same spec with different build dependencies, which means
# they will have different build hashes but the same DAG hash.
@@ -2865,8 +2951,8 @@ def test_read_legacy_lockfile_and_reconcretize(
# So in v2 and v3 lockfiles we have two versions of dttop with the same DAG
# hash but different build hashes.
- env('create', 'test', legacy_lockfile_path)
- test = ev.read('test')
+ env("create", "test", legacy_lockfile_path)
+ test = ev.read("test")
assert len(test.specs_by_hash) == 1
single_root = next(iter(test.specs_by_hash.values()))
@@ -2875,72 +2961,73 @@ def test_read_legacy_lockfile_and_reconcretize(
# v0.5 on lockfile creation. v2 only has v0.5, because we specifically prefer
# the one that would be installed when we read old lockfiles.
if lockfile == "v1":
- assert single_root['dtbuild1'].version == Version('1.0')
+ assert single_root["dtbuild1"].version == Version("1.0")
else:
- assert single_root['dtbuild1'].version == Version('0.5')
+ assert single_root["dtbuild1"].version == Version("0.5")
# Now forcefully reconcretize
- with ev.read('test'):
- concretize('-f')
+ with ev.read("test"):
+ concretize("-f")
# After reconcretizing, we should again see two roots, one depending on each
# of the dtbuild1 versions specified in the roots of the original lockfile.
- test = ev.read('test')
+ test = ev.read("test")
assert len(test.specs_by_hash) == 2
- expected_versions = set([Version('0.5'), Version('1.0')])
- current_versions = set(s['dtbuild1'].version for s in test.specs_by_hash.values())
+ expected_versions = set([Version("0.5"), Version("1.0")])
+ current_versions = set(s["dtbuild1"].version for s in test.specs_by_hash.values())
assert current_versions == expected_versions
def test_environment_depfile_makefile(tmpdir, mock_packages):
- env('create', 'test')
- make = Executable('make')
- makefile = str(tmpdir.join('Makefile'))
- with ev.read('test'):
- add('libdwarf')
+ env("create", "test")
+ make = Executable("make")
+ makefile = str(tmpdir.join("Makefile"))
+ with ev.read("test"):
+ add("libdwarf")
concretize()
# Disable jobserver so we can do a dry run.
- with ev.read('test'):
- env('depfile', '-o', makefile, '--make-disable-jobserver',
- '--make-target-prefix', 'prefix')
+ with ev.read("test"):
+ env(
+ "depfile", "-o", makefile, "--make-disable-jobserver", "--make-target-prefix", "prefix"
+ )
# Do make dry run.
- all_out = make('-n', '-f', makefile, output=str)
+ all_out = make("-n", "-f", makefile, output=str)
# Check whether `make` installs everything
- with ev.read('test') as e:
+ with ev.read("test") as e:
for _, root in e.concretized_specs():
for spec in root.traverse(root=True):
- tgt = os.path.join('prefix', '.install', spec.dag_hash())
- assert 'touch {}'.format(tgt) in all_out
+ tgt = os.path.join("prefix", ".install", spec.dag_hash())
+ assert "touch {}".format(tgt) in all_out
def test_environment_depfile_out(tmpdir, mock_packages):
- env('create', 'test')
- makefile_path = str(tmpdir.join('Makefile'))
- with ev.read('test'):
- add('libdwarf')
+ env("create", "test")
+ makefile_path = str(tmpdir.join("Makefile"))
+ with ev.read("test"):
+ add("libdwarf")
concretize()
- with ev.read('test'):
- env('depfile', '-G', 'make', '-o', makefile_path)
- stdout = env('depfile', '-G', 'make')
- with open(makefile_path, 'r') as f:
+ with ev.read("test"):
+ env("depfile", "-G", "make", "-o", makefile_path)
+ stdout = env("depfile", "-G", "make")
+ with open(makefile_path, "r") as f:
assert stdout == f.read()
def test_unify_when_possible_works_around_conflicts():
- e = ev.create('coconcretization')
- e.unify = 'when_possible'
+ e = ev.create("coconcretization")
+ e.unify = "when_possible"
- e.add('mpileaks+opt')
- e.add('mpileaks~opt')
- e.add('mpich')
+ e.add("mpileaks+opt")
+ e.add("mpileaks~opt")
+ e.add("mpich")
e.concretize()
- assert len([x for x in e.all_specs() if x.satisfies('mpileaks')]) == 2
- assert len([x for x in e.all_specs() if x.satisfies('mpileaks+opt')]) == 1
- assert len([x for x in e.all_specs() if x.satisfies('mpileaks~opt')]) == 1
- assert len([x for x in e.all_specs() if x.satisfies('mpich')]) == 1
+ assert len([x for x in e.all_specs() if x.satisfies("mpileaks")]) == 2
+ assert len([x for x in e.all_specs() if x.satisfies("mpileaks+opt")]) == 1
+ assert len([x for x in e.all_specs() if x.satisfies("mpileaks~opt")]) == 1
+ assert len([x for x in e.all_specs() if x.satisfies("mpich")]) == 1
diff --git a/lib/spack/spack/test/cmd/extensions.py b/lib/spack/spack/test/cmd/extensions.py
index 88f073519c..c4e849365f 100644
--- a/lib/spack/spack/test/cmd/extensions.py
+++ b/lib/spack/spack/test/cmd/extensions.py
@@ -10,16 +10,19 @@ import pytest
from spack.main import SpackCommand, SpackCommandError
from spack.spec import Spec
-extensions = SpackCommand('extensions')
+extensions = SpackCommand("extensions")
@pytest.fixture
def python_database(mock_packages, mutable_database):
- specs = [Spec(s).concretized() for s in [
- 'python',
- 'py-extension1',
- 'py-extension2',
- ]]
+ specs = [
+ Spec(s).concretized()
+ for s in [
+ "python",
+ "py-extension1",
+ "py-extension2",
+ ]
+ ]
for spec in specs:
spec.package.do_install(fake=True, explicit=True)
@@ -27,10 +30,10 @@ def python_database(mock_packages, mutable_database):
yield
-@pytest.mark.skipif(sys.platform == 'win32', reason="All Fetchers Failed")
+@pytest.mark.skipif(sys.platform == "win32", reason="All Fetchers Failed")
@pytest.mark.db
def test_extensions(mock_packages, python_database, config, capsys):
- ext2 = Spec("py-extension2").concretized()
+ ext2 = Spec("py-extension2").concretized()
def check_output(ni, na):
with capsys.disabled():
@@ -71,7 +74,7 @@ def test_extensions(mock_packages, python_database, config, capsys):
def test_extensions_no_arguments(mock_packages):
out = extensions()
- assert 'python' in out
+ assert "python" in out
def test_extensions_raises_if_not_extendable(mock_packages):
diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py
index 0ee857ae53..ec9923139c 100644
--- a/lib/spack/spack/test/cmd/external.py
+++ b/lib/spack/spack/test/cmd/external.py
@@ -16,7 +16,7 @@ import spack.detection.path
from spack.main import SpackCommand
from spack.spec import Spec
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
@pytest.fixture
@@ -25,237 +25,248 @@ def executables_found(monkeypatch):
def _mock_search(path_hints=None):
return result
- monkeypatch.setattr(spack.detection.path, 'executables_in_path', _mock_search)
+ monkeypatch.setattr(spack.detection.path, "executables_in_path", _mock_search)
+
return _factory
@pytest.fixture
def _platform_executables(monkeypatch):
def _win_exe_ext():
- return '.bat'
+ return ".bat"
- monkeypatch.setattr(spack.util.path, 'win_exe_ext', _win_exe_ext)
+ monkeypatch.setattr(spack.util.path, "win_exe_ext", _win_exe_ext)
def define_plat_exe(exe):
if is_windows:
- exe += '.bat'
+ exe += ".bat"
return exe
-def test_find_external_single_package(mock_executable, executables_found,
- _platform_executables):
- pkgs_to_check = [spack.repo.path.get_pkg_class('cmake')]
- executables_found({
- mock_executable("cmake", output='echo cmake version 1.foo'):
- define_plat_exe('cmake')
- })
+def test_find_external_single_package(mock_executable, executables_found, _platform_executables):
+ pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
+ executables_found(
+ {mock_executable("cmake", output="echo cmake version 1.foo"): define_plat_exe("cmake")}
+ )
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
pkg, entries = next(iter(pkg_to_entries.items()))
single_entry = next(iter(entries))
- assert single_entry.spec == Spec('cmake@1.foo')
+ assert single_entry.spec == Spec("cmake@1.foo")
-def test_find_external_two_instances_same_package(mock_executable, executables_found,
- _platform_executables):
- pkgs_to_check = [spack.repo.path.get_pkg_class('cmake')]
+def test_find_external_two_instances_same_package(
+ mock_executable, executables_found, _platform_executables
+):
+ pkgs_to_check = [spack.repo.path.get_pkg_class("cmake")]
# Each of these cmake instances is created in a different prefix
# In Windows, quoted strings are echo'd with quotes includes
# we need to avoid that for proper regex.
cmake_path1 = mock_executable(
- "cmake", output='echo cmake version 1.foo', subdir=('base1', 'bin')
+ "cmake", output="echo cmake version 1.foo", subdir=("base1", "bin")
)
cmake_path2 = mock_executable(
- "cmake", output='echo cmake version 3.17.2', subdir=('base2', 'bin')
+ "cmake", output="echo cmake version 3.17.2", subdir=("base2", "bin")
)
- cmake_exe = define_plat_exe('cmake')
- executables_found({
- cmake_path1: cmake_exe,
- cmake_path2: cmake_exe
- })
+ cmake_exe = define_plat_exe("cmake")
+ executables_found({cmake_path1: cmake_exe, cmake_path2: cmake_exe})
pkg_to_entries = spack.detection.by_executable(pkgs_to_check)
pkg, entries = next(iter(pkg_to_entries.items()))
spec_to_path = dict((e.spec, e.prefix) for e in entries)
- assert spec_to_path[Spec('cmake@1.foo')] == (
- spack.detection.executable_prefix(os.path.dirname(cmake_path1)))
- assert spec_to_path[Spec('cmake@3.17.2')] == (
- spack.detection.executable_prefix(os.path.dirname(cmake_path2)))
+ assert spec_to_path[Spec("cmake@1.foo")] == (
+ spack.detection.executable_prefix(os.path.dirname(cmake_path1))
+ )
+ assert spec_to_path[Spec("cmake@3.17.2")] == (
+ spack.detection.executable_prefix(os.path.dirname(cmake_path2))
+ )
def test_find_external_update_config(mutable_config):
entries = [
- spack.detection.DetectedPackage(Spec.from_detection('cmake@1.foo'), '/x/y1/'),
- spack.detection.DetectedPackage(Spec.from_detection('cmake@3.17.2'), '/x/y2/'),
+ spack.detection.DetectedPackage(Spec.from_detection("cmake@1.foo"), "/x/y1/"),
+ spack.detection.DetectedPackage(Spec.from_detection("cmake@3.17.2"), "/x/y2/"),
]
- pkg_to_entries = {'cmake': entries}
+ pkg_to_entries = {"cmake": entries}
- scope = spack.config.default_modify_scope('packages')
+ scope = spack.config.default_modify_scope("packages")
spack.detection.update_configuration(pkg_to_entries, scope=scope, buildable=True)
- pkgs_cfg = spack.config.get('packages')
- cmake_cfg = pkgs_cfg['cmake']
- cmake_externals = cmake_cfg['externals']
+ pkgs_cfg = spack.config.get("packages")
+ cmake_cfg = pkgs_cfg["cmake"]
+ cmake_externals = cmake_cfg["externals"]
- assert {'spec': 'cmake@1.foo', 'prefix': '/x/y1/'} in cmake_externals
- assert {'spec': 'cmake@3.17.2', 'prefix': '/x/y2/'} in cmake_externals
+ assert {"spec": "cmake@1.foo", "prefix": "/x/y1/"} in cmake_externals
+ assert {"spec": "cmake@3.17.2", "prefix": "/x/y2/"} in cmake_externals
def test_get_executables(working_env, mock_executable):
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
- os.environ['PATH'] = os.pathsep.join([os.path.dirname(cmake_path1)])
+ os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
path_to_exe = spack.detection.executables_in_path()
- cmake_exe = define_plat_exe('cmake')
+ cmake_exe = define_plat_exe("cmake")
assert path_to_exe[cmake_path1] == cmake_exe
-external = SpackCommand('external')
+external = SpackCommand("external")
-def test_find_external_cmd(mutable_config, working_env, mock_executable,
- _platform_executables):
+def test_find_external_cmd(mutable_config, working_env, mock_executable, _platform_executables):
"""Test invoking 'spack external find' with additional package arguments,
which restricts the set of packages that Spack looks for.
"""
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
prefix = os.path.dirname(os.path.dirname(cmake_path1))
- os.environ['PATH'] = os.pathsep.join([os.path.dirname(cmake_path1)])
- external('find', 'cmake')
+ os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
+ external("find", "cmake")
- pkgs_cfg = spack.config.get('packages')
- cmake_cfg = pkgs_cfg['cmake']
- cmake_externals = cmake_cfg['externals']
+ pkgs_cfg = spack.config.get("packages")
+ cmake_cfg = pkgs_cfg["cmake"]
+ cmake_externals = cmake_cfg["externals"]
- assert {'spec': 'cmake@1.foo', 'prefix': prefix} in cmake_externals
+ assert {"spec": "cmake@1.foo", "prefix": prefix} in cmake_externals
-def test_find_external_cmd_not_buildable(
- mutable_config, working_env, mock_executable):
+def test_find_external_cmd_not_buildable(mutable_config, working_env, mock_executable):
"""When the user invokes 'spack external find --not-buildable', the config
for any package where Spack finds an external version should be marked as
not buildable.
"""
cmake_path1 = mock_executable("cmake", output="echo cmake version 1.foo")
- os.environ['PATH'] = os.pathsep.join([os.path.dirname(cmake_path1)])
- external('find', '--not-buildable', 'cmake')
- pkgs_cfg = spack.config.get('packages')
- assert not pkgs_cfg['cmake']['buildable']
+ os.environ["PATH"] = os.pathsep.join([os.path.dirname(cmake_path1)])
+ external("find", "--not-buildable", "cmake")
+ pkgs_cfg = spack.config.get("packages")
+ assert not pkgs_cfg["cmake"]["buildable"]
def test_find_external_cmd_full_repo(
- mutable_config, working_env, mock_executable, mutable_mock_repo,
- _platform_executables):
+ mutable_config, working_env, mock_executable, mutable_mock_repo, _platform_executables
+):
"""Test invoking 'spack external find' with no additional arguments, which
iterates through each package in the repository.
"""
- exe_path1 = mock_executable(
- "find-externals1-exe", output="echo find-externals1 version 1.foo"
- )
+ exe_path1 = mock_executable("find-externals1-exe", output="echo find-externals1 version 1.foo")
prefix = os.path.dirname(os.path.dirname(exe_path1))
- os.environ['PATH'] = os.pathsep.join([os.path.dirname(exe_path1)])
- external('find', '--all')
+ os.environ["PATH"] = os.pathsep.join([os.path.dirname(exe_path1)])
+ external("find", "--all")
- pkgs_cfg = spack.config.get('packages')
- pkg_cfg = pkgs_cfg['find-externals1']
- pkg_externals = pkg_cfg['externals']
+ pkgs_cfg = spack.config.get("packages")
+ pkg_cfg = pkgs_cfg["find-externals1"]
+ pkg_externals = pkg_cfg["externals"]
- assert {'spec': 'find-externals1@1.foo', 'prefix': prefix} in pkg_externals
+ assert {"spec": "find-externals1@1.foo", "prefix": prefix} in pkg_externals
def test_find_external_no_manifest(
- mutable_config, working_env, mock_executable, mutable_mock_repo,
- _platform_executables, monkeypatch):
+ mutable_config,
+ working_env,
+ mock_executable,
+ mutable_mock_repo,
+ _platform_executables,
+ monkeypatch,
+):
"""The user runs 'spack external find'; the default path for storing
manifest files does not exist. Ensure that the command does not
fail.
"""
- monkeypatch.setenv('PATH', '')
- monkeypatch.setattr(spack.cray_manifest, 'default_path',
- os.path.join('a', 'path', 'that', 'doesnt', 'exist'))
- external('find')
+ monkeypatch.setenv("PATH", "")
+ monkeypatch.setattr(
+ spack.cray_manifest, "default_path", os.path.join("a", "path", "that", "doesnt", "exist")
+ )
+ external("find")
def test_find_external_empty_default_manifest_dir(
- mutable_config, working_env, mock_executable, mutable_mock_repo,
- _platform_executables, tmpdir, monkeypatch):
+ mutable_config,
+ working_env,
+ mock_executable,
+ mutable_mock_repo,
+ _platform_executables,
+ tmpdir,
+ monkeypatch,
+):
"""The user runs 'spack external find'; the default path for storing
manifest files exists but is empty. Ensure that the command does not
fail.
"""
- empty_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
- monkeypatch.setenv('PATH', '')
- monkeypatch.setattr(spack.cray_manifest, 'default_path',
- empty_manifest_dir)
- external('find')
+ empty_manifest_dir = str(tmpdir.mkdir("manifest_dir"))
+ monkeypatch.setenv("PATH", "")
+ monkeypatch.setattr(spack.cray_manifest, "default_path", empty_manifest_dir)
+ external("find")
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Can't chmod on Windows")
-@pytest.mark.skipif(getuid() == 0, reason='user is root')
+@pytest.mark.skipif(sys.platform == "win32", reason="Can't chmod on Windows")
+@pytest.mark.skipif(getuid() == 0, reason="user is root")
def test_find_external_manifest_with_bad_permissions(
- mutable_config, working_env, mock_executable, mutable_mock_repo,
- _platform_executables, tmpdir, monkeypatch):
+ mutable_config,
+ working_env,
+ mock_executable,
+ mutable_mock_repo,
+ _platform_executables,
+ tmpdir,
+ monkeypatch,
+):
"""The user runs 'spack external find'; the default path for storing
manifest files exists but with insufficient permissions. Check that
the command does not fail.
"""
- test_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
- test_manifest_file_path = os.path.join(test_manifest_dir, 'badperms.json')
+ test_manifest_dir = str(tmpdir.mkdir("manifest_dir"))
+ test_manifest_file_path = os.path.join(test_manifest_dir, "badperms.json")
touch(test_manifest_file_path)
- monkeypatch.setenv('PATH', '')
- monkeypatch.setattr(spack.cray_manifest, 'default_path',
- test_manifest_dir)
+ monkeypatch.setenv("PATH", "")
+ monkeypatch.setattr(spack.cray_manifest, "default_path", test_manifest_dir)
try:
os.chmod(test_manifest_file_path, 0)
- output = external('find')
- assert 'insufficient permissions' in output
- assert 'Skipping manifest and continuing' in output
+ output = external("find")
+ assert "insufficient permissions" in output
+ assert "Skipping manifest and continuing" in output
finally:
os.chmod(test_manifest_file_path, 0o700)
-def test_find_external_manifest_failure(
- mutable_config, mutable_mock_repo, tmpdir, monkeypatch):
+def test_find_external_manifest_failure(mutable_config, mutable_mock_repo, tmpdir, monkeypatch):
"""The user runs 'spack external find'; the manifest parsing fails with
some exception. Ensure that the command still succeeds (i.e. moves on
to other external detection mechanisms).
"""
# First, create an empty manifest file (without a file to read, the
# manifest parsing is skipped)
- test_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
- test_manifest_file_path = os.path.join(test_manifest_dir, 'test.json')
+ test_manifest_dir = str(tmpdir.mkdir("manifest_dir"))
+ test_manifest_file_path = os.path.join(test_manifest_dir, "test.json")
touch(test_manifest_file_path)
def fail():
raise Exception()
- monkeypatch.setattr(
- spack.cmd.external, '_collect_and_consume_cray_manifest_files', fail)
- monkeypatch.setenv('PATH', '')
- output = external('find')
- assert 'Skipping manifest and continuing' in output
+ monkeypatch.setattr(spack.cmd.external, "_collect_and_consume_cray_manifest_files", fail)
+ monkeypatch.setenv("PATH", "")
+ output = external("find")
+ assert "Skipping manifest and continuing" in output
def test_find_external_nonempty_default_manifest_dir(
- mutable_database, mutable_mock_repo,
- _platform_executables, tmpdir, monkeypatch,
- directory_with_manifest):
+ mutable_database,
+ mutable_mock_repo,
+ _platform_executables,
+ tmpdir,
+ monkeypatch,
+ directory_with_manifest,
+):
"""The user runs 'spack external find'; the default manifest directory
contains a manifest file. Ensure that the specs are read.
"""
- monkeypatch.setenv('PATH', '')
- monkeypatch.setattr(spack.cray_manifest, 'default_path',
- str(directory_with_manifest))
- external('find')
- specs = spack.store.db.query('hwloc')
- assert any(x.dag_hash() == 'hwlocfakehashaaa' for x in specs)
+ monkeypatch.setenv("PATH", "")
+ monkeypatch.setattr(spack.cray_manifest, "default_path", str(directory_with_manifest))
+ external("find")
+ specs = spack.store.db.query("hwloc")
+ assert any(x.dag_hash() == "hwlocfakehashaaa" for x in specs)
def test_find_external_merge(mutable_config, mutable_mock_repo):
@@ -263,36 +274,27 @@ def test_find_external_merge(mutable_config, mutable_mock_repo):
entry in packages.yaml.
"""
pkgs_cfg_init = {
- 'find-externals1': {
- 'externals': [{
- 'spec': 'find-externals1@1.1',
- 'prefix': '/preexisting-prefix/'
- }],
- 'buildable': False
+ "find-externals1": {
+ "externals": [{"spec": "find-externals1@1.1", "prefix": "/preexisting-prefix/"}],
+ "buildable": False,
}
}
- mutable_config.update_config('packages', pkgs_cfg_init)
+ mutable_config.update_config("packages", pkgs_cfg_init)
entries = [
- spack.detection.DetectedPackage(
- Spec.from_detection('find-externals1@1.1'), '/x/y1/'
- ),
- spack.detection.DetectedPackage(
- Spec.from_detection('find-externals1@1.2'), '/x/y2/'
- )
+ spack.detection.DetectedPackage(Spec.from_detection("find-externals1@1.1"), "/x/y1/"),
+ spack.detection.DetectedPackage(Spec.from_detection("find-externals1@1.2"), "/x/y2/"),
]
- pkg_to_entries = {'find-externals1': entries}
- scope = spack.config.default_modify_scope('packages')
+ pkg_to_entries = {"find-externals1": entries}
+ scope = spack.config.default_modify_scope("packages")
spack.detection.update_configuration(pkg_to_entries, scope=scope, buildable=True)
- pkgs_cfg = spack.config.get('packages')
- pkg_cfg = pkgs_cfg['find-externals1']
- pkg_externals = pkg_cfg['externals']
+ pkgs_cfg = spack.config.get("packages")
+ pkg_cfg = pkgs_cfg["find-externals1"]
+ pkg_externals = pkg_cfg["externals"]
- assert {'spec': 'find-externals1@1.1',
- 'prefix': '/preexisting-prefix/'} in pkg_externals
- assert {'spec': 'find-externals1@1.2',
- 'prefix': '/x/y2/'} in pkg_externals
+ assert {"spec": "find-externals1@1.1", "prefix": "/preexisting-prefix/"} in pkg_externals
+ assert {"spec": "find-externals1@1.2", "prefix": "/x/y2/"} in pkg_externals
def test_list_detectable_packages(mutable_config, mutable_mock_repo):
@@ -300,97 +302,93 @@ def test_list_detectable_packages(mutable_config, mutable_mock_repo):
assert external.returncode == 0
-def test_packages_yaml_format(
- mock_executable, mutable_config, monkeypatch, _platform_executables):
+def test_packages_yaml_format(mock_executable, mutable_config, monkeypatch, _platform_executables):
# Prepare an environment to detect a fake gcc
- gcc_exe = mock_executable('gcc', output="echo 4.2.1")
+ gcc_exe = mock_executable("gcc", output="echo 4.2.1")
prefix = os.path.dirname(gcc_exe)
- monkeypatch.setenv('PATH', prefix)
+ monkeypatch.setenv("PATH", prefix)
# Find the external spec
- external('find', 'gcc')
+ external("find", "gcc")
# Check entries in 'packages.yaml'
- packages_yaml = spack.config.get('packages')
- assert 'gcc' in packages_yaml
- assert 'externals' in packages_yaml['gcc']
- externals = packages_yaml['gcc']['externals']
+ packages_yaml = spack.config.get("packages")
+ assert "gcc" in packages_yaml
+ assert "externals" in packages_yaml["gcc"]
+ externals = packages_yaml["gcc"]["externals"]
assert len(externals) == 1
external_gcc = externals[0]
- assert external_gcc['spec'] == 'gcc@4.2.1 languages=c'
- assert external_gcc['prefix'] == os.path.dirname(prefix)
- assert 'extra_attributes' in external_gcc
- extra_attributes = external_gcc['extra_attributes']
- assert 'prefix' not in extra_attributes
- assert extra_attributes['compilers']['c'] == gcc_exe
+ assert external_gcc["spec"] == "gcc@4.2.1 languages=c"
+ assert external_gcc["prefix"] == os.path.dirname(prefix)
+ assert "extra_attributes" in external_gcc
+ extra_attributes = external_gcc["extra_attributes"]
+ assert "prefix" not in extra_attributes
+ assert extra_attributes["compilers"]["c"] == gcc_exe
-def test_overriding_prefix(
- mock_executable, mutable_config, monkeypatch, _platform_executables):
+def test_overriding_prefix(mock_executable, mutable_config, monkeypatch, _platform_executables):
# Prepare an environment to detect a fake gcc that
# override its external prefix
- gcc_exe = mock_executable('gcc', output="echo 4.2.1")
+ gcc_exe = mock_executable("gcc", output="echo 4.2.1")
prefix = os.path.dirname(gcc_exe)
- monkeypatch.setenv('PATH', prefix)
+ monkeypatch.setenv("PATH", prefix)
@classmethod
def _determine_variants(cls, exes, version_str):
- return 'languages=c', {
- 'prefix': '/opt/gcc/bin',
- 'compilers': {'c': exes[0]}
- }
+ return "languages=c", {"prefix": "/opt/gcc/bin", "compilers": {"c": exes[0]}}
- gcc_cls = spack.repo.path.get_pkg_class('gcc')
- monkeypatch.setattr(gcc_cls, 'determine_variants', _determine_variants)
+ gcc_cls = spack.repo.path.get_pkg_class("gcc")
+ monkeypatch.setattr(gcc_cls, "determine_variants", _determine_variants)
# Find the external spec
- external('find', 'gcc')
+ external("find", "gcc")
# Check entries in 'packages.yaml'
- packages_yaml = spack.config.get('packages')
- assert 'gcc' in packages_yaml
- assert 'externals' in packages_yaml['gcc']
- externals = packages_yaml['gcc']['externals']
+ packages_yaml = spack.config.get("packages")
+ assert "gcc" in packages_yaml
+ assert "externals" in packages_yaml["gcc"]
+ externals = packages_yaml["gcc"]["externals"]
assert len(externals) == 1
- assert externals[0]['prefix'] == '/opt/gcc/bin'
+ assert externals[0]["prefix"] == "/opt/gcc/bin"
def test_new_entries_are_reported_correctly(
- mock_executable, mutable_config, monkeypatch, _platform_executables
+ mock_executable, mutable_config, monkeypatch, _platform_executables
):
# Prepare an environment to detect a fake gcc
- gcc_exe = mock_executable('gcc', output="echo 4.2.1")
+ gcc_exe = mock_executable("gcc", output="echo 4.2.1")
prefix = os.path.dirname(gcc_exe)
- monkeypatch.setenv('PATH', prefix)
+ monkeypatch.setenv("PATH", prefix)
# The first run will find and add the external gcc
- output = external('find', 'gcc')
- assert 'The following specs have been' in output
+ output = external("find", "gcc")
+ assert "The following specs have been" in output
# The second run should report that no new external
# has been found
- output = external('find', 'gcc')
- assert 'No new external packages detected' in output
-
-
-@pytest.mark.parametrize('command_args', [
- ('-t', 'build-tools'),
- ('-t', 'build-tools', 'cmake'),
-])
-def test_use_tags_for_detection(
- command_args, mock_executable, mutable_config, monkeypatch
-):
+ output = external("find", "gcc")
+ assert "No new external packages detected" in output
+
+
+@pytest.mark.parametrize(
+ "command_args",
+ [
+ ("-t", "build-tools"),
+ ("-t", "build-tools", "cmake"),
+ ],
+)
+def test_use_tags_for_detection(command_args, mock_executable, mutable_config, monkeypatch):
# Prepare an environment to detect a fake cmake
- cmake_exe = mock_executable('cmake', output="echo cmake version 3.19.1")
+ cmake_exe = mock_executable("cmake", output="echo cmake version 3.19.1")
prefix = os.path.dirname(cmake_exe)
- monkeypatch.setenv('PATH', prefix)
+ monkeypatch.setenv("PATH", prefix)
- openssl_exe = mock_executable('openssl', output="OpenSSL 2.8.3")
+ openssl_exe = mock_executable("openssl", output="OpenSSL 2.8.3")
prefix = os.path.dirname(openssl_exe)
- monkeypatch.setenv('PATH', prefix)
+ monkeypatch.setenv("PATH", prefix)
# Test that we detect specs
- output = external('find', *command_args)
- assert 'The following specs have been' in output
- assert 'cmake' in output
- assert 'openssl' not in output
+ output = external("find", *command_args)
+ assert "The following specs have been" in output
+ assert "cmake" in output
+ assert "openssl" not in output
diff --git a/lib/spack/spack/test/cmd/fetch.py b/lib/spack/spack/test/cmd/fetch.py
index da56fbd528..264ed283d1 100644
--- a/lib/spack/spack/test/cmd/fetch.py
+++ b/lib/spack/spack/test/cmd/fetch.py
@@ -9,15 +9,11 @@ import spack.environment as ev
from spack.main import SpackCommand, SpackCommandError
# everything here uses the mock_env_path
-pytestmark = pytest.mark.usefixtures(
- "mutable_mock_env_path", "config", "mutable_mock_repo"
-)
+pytestmark = pytest.mark.usefixtures("mutable_mock_env_path", "config", "mutable_mock_repo")
@pytest.mark.disable_clean_stage_check
-def test_fetch_in_env(
- tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
-):
+def test_fetch_in_env(tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery):
SpackCommand("env")("create", "test")
with ev.read("test"):
SpackCommand("add")("python")
@@ -28,16 +24,12 @@ def test_fetch_in_env(
@pytest.mark.disable_clean_stage_check
-def test_fetch_single_spec(
- tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
-):
+def test_fetch_single_spec(tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery):
SpackCommand("fetch")("mpileaks")
@pytest.mark.disable_clean_stage_check
-def test_fetch_multiple_specs(
- tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
-):
+def test_fetch_multiple_specs(tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery):
SpackCommand("fetch")("mpileaks", "gcc@10.2.0", "python")
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
index 7760ef49a4..0047f9da23 100644
--- a/lib/spack/spack/test/cmd/find.py
+++ b/lib/spack/spack/test/cmd/find.py
@@ -18,17 +18,16 @@ from spack.main import SpackCommand
from spack.spec import Spec
from spack.util.pattern import Bunch
-find = SpackCommand('find')
-env = SpackCommand('env')
-install = SpackCommand('install')
+find = SpackCommand("find")
+env = SpackCommand("env")
+install = SpackCommand("install")
-base32_alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
+base32_alphabet = "abcdefghijklmnopqrstuvwxyz234567"
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def parser():
"""Returns the parser for the module command"""
prs = argparse.ArgumentParser()
@@ -49,7 +48,7 @@ def mock_display(monkeypatch, specs):
def display(x, *args, **kwargs):
specs.extend(x)
- monkeypatch.setattr(spack.cmd, 'display_specs', display)
+ monkeypatch.setattr(spack.cmd, "display_specs", display)
def test_query_arguments():
@@ -65,56 +64,56 @@ def test_query_arguments():
explicit=False,
implicit=False,
start_date="2018-02-23",
- end_date=None
+ end_date=None,
)
q_args = query_arguments(args)
- assert 'installed' in q_args
- assert 'known' in q_args
- assert 'explicit' in q_args
- assert q_args['installed'] == ['installed']
- assert q_args['known'] is any
- assert q_args['explicit'] is any
- assert 'start_date' in q_args
- assert 'end_date' not in q_args
+ assert "installed" in q_args
+ assert "known" in q_args
+ assert "explicit" in q_args
+ assert q_args["installed"] == ["installed"]
+ assert q_args["known"] is any
+ assert q_args["explicit"] is any
+ assert "start_date" in q_args
+ assert "end_date" not in q_args
# Check that explicit works correctly
args.explicit = True
q_args = query_arguments(args)
- assert q_args['explicit'] is True
+ assert q_args["explicit"] is True
args.explicit = False
args.implicit = True
q_args = query_arguments(args)
- assert q_args['explicit'] is False
+ assert q_args["explicit"] is False
@pytest.mark.db
-@pytest.mark.usefixtures('database', 'mock_display')
+@pytest.mark.usefixtures("database", "mock_display")
def test_tag1(parser, specs):
- args = parser.parse_args(['--tag', 'tag1'])
+ args = parser.parse_args(["--tag", "tag1"])
spack.cmd.find.find(parser, args)
assert len(specs) == 2
- assert 'mpich' in [x.name for x in specs]
- assert 'mpich2' in [x.name for x in specs]
+ assert "mpich" in [x.name for x in specs]
+ assert "mpich2" in [x.name for x in specs]
@pytest.mark.db
-@pytest.mark.usefixtures('database', 'mock_display')
+@pytest.mark.usefixtures("database", "mock_display")
def test_tag2(parser, specs):
- args = parser.parse_args(['--tag', 'tag2'])
+ args = parser.parse_args(["--tag", "tag2"])
spack.cmd.find.find(parser, args)
assert len(specs) == 1
- assert 'mpich' in [x.name for x in specs]
+ assert "mpich" in [x.name for x in specs]
@pytest.mark.db
-@pytest.mark.usefixtures('database', 'mock_display')
+@pytest.mark.usefixtures("database", "mock_display")
def test_tag2_tag3(parser, specs):
- args = parser.parse_args(['--tag', 'tag2', '--tag', 'tag3'])
+ args = parser.parse_args(["--tag", "tag2", "--tag", "tag3"])
spack.cmd.find.find(parser, args)
assert len(specs) == 0
@@ -123,10 +122,10 @@ def test_tag2_tag3(parser, specs):
@pytest.mark.db
def test_namespaces_shown_correctly(database):
out = find()
- assert 'builtin.mock.zmpi' not in out
+ assert "builtin.mock.zmpi" not in out
- out = find('--namespace')
- assert 'builtin.mock.zmpi' in out
+ out = find("--namespace")
+ assert "builtin.mock.zmpi" in out
def _check_json_output(spec_list):
@@ -157,25 +156,28 @@ def _check_json_output_deps(spec_list):
@pytest.mark.db
def test_find_json(database):
- output = find('--json', 'mpileaks')
+ output = find("--json", "mpileaks")
spec_list = json.loads(output)
_check_json_output(spec_list)
@pytest.mark.db
def test_find_json_deps(database):
- output = find('-d', '--json', 'mpileaks')
+ output = find("-d", "--json", "mpileaks")
spec_list = json.loads(output)
_check_json_output_deps(spec_list)
@pytest.mark.db
def test_display_json(database, capsys):
- specs = [Spec(s).concretized() for s in [
- "mpileaks ^zmpi",
- "mpileaks ^mpich",
- "mpileaks ^mpich2",
- ]]
+ specs = [
+ Spec(s).concretized()
+ for s in [
+ "mpileaks ^zmpi",
+ "mpileaks ^mpich",
+ "mpileaks ^mpich2",
+ ]
+ ]
cmd.display_specs_as_json(specs)
spec_list = json.loads(capsys.readouterr()[0])
@@ -188,11 +190,14 @@ def test_display_json(database, capsys):
@pytest.mark.db
def test_display_json_deps(database, capsys):
- specs = [Spec(s).concretized() for s in [
- "mpileaks ^zmpi",
- "mpileaks ^mpich",
- "mpileaks ^mpich2",
- ]]
+ specs = [
+ Spec(s).concretized()
+ for s in [
+ "mpileaks ^zmpi",
+ "mpileaks ^mpich",
+ "mpileaks ^mpich2",
+ ]
+ ]
cmd.display_specs_as_json(specs, deps=True)
spec_list = json.loads(capsys.readouterr()[0])
@@ -205,30 +210,34 @@ def test_display_json_deps(database, capsys):
@pytest.mark.db
def test_find_format(database, config):
- output = find('--format', '{name}-{^mpi.name}', 'mpileaks')
- assert set(output.strip().split('\n')) == set([
- "mpileaks-zmpi",
- "mpileaks-mpich",
- "mpileaks-mpich2",
- ])
-
- output = find('--format', '{name}-{version}-{compiler.name}-{^mpi.name}',
- 'mpileaks')
+ output = find("--format", "{name}-{^mpi.name}", "mpileaks")
+ assert set(output.strip().split("\n")) == set(
+ [
+ "mpileaks-zmpi",
+ "mpileaks-mpich",
+ "mpileaks-mpich2",
+ ]
+ )
+
+ output = find("--format", "{name}-{version}-{compiler.name}-{^mpi.name}", "mpileaks")
assert "installed package" not in output
- assert set(output.strip().split('\n')) == set([
- "mpileaks-2.3-gcc-zmpi",
- "mpileaks-2.3-gcc-mpich",
- "mpileaks-2.3-gcc-mpich2",
- ])
-
- output = find('--format', '{name}-{^mpi.name}-{hash:7}',
- 'mpileaks')
- elements = output.strip().split('\n')
- assert set(e[:-7] for e in elements) == set([
- "mpileaks-zmpi-",
- "mpileaks-mpich-",
- "mpileaks-mpich2-",
- ])
+ assert set(output.strip().split("\n")) == set(
+ [
+ "mpileaks-2.3-gcc-zmpi",
+ "mpileaks-2.3-gcc-mpich",
+ "mpileaks-2.3-gcc-mpich2",
+ ]
+ )
+
+ output = find("--format", "{name}-{^mpi.name}-{hash:7}", "mpileaks")
+ elements = output.strip().split("\n")
+ assert set(e[:-7] for e in elements) == set(
+ [
+ "mpileaks-zmpi-",
+ "mpileaks-mpich-",
+ "mpileaks-mpich2-",
+ ]
+ )
# hashes are in base32
for e in elements:
@@ -238,8 +247,10 @@ def test_find_format(database, config):
@pytest.mark.db
def test_find_format_deps(database, config):
- output = find('-d', '--format', '{name}-{version}', 'mpileaks', '^zmpi')
- assert output == """\
+ output = find("-d", "--format", "{name}-{version}", "mpileaks", "^zmpi")
+ assert (
+ output
+ == """\
mpileaks-2.3
callpath-1.0
dyninst-8.2
@@ -249,16 +260,19 @@ mpileaks-2.3
fake-1.0
"""
+ )
@pytest.mark.db
def test_find_format_deps_paths(database, config):
- output = find('-dp', '--format', '{name}-{version}', 'mpileaks', '^zmpi')
+ output = find("-dp", "--format", "{name}-{version}", "mpileaks", "^zmpi")
spec = Spec("mpileaks ^zmpi").concretized()
prefixes = [s.prefix for s in spec.traverse()]
- assert output == """\
+ assert (
+ output
+ == """\
mpileaks-2.3 {0}
callpath-1.0 {1}
dyninst-8.2 {2}
@@ -267,27 +281,33 @@ mpileaks-2.3 {0}
zmpi-1.0 {5}
fake-1.0 {6}
-""".format(*prefixes)
+""".format(
+ *prefixes
+ )
+ )
@pytest.mark.db
def test_find_very_long(database, config):
- output = find('-L', '--no-groups', "mpileaks")
-
- specs = [Spec(s).concretized() for s in [
- "mpileaks ^zmpi",
- "mpileaks ^mpich",
- "mpileaks ^mpich2",
- ]]
-
- assert set(output.strip().split("\n")) == set([
- ("%s mpileaks@2.3" % s.dag_hash()) for s in specs
- ])
+ output = find("-L", "--no-groups", "mpileaks")
+
+ specs = [
+ Spec(s).concretized()
+ for s in [
+ "mpileaks ^zmpi",
+ "mpileaks ^mpich",
+ "mpileaks ^mpich2",
+ ]
+ ]
+
+ assert set(output.strip().split("\n")) == set(
+ [("%s mpileaks@2.3" % s.dag_hash()) for s in specs]
+ )
@pytest.mark.db
def test_find_show_compiler(database, config):
- output = find('--no-groups', '--show-full-compiler', "mpileaks")
+ output = find("--no-groups", "--show-full-compiler", "mpileaks")
assert "mpileaks@2.3%gcc@4.5.0" in output
@@ -312,33 +332,35 @@ def test_find_no_sections(database, config):
@pytest.mark.db
def test_find_command_basic_usage(database):
output = find()
- assert 'mpileaks' in output
+ assert "mpileaks" in output
-@pytest.mark.regression('9875')
-def test_find_prefix_in_env(mutable_mock_env_path, install_mockery, mock_fetch,
- mock_packages, mock_archive, config):
+@pytest.mark.regression("9875")
+def test_find_prefix_in_env(
+ mutable_mock_env_path, install_mockery, mock_fetch, mock_packages, mock_archive, config
+):
"""Test `find` formats requiring concrete specs work in environments."""
- env('create', 'test')
- with ev.read('test'):
- install('mpileaks')
- find('-p')
- find('-l')
- find('-L')
+ env("create", "test")
+ with ev.read("test"):
+ install("mpileaks")
+ find("-p")
+ find("-l")
+ find("-L")
# Would throw error on regression
def test_find_loaded(database, working_env):
- output = find('--loaded', '--group')
- assert output == ''
+ output = find("--loaded", "--group")
+ assert output == ""
- os.environ[uenv.spack_loaded_hashes_var] = ':'.join(
- [x.dag_hash() for x in spack.store.db.query()])
- output = find('--loaded')
+ os.environ[uenv.spack_loaded_hashes_var] = ":".join(
+ [x.dag_hash() for x in spack.store.db.query()]
+ )
+ output = find("--loaded")
expected = find()
assert output == expected
def test_bootstrap_deprecated():
- output = find('--bootstrap')
+ output = find("--bootstrap")
assert "`spack find --bootstrap` is deprecated" in output
diff --git a/lib/spack/spack/test/cmd/gc.py b/lib/spack/spack/test/cmd/gc.py
index 822564f0a4..53705519bb 100644
--- a/lib/spack/spack/test/cmd/gc.py
+++ b/lib/spack/spack/test/cmd/gc.py
@@ -11,41 +11,38 @@ import spack.environment as ev
import spack.main
import spack.spec
-gc = spack.main.SpackCommand('gc')
+gc = spack.main.SpackCommand("gc")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.mark.db
def test_no_packages_to_remove(config, mutable_database, capsys):
with capsys.disabled():
- output = gc('-y')
- assert 'There are no unused specs.' in output
+ output = gc("-y")
+ assert "There are no unused specs." in output
@pytest.mark.db
def test_packages_are_removed(config, mutable_database, capsys):
- s = spack.spec.Spec('simple-inheritance')
+ s = spack.spec.Spec("simple-inheritance")
s.concretize()
s.package.do_install(fake=True, explicit=True)
with capsys.disabled():
- output = gc('-y')
- assert 'Successfully uninstalled cmake' in output
+ output = gc("-y")
+ assert "Successfully uninstalled cmake" in output
@pytest.mark.db
-def test_gc_with_environment(
- config, mutable_database, mutable_mock_env_path, capsys
-):
- s = spack.spec.Spec('simple-inheritance')
+def test_gc_with_environment(config, mutable_database, mutable_mock_env_path, capsys):
+ s = spack.spec.Spec("simple-inheritance")
s.concretize()
s.package.do_install(fake=True, explicit=True)
- e = ev.create('test_gc')
- e.add('cmake')
+ e = ev.create("test_gc")
+ e.add("cmake")
with e:
with capsys.disabled():
- output = gc('-y')
- assert 'Restricting the garbage collection' in output
- assert 'There are no unused specs' in output
+ output = gc("-y")
+ assert "Restricting the garbage collection" in output
+ assert "There are no unused specs" in output
diff --git a/lib/spack/spack/test/cmd/gpg.py b/lib/spack/spack/test/cmd/gpg.py
index 1cca88fdc2..9d698275ed 100644
--- a/lib/spack/spack/test/cmd/gpg.py
+++ b/lib/spack/spack/test/cmd/gpg.py
@@ -18,27 +18,26 @@ from spack.paths import mock_gpg_data_path, mock_gpg_keys_path
from spack.util.executable import ProcessError
#: spack command used by tests below
-gpg = SpackCommand('gpg')
-bootstrap = SpackCommand('bootstrap')
-mirror = SpackCommand('mirror')
+gpg = SpackCommand("gpg")
+bootstrap = SpackCommand("bootstrap")
+mirror = SpackCommand("mirror")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture
def tmp_scope():
"""Creates a temporary configuration scope"""
- base_name = 'internal-testing-scope'
+ base_name = "internal-testing-scope"
current_overrides = set(
- x.name for x in
- spack.config.config.matching_scopes(r'^{0}'.format(base_name)))
+ x.name for x in spack.config.config.matching_scopes(r"^{0}".format(base_name))
+ )
num_overrides = 0
scope_name = base_name
while scope_name in current_overrides:
- scope_name = '{0}{1}'.format(base_name, num_overrides)
+ scope_name = "{0}{1}".format(base_name, num_overrides)
num_overrides += 1
with spack.config.override(spack.config.InternalConfigScope(scope_name)):
@@ -46,24 +45,26 @@ def tmp_scope():
# test gpg command detection
-@pytest.mark.parametrize('cmd_name,version', [
- ('gpg', 'undetectable'), # undetectable version
- ('gpg', 'gpg (GnuPG) 1.3.4'), # insufficient version
- ('gpg', 'gpg (GnuPG) 2.2.19'), # sufficient version
- ('gpg2', 'gpg (GnuPG) 2.2.19'), # gpg2 command
-])
+@pytest.mark.parametrize(
+ "cmd_name,version",
+ [
+ ("gpg", "undetectable"), # undetectable version
+ ("gpg", "gpg (GnuPG) 1.3.4"), # insufficient version
+ ("gpg", "gpg (GnuPG) 2.2.19"), # sufficient version
+ ("gpg2", "gpg (GnuPG) 2.2.19"), # gpg2 command
+ ],
+)
def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
- TEMPLATE = ('#!/bin/sh\n'
- 'echo "{version}"\n')
+ TEMPLATE = "#!/bin/sh\n" 'echo "{version}"\n'
with tmpdir.as_cwd():
- for fname in (cmd_name, 'gpgconf'):
- with open(fname, 'w') as f:
+ for fname in (cmd_name, "gpgconf"):
+ with open(fname, "w") as f:
f.write(TEMPLATE.format(version=version))
fs.set_executable(fname)
monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
- if version == 'undetectable' or version.endswith('1.3.4'):
+ if version == "undetectable" or version.endswith("1.3.4"):
with pytest.raises(spack.util.gpg.SpackGPGError):
spack.util.gpg.init(force=True)
else:
@@ -74,7 +75,7 @@ def test_find_gpg(cmd_name, version, tmpdir, mock_gnupghome, monkeypatch):
def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch, mutable_config):
monkeypatch.setitem(os.environ, "PATH", str(tmpdir))
- bootstrap('disable')
+ bootstrap("disable")
with pytest.raises(RuntimeError):
spack.util.gpg.init(force=True)
@@ -83,133 +84,133 @@ def test_no_gpg_in_path(tmpdir, mock_gnupghome, monkeypatch, mutable_config):
def test_gpg(tmpdir, tmp_scope, mock_gnupghome):
# Verify a file with an empty keyring.
with pytest.raises(ProcessError):
- gpg('verify', os.path.join(mock_gpg_data_path, 'content.txt'))
+ gpg("verify", os.path.join(mock_gpg_data_path, "content.txt"))
# Import the default key.
- gpg('init', '--from', mock_gpg_keys_path)
+ gpg("init", "--from", mock_gpg_keys_path)
# List the keys.
# TODO: Test the output here.
- gpg('list', '--trusted')
- gpg('list', '--signing')
+ gpg("list", "--trusted")
+ gpg("list", "--signing")
# Verify the file now that the key has been trusted.
- gpg('verify', os.path.join(mock_gpg_data_path, 'content.txt'))
+ gpg("verify", os.path.join(mock_gpg_data_path, "content.txt"))
# Untrust the default key.
- gpg('untrust', 'Spack testing')
+ gpg("untrust", "Spack testing")
# Now that the key is untrusted, verification should fail.
with pytest.raises(ProcessError):
- gpg('verify', os.path.join(mock_gpg_data_path, 'content.txt'))
+ gpg("verify", os.path.join(mock_gpg_data_path, "content.txt"))
# Create a file to test signing.
- test_path = tmpdir.join('to-sign.txt')
- with open(str(test_path), 'w+') as fout:
- fout.write('Test content for signing.\n')
+ test_path = tmpdir.join("to-sign.txt")
+ with open(str(test_path), "w+") as fout:
+ fout.write("Test content for signing.\n")
# Signing without a private key should fail.
with pytest.raises(RuntimeError) as exc_info:
- gpg('sign', str(test_path))
- assert exc_info.value.args[0] == 'no signing keys are available'
+ gpg("sign", str(test_path))
+ assert exc_info.value.args[0] == "no signing keys are available"
# Create a key for use in the tests.
- keypath = tmpdir.join('testing-1.key')
- gpg('create',
- '--comment', 'Spack testing key',
- '--export', str(keypath),
- 'Spack testing 1',
- 'spack@googlegroups.com')
+ keypath = tmpdir.join("testing-1.key")
+ gpg(
+ "create",
+ "--comment",
+ "Spack testing key",
+ "--export",
+ str(keypath),
+ "Spack testing 1",
+ "spack@googlegroups.com",
+ )
keyfp = spack.util.gpg.signing_keys()[0]
# List the keys.
# TODO: Test the output here.
- gpg('list')
- gpg('list', '--trusted')
- gpg('list', '--signing')
+ gpg("list")
+ gpg("list", "--trusted")
+ gpg("list", "--signing")
# Signing with the default (only) key.
- gpg('sign', str(test_path))
+ gpg("sign", str(test_path))
# Verify the file we just verified.
- gpg('verify', str(test_path))
+ gpg("verify", str(test_path))
# Export the key for future use.
- export_path = tmpdir.join('export.testing.key')
- gpg('export', str(export_path))
+ export_path = tmpdir.join("export.testing.key")
+ gpg("export", str(export_path))
# Test exporting the private key
- private_export_path = tmpdir.join('export-secret.testing.key')
- gpg('export', '--secret', str(private_export_path))
+ private_export_path = tmpdir.join("export-secret.testing.key")
+ gpg("export", "--secret", str(private_export_path))
# Ensure we exported the right content!
- with open(str(private_export_path), 'r') as fd:
+ with open(str(private_export_path), "r") as fd:
content = fd.read()
assert "BEGIN PGP PRIVATE KEY BLOCK" in content
# and for the public key
- with open(str(export_path), 'r') as fd:
+ with open(str(export_path), "r") as fd:
content = fd.read()
assert "BEGIN PGP PUBLIC KEY BLOCK" in content
# Create a second key for use in the tests.
- gpg('create',
- '--comment', 'Spack testing key',
- 'Spack testing 2',
- 'spack@googlegroups.com')
+ gpg("create", "--comment", "Spack testing key", "Spack testing 2", "spack@googlegroups.com")
# List the keys.
# TODO: Test the output here.
- gpg('list', '--trusted')
- gpg('list', '--signing')
+ gpg("list", "--trusted")
+ gpg("list", "--signing")
- test_path = tmpdir.join('to-sign-2.txt')
- with open(str(test_path), 'w+') as fout:
- fout.write('Test content for signing.\n')
+ test_path = tmpdir.join("to-sign-2.txt")
+ with open(str(test_path), "w+") as fout:
+ fout.write("Test content for signing.\n")
# Signing with multiple signing keys is ambiguous.
with pytest.raises(RuntimeError) as exc_info:
- gpg('sign', str(test_path))
- assert exc_info.value.args[0] == \
- 'multiple signing keys are available; please choose one'
+ gpg("sign", str(test_path))
+ assert exc_info.value.args[0] == "multiple signing keys are available; please choose one"
# Signing with a specified key.
- gpg('sign', '--key', keyfp, str(test_path))
+ gpg("sign", "--key", keyfp, str(test_path))
# Untrusting signing keys needs a flag.
with pytest.raises(ProcessError):
- gpg('untrust', 'Spack testing 1')
+ gpg("untrust", "Spack testing 1")
# Untrust the key we created.
- gpg('untrust', '--signing', keyfp)
+ gpg("untrust", "--signing", keyfp)
# Verification should now fail.
with pytest.raises(ProcessError):
- gpg('verify', str(test_path))
+ gpg("verify", str(test_path))
# Trust the exported key.
- gpg('trust', str(export_path))
+ gpg("trust", str(export_path))
# Verification should now succeed again.
- gpg('verify', str(test_path))
+ gpg("verify", str(test_path))
# Publish the keys using a directory path
- test_path = tmpdir.join('dir_cache')
- os.makedirs('%s' % test_path)
- gpg('publish', '--rebuild-index', '-d', str(test_path))
- assert os.path.exists('%s/build_cache/_pgp/index.json' % test_path)
+ test_path = tmpdir.join("dir_cache")
+ os.makedirs("%s" % test_path)
+ gpg("publish", "--rebuild-index", "-d", str(test_path))
+ assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
# Publish the keys using a mirror url
- test_path = tmpdir.join('url_cache')
- os.makedirs('%s' % test_path)
- test_url = 'file://%s' % test_path
- gpg('publish', '--rebuild-index', '--mirror-url', test_url)
- assert os.path.exists('%s/build_cache/_pgp/index.json' % test_path)
+ test_path = tmpdir.join("url_cache")
+ os.makedirs("%s" % test_path)
+ test_url = "file://%s" % test_path
+ gpg("publish", "--rebuild-index", "--mirror-url", test_url)
+ assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
# Publish the keys using a mirror name
- test_path = tmpdir.join('named_cache')
- os.makedirs('%s' % test_path)
- mirror_url = 'file://%s' % test_path
- mirror('add', '--scope', tmp_scope, 'gpg', mirror_url)
- gpg('publish', '--rebuild-index', '-m', 'gpg')
- assert os.path.exists('%s/build_cache/_pgp/index.json' % test_path)
+ test_path = tmpdir.join("named_cache")
+ os.makedirs("%s" % test_path)
+ mirror_url = "file://%s" % test_path
+ mirror("add", "--scope", tmp_scope, "gpg", mirror_url)
+ gpg("publish", "--rebuild-index", "-m", "gpg")
+ assert os.path.exists("%s/build_cache/_pgp/index.json" % test_path)
diff --git a/lib/spack/spack/test/cmd/graph.py b/lib/spack/spack/test/cmd/graph.py
index 06900cb0ac..c7999d6629 100644
--- a/lib/spack/spack/test/cmd/graph.py
+++ b/lib/spack/spack/test/cmd/graph.py
@@ -9,49 +9,48 @@ import pytest
from spack.main import SpackCommand, SpackCommandError
-graph = SpackCommand('graph')
+graph = SpackCommand("graph")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.mark.db
-@pytest.mark.usefixtures('mock_packages', 'database')
+@pytest.mark.usefixtures("mock_packages", "database")
def test_graph_ascii():
"""Tests spack graph --ascii"""
- graph('--ascii', 'dt-diamond')
+ graph("--ascii", "dt-diamond")
@pytest.mark.db
-@pytest.mark.usefixtures('mock_packages', 'database')
+@pytest.mark.usefixtures("mock_packages", "database")
def test_graph_dot():
"""Tests spack graph --dot"""
- graph('--dot', 'dt-diamond')
+ graph("--dot", "dt-diamond")
@pytest.mark.db
-@pytest.mark.usefixtures('mock_packages', 'database')
+@pytest.mark.usefixtures("mock_packages", "database")
def test_graph_static():
"""Tests spack graph --static"""
- graph('--static', 'dt-diamond')
+ graph("--static", "dt-diamond")
@pytest.mark.db
-@pytest.mark.usefixtures('mock_packages', 'database')
+@pytest.mark.usefixtures("mock_packages", "database")
def test_graph_installed():
"""Tests spack graph --installed"""
- graph('--installed')
+ graph("--installed")
with pytest.raises(SpackCommandError):
- graph('--installed', 'dt-diamond')
+ graph("--installed", "dt-diamond")
@pytest.mark.db
-@pytest.mark.usefixtures('mock_packages', 'database')
+@pytest.mark.usefixtures("mock_packages", "database")
def test_graph_deptype():
"""Tests spack graph --deptype"""
- graph('--deptype', 'all', 'dt-diamond')
+ graph("--deptype", "all", "dt-diamond")
def test_graph_no_specs():
diff --git a/lib/spack/spack/test/cmd/help.py b/lib/spack/spack/test/cmd/help.py
index 75e9e92ddb..ed7f872275 100644
--- a/lib/spack/spack/test/cmd/help.py
+++ b/lib/spack/spack/test/cmd/help.py
@@ -11,7 +11,7 @@ from spack.main import SpackCommand
@pytest.mark.xfail
def test_reuse_after_help():
"""Test `spack help` can be called twice with the same SpackCommand."""
- help_cmd = SpackCommand('help')
+ help_cmd = SpackCommand("help")
help_cmd()
# This second invocation will somehow fail because the parser no
@@ -30,27 +30,27 @@ def test_reuse_after_help():
def test_help():
"""Sanity check the help command to make sure it works."""
- help_cmd = SpackCommand('help')
+ help_cmd = SpackCommand("help")
out = help_cmd()
- assert 'These are common spack commands:' in out
+ assert "These are common spack commands:" in out
def test_help_all():
"""Test the spack help --all flag"""
- help_cmd = SpackCommand('help')
- out = help_cmd('--all')
- assert 'Complete list of spack commands:' in out
+ help_cmd = SpackCommand("help")
+ out = help_cmd("--all")
+ assert "Complete list of spack commands:" in out
def test_help_spec():
"""Test the spack help --spec flag"""
- help_cmd = SpackCommand('help')
- out = help_cmd('--spec')
- assert 'spec expression syntax:' in out
+ help_cmd = SpackCommand("help")
+ out = help_cmd("--spec")
+ assert "spec expression syntax:" in out
def test_help_subcommand():
"""Test the spack help subcommand argument"""
- help_cmd = SpackCommand('help')
- out = help_cmd('help')
- assert 'get help on spack and its commands' in out
+ help_cmd = SpackCommand("help")
+ out = help_cmd("help")
+ assert "get help on spack and its commands" in out
diff --git a/lib/spack/spack/test/cmd/info.py b/lib/spack/spack/test/cmd/info.py
index 57176a4784..55b875e022 100644
--- a/lib/spack/spack/test/cmd/info.py
+++ b/lib/spack/spack/test/cmd/info.py
@@ -11,13 +11,12 @@ import pytest
import spack.cmd.info
from spack.main import SpackCommand
-info = SpackCommand('info')
+info = SpackCommand("info")
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="Not yet implemented on Windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Not yet implemented on Windows")
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def parser():
"""Returns the parser for the module command"""
prs = argparse.ArgumentParser()
@@ -32,72 +31,73 @@ def print_buffer(monkeypatch):
def _print(*args):
buffer.extend(args)
- monkeypatch.setattr(spack.cmd.info.color, 'cprint', _print, raising=False)
+ monkeypatch.setattr(spack.cmd.info.color, "cprint", _print, raising=False)
return buffer
-@pytest.mark.parametrize('pkg', [
- 'openmpi',
- 'trilinos',
- 'boost',
- 'python',
- 'dealii',
- 'xsdk' # a BundlePackage
-])
+@pytest.mark.parametrize(
+ "pkg", ["openmpi", "trilinos", "boost", "python", "dealii", "xsdk"] # a BundlePackage
+)
def test_it_just_runs(pkg):
info(pkg)
def test_info_noversion(mock_packages, print_buffer):
"""Check that a mock package with no versions or variants outputs None."""
- info('noversion')
+ info("noversion")
line_iter = iter(print_buffer)
for line in line_iter:
- if 'version' in line:
- has = [desc in line for desc in ['Preferred', 'Safe', 'Deprecated']]
+ if "version" in line:
+ has = [desc in line for desc in ["Preferred", "Safe", "Deprecated"]]
if not any(has):
continue
- elif 'Variants' not in line:
+ elif "Variants" not in line:
continue
- assert 'None' in next(line_iter).strip()
+ assert "None" in next(line_iter).strip()
-@pytest.mark.parametrize('pkg_query,expected', [
- ('zlib', 'False'),
- ('gcc', 'True (version, variants)'),
-])
+@pytest.mark.parametrize(
+ "pkg_query,expected",
+ [
+ ("zlib", "False"),
+ ("gcc", "True (version, variants)"),
+ ],
+)
def test_is_externally_detectable(pkg_query, expected, parser, print_buffer):
- args = parser.parse_args(['--detectable', pkg_query])
+ args = parser.parse_args(["--detectable", pkg_query])
spack.cmd.info.info(parser, args)
line_iter = iter(print_buffer)
for line in line_iter:
- if 'Externally Detectable' in line:
+ if "Externally Detectable" in line:
is_externally_detectable = next(line_iter).strip()
assert is_externally_detectable == expected
-@pytest.mark.parametrize('pkg_query', [
- 'hdf5',
- 'cloverleaf3d',
- 'trilinos',
- 'gcc' # This should ensure --test's c_names processing loop covered
-])
+@pytest.mark.parametrize(
+ "pkg_query",
+ [
+ "hdf5",
+ "cloverleaf3d",
+ "trilinos",
+ "gcc", # This should ensure --test's c_names processing loop covered
+ ],
+)
def test_info_fields(pkg_query, parser, print_buffer):
expected_fields = (
- 'Description:',
- 'Homepage:',
- 'Externally Detectable:',
- 'Safe versions:',
- 'Variants:',
- 'Installation Phases:',
- 'Virtual Packages:',
- 'Tags:'
+ "Description:",
+ "Homepage:",
+ "Externally Detectable:",
+ "Safe versions:",
+ "Variants:",
+ "Installation Phases:",
+ "Virtual Packages:",
+ "Tags:",
)
- args = parser.parse_args(['--all', pkg_query])
+ args = parser.parse_args(["--all", pkg_query])
spack.cmd.info.info(parser, args)
for text in expected_fields:
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index f9194f664d..73e3cf15cf 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -26,34 +26,34 @@ from spack.error import SpackError
from spack.main import SpackCommand
from spack.spec import CompilerSpec, Spec
-install = SpackCommand('install')
-env = SpackCommand('env')
-add = SpackCommand('add')
-mirror = SpackCommand('mirror')
-uninstall = SpackCommand('uninstall')
-buildcache = SpackCommand('buildcache')
-find = SpackCommand('find')
+install = SpackCommand("install")
+env = SpackCommand("env")
+add = SpackCommand("add")
+mirror = SpackCommand("mirror")
+uninstall = SpackCommand("uninstall")
+buildcache = SpackCommand("buildcache")
+find = SpackCommand("find")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture()
def noop_install(monkeypatch):
def noop(*args, **kwargs):
pass
- monkeypatch.setattr(spack.installer.PackageInstaller, 'install', noop)
+
+ monkeypatch.setattr(spack.installer.PackageInstaller, "install", noop)
def test_install_package_and_dependency(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
with tmpdir.as_cwd():
- install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
+ install("--log-format=junit", "--log-file=test.xml", "libdwarf")
files = tmpdir.listdir()
- filename = tmpdir.join('test.xml')
+ filename = tmpdir.join("test.xml")
assert filename in files
content = filename.open().read()
@@ -66,17 +66,18 @@ def test_install_package_and_dependency(
def test_install_runtests_notests(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert not pkg.run_tests
- monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', check)
- install('-v', 'dttop')
+
+ monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
+ install("-v", "dttop")
@pytest.mark.disable_clean_stage_check
def test_install_runtests_root(monkeypatch, mock_packages, install_mockery):
def check(pkg):
- assert pkg.run_tests == (pkg.name == 'dttop')
+ assert pkg.run_tests == (pkg.name == "dttop")
- monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', check)
- install('--test=root', 'dttop')
+ monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
+ install("--test=root", "dttop")
@pytest.mark.disable_clean_stage_check
@@ -84,20 +85,20 @@ def test_install_runtests_all(monkeypatch, mock_packages, install_mockery):
def check(pkg):
assert pkg.run_tests
- monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', check)
- install('--test=all', 'a')
+ monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", check)
+ install("--test=all", "a")
def test_install_package_already_installed(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
with tmpdir.as_cwd():
- install('libdwarf')
- install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
+ install("libdwarf")
+ install("--log-format=junit", "--log-file=test.xml", "libdwarf")
files = tmpdir.listdir()
- filename = tmpdir.join('test.xml')
+ filename = tmpdir.join("test.xml")
assert filename in files
content = filename.open().read()
@@ -105,15 +106,18 @@ def test_install_package_already_installed(
assert 'failures="0"' in content
assert 'errors="0"' in content
- skipped = [line for line in content.split('\n') if 'skipped' in line]
+ skipped = [line for line in content.split("\n") if "skipped" in line]
assert len(skipped) == 2
-@pytest.mark.parametrize('arguments,expected', [
- ([], spack.config.get('config:dirty')), # default from config file
- (['--clean'], False),
- (['--dirty'], True),
-])
+@pytest.mark.parametrize(
+ "arguments,expected",
+ [
+ ([], spack.config.get("config:dirty")), # default from config file
+ (["--clean"], False),
+ (["--dirty"], True),
+ ],
+)
def test_install_dirty_flag(arguments, expected):
parser = argparse.ArgumentParser()
spack.cmd.install.setup_parser(parser)
@@ -128,7 +132,7 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
# we can't use output capture here because it interferes with Spack's
# logging. TODO: see whether we can get multiple log_outputs to work
# when nested AND in pytest
- spec = Spec('printing-package').concretized()
+ spec = Spec("printing-package").concretized()
pkg = spec.package
pkg.do_install(verbose=True)
@@ -143,8 +147,9 @@ def test_package_output(tmpdir, capsys, install_mockery, mock_fetch):
@pytest.mark.disable_clean_stage_check
-def test_install_output_on_build_error(mock_packages, mock_archive, mock_fetch,
- config, install_mockery, capfd):
+def test_install_output_on_build_error(
+ mock_packages, mock_archive, mock_fetch, config, install_mockery, capfd
+):
"""
This test used to assume receiving full output, but since we've updated
spack to generate logs on the level of phases, it will only return the
@@ -152,68 +157,65 @@ def test_install_output_on_build_error(mock_packages, mock_archive, mock_fetch,
"""
# capfd interferes with Spack's capturing
with capfd.disabled():
- out = install('-v', 'build-error', fail_on_error=False)
- assert 'Installing build-error' in out
+ out = install("-v", "build-error", fail_on_error=False)
+ assert "Installing build-error" in out
@pytest.mark.disable_clean_stage_check
def test_install_output_on_python_error(
- mock_packages, mock_archive, mock_fetch, config, install_mockery):
- out = install('failing-build', fail_on_error=False)
+ mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ out = install("failing-build", fail_on_error=False)
assert isinstance(install.error, spack.build_environment.ChildError)
- assert install.error.name == 'InstallError'
+ assert install.error.name == "InstallError"
assert 'raise InstallError("Expected failure.")' in out
@pytest.mark.disable_clean_stage_check
-def test_install_with_source(
- mock_packages, mock_archive, mock_fetch, config, install_mockery):
+def test_install_with_source(mock_packages, mock_archive, mock_fetch, config, install_mockery):
"""Verify that source has been copied into place."""
- install('--source', '--keep-stage', 'trivial-install-test-package')
- spec = Spec('trivial-install-test-package').concretized()
- src = os.path.join(
- spec.prefix.share, 'trivial-install-test-package', 'src')
- assert filecmp.cmp(os.path.join(mock_archive.path, 'configure'),
- os.path.join(src, 'configure'))
+ install("--source", "--keep-stage", "trivial-install-test-package")
+ spec = Spec("trivial-install-test-package").concretized()
+ src = os.path.join(spec.prefix.share, "trivial-install-test-package", "src")
+ assert filecmp.cmp(
+ os.path.join(mock_archive.path, "configure"), os.path.join(src, "configure")
+ )
-def test_install_env_variables(
- mock_packages, mock_archive, mock_fetch, config, install_mockery
-):
- spec = Spec('libdwarf')
+def test_install_env_variables(mock_packages, mock_archive, mock_fetch, config, install_mockery):
+ spec = Spec("libdwarf")
spec.concretize()
- install('libdwarf')
+ install("libdwarf")
assert os.path.isfile(spec.package.install_env_path)
@pytest.mark.disable_clean_stage_check
-def test_show_log_on_error(mock_packages, mock_archive, mock_fetch,
- config, install_mockery, capfd):
+def test_show_log_on_error(
+ mock_packages, mock_archive, mock_fetch, config, install_mockery, capfd
+):
"""
Make sure --show-log-on-error works.
"""
with capfd.disabled():
- out = install('--show-log-on-error', 'build-error',
- fail_on_error=False)
+ out = install("--show-log-on-error", "build-error", fail_on_error=False)
assert isinstance(install.error, spack.build_environment.ChildError)
- assert install.error.pkg.name == 'build-error'
+ assert install.error.pkg.name == "build-error"
- assert '==> Installing build-error' in out
- assert 'See build log for details:' in out
+ assert "==> Installing build-error" in out
+ assert "See build log for details:" in out
-def test_install_overwrite(
- mock_packages, mock_archive, mock_fetch, config, install_mockery
-):
+def test_install_overwrite(mock_packages, mock_archive, mock_fetch, config, install_mockery):
# Try to install a spec and then to reinstall it.
- spec = Spec('libdwarf')
+ spec = Spec("libdwarf")
spec.concretize()
- install('libdwarf')
+ install("libdwarf")
# Ignore manifest and install times
- manifest = os.path.join(spec.prefix, spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ manifest = os.path.join(
+ spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
ignores = [manifest, spec.package.times_log_path]
assert os.path.exists(spec.prefix)
@@ -221,14 +223,14 @@ def test_install_overwrite(
# Modify the first installation to be sure the content is not the same
# as the one after we reinstalled
- with open(os.path.join(spec.prefix, 'only_in_old'), 'w') as f:
- f.write('This content is here to differentiate installations.')
+ with open(os.path.join(spec.prefix, "only_in_old"), "w") as f:
+ f.write("This content is here to differentiate installations.")
bad_md5 = fs.hash_directory(spec.prefix, ignore=ignores)
assert bad_md5 != expected_md5
- install('--overwrite', '-y', 'libdwarf')
+ install("--overwrite", "-y", "libdwarf")
assert os.path.exists(spec.prefix)
assert fs.hash_directory(spec.prefix, ignore=ignores) == expected_md5
@@ -236,20 +238,23 @@ def test_install_overwrite(
def test_install_overwrite_not_installed(
- mock_packages, mock_archive, mock_fetch, config, install_mockery,
+ mock_packages,
+ mock_archive,
+ mock_fetch,
+ config,
+ install_mockery,
):
# Try to install a spec and then to reinstall it.
- spec = Spec('libdwarf')
+ spec = Spec("libdwarf")
spec.concretize()
assert not os.path.exists(spec.prefix)
- install('--overwrite', '-y', 'libdwarf')
+ install("--overwrite", "-y", "libdwarf")
assert os.path.exists(spec.prefix)
-def test_install_commit(
- mock_git_version_info, install_mockery, mock_packages, monkeypatch):
+def test_install_commit(mock_git_version_info, install_mockery, mock_packages, monkeypatch):
"""Test installing a git package from a commit.
This ensures Spack associates commit versions with their packages in time to do
@@ -257,13 +262,13 @@ def test_install_commit(
"""
repo_path, filename, commits = mock_git_version_info
- monkeypatch.setattr(spack.package_base.PackageBase,
- 'git', 'file://%s' % repo_path,
- raising=False)
+ monkeypatch.setattr(
+ spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False
+ )
# Use the earliest commit in the respository
commit = commits[-1]
- spec = spack.spec.Spec('git-test-commit@%s' % commit)
+ spec = spack.spec.Spec("git-test-commit@%s" % commit)
spec.concretize()
print(spec)
spec.package.do_install()
@@ -271,38 +276,37 @@ def test_install_commit(
# Ensure first commit file contents were written
installed = os.listdir(spec.prefix.bin)
assert filename in installed
- with open(spec.prefix.bin.join(filename), 'r') as f:
+ with open(spec.prefix.bin.join(filename), "r") as f:
content = f.read().strip()
- assert content == '[]' # contents are weird for another test
+ assert content == "[]" # contents are weird for another test
def test_install_overwrite_multiple(
- mock_packages, mock_archive, mock_fetch, config, install_mockery
+ mock_packages, mock_archive, mock_fetch, config, install_mockery
):
# Try to install a spec and then to reinstall it.
- libdwarf = Spec('libdwarf')
+ libdwarf = Spec("libdwarf")
libdwarf.concretize()
- install('libdwarf')
+ install("libdwarf")
- cmake = Spec('cmake')
+ cmake = Spec("cmake")
cmake.concretize()
- install('cmake')
+ install("cmake")
- ld_manifest = os.path.join(libdwarf.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ ld_manifest = os.path.join(
+ libdwarf.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
ld_ignores = [ld_manifest, libdwarf.package.times_log_path]
assert os.path.exists(libdwarf.prefix)
- expected_libdwarf_md5 = fs.hash_directory(libdwarf.prefix,
- ignore=ld_ignores)
+ expected_libdwarf_md5 = fs.hash_directory(libdwarf.prefix, ignore=ld_ignores)
- cm_manifest = os.path.join(cmake.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ cm_manifest = os.path.join(
+ cmake.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
cm_ignores = [cm_manifest, cmake.package.times_log_path]
assert os.path.exists(cmake.prefix)
@@ -310,10 +314,10 @@ def test_install_overwrite_multiple(
# Modify the first installation to be sure the content is not the same
# as the one after we reinstalled
- with open(os.path.join(libdwarf.prefix, 'only_in_old'), 'w') as f:
- f.write('This content is here to differentiate installations.')
- with open(os.path.join(cmake.prefix, 'only_in_old'), 'w') as f:
- f.write('This content is here to differentiate installations.')
+ with open(os.path.join(libdwarf.prefix, "only_in_old"), "w") as f:
+ f.write("This content is here to differentiate installations.")
+ with open(os.path.join(cmake.prefix, "only_in_old"), "w") as f:
+ f.write("This content is here to differentiate installations.")
bad_libdwarf_md5 = fs.hash_directory(libdwarf.prefix, ignore=ld_ignores)
bad_cmake_md5 = fs.hash_directory(cmake.prefix, ignore=cm_ignores)
@@ -321,7 +325,7 @@ def test_install_overwrite_multiple(
assert bad_libdwarf_md5 != expected_libdwarf_md5
assert bad_cmake_md5 != expected_cmake_md5
- install('--overwrite', '-y', 'libdwarf', 'cmake')
+ install("--overwrite", "-y", "libdwarf", "cmake")
assert os.path.exists(libdwarf.prefix)
assert os.path.exists(cmake.prefix)
@@ -334,7 +338,11 @@ def test_install_overwrite_multiple(
@pytest.mark.usefixtures(
- 'mock_packages', 'mock_archive', 'mock_fetch', 'config', 'install_mockery',
+ "mock_packages",
+ "mock_archive",
+ "mock_fetch",
+ "config",
+ "install_mockery",
)
def test_install_conflicts(conflict_spec):
# Make sure that spec with conflicts raises a SpackError
@@ -343,70 +351,78 @@ def test_install_conflicts(conflict_spec):
@pytest.mark.usefixtures(
- 'mock_packages', 'mock_archive', 'mock_fetch', 'config', 'install_mockery',
+ "mock_packages",
+ "mock_archive",
+ "mock_fetch",
+ "config",
+ "install_mockery",
)
def test_install_invalid_spec(invalid_spec):
# Make sure that invalid specs raise a SpackError
- with pytest.raises(SpackError, match='Unexpected token'):
+ with pytest.raises(SpackError, match="Unexpected token"):
install(invalid_spec)
-@pytest.mark.usefixtures('noop_install', 'mock_packages', 'config')
-@pytest.mark.parametrize('spec,concretize,error_code', [
- (Spec('mpi'), False, 1),
- (Spec('mpi'), True, 0),
- (Spec('boost'), False, 1),
- (Spec('boost'), True, 0)
-])
+@pytest.mark.usefixtures("noop_install", "mock_packages", "config")
+@pytest.mark.parametrize(
+ "spec,concretize,error_code",
+ [
+ (Spec("mpi"), False, 1),
+ (Spec("mpi"), True, 0),
+ (Spec("boost"), False, 1),
+ (Spec("boost"), True, 0),
+ ],
+)
def test_install_from_file(spec, concretize, error_code, tmpdir):
if concretize:
spec.concretize()
- specfile = tmpdir.join('spec.yaml')
+ specfile = tmpdir.join("spec.yaml")
- with specfile.open('w') as f:
+ with specfile.open("w") as f:
spec.to_yaml(f)
- err_msg = 'does not contain a concrete spec' if error_code else ''
+ err_msg = "does not contain a concrete spec" if error_code else ""
# Relative path to specfile (regression for #6906)
with fs.working_dir(specfile.dirname):
# A non-concrete spec will fail to be installed
- out = install('-f', specfile.basename, fail_on_error=False)
+ out = install("-f", specfile.basename, fail_on_error=False)
assert install.returncode == error_code
assert err_msg in out
# Absolute path to specfile (regression for #6983)
- out = install('-f', str(specfile), fail_on_error=False)
+ out = install("-f", str(specfile), fail_on_error=False)
assert install.returncode == error_code
assert err_msg in out
@pytest.mark.disable_clean_stage_check
@pytest.mark.usefixtures(
- 'mock_packages', 'mock_archive', 'mock_fetch', 'config', 'install_mockery'
+ "mock_packages", "mock_archive", "mock_fetch", "config", "install_mockery"
+)
+@pytest.mark.parametrize(
+ "exc_typename,msg",
+ [("RuntimeError", "something weird happened"), ("ValueError", "spec is not concrete")],
)
-@pytest.mark.parametrize('exc_typename,msg', [
- ('RuntimeError', 'something weird happened'),
- ('ValueError', 'spec is not concrete')
-])
def test_junit_output_with_failures(tmpdir, exc_typename, msg):
with tmpdir.as_cwd():
install(
- '--log-format=junit', '--log-file=test.xml',
- 'raiser',
- 'exc_type={0}'.format(exc_typename),
+ "--log-format=junit",
+ "--log-file=test.xml",
+ "raiser",
+ "exc_type={0}".format(exc_typename),
'msg="{0}"'.format(msg),
fail_on_error=False,
)
assert isinstance(install.error, spack.build_environment.ChildError)
assert install.error.name == exc_typename
- assert install.error.pkg.name == 'raiser'
+ assert install.error.pkg.name == "raiser"
files = tmpdir.listdir()
- filename = tmpdir.join('test.xml')
+ filename = tmpdir.join("test.xml")
assert filename in files
content = filename.open().read()
@@ -421,35 +437,43 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
assert 'failures="0"' not in content
# We want to have both stdout and stderr
- assert '<system-out>' in content
+ assert "<system-out>" in content
assert msg in content
@pytest.mark.disable_clean_stage_check
-@pytest.mark.parametrize('exc_typename,expected_exc,msg', [
- ('RuntimeError', spack.installer.InstallError, 'something weird happened'),
- ('KeyboardInterrupt', KeyboardInterrupt, 'Ctrl-C strikes again')
-])
+@pytest.mark.parametrize(
+ "exc_typename,expected_exc,msg",
+ [
+ ("RuntimeError", spack.installer.InstallError, "something weird happened"),
+ ("KeyboardInterrupt", KeyboardInterrupt, "Ctrl-C strikes again"),
+ ],
+)
def test_junit_output_with_errors(
- exc_typename, expected_exc, msg,
- mock_packages, mock_archive, mock_fetch, install_mockery,
- config, tmpdir, monkeypatch):
-
+ exc_typename,
+ expected_exc,
+ msg,
+ mock_packages,
+ mock_archive,
+ mock_fetch,
+ install_mockery,
+ config,
+ tmpdir,
+ monkeypatch,
+):
def just_throw(*args, **kwargs):
exc_type = getattr(builtins, exc_typename)
raise exc_type(msg)
- monkeypatch.setattr(spack.installer.PackageInstaller, '_install_task',
- just_throw)
+ monkeypatch.setattr(spack.installer.PackageInstaller, "_install_task", just_throw)
with tmpdir.as_cwd():
- install('--log-format=junit', '--log-file=test.xml', 'libdwarf',
- fail_on_error=False)
+ install("--log-format=junit", "--log-file=test.xml", "libdwarf", fail_on_error=False)
assert isinstance(install.error, expected_exc)
files = tmpdir.listdir()
- filename = tmpdir.join('test.xml')
+ filename = tmpdir.join("test.xml")
assert filename in files
content = filename.open().read()
@@ -464,97 +488,95 @@ def test_junit_output_with_errors(
assert 'errors="0"' not in content
# We want to have both stdout and stderr
- assert '<system-out>' in content
+ assert "<system-out>" in content
assert 'error message="{0}"'.format(msg) in content
-@pytest.mark.usefixtures('noop_install', 'mock_packages', 'config')
-@pytest.mark.parametrize('clispecs,filespecs', [
- [[], ['mpi']],
- [[], ['mpi', 'boost']],
- [['cmake'], ['mpi']],
- [['cmake', 'libelf'], []],
- [['cmake', 'libelf'], ['mpi', 'boost']],
-])
+@pytest.mark.usefixtures("noop_install", "mock_packages", "config")
+@pytest.mark.parametrize(
+ "clispecs,filespecs",
+ [
+ [[], ["mpi"]],
+ [[], ["mpi", "boost"]],
+ [["cmake"], ["mpi"]],
+ [["cmake", "libelf"], []],
+ [["cmake", "libelf"], ["mpi", "boost"]],
+ ],
+)
def test_install_mix_cli_and_files(clispecs, filespecs, tmpdir):
args = clispecs
for spec in filespecs:
- filepath = tmpdir.join(spec + '.yaml')
- args = ['-f', str(filepath)] + args
+ filepath = tmpdir.join(spec + ".yaml")
+ args = ["-f", str(filepath)] + args
s = Spec(spec)
s.concretize()
- with filepath.open('w') as f:
+ with filepath.open("w") as f:
s.to_yaml(f)
install(*args, fail_on_error=False)
assert install.returncode == 0
-def test_extra_files_are_archived(mock_packages, mock_archive, mock_fetch,
- config, install_mockery):
- s = Spec('archive-files')
+def test_extra_files_are_archived(
+ mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ s = Spec("archive-files")
s.concretize()
- install('archive-files')
+ install("archive-files")
- archive_dir = os.path.join(
- spack.store.layout.metadata_path(s), 'archived-files'
- )
- config_log = os.path.join(archive_dir,
- mock_archive.expanded_archive_basedir,
- 'config.log')
+ archive_dir = os.path.join(spack.store.layout.metadata_path(s), "archived-files")
+ config_log = os.path.join(archive_dir, mock_archive.expanded_archive_basedir, "config.log")
assert os.path.exists(config_log)
- errors_txt = os.path.join(archive_dir, 'errors.txt')
+ errors_txt = os.path.join(archive_dir, "errors.txt")
assert os.path.exists(errors_txt)
@pytest.mark.disable_clean_stage_check
-def test_cdash_report_concretization_error(tmpdir, mock_fetch, install_mockery,
- capfd, conflict_spec):
+def test_cdash_report_concretization_error(
+ tmpdir, mock_fetch, install_mockery, capfd, conflict_spec
+):
# capfd interferes with Spack's capturing
with capfd.disabled():
with tmpdir.as_cwd():
with pytest.raises(SpackError):
- install(
- '--log-format=cdash',
- '--log-file=cdash_reports',
- conflict_spec)
- report_dir = tmpdir.join('cdash_reports')
+ install("--log-format=cdash", "--log-file=cdash_reports", conflict_spec)
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('Update.xml')
+ report_file = report_dir.join("Update.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
- assert '<UpdateReturnStatus>' in content
+ assert "<UpdateReturnStatus>" in content
# The message is different based on using the
# new or the old concretizer
expected_messages = (
- 'Conflicts in concretized spec',
- 'conflicts with',
+ "Conflicts in concretized spec",
+ "conflicts with",
)
assert any(x in content for x in expected_messages)
@pytest.mark.disable_clean_stage_check
-def test_cdash_upload_build_error(tmpdir, mock_fetch, install_mockery,
- capfd):
+def test_cdash_upload_build_error(tmpdir, mock_fetch, install_mockery, capfd):
# capfd interferes with Spack's capturing
with capfd.disabled():
with tmpdir.as_cwd():
with pytest.raises(SpackError):
install(
- '--log-format=cdash',
- '--log-file=cdash_reports',
- '--cdash-upload-url=http://localhost/fakeurl/submit.php?project=Spack',
- 'build-error')
- report_dir = tmpdir.join('cdash_reports')
+ "--log-format=cdash",
+ "--log-file=cdash_reports",
+ "--cdash-upload-url=http://localhost/fakeurl/submit.php?project=Spack",
+ "build-error",
+ )
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('Build.xml')
+ report_file = report_dir.join("Build.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
- assert '<Text>configure: error: in /path/to/some/file:</Text>' in content
+ assert "<Text>configure: error: in /path/to/some/file:</Text>" in content
@pytest.mark.disable_clean_stage_check
@@ -562,17 +584,14 @@ def test_cdash_upload_clean_build(tmpdir, mock_fetch, install_mockery, capfd):
# capfd interferes with Spack's capturing of e.g., Build.xml output
with capfd.disabled():
with tmpdir.as_cwd():
- install(
- '--log-file=cdash_reports',
- '--log-format=cdash',
- 'a')
- report_dir = tmpdir.join('cdash_reports')
+ install("--log-file=cdash_reports", "--log-format=cdash", "a")
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('a_Build.xml')
+ report_file = report_dir.join("a_Build.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
- assert '</Build>' in content
- assert '<Text>' not in content
+ assert "</Build>" in content
+ assert "<Text>" not in content
@pytest.mark.disable_clean_stage_check
@@ -581,20 +600,21 @@ def test_cdash_upload_extra_params(tmpdir, mock_fetch, install_mockery, capfd):
with capfd.disabled():
with tmpdir.as_cwd():
install(
- '--log-file=cdash_reports',
- '--log-format=cdash',
- '--cdash-build=my_custom_build',
- '--cdash-site=my_custom_site',
- '--cdash-track=my_custom_track',
- 'a')
- report_dir = tmpdir.join('cdash_reports')
+ "--log-file=cdash_reports",
+ "--log-format=cdash",
+ "--cdash-build=my_custom_build",
+ "--cdash-site=my_custom_site",
+ "--cdash-track=my_custom_track",
+ "a",
+ )
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('a_Build.xml')
+ report_file = report_dir.join("a_Build.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
assert 'Site BuildName="my_custom_build - a"' in content
assert 'Name="my_custom_site"' in content
- assert '-my_custom_track' in content
+ assert "-my_custom_track" in content
@pytest.mark.disable_clean_stage_check
@@ -602,167 +622,172 @@ def test_cdash_buildstamp_param(tmpdir, mock_fetch, install_mockery, capfd):
# capfd interferes with Spack's capture of e.g., Build.xml output
with capfd.disabled():
with tmpdir.as_cwd():
- cdash_track = 'some_mocked_track'
+ cdash_track = "some_mocked_track"
buildstamp_format = "%Y%m%d-%H%M-{0}".format(cdash_track)
- buildstamp = time.strftime(buildstamp_format,
- time.localtime(int(time.time())))
+ buildstamp = time.strftime(buildstamp_format, time.localtime(int(time.time())))
install(
- '--log-file=cdash_reports',
- '--log-format=cdash',
- '--cdash-buildstamp={0}'.format(buildstamp),
- 'a')
- report_dir = tmpdir.join('cdash_reports')
+ "--log-file=cdash_reports",
+ "--log-format=cdash",
+ "--cdash-buildstamp={0}".format(buildstamp),
+ "a",
+ )
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('a_Build.xml')
+ report_file = report_dir.join("a_Build.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
assert buildstamp in content
@pytest.mark.disable_clean_stage_check
-def test_cdash_install_from_spec_json(tmpdir, mock_fetch, install_mockery,
- capfd, mock_packages, mock_archive,
- config):
+def test_cdash_install_from_spec_json(
+ tmpdir, mock_fetch, install_mockery, capfd, mock_packages, mock_archive, config
+):
# capfd interferes with Spack's capturing
with capfd.disabled():
with tmpdir.as_cwd():
- spec_json_path = str(tmpdir.join('spec.json'))
+ spec_json_path = str(tmpdir.join("spec.json"))
- pkg_spec = Spec('a')
+ pkg_spec = Spec("a")
pkg_spec.concretize()
- with open(spec_json_path, 'w') as fd:
+ with open(spec_json_path, "w") as fd:
fd.write(pkg_spec.to_json(hash=ht.dag_hash))
install(
- '--log-format=cdash',
- '--log-file=cdash_reports',
- '--cdash-build=my_custom_build',
- '--cdash-site=my_custom_site',
- '--cdash-track=my_custom_track',
- '-f', spec_json_path)
-
- report_dir = tmpdir.join('cdash_reports')
+ "--log-format=cdash",
+ "--log-file=cdash_reports",
+ "--cdash-build=my_custom_build",
+ "--cdash-site=my_custom_site",
+ "--cdash-track=my_custom_track",
+ "-f",
+ spec_json_path,
+ )
+
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('a_Configure.xml')
+ report_file = report_dir.join("a_Configure.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
install_command_regex = re.compile(
- r'<ConfigureCommand>(.+)</ConfigureCommand>',
- re.MULTILINE | re.DOTALL)
+ r"<ConfigureCommand>(.+)</ConfigureCommand>", re.MULTILINE | re.DOTALL
+ )
m = install_command_regex.search(content)
assert m
install_command = m.group(1)
- assert 'a@' in install_command
+ assert "a@" in install_command
@pytest.mark.disable_clean_stage_check
def test_build_error_output(tmpdir, mock_fetch, install_mockery, capfd):
with capfd.disabled():
- msg = ''
+ msg = ""
try:
- install('build-error')
+ install("build-error")
assert False, "no exception was raised!"
except spack.build_environment.ChildError as e:
msg = e.long_message
- assert 'configure: error: in /path/to/some/file:' in msg
- assert 'configure: error: cannot run C compiled programs.' in msg
+ assert "configure: error: in /path/to/some/file:" in msg
+ assert "configure: error: cannot run C compiled programs." in msg
@pytest.mark.disable_clean_stage_check
def test_build_warning_output(tmpdir, mock_fetch, install_mockery, capfd):
with capfd.disabled():
- msg = ''
+ msg = ""
try:
- install('build-warnings')
+ install("build-warnings")
assert False, "no exception was raised!"
except spack.build_environment.ChildError as e:
msg = e.long_message
- assert 'WARNING: ALL CAPITAL WARNING!' in msg
- assert 'foo.c:89: warning: some weird warning!' in msg
+ assert "WARNING: ALL CAPITAL WARNING!" in msg
+ assert "foo.c:89: warning: some weird warning!" in msg
def test_cache_only_fails(tmpdir, mock_fetch, install_mockery, capfd):
# libelf from cache fails to install, which automatically removes the
# the libdwarf build task
with capfd.disabled():
- out = install('--cache-only', 'libdwarf', fail_on_error=False)
+ out = install("--cache-only", "libdwarf", fail_on_error=False)
- assert 'Failed to install libelf' in out
- assert 'Skipping build of libdwarf' in out
- assert 'was not installed' in out
+ assert "Failed to install libelf" in out
+ assert "Skipping build of libdwarf" in out
+ assert "was not installed" in out
# Check that failure prefix locks are still cached
- failure_lock_prefixes = ','.join(spack.store.db._prefix_failures.keys())
- assert 'libelf' in failure_lock_prefixes
- assert 'libdwarf' in failure_lock_prefixes
+ failure_lock_prefixes = ",".join(spack.store.db._prefix_failures.keys())
+ assert "libelf" in failure_lock_prefixes
+ assert "libdwarf" in failure_lock_prefixes
def test_install_only_dependencies(tmpdir, mock_fetch, install_mockery):
- dep = Spec('dependency-install').concretized()
- root = Spec('dependent-install').concretized()
+ dep = Spec("dependency-install").concretized()
+ root = Spec("dependent-install").concretized()
- install('--only', 'dependencies', 'dependent-install')
+ install("--only", "dependencies", "dependent-install")
assert os.path.exists(dep.prefix)
assert not os.path.exists(root.prefix)
def test_install_only_package(tmpdir, mock_fetch, install_mockery, capfd):
- msg = ''
+ msg = ""
with capfd.disabled():
try:
- install('--only', 'package', 'dependent-install')
+ install("--only", "package", "dependent-install")
except spack.installer.InstallError as e:
msg = str(e)
- assert 'Cannot proceed with dependent-install' in msg
- assert '1 uninstalled dependency' in msg
+ assert "Cannot proceed with dependent-install" in msg
+ assert "1 uninstalled dependency" in msg
def test_install_deps_then_package(tmpdir, mock_fetch, install_mockery):
- dep = Spec('dependency-install').concretized()
- root = Spec('dependent-install').concretized()
+ dep = Spec("dependency-install").concretized()
+ root = Spec("dependent-install").concretized()
- install('--only', 'dependencies', 'dependent-install')
+ install("--only", "dependencies", "dependent-install")
assert os.path.exists(dep.prefix)
assert not os.path.exists(root.prefix)
- install('--only', 'package', 'dependent-install')
+ install("--only", "package", "dependent-install")
assert os.path.exists(root.prefix)
-@pytest.mark.regression('12002')
-def test_install_only_dependencies_in_env(tmpdir, mock_fetch, install_mockery,
- mutable_mock_env_path):
- env('create', 'test')
+@pytest.mark.regression("12002")
+def test_install_only_dependencies_in_env(
+ tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
+):
+ env("create", "test")
- with ev.read('test'):
- dep = Spec('dependency-install').concretized()
- root = Spec('dependent-install').concretized()
+ with ev.read("test"):
+ dep = Spec("dependency-install").concretized()
+ root = Spec("dependent-install").concretized()
- install('-v', '--only', 'dependencies', 'dependent-install')
+ install("-v", "--only", "dependencies", "dependent-install")
assert os.path.exists(dep.prefix)
assert not os.path.exists(root.prefix)
-@pytest.mark.regression('12002')
+@pytest.mark.regression("12002")
def test_install_only_dependencies_of_all_in_env(
tmpdir, mock_fetch, install_mockery, mutable_mock_env_path
):
- env('create', '--without-view', 'test')
+ env("create", "--without-view", "test")
- with ev.read('test'):
- roots = [Spec('dependent-install@1.0').concretized(),
- Spec('dependent-install@2.0').concretized()]
+ with ev.read("test"):
+ roots = [
+ Spec("dependent-install@1.0").concretized(),
+ Spec("dependent-install@2.0").concretized(),
+ ]
- add('dependent-install@1.0')
- add('dependent-install@2.0')
- install('--only', 'dependencies')
+ add("dependent-install@1.0")
+ add("dependent-install@2.0")
+ install("--only", "dependencies")
for root in roots:
assert not os.path.exists(root.prefix)
@@ -770,8 +795,7 @@ def test_install_only_dependencies_of_all_in_env(
assert os.path.exists(dep.prefix)
-def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
- mutable_mock_env_path):
+def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery, mutable_mock_env_path):
# To test behavior of --no-add option, we create the following environment:
#
# mpileaks
@@ -785,11 +809,11 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
# ^b
# a
# ^b
- e = ev.create('test')
- e.add('mpileaks')
- e.add('libelf@0.8.10') # so env has both root and dep libelf specs
- e.add('a')
- e.add('a ~bvv')
+ e = ev.create("test")
+ e.add("mpileaks")
+ e.add("libelf@0.8.10") # so env has both root and dep libelf specs
+ e.add("a")
+ e.add("a ~bvv")
e.concretize()
env_specs = e.all_specs()
@@ -799,48 +823,46 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
# First find and remember some target concrete specs in the environment
for e_spec in env_specs:
- if e_spec.satisfies(Spec('a ~bvv')):
+ if e_spec.satisfies(Spec("a ~bvv")):
a_spec = e_spec
- elif e_spec.name == 'b':
+ elif e_spec.name == "b":
b_spec = e_spec
- elif e_spec.satisfies(Spec('mpi')):
+ elif e_spec.satisfies(Spec("mpi")):
mpi_spec = e_spec
- assert(a_spec)
- assert(a_spec.concrete)
+ assert a_spec
+ assert a_spec.concrete
- assert(b_spec)
- assert(b_spec.concrete)
- assert(b_spec not in e.roots())
+ assert b_spec
+ assert b_spec.concrete
+ assert b_spec not in e.roots()
- assert(mpi_spec)
- assert(mpi_spec.concrete)
+ assert mpi_spec
+ assert mpi_spec.concrete
# Activate the environment
with e:
# Assert using --no-add with a spec not in the env fails
- inst_out = install(
- '--no-add', 'boost', fail_on_error=False, output=str)
+ inst_out = install("--no-add", "boost", fail_on_error=False, output=str)
- assert('no such spec exists in environment' in inst_out)
+ assert "no such spec exists in environment" in inst_out
# Ensure using --no-add with an ambiguous spec fails
with pytest.raises(ev.SpackEnvironmentError) as err:
- inst_out = install(
- '--no-add', 'a', output=str)
+ inst_out = install("--no-add", "a", output=str)
- assert('a matches multiple specs in the env' in str(err))
+ assert "a matches multiple specs in the env" in str(err)
# With "--no-add", install an unambiguous dependency spec (that already
# exists as a dep in the environment) using --no-add and make sure it
# gets installed (w/ deps), but is not added to the environment.
- install('--no-add', 'dyninst')
+ install("--no-add", "dyninst")
- find_output = find('-l', output=str)
- assert('dyninst' in find_output)
- assert('libdwarf' in find_output)
- assert('libelf' in find_output)
- assert('callpath' not in find_output)
+ find_output = find("-l", output=str)
+ assert "dyninst" in find_output
+ assert "libdwarf" in find_output
+ assert "libelf" in find_output
+ assert "callpath" not in find_output
post_install_specs = e.all_specs()
assert all([s in env_specs for s in post_install_specs])
@@ -848,32 +870,32 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
# Make sure we can install a concrete dependency spec from a spec.json
# file on disk, using the ``--no-add` option, and the spec is installed
# but not added as a root
- mpi_spec_json_path = tmpdir.join('{0}.json'.format(mpi_spec.name))
- with open(mpi_spec_json_path.strpath, 'w') as fd:
+ mpi_spec_json_path = tmpdir.join("{0}.json".format(mpi_spec.name))
+ with open(mpi_spec_json_path.strpath, "w") as fd:
fd.write(mpi_spec.to_json(hash=ht.dag_hash))
- install('--no-add', '-f', mpi_spec_json_path.strpath)
- assert(mpi_spec not in e.roots())
+ install("--no-add", "-f", mpi_spec_json_path.strpath)
+ assert mpi_spec not in e.roots()
- find_output = find('-l', output=str)
- assert(mpi_spec.name in find_output)
+ find_output = find("-l", output=str)
+ assert mpi_spec.name in find_output
# Without "--no-add", install an unambiguous depependency spec (that
# already exists as a dep in the environment) without --no-add and make
# sure it is added as a root of the environment as well as installed.
- assert(b_spec not in e.roots())
+ assert b_spec not in e.roots()
- install('b')
+ install("b")
- assert(b_spec in e.roots())
- assert(b_spec not in e.uninstalled_specs())
+ assert b_spec in e.roots()
+ assert b_spec not in e.uninstalled_specs()
# Without "--no-add", install a novel spec and make sure it is added
# as a root and installed.
- install('bowtie')
+ install("bowtie")
- assert(any([s.name == 'bowtie' for s in e.roots()]))
- assert(not any([s.name == 'bowtie' for s in e.uninstalled_specs()]))
+ assert any([s.name == "bowtie" for s in e.roots()])
+ assert not any([s.name == "bowtie" for s in e.uninstalled_specs()])
def test_install_help_does_not_show_cdash_options(capsys):
@@ -881,16 +903,16 @@ def test_install_help_does_not_show_cdash_options(capsys):
Make sure `spack install --help` does not describe CDash arguments
"""
with pytest.raises(SystemExit):
- install('--help')
+ install("--help")
captured = capsys.readouterr()
- assert 'CDash URL' not in captured.out
+ assert "CDash URL" not in captured.out
def test_install_help_cdash(capsys):
"""Make sure `spack install --help-cdash` describes CDash arguments"""
- install_cmd = SpackCommand('install')
- out = install_cmd('--help-cdash')
- assert 'CDash URL' in out
+ install_cmd = SpackCommand("install")
+ out = install_cmd("--help-cdash")
+ assert "CDash URL" in out
@pytest.mark.disable_clean_stage_check
@@ -898,13 +920,9 @@ def test_cdash_auth_token(tmpdir, mock_fetch, install_mockery, capfd):
# capfd interferes with Spack's capturing
with tmpdir.as_cwd():
with capfd.disabled():
- os.environ['SPACK_CDASH_AUTH_TOKEN'] = 'asdf'
- out = install(
- '-v',
- '--log-file=cdash_reports',
- '--log-format=cdash',
- 'a')
- assert 'Using CDash auth token from environment' in out
+ os.environ["SPACK_CDASH_AUTH_TOKEN"] = "asdf"
+ out = install("-v", "--log-file=cdash_reports", "--log-format=cdash", "a")
+ assert "Using CDash auth token from environment" in out
@pytest.mark.disable_clean_stage_check
@@ -913,82 +931,89 @@ def test_cdash_configure_warning(tmpdir, mock_fetch, install_mockery, capfd):
with capfd.disabled():
with tmpdir.as_cwd():
# Test would fail if install raised an error.
- install(
- '--log-file=cdash_reports',
- '--log-format=cdash',
- 'configure-warning')
+ install("--log-file=cdash_reports", "--log-format=cdash", "configure-warning")
# Verify Configure.xml exists with expected contents.
- report_dir = tmpdir.join('cdash_reports')
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('Configure.xml')
+ report_file = report_dir.join("Configure.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
- assert 'foo: No such file or directory' in content
+ assert "foo: No such file or directory" in content
def test_compiler_bootstrap(
- install_mockery_mutable_config, mock_packages, mock_fetch,
- mock_archive, mutable_config, monkeypatch):
- monkeypatch.setattr(spack.concretize.Concretizer,
- 'check_for_compiler_existence', False)
- spack.config.set('config:install_missing_compilers', True)
- assert CompilerSpec('gcc@2.0') not in compilers.all_compiler_specs()
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_archive,
+ mutable_config,
+ monkeypatch,
+):
+ monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
+ spack.config.set("config:install_missing_compilers", True)
+ assert CompilerSpec("gcc@2.0") not in compilers.all_compiler_specs()
# Test succeeds if it does not raise an error
- install('a%gcc@2.0')
+ install("a%gcc@2.0")
def test_compiler_bootstrap_from_binary_mirror(
- install_mockery_mutable_config, mock_packages, mock_fetch,
- mock_archive, mutable_config, monkeypatch, tmpdir):
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_archive,
+ mutable_config,
+ monkeypatch,
+ tmpdir,
+):
"""
Make sure installing compiler from buildcache registers compiler
"""
# Create a temp mirror directory for buildcache usage
- mirror_dir = tmpdir.join('mirror_dir')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+ mirror_dir = tmpdir.join("mirror_dir")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
# Install a compiler, because we want to put it in a buildcache
- install('gcc@10.2.0')
+ install("gcc@10.2.0")
# Put installed compiler in the buildcache
- buildcache(
- 'create', '-u', '-a', '-f', '-d', mirror_dir.strpath, 'gcc@10.2.0'
- )
+ buildcache("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, "gcc@10.2.0")
# Now uninstall the compiler
- uninstall('-y', 'gcc@10.2.0')
+ uninstall("-y", "gcc@10.2.0")
- monkeypatch.setattr(spack.concretize.Concretizer,
- 'check_for_compiler_existence', False)
- spack.config.set('config:install_missing_compilers', True)
- assert CompilerSpec('gcc@10.2.0') not in compilers.all_compiler_specs()
+ monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
+ spack.config.set("config:install_missing_compilers", True)
+ assert CompilerSpec("gcc@10.2.0") not in compilers.all_compiler_specs()
# Configure the mirror where we put that buildcache w/ the compiler
- mirror('add', 'test-mirror', mirror_url)
+ mirror("add", "test-mirror", mirror_url)
# Now make sure that when the compiler is installed from binary mirror,
# it also gets configured as a compiler. Test succeeds if it does not
# raise an error
- install('--no-check-signature', '--cache-only', '--only',
- 'dependencies', 'b%gcc@10.2.0')
- install('--no-cache', '--only', 'package', 'b%gcc@10.2.0')
+ install("--no-check-signature", "--cache-only", "--only", "dependencies", "b%gcc@10.2.0")
+ install("--no-cache", "--only", "package", "b%gcc@10.2.0")
-@pytest.mark.regression('16221')
+@pytest.mark.regression("16221")
def test_compiler_bootstrap_already_installed(
- install_mockery_mutable_config, mock_packages, mock_fetch,
- mock_archive, mutable_config, monkeypatch):
- monkeypatch.setattr(spack.concretize.Concretizer,
- 'check_for_compiler_existence', False)
- spack.config.set('config:install_missing_compilers', True)
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_archive,
+ mutable_config,
+ monkeypatch,
+):
+ monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
+ spack.config.set("config:install_missing_compilers", True)
- assert CompilerSpec('gcc@2.0') not in compilers.all_compiler_specs()
+ assert CompilerSpec("gcc@2.0") not in compilers.all_compiler_specs()
# Test succeeds if it does not raise an error
- install('gcc@2.0')
- install('a%gcc@2.0')
+ install("gcc@2.0")
+ install("a%gcc@2.0")
def test_install_fails_no_args(tmpdir):
@@ -997,64 +1022,70 @@ def test_install_fails_no_args(tmpdir):
output = install(fail_on_error=False)
# check we got the short version of the error message with no spack.yaml
- assert 'requires a package argument or active environment' in output
- assert 'spack env activate .' not in output
- assert 'using the `spack.yaml` in this directory' not in output
+ assert "requires a package argument or active environment" in output
+ assert "spack env activate ." not in output
+ assert "using the `spack.yaml` in this directory" not in output
def test_install_fails_no_args_suggests_env_activation(tmpdir):
# ensure spack.yaml in directory
- tmpdir.ensure('spack.yaml')
+ tmpdir.ensure("spack.yaml")
with tmpdir.as_cwd():
output = install(fail_on_error=False)
# check we got the long version of the error message with spack.yaml
- assert 'requires a package argument or active environment' in output
- assert 'spack env activate .' in output
- assert 'using the `spack.yaml` in this directory' in output
-
-
-def test_install_env_with_tests_all(tmpdir, mock_packages, mock_fetch,
- install_mockery, mutable_mock_env_path):
- env('create', 'test')
- with ev.read('test'):
- test_dep = Spec('test-dependency').concretized()
- add('depb')
- install('--test', 'all')
+ assert "requires a package argument or active environment" in output
+ assert "spack env activate ." in output
+ assert "using the `spack.yaml` in this directory" in output
+
+
+def test_install_env_with_tests_all(
+ tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
+):
+ env("create", "test")
+ with ev.read("test"):
+ test_dep = Spec("test-dependency").concretized()
+ add("depb")
+ install("--test", "all")
assert os.path.exists(test_dep.prefix)
-def test_install_env_with_tests_root(tmpdir, mock_packages, mock_fetch,
- install_mockery, mutable_mock_env_path):
- env('create', 'test')
- with ev.read('test'):
- test_dep = Spec('test-dependency').concretized()
- add('depb')
- install('--test', 'root')
+def test_install_env_with_tests_root(
+ tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
+):
+ env("create", "test")
+ with ev.read("test"):
+ test_dep = Spec("test-dependency").concretized()
+ add("depb")
+ install("--test", "root")
assert not os.path.exists(test_dep.prefix)
-def test_install_empty_env(tmpdir, mock_packages, mock_fetch,
- install_mockery, mutable_mock_env_path):
- env_name = 'empty'
- env('create', env_name)
+def test_install_empty_env(
+ tmpdir, mock_packages, mock_fetch, install_mockery, mutable_mock_env_path
+):
+ env_name = "empty"
+ env("create", env_name)
with ev.read(env_name):
out = install(fail_on_error=False)
assert env_name in out
- assert 'environment' in out
- assert 'no specs to install' in out
+ assert "environment" in out
+ assert "no specs to install" in out
@pytest.mark.disable_clean_stage_check
-@pytest.mark.parametrize('name,method', [
- ('test-build-callbacks', 'undefined-build-test'),
- ('test-install-callbacks', 'undefined-install-test')
-])
+@pytest.mark.parametrize(
+ "name,method",
+ [
+ ("test-build-callbacks", "undefined-build-test"),
+ ("test-install-callbacks", "undefined-install-test"),
+ ],
+)
def test_install_callbacks_fail(install_mockery, mock_fetch, name, method):
- output = install('--test=root', '--no-cache', name, fail_on_error=False)
+ output = install("--test=root", "--no-cache", name, fail_on_error=False)
assert output.count(method) == 2
- assert output.count('method not implemented') == 1
- assert output.count('TestFailure: 1 tests failed') == 1
+ assert output.count("method not implemented") == 1
+ assert output.count("TestFailure: 1 tests failed") == 1
diff --git a/lib/spack/spack/test/cmd/is_git_repo.py b/lib/spack/spack/test/cmd/is_git_repo.py
index 025d04fff0..e094476a2e 100644
--- a/lib/spack/spack/test/cmd/is_git_repo.py
+++ b/lib/spack/spack/test/cmd/is_git_repo.py
@@ -17,7 +17,7 @@ from spack.util.executable import which
from spack.version import ver
git = which("git")
-git_required_version = '2.17.0'
+git_required_version = "2.17.0"
def check_git_version():
@@ -35,8 +35,7 @@ def check_git_version():
pytestmark = pytest.mark.skipif(
- not git or not check_git_version(),
- reason="we need git to test if we are in a git repo"
+ not git or not check_git_version(), reason="we need git to test if we are in a git repo"
)
@@ -49,7 +48,7 @@ def git_tmp_worktree(tmpdir):
# follow up fixes. 27021
# Path length is occasionally too long on Windows
# the following reduces the path length to acceptable levels
- if sys.platform == 'win32':
+ if sys.platform == "win32":
long_pth = str(tmpdir).split(os.path.sep)
tmp_worktree = os.path.sep.join(long_pth[:-1])
else:
diff --git a/lib/spack/spack/test/cmd/license.py b/lib/spack/spack/test/cmd/license.py
index 8501434659..4c920c5dff 100644
--- a/lib/spack/spack/test/cmd/license.py
+++ b/lib/spack/spack/test/cmd/license.py
@@ -15,81 +15,89 @@ import spack.cmd.license
import spack.paths
from spack.main import SpackCommand
-license = SpackCommand('license')
+license = SpackCommand("license")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_list_files():
- files = license('list-files').strip().split('\n')
+ files = license("list-files").strip().split("\n")
assert all(f.startswith(spack.paths.prefix) for f in files)
- assert os.path.join(spack.paths.bin_path, 'spack') in files
+ assert os.path.join(spack.paths.bin_path, "spack") in files
assert os.path.abspath(__file__) in files
def test_verify(tmpdir):
- source_dir = tmpdir.join('lib', 'spack', 'spack')
+ source_dir = tmpdir.join("lib", "spack", "spack")
mkdirp(str(source_dir))
- no_header = source_dir.join('no_header.py')
+ no_header = source_dir.join("no_header.py")
touch(str(no_header))
- lgpl_header = source_dir.join('lgpl_header.py')
- with lgpl_header.open('w') as f:
- f.write("""\
+ lgpl_header = source_dir.join("lgpl_header.py")
+ with lgpl_header.open("w") as f:
+ f.write(
+ """\
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: LGPL-2.1-only
-""")
+"""
+ )
- old_lgpl_header = source_dir.join('old_lgpl_header.py')
- with old_lgpl_header.open('w') as f:
- f.write("""\
+ old_lgpl_header = source_dir.join("old_lgpl_header.py")
+ with old_lgpl_header.open("w") as f:
+ f.write(
+ """\
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
-""")
+"""
+ )
- correct_header = source_dir.join('correct_header.py')
- with correct_header.open('w') as f:
- f.write("""\
+ correct_header = source_dir.join("correct_header.py")
+ with correct_header.open("w") as f:
+ f.write(
+ """\
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-""")
+"""
+ )
- out = license('--root', str(tmpdir), 'verify', fail_on_error=False)
+ out = license("--root", str(tmpdir), "verify", fail_on_error=False)
assert str(no_header) in out
assert str(lgpl_header) in out
assert str(old_lgpl_header) in out
assert str(correct_header) not in out
- assert '3 improperly licensed files' in out
- assert re.search(r'files not containing expected license:\s*1', out)
- assert re.search(r'files with wrong SPDX-License-Identifier:\s*1', out)
- assert re.search(r'files with old license header:\s*1', out)
+ assert "3 improperly licensed files" in out
+ assert re.search(r"files not containing expected license:\s*1", out)
+ assert re.search(r"files with wrong SPDX-License-Identifier:\s*1", out)
+ assert re.search(r"files with old license header:\s*1", out)
assert license.returncode == 1
def test_update_copyright_year(tmpdir):
- source_dir = tmpdir.join('lib', 'spack', 'spack')
+ source_dir = tmpdir.join("lib", "spack", "spack")
mkdirp(str(source_dir))
years = list(range(2018, 2021))
for year in years:
- outdated = source_dir.join('header_%d.py' % year)
- with outdated.open('w') as f:
- f.write("""\
+ outdated = source_dir.join("header_%d.py" % year)
+ with outdated.open("w") as f:
+ f.write(
+ """\
# Copyright 2013-%d Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-""" % year)
+"""
+ % year
+ )
# add an old MIT license at top level
mit_file = os.path.join(spack.paths.prefix, "LICENSE-MIT")
@@ -99,10 +107,10 @@ def test_update_copyright_year(tmpdir):
old_copyright = re.sub(r"\d{4}-\d{4}", "2018-2019", real.read())
dummy.write(old_copyright)
- license('--root', str(tmpdir), 'update-copyright-year')
+ license("--root", str(tmpdir), "update-copyright-year")
for year in years:
- outdated = source_dir.join('header_%d.py' % year)
+ outdated = source_dir.join("header_%d.py" % year)
first_line = outdated.open().read().split("\n")[0]
assert str(year) not in first_line
assert spack.cmd.license.strict_date in first_line
diff --git a/lib/spack/spack/test/cmd/list.py b/lib/spack/spack/test/cmd/list.py
index fe86a7f3b7..5a4b4b223e 100644
--- a/lib/spack/spack/test/cmd/list.py
+++ b/lib/spack/spack/test/cmd/list.py
@@ -5,81 +5,82 @@
from spack.main import SpackCommand
-list = SpackCommand('list')
+list = SpackCommand("list")
def test_list():
output = list()
- assert 'cloverleaf3d' in output
- assert 'hdf5' in output
+ assert "cloverleaf3d" in output
+ assert "hdf5" in output
def test_list_filter(mock_packages):
- output = list('py-*')
- assert 'py-extension1' in output
- assert 'py-extension2' in output
- assert 'py-extension3' in output
- assert 'python' not in output
- assert 'mpich' not in output
+ output = list("py-*")
+ assert "py-extension1" in output
+ assert "py-extension2" in output
+ assert "py-extension3" in output
+ assert "python" not in output
+ assert "mpich" not in output
- output = list('py')
- assert 'py-extension1' in output
- assert 'py-extension2' in output
- assert 'py-extension3' in output
- assert 'python' in output
- assert 'mpich' not in output
+ output = list("py")
+ assert "py-extension1" in output
+ assert "py-extension2" in output
+ assert "py-extension3" in output
+ assert "python" in output
+ assert "mpich" not in output
def test_list_search_description(mock_packages):
- output = list('--search-description', 'one build dependency')
- assert 'depb' in output
+ output = list("--search-description", "one build dependency")
+ assert "depb" in output
def test_list_format_name_only(mock_packages):
- output = list('--format', 'name_only')
- assert 'zmpi' in output
- assert 'hdf5' in output
+ output = list("--format", "name_only")
+ assert "zmpi" in output
+ assert "hdf5" in output
def test_list_format_version_json(mock_packages):
- output = list('--format', 'version_json')
+ output = list("--format", "version_json")
assert '{"name": "zmpi",' in output
assert '{"name": "dyninst",' in output
import json
+
json.loads(output)
def test_list_format_html(mock_packages):
- output = list('--format', 'html')
+ output = list("--format", "html")
assert '<div class="section" id="zmpi">' in output
- assert '<h1>zmpi' in output
+ assert "<h1>zmpi" in output
assert '<div class="section" id="hdf5">' in output
- assert '<h1>hdf5' in output
+ assert "<h1>hdf5" in output
def test_list_update(tmpdir, mock_packages):
- update_file = tmpdir.join('output')
+ update_file = tmpdir.join("output")
# not yet created when list is run
- list('--update', str(update_file))
+ list("--update", str(update_file))
assert update_file.exists()
with update_file.open() as f:
assert f.read()
# created but older than any package
- with update_file.open('w') as f:
- f.write('empty\n')
+ with update_file.open("w") as f:
+ f.write("empty\n")
update_file.setmtime(0)
- list('--update', str(update_file))
+ list("--update", str(update_file))
assert update_file.exists()
with update_file.open() as f:
- assert f.read() != 'empty\n'
+ assert f.read() != "empty\n"
# newer than any packages
- with update_file.open('w') as f:
- f.write('empty\n')
- list('--update', str(update_file))
+ with update_file.open("w") as f:
+ f.write("empty\n")
+ list("--update", str(update_file))
assert update_file.exists()
with update_file.open() as f:
- assert f.read() == 'empty\n'
+ assert f.read() == "empty\n"
diff --git a/lib/spack/spack/test/cmd/load.py b/lib/spack/spack/test/cmd/load.py
index 01902c3143..93cb7348ce 100644
--- a/lib/spack/spack/test/cmd/load.py
+++ b/lib/spack/spack/test/cmd/load.py
@@ -12,31 +12,31 @@ import spack.spec
import spack.user_environment as uenv
from spack.main import SpackCommand, SpackCommandError
-load = SpackCommand('load')
-unload = SpackCommand('unload')
-install = SpackCommand('install')
-location = SpackCommand('location')
+load = SpackCommand("load")
+unload = SpackCommand("unload")
+install = SpackCommand("install")
+location = SpackCommand("location")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-def test_manpath_trailing_colon(install_mockery, mock_fetch, mock_archive,
- mock_packages, working_env):
+def test_manpath_trailing_colon(
+ install_mockery, mock_fetch, mock_archive, mock_packages, working_env
+):
"""Test that the commands generated by load add the MANPATH prefix
inspections. Also test that Spack correctly preserves the default/existing
manpath search path via a trailing colon"""
- install('mpileaks')
+ install("mpileaks")
- sh_out = load('--sh', '--only', 'package', 'mpileaks')
- lines = sh_out.split('\n')
- assert any(re.match(r'export MANPATH=.*:;', ln) for ln in lines)
+ sh_out = load("--sh", "--only", "package", "mpileaks")
+ lines = sh_out.split("\n")
+ assert any(re.match(r"export MANPATH=.*:;", ln) for ln in lines)
- os.environ['MANPATH'] = '/tmp/man:'
+ os.environ["MANPATH"] = "/tmp/man:"
- sh_out = load('--sh', '--only', 'package', 'mpileaks')
- lines = sh_out.split('\n')
- assert any(re.match(r'export MANPATH=.*:/tmp/man:;', ln) for ln in lines)
+ sh_out = load("--sh", "--only", "package", "mpileaks")
+ lines = sh_out.split("\n")
+ assert any(re.match(r"export MANPATH=.*:/tmp/man:;", ln) for ln in lines)
def test_load(install_mockery, mock_fetch, mock_archive, mock_packages):
@@ -46,120 +46,118 @@ def test_load(install_mockery, mock_fetch, mock_archive, mock_packages):
CMAKE_PREFIX_PATH is the only prefix inspection guaranteed for fake
packages, since it keys on the prefix instead of a subdir."""
- install_out = install('mpileaks', output=str, fail_on_error=False)
- print('spack install mpileaks')
+ install_out = install("mpileaks", output=str, fail_on_error=False)
+ print("spack install mpileaks")
print(install_out)
- mpileaks_spec = spack.spec.Spec('mpileaks').concretized()
+ mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
- sh_out = load('--sh', '--only', 'package', 'mpileaks')
- csh_out = load('--csh', '--only', 'package', 'mpileaks')
+ sh_out = load("--sh", "--only", "package", "mpileaks")
+ csh_out = load("--csh", "--only", "package", "mpileaks")
# Test prefix inspections
- sh_out_test = 'export CMAKE_PREFIX_PATH=%s' % mpileaks_spec.prefix
- csh_out_test = 'setenv CMAKE_PREFIX_PATH %s' % mpileaks_spec.prefix
+ sh_out_test = "export CMAKE_PREFIX_PATH=%s" % mpileaks_spec.prefix
+ csh_out_test = "setenv CMAKE_PREFIX_PATH %s" % mpileaks_spec.prefix
assert sh_out_test in sh_out
assert csh_out_test in csh_out
# Test hashes recorded properly
- hash_test_replacements = (uenv.spack_loaded_hashes_var,
- mpileaks_spec.dag_hash())
- sh_hash_test = 'export %s=%s' % hash_test_replacements
- csh_hash_test = 'setenv %s %s' % hash_test_replacements
+ hash_test_replacements = (uenv.spack_loaded_hashes_var, mpileaks_spec.dag_hash())
+ sh_hash_test = "export %s=%s" % hash_test_replacements
+ csh_hash_test = "setenv %s %s" % hash_test_replacements
assert sh_hash_test in sh_out
assert csh_hash_test in csh_out
-def test_load_recursive(install_mockery, mock_fetch, mock_archive,
- mock_packages):
+def test_load_recursive(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test that the '-r' option to the load command prepends dependency prefix
inspections in post-order"""
- install('mpileaks')
- mpileaks_spec = spack.spec.Spec('mpileaks').concretized()
+ install("mpileaks")
+ mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
- sh_out = load('--sh', 'mpileaks')
- csh_out = load('--csh', 'mpileaks')
+ sh_out = load("--sh", "mpileaks")
+ csh_out = load("--csh", "mpileaks")
# Test prefix inspections
- prefix_test_replacement = ':'.join(reversed(
- [s.prefix for s in mpileaks_spec.traverse(order='post')]))
+ prefix_test_replacement = ":".join(
+ reversed([s.prefix for s in mpileaks_spec.traverse(order="post")])
+ )
- sh_prefix_test = 'export CMAKE_PREFIX_PATH=%s' % prefix_test_replacement
- csh_prefix_test = 'setenv CMAKE_PREFIX_PATH %s' % prefix_test_replacement
+ sh_prefix_test = "export CMAKE_PREFIX_PATH=%s" % prefix_test_replacement
+ csh_prefix_test = "setenv CMAKE_PREFIX_PATH %s" % prefix_test_replacement
assert sh_prefix_test in sh_out
assert csh_prefix_test in csh_out
# Test spack records loaded hashes properly
- hash_test_replacement = (uenv.spack_loaded_hashes_var, ':'.join(reversed(
- [s.dag_hash() for s in mpileaks_spec.traverse(order='post')])))
- sh_hash_test = 'export %s=%s' % hash_test_replacement
- csh_hash_test = 'setenv %s %s' % hash_test_replacement
+ hash_test_replacement = (
+ uenv.spack_loaded_hashes_var,
+ ":".join(reversed([s.dag_hash() for s in mpileaks_spec.traverse(order="post")])),
+ )
+ sh_hash_test = "export %s=%s" % hash_test_replacement
+ csh_hash_test = "setenv %s %s" % hash_test_replacement
assert sh_hash_test in sh_out
assert csh_hash_test in csh_out
-def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive,
- mock_packages):
+def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Tests that environment changes from the package's
`setup_run_environment` method are added to the user environment in
addition to the prefix inspections"""
- install('mpileaks')
+ install("mpileaks")
- sh_out = load('--sh', 'mpileaks')
- csh_out = load('--csh', 'mpileaks')
+ sh_out = load("--sh", "mpileaks")
+ csh_out = load("--csh", "mpileaks")
- assert 'export FOOBAR=mpileaks' in sh_out
- assert 'setenv FOOBAR mpileaks' in csh_out
+ assert "export FOOBAR=mpileaks" in sh_out
+ assert "setenv FOOBAR mpileaks" in csh_out
def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test with and without the --first option"""
- install('libelf@0.8.12')
- install('libelf@0.8.13')
+ install("libelf@0.8.12")
+ install("libelf@0.8.13")
# Now there are two versions of libelf
with pytest.raises(SpackCommandError):
# This should cause an error due to multiple versions
- load('--sh', 'libelf')
+ load("--sh", "libelf")
# Using --first should avoid the error condition
- load('--sh', '--first', 'libelf')
+ load("--sh", "--first", "libelf")
-def test_load_fails_no_shell(install_mockery, mock_fetch, mock_archive,
- mock_packages):
+def test_load_fails_no_shell(install_mockery, mock_fetch, mock_archive, mock_packages):
"""Test that spack load prints an error message without a shell."""
- install('mpileaks')
+ install("mpileaks")
- out = load('mpileaks', fail_on_error=False)
+ out = load("mpileaks", fail_on_error=False)
assert "To set up shell support" in out
-def test_unload(install_mockery, mock_fetch, mock_archive, mock_packages,
- working_env):
+def test_unload(install_mockery, mock_fetch, mock_archive, mock_packages, working_env):
"""Tests that any variables set in the user environment are undone by the
unload command"""
- install('mpileaks')
- mpileaks_spec = spack.spec.Spec('mpileaks').concretized()
+ install("mpileaks")
+ mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
# Set so unload has something to do
- os.environ['FOOBAR'] = 'mpileaks'
- os.environ[uenv.spack_loaded_hashes_var] = '%s:%s' % (
- mpileaks_spec.dag_hash(), 'garbage')
+ os.environ["FOOBAR"] = "mpileaks"
+ os.environ[uenv.spack_loaded_hashes_var] = "%s:%s" % (mpileaks_spec.dag_hash(), "garbage")
- sh_out = unload('--sh', 'mpileaks')
- csh_out = unload('--csh', 'mpileaks')
+ sh_out = unload("--sh", "mpileaks")
+ csh_out = unload("--csh", "mpileaks")
- assert 'unset FOOBAR' in sh_out
- assert 'unsetenv FOOBAR' in csh_out
+ assert "unset FOOBAR" in sh_out
+ assert "unsetenv FOOBAR" in csh_out
- assert 'export %s=garbage' % uenv.spack_loaded_hashes_var in sh_out
- assert 'setenv %s garbage' % uenv.spack_loaded_hashes_var in csh_out
+ assert "export %s=garbage" % uenv.spack_loaded_hashes_var in sh_out
+ assert "setenv %s garbage" % uenv.spack_loaded_hashes_var in csh_out
-def test_unload_fails_no_shell(install_mockery, mock_fetch, mock_archive,
- mock_packages, working_env):
+def test_unload_fails_no_shell(
+ install_mockery, mock_fetch, mock_archive, mock_packages, working_env
+):
"""Test that spack unload prints an error message without a shell."""
- install('mpileaks')
- mpileaks_spec = spack.spec.Spec('mpileaks').concretized()
+ install("mpileaks")
+ mpileaks_spec = spack.spec.Spec("mpileaks").concretized()
os.environ[uenv.spack_loaded_hashes_var] = mpileaks_spec.dag_hash()
- out = unload('mpileaks', fail_on_error=False)
+ out = unload("mpileaks", fail_on_error=False)
assert "To set up shell support" in out
diff --git a/lib/spack/spack/test/cmd/location.py b/lib/spack/spack/test/cmd/location.py
index ca7b302214..7e62b5902e 100644
--- a/lib/spack/spack/test/cmd/location.py
+++ b/lib/spack/spack/test/cmd/location.py
@@ -17,18 +17,19 @@ import spack.stage
from spack.main import SpackCommand, SpackCommandError
# Everything here uses (or can use) the mock config and database.
-pytestmark = [pytest.mark.usefixtures('config', 'database'),
- pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")]
+pytestmark = [
+ pytest.mark.usefixtures("config", "database"),
+ pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
+]
# location prints out "locations of packages and spack directories"
-location = SpackCommand('location')
-env = SpackCommand('env')
+location = SpackCommand("location")
+env = SpackCommand("env")
@pytest.fixture
def mock_spec():
# Make it look like the source was actually expanded.
- s = spack.spec.Spec('externaltest').concretized()
+ s = spack.spec.Spec("externaltest").concretized()
source_path = s.package.stage.source_path
mkdirp(source_path)
yield s, s.package
@@ -39,31 +40,33 @@ def mock_spec():
def test_location_build_dir(mock_spec):
"""Tests spack location --build-dir."""
spec, pkg = mock_spec
- assert location('--build-dir', spec.name).strip() == pkg.stage.source_path
+ assert location("--build-dir", spec.name).strip() == pkg.stage.source_path
-@pytest.mark.regression('22738')
+@pytest.mark.regression("22738")
def test_location_source_dir(mock_spec):
"""Tests spack location --source-dir."""
spec, pkg = mock_spec
- assert location('--source-dir', spec.name).strip() == pkg.stage.source_path
+ assert location("--source-dir", spec.name).strip() == pkg.stage.source_path
assert location(spec.name).strip() == pkg.stage.source_path
def test_location_source_dir_missing():
"""Tests spack location --source-dir with a missing source directory."""
- spec = 'mpileaks'
+ spec = "mpileaks"
prefix = "==> Error: "
- expected = "%sSource directory does not exist yet. Run this to create it:"\
- "%s spack stage %s" % (prefix, os.linesep, spec)
- out = location('--source-dir', spec, fail_on_error=False).strip()
+ expected = (
+ "%sSource directory does not exist yet. Run this to create it:"
+ "%s spack stage %s" % (prefix, os.linesep, spec)
+ )
+ out = location("--source-dir", spec, fail_on_error=False).strip()
assert out == expected
-@pytest.mark.parametrize('options', [([]),
- (['--source-dir', 'mpileaks']),
- (['--env', 'missing-env']),
- (['spec1', 'spec2'])])
+@pytest.mark.parametrize(
+ "options",
+ [([]), (["--source-dir", "mpileaks"]), (["--env", "missing-env"]), (["spec1", "spec2"])],
+)
def test_location_cmd_error(options):
"""Ensure the proper error is raised with problematic location options."""
with pytest.raises(SpackCommandError, match="Command exited with code 1"):
@@ -74,7 +77,7 @@ def test_location_env_exists(mutable_mock_env_path):
"""Tests spack location --env <name> for an existing environment."""
e = ev.create("example")
e.write()
- assert location('--env', "example").strip() == e.path
+ assert location("--env", "example").strip() == e.path
def test_location_with_active_env(mutable_mock_env_path):
@@ -82,7 +85,7 @@ def test_location_with_active_env(mutable_mock_env_path):
e = ev.create("example")
e.write()
with e:
- assert location('--env').strip() == e.path
+ assert location("--env").strip() == e.path
def test_location_env_flag_interference(mutable_mock_env_path, tmpdir):
@@ -92,25 +95,25 @@ def test_location_env_flag_interference(mutable_mock_env_path, tmpdir):
"""
# create two environments
- env('create', 'first_env')
- env('create', 'second_env')
+ env("create", "first_env")
+ env("create", "second_env")
- global_args = ['-e', 'first_env']
+ global_args = ["-e", "first_env"]
# `spack -e first_env location -e second_env` should print the env
# path of second_env
- assert 'first_env' not in location('-e', 'second_env', global_args=global_args)
+ assert "first_env" not in location("-e", "second_env", global_args=global_args)
# `spack -e first_env location --packages` should not print
# the environment path of first_env.
- assert 'first_env' not in location('--packages', global_args=global_args)
+ assert "first_env" not in location("--packages", global_args=global_args)
def test_location_env_missing():
"""Tests spack location --env."""
- missing_env_name = 'missing-env'
+ missing_env_name = "missing-env"
error = "==> Error: no such environment: '%s'" % missing_env_name
- out = location('--env', missing_env_name, fail_on_error=False).strip()
+ out = location("--env", missing_env_name, fail_on_error=False).strip()
assert out == error
@@ -118,29 +121,34 @@ def test_location_env_missing():
def test_location_install_dir(mock_spec):
"""Tests spack location --install-dir."""
spec, _ = mock_spec
- assert location('--install-dir', spec.name).strip() == spec.prefix
+ assert location("--install-dir", spec.name).strip() == spec.prefix
@pytest.mark.db
def test_location_package_dir(mock_spec):
"""Tests spack location --package-dir."""
spec, pkg = mock_spec
- assert location('--package-dir', spec.name).strip() == pkg.package_dir
+ assert location("--package-dir", spec.name).strip() == pkg.package_dir
@pytest.mark.db
-@pytest.mark.parametrize('option,expected', [
- ('--module-dir', spack.paths.module_path),
- ('--packages', spack.paths.mock_packages_path),
- ('--spack-root', spack.paths.prefix)])
+@pytest.mark.parametrize(
+ "option,expected",
+ [
+ ("--module-dir", spack.paths.module_path),
+ ("--packages", spack.paths.mock_packages_path),
+ ("--spack-root", spack.paths.prefix),
+ ],
+)
def test_location_paths_options(option, expected):
"""Tests basic spack.paths location command options."""
assert location(option).strip() == expected
-@pytest.mark.parametrize('specs,expected', [
- ([], "You must supply a spec."),
- (['spec1', 'spec2'], "Too many specs. Supply only one.")])
+@pytest.mark.parametrize(
+ "specs,expected",
+ [([], "You must supply a spec."), (["spec1", "spec2"], "Too many specs. Supply only one.")],
+)
def test_location_spec_errors(specs, expected):
"""Tests spack location with bad spec options."""
error = "==> Error: %s" % expected
@@ -151,10 +159,10 @@ def test_location_spec_errors(specs, expected):
def test_location_stage_dir(mock_spec):
"""Tests spack location --stage-dir."""
spec, pkg = mock_spec
- assert location('--stage-dir', spec.name).strip() == pkg.stage.path
+ assert location("--stage-dir", spec.name).strip() == pkg.stage.path
@pytest.mark.db
def test_location_stages(mock_spec):
"""Tests spack location --stages."""
- assert location('--stages').strip() == spack.stage.get_stage_root()
+ assert location("--stages").strip() == spack.stage.get_stage_root()
diff --git a/lib/spack/spack/test/cmd/maintainers.py b/lib/spack/spack/test/cmd/maintainers.py
index e3f665d5da..2ac2fdf444 100644
--- a/lib/spack/spack/test/cmd/maintainers.py
+++ b/lib/spack/spack/test/cmd/maintainers.py
@@ -12,56 +12,69 @@ import pytest
import spack.main
import spack.repo
-maintainers = spack.main.SpackCommand('maintainers')
+maintainers = spack.main.SpackCommand("maintainers")
def split(output):
"""Split command line output into an array."""
output = output.strip()
- return re.split(r'\s+', output) if output else []
+ return re.split(r"\s+", output) if output else []
def test_maintained(mock_packages):
- out = split(maintainers('--maintained'))
- assert out == ['maintainers-1', 'maintainers-2']
+ out = split(maintainers("--maintained"))
+ assert out == ["maintainers-1", "maintainers-2"]
def test_unmaintained(mock_packages):
- out = split(maintainers('--unmaintained'))
+ out = split(maintainers("--unmaintained"))
assert out == sorted(
- set(spack.repo.all_package_names()) -
- set(['maintainers-1', 'maintainers-2']))
+ set(spack.repo.all_package_names()) - set(["maintainers-1", "maintainers-2"])
+ )
def test_all(mock_packages, capfd):
with capfd.disabled():
- out = split(maintainers('--all'))
+ out = split(maintainers("--all"))
assert out == [
- 'maintainers-1:', 'user1,', 'user2',
- 'maintainers-2:', 'user2,', 'user3',
+ "maintainers-1:",
+ "user1,",
+ "user2",
+ "maintainers-2:",
+ "user2,",
+ "user3",
]
with capfd.disabled():
- out = split(maintainers('--all', 'maintainers-1'))
+ out = split(maintainers("--all", "maintainers-1"))
assert out == [
- 'maintainers-1:', 'user1,', 'user2',
+ "maintainers-1:",
+ "user1,",
+ "user2",
]
def test_all_by_user(mock_packages, capfd):
with capfd.disabled():
- out = split(maintainers('--all', '--by-user'))
+ out = split(maintainers("--all", "--by-user"))
assert out == [
- 'user1:', 'maintainers-1',
- 'user2:', 'maintainers-1,', 'maintainers-2',
- 'user3:', 'maintainers-2',
+ "user1:",
+ "maintainers-1",
+ "user2:",
+ "maintainers-1,",
+ "maintainers-2",
+ "user3:",
+ "maintainers-2",
]
with capfd.disabled():
- out = split(maintainers('--all', '--by-user', 'user1', 'user2'))
+ out = split(maintainers("--all", "--by-user", "user1", "user2"))
assert out == [
- 'user1:', 'maintainers-1',
- 'user2:', 'maintainers-1,', 'maintainers-2',
+ "user1:",
+ "maintainers-1",
+ "user2:",
+ "maintainers-1,",
+ "maintainers-2",
]
@@ -72,47 +85,47 @@ def test_no_args(mock_packages):
def test_no_args_by_user(mock_packages):
with pytest.raises(spack.main.SpackCommandError):
- maintainers('--by-user')
+ maintainers("--by-user")
def test_mutex_args_fail(mock_packages):
with pytest.raises(SystemExit):
- maintainers('--maintained', '--unmaintained')
+ maintainers("--maintained", "--unmaintained")
def test_maintainers_list_packages(mock_packages, capfd):
with capfd.disabled():
- out = split(maintainers('maintainers-1'))
- assert out == ['user1', 'user2']
+ out = split(maintainers("maintainers-1"))
+ assert out == ["user1", "user2"]
with capfd.disabled():
- out = split(maintainers('maintainers-1', 'maintainers-2'))
- assert out == ['user1', 'user2', 'user3']
+ out = split(maintainers("maintainers-1", "maintainers-2"))
+ assert out == ["user1", "user2", "user3"]
with capfd.disabled():
- out = split(maintainers('maintainers-2'))
- assert out == ['user2', 'user3']
+ out = split(maintainers("maintainers-2"))
+ assert out == ["user2", "user3"]
def test_maintainers_list_fails(mock_packages, capfd):
- out = maintainers('a', fail_on_error=False)
+ out = maintainers("a", fail_on_error=False)
assert not out
assert maintainers.returncode == 1
def test_maintainers_list_by_user(mock_packages, capfd):
with capfd.disabled():
- out = split(maintainers('--by-user', 'user1'))
- assert out == ['maintainers-1']
+ out = split(maintainers("--by-user", "user1"))
+ assert out == ["maintainers-1"]
with capfd.disabled():
- out = split(maintainers('--by-user', 'user1', 'user2'))
- assert out == ['maintainers-1', 'maintainers-2']
+ out = split(maintainers("--by-user", "user1", "user2"))
+ assert out == ["maintainers-1", "maintainers-2"]
with capfd.disabled():
- out = split(maintainers('--by-user', 'user2'))
- assert out == ['maintainers-1', 'maintainers-2']
+ out = split(maintainers("--by-user", "user2"))
+ assert out == ["maintainers-1", "maintainers-2"]
with capfd.disabled():
- out = split(maintainers('--by-user', 'user3'))
- assert out == ['maintainers-2']
+ out = split(maintainers("--by-user", "user3"))
+ assert out == ["maintainers-2"]
diff --git a/lib/spack/spack/test/cmd/mark.py b/lib/spack/spack/test/cmd/mark.py
index d74c5df783..de708db694 100644
--- a/lib/spack/spack/test/cmd/mark.py
+++ b/lib/spack/spack/test/cmd/mark.py
@@ -10,64 +10,63 @@ import pytest
import spack.store
from spack.main import SpackCommand, SpackCommandError
-gc = SpackCommand('gc')
-mark = SpackCommand('mark')
-install = SpackCommand('install')
-uninstall = SpackCommand('uninstall')
+gc = SpackCommand("gc")
+mark = SpackCommand("mark")
+install = SpackCommand("install")
+uninstall = SpackCommand("uninstall")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.mark.db
def test_mark_mode_required(mutable_database):
with pytest.raises(SystemExit):
- mark('-a')
+ mark("-a")
@pytest.mark.db
def test_mark_spec_required(mutable_database):
with pytest.raises(SpackCommandError):
- mark('-i')
+ mark("-i")
@pytest.mark.db
def test_mark_all_explicit(mutable_database):
- mark('-e', '-a')
- gc('-y')
+ mark("-e", "-a")
+ gc("-y")
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 15
@pytest.mark.db
def test_mark_all_implicit(mutable_database):
- mark('-i', '-a')
- gc('-y')
+ mark("-i", "-a")
+ gc("-y")
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 0
@pytest.mark.db
def test_mark_one_explicit(mutable_database):
- mark('-e', 'libelf')
- uninstall('-y', '-a', 'mpileaks')
- gc('-y')
+ mark("-e", "libelf")
+ uninstall("-y", "-a", "mpileaks")
+ gc("-y")
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 3
@pytest.mark.db
def test_mark_one_implicit(mutable_database):
- mark('-i', 'externaltest')
- gc('-y')
+ mark("-i", "externaltest")
+ gc("-y")
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 14
@pytest.mark.db
def test_mark_all_implicit_then_explicit(mutable_database):
- mark('-i', '-a')
- mark('-e', '-a')
- gc('-y')
+ mark("-i", "-a")
+ mark("-e", "-a")
+ gc("-y")
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 15
diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py
index 0090e78e86..c3e322487a 100644
--- a/lib/spack/spack/test/cmd/mirror.py
+++ b/lib/spack/spack/test/cmd/mirror.py
@@ -12,31 +12,30 @@ import spack.config
import spack.environment as ev
from spack.main import SpackCommand, SpackCommandError
-mirror = SpackCommand('mirror')
-env = SpackCommand('env')
-add = SpackCommand('add')
-concretize = SpackCommand('concretize')
-install = SpackCommand('install')
-buildcache = SpackCommand('buildcache')
-uninstall = SpackCommand('uninstall')
+mirror = SpackCommand("mirror")
+env = SpackCommand("env")
+add = SpackCommand("add")
+concretize = SpackCommand("concretize")
+install = SpackCommand("install")
+buildcache = SpackCommand("buildcache")
+uninstall = SpackCommand("uninstall")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@pytest.fixture
def tmp_scope():
"""Creates a temporary configuration scope"""
- base_name = 'internal-testing-scope'
+ base_name = "internal-testing-scope"
current_overrides = set(
- x.name for x in
- spack.config.config.matching_scopes(r'^{0}'.format(base_name)))
+ x.name for x in spack.config.config.matching_scopes(r"^{0}".format(base_name))
+ )
num_overrides = 0
scope_name = base_name
while scope_name in current_overrides:
- scope_name = '{0}{1}'.format(base_name, num_overrides)
+ scope_name = "{0}{1}".format(base_name, num_overrides)
num_overrides += 1
with spack.config.override(spack.config.InternalConfigScope(scope_name)):
@@ -49,67 +48,72 @@ def _validate_url(url):
@pytest.fixture(autouse=True)
def url_check(monkeypatch):
- monkeypatch.setattr(spack.util.url, 'require_url_format', _validate_url)
+ monkeypatch.setattr(spack.util.url, "require_url_format", _validate_url)
@pytest.mark.disable_clean_stage_check
-@pytest.mark.regression('8083')
+@pytest.mark.regression("8083")
def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
with capfd.disabled():
- output = mirror('create', '-d', str(tmpdir), 'externaltool')
- assert 'Skipping' in output
- assert 'as it is an external spec' in output
+ output = mirror("create", "-d", str(tmpdir), "externaltool")
+ assert "Skipping" in output
+ assert "as it is an external spec" in output
-@pytest.mark.regression('12345')
-def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config,
- mutable_mock_env_path):
+@pytest.mark.regression("12345")
+def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config, mutable_mock_env_path):
mirror_dir = str(tmpdir)
- env_name = 'test'
+ env_name = "test"
- env('create', env_name)
+ env("create", env_name)
with ev.read(env_name):
- add('trivial-install-test-package')
- add('git-test')
+ add("trivial-install-test-package")
+ add("git-test")
concretize()
- with spack.config.override('config:checksum', False):
- mirror('create', '-d', mirror_dir, '--all')
+ with spack.config.override("config:checksum", False):
+ mirror("create", "-d", mirror_dir, "--all")
e = ev.read(env_name)
assert set(os.listdir(mirror_dir)) == set([s.name for s in e.user_specs])
for spec in e.specs_by_hash.values():
mirror_res = os.listdir(os.path.join(mirror_dir, spec.name))
- expected = ['%s.tar.gz' % spec.format('{name}-{version}')]
+ expected = ["%s.tar.gz" % spec.format("{name}-{version}")]
assert mirror_res == expected
@pytest.fixture
def source_for_pkg_with_hash(mock_packages, tmpdir):
- s = spack.spec.Spec('trivial-pkg-with-valid-hash').concretized()
+ s = spack.spec.Spec("trivial-pkg-with-valid-hash").concretized()
local_url_basename = os.path.basename(s.package.url)
local_path = os.path.join(str(tmpdir), local_url_basename)
- with open(local_path, 'w') as f:
+ with open(local_path, "w") as f:
f.write(s.package.hashed_content)
local_url = "file://" + local_path
- s.package.versions[spack.version.Version('1.0')]['url'] = local_url
+ s.package.versions[spack.version.Version("1.0")]["url"] = local_url
-def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config,
- source_for_pkg_with_hash):
- mirror_dir = str(tmpdir_factory.mktemp('mirror-dir'))
+def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config, source_for_pkg_with_hash):
+ mirror_dir = str(tmpdir_factory.mktemp("mirror-dir"))
- specs = [spack.spec.Spec(x).concretized() for x in
- ['git-test', 'trivial-pkg-with-valid-hash']]
+ specs = [spack.spec.Spec(x).concretized() for x in ["git-test", "trivial-pkg-with-valid-hash"]]
spack.mirror.create(mirror_dir, specs, skip_unstable_versions=True)
- assert (set(os.listdir(mirror_dir)) - set(['_source-cache']) ==
- set(['trivial-pkg-with-valid-hash']))
+ assert set(os.listdir(mirror_dir)) - set(["_source-cache"]) == set(
+ ["trivial-pkg-with-valid-hash"]
+ )
class MockMirrorArgs(object):
- def __init__(self, specs=None, all=False, file=None,
- versions_per_spec=None, dependencies=False,
- exclude_file=None, exclude_specs=None):
+ def __init__(
+ self,
+ specs=None,
+ all=False,
+ file=None,
+ versions_per_spec=None,
+ dependencies=False,
+ exclude_file=None,
+ exclude_specs=None,
+ ):
self.specs = specs or []
self.all = all
self.file = file
@@ -121,150 +125,166 @@ class MockMirrorArgs(object):
def test_exclude_specs(mock_packages, config):
args = MockMirrorArgs(
- specs=['mpich'],
- versions_per_spec='all',
- exclude_specs="mpich@3.0.1:3.0.2 mpich@1.0")
+ specs=["mpich"], versions_per_spec="all", exclude_specs="mpich@3.0.1:3.0.2 mpich@1.0"
+ )
mirror_specs = spack.cmd.mirror._determine_specs_to_mirror(args)
- expected_include = set(spack.spec.Spec(x) for x in
- ['mpich@3.0.3', 'mpich@3.0.4', 'mpich@3.0'])
- expected_exclude = set(spack.spec.Spec(x) for x in
- ['mpich@3.0.1', 'mpich@3.0.2', 'mpich@1.0'])
+ expected_include = set(spack.spec.Spec(x) for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"])
+ expected_exclude = set(spack.spec.Spec(x) for x in ["mpich@3.0.1", "mpich@3.0.2", "mpich@1.0"])
assert expected_include <= set(mirror_specs)
- assert (not expected_exclude & set(mirror_specs))
+ assert not expected_exclude & set(mirror_specs)
def test_exclude_file(mock_packages, tmpdir, config):
- exclude_path = os.path.join(str(tmpdir), 'test-exclude.txt')
- with open(exclude_path, 'w') as exclude_file:
- exclude_file.write("""\
+ exclude_path = os.path.join(str(tmpdir), "test-exclude.txt")
+ with open(exclude_path, "w") as exclude_file:
+ exclude_file.write(
+ """\
mpich@3.0.1:3.0.2
mpich@1.0
-""")
+"""
+ )
- args = MockMirrorArgs(
- specs=['mpich'],
- versions_per_spec='all',
- exclude_file=exclude_path)
+ args = MockMirrorArgs(specs=["mpich"], versions_per_spec="all", exclude_file=exclude_path)
mirror_specs = spack.cmd.mirror._determine_specs_to_mirror(args)
- expected_include = set(spack.spec.Spec(x) for x in
- ['mpich@3.0.3', 'mpich@3.0.4', 'mpich@3.0'])
- expected_exclude = set(spack.spec.Spec(x) for x in
- ['mpich@3.0.1', 'mpich@3.0.2', 'mpich@1.0'])
+ expected_include = set(spack.spec.Spec(x) for x in ["mpich@3.0.3", "mpich@3.0.4", "mpich@3.0"])
+ expected_exclude = set(spack.spec.Spec(x) for x in ["mpich@3.0.1", "mpich@3.0.2", "mpich@1.0"])
assert expected_include <= set(mirror_specs)
- assert (not expected_exclude & set(mirror_specs))
+ assert not expected_exclude & set(mirror_specs)
def test_mirror_crud(tmp_scope, capsys):
with capsys.disabled():
- mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
+ mirror("add", "--scope", tmp_scope, "mirror", "http://spack.io")
- output = mirror('remove', '--scope', tmp_scope, 'mirror')
- assert 'Removed mirror' in output
+ output = mirror("remove", "--scope", tmp_scope, "mirror")
+ assert "Removed mirror" in output
- mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
+ mirror("add", "--scope", tmp_scope, "mirror", "http://spack.io")
# no-op
- output = mirror('set-url', '--scope', tmp_scope,
- 'mirror', 'http://spack.io')
- assert 'No changes made' in output
+ output = mirror("set-url", "--scope", tmp_scope, "mirror", "http://spack.io")
+ assert "No changes made" in output
- output = mirror('set-url', '--scope', tmp_scope,
- '--push', 'mirror', 's3://spack-public')
- assert 'Changed (push) url' in output
+ output = mirror("set-url", "--scope", tmp_scope, "--push", "mirror", "s3://spack-public")
+ assert "Changed (push) url" in output
# no-op
- output = mirror('set-url', '--scope', tmp_scope,
- '--push', 'mirror', 's3://spack-public')
- assert 'No changes made' in output
+ output = mirror("set-url", "--scope", tmp_scope, "--push", "mirror", "s3://spack-public")
+ assert "No changes made" in output
- output = mirror('remove', '--scope', tmp_scope, 'mirror')
- assert 'Removed mirror' in output
+ output = mirror("remove", "--scope", tmp_scope, "mirror")
+ assert "Removed mirror" in output
# Test S3 connection info token
- mirror('add', '--scope', tmp_scope,
- '--s3-access-token', 'aaaaaazzzzz',
- 'mirror', 's3://spack-public')
-
- output = mirror('remove', '--scope', tmp_scope, 'mirror')
- assert 'Removed mirror' in output
+ mirror(
+ "add",
+ "--scope",
+ tmp_scope,
+ "--s3-access-token",
+ "aaaaaazzzzz",
+ "mirror",
+ "s3://spack-public",
+ )
+
+ output = mirror("remove", "--scope", tmp_scope, "mirror")
+ assert "Removed mirror" in output
# Test S3 connection info id/key
- mirror('add', '--scope', tmp_scope,
- '--s3-access-key-id', 'foo', '--s3-access-key-secret', 'bar',
- 'mirror', 's3://spack-public')
-
- output = mirror('remove', '--scope', tmp_scope, 'mirror')
- assert 'Removed mirror' in output
+ mirror(
+ "add",
+ "--scope",
+ tmp_scope,
+ "--s3-access-key-id",
+ "foo",
+ "--s3-access-key-secret",
+ "bar",
+ "mirror",
+ "s3://spack-public",
+ )
+
+ output = mirror("remove", "--scope", tmp_scope, "mirror")
+ assert "Removed mirror" in output
# Test S3 connection info with endpoint URL
- mirror('add', '--scope', tmp_scope,
- '--s3-access-token', 'aaaaaazzzzz',
- '--s3-endpoint-url', 'http://localhost/',
- 'mirror', 's3://spack-public')
-
- output = mirror('remove', '--scope', tmp_scope, 'mirror')
- assert 'Removed mirror' in output
-
- output = mirror('list', '--scope', tmp_scope)
- assert 'No mirrors configured' in output
+ mirror(
+ "add",
+ "--scope",
+ tmp_scope,
+ "--s3-access-token",
+ "aaaaaazzzzz",
+ "--s3-endpoint-url",
+ "http://localhost/",
+ "mirror",
+ "s3://spack-public",
+ )
+
+ output = mirror("remove", "--scope", tmp_scope, "mirror")
+ assert "Removed mirror" in output
+
+ output = mirror("list", "--scope", tmp_scope)
+ assert "No mirrors configured" in output
# Test GCS Mirror
- mirror('add', '--scope', tmp_scope,
- 'mirror', 'gs://spack-test')
+ mirror("add", "--scope", tmp_scope, "mirror", "gs://spack-test")
- output = mirror('remove', '--scope', tmp_scope, 'mirror')
- assert 'Removed mirror' in output
+ output = mirror("remove", "--scope", tmp_scope, "mirror")
+ assert "Removed mirror" in output
def test_mirror_nonexisting(tmp_scope):
with pytest.raises(SpackCommandError):
- mirror('remove', '--scope', tmp_scope, 'not-a-mirror')
+ mirror("remove", "--scope", tmp_scope, "not-a-mirror")
with pytest.raises(SpackCommandError):
- mirror('set-url', '--scope', tmp_scope,
- 'not-a-mirror', 'http://spack.io')
+ mirror("set-url", "--scope", tmp_scope, "not-a-mirror", "http://spack.io")
def test_mirror_name_collision(tmp_scope):
- mirror('add', '--scope', tmp_scope, 'first', '1')
+ mirror("add", "--scope", tmp_scope, "first", "1")
with pytest.raises(SpackCommandError):
- mirror('add', '--scope', tmp_scope, 'first', '1')
-
-
-def test_mirror_destroy(install_mockery_mutable_config,
- mock_packages, mock_fetch, mock_archive,
- mutable_config, monkeypatch, tmpdir):
+ mirror("add", "--scope", tmp_scope, "first", "1")
+
+
+def test_mirror_destroy(
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_fetch,
+ mock_archive,
+ mutable_config,
+ monkeypatch,
+ tmpdir,
+):
# Create a temp mirror directory for buildcache usage
- mirror_dir = tmpdir.join('mirror_dir')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
- mirror('add', 'atest', mirror_url)
+ mirror_dir = tmpdir.join("mirror_dir")
+ mirror_url = "file://{0}".format(mirror_dir.strpath)
+ mirror("add", "atest", mirror_url)
- spec_name = 'libdwarf'
+ spec_name = "libdwarf"
# Put a binary package in a buildcache
- install('--no-cache', spec_name)
- buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, spec_name)
+ install("--no-cache", spec_name)
+ buildcache("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
contents = os.listdir(mirror_dir.strpath)
- assert('build_cache' in contents)
+ assert "build_cache" in contents
# Destroy mirror by name
- mirror('destroy', '-m', 'atest')
+ mirror("destroy", "-m", "atest")
- assert(not os.path.exists(mirror_dir.strpath))
+ assert not os.path.exists(mirror_dir.strpath)
- buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, spec_name)
+ buildcache("create", "-u", "-a", "-f", "-d", mirror_dir.strpath, spec_name)
contents = os.listdir(mirror_dir.strpath)
- assert('build_cache' in contents)
+ assert "build_cache" in contents
# Destroy mirror by url
- mirror('destroy', '--mirror-url', mirror_url)
+ mirror("destroy", "--mirror-url", mirror_url)
- assert(not os.path.exists(mirror_dir.strpath))
+ assert not os.path.exists(mirror_dir.strpath)
- uninstall('-y', spec_name)
- mirror('remove', 'atest')
+ uninstall("-y", spec_name)
+ mirror("remove", "atest")
diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py
index 0ff7cb78a6..1af511b85c 100644
--- a/lib/spack/spack/test/cmd/module.py
+++ b/lib/spack/spack/test/cmd/module.py
@@ -14,37 +14,34 @@ import spack.main
import spack.modules
import spack.store
-module = spack.main.SpackCommand('module')
+module = spack.main.SpackCommand("module")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
#: make sure module files are generated for all the tests here
-@pytest.fixture(scope='module', autouse=True)
-def ensure_module_files_are_there(
- mock_repo_path, mock_store, mock_configuration_scopes
-):
+@pytest.fixture(scope="module", autouse=True)
+def ensure_module_files_are_there(mock_repo_path, mock_store, mock_configuration_scopes):
"""Generate module files for module tests."""
- module = spack.main.SpackCommand('module')
+ module = spack.main.SpackCommand("module")
with spack.store.use_store(str(mock_store)):
with spack.config.use_configuration(*mock_configuration_scopes):
with spack.repo.use_repositories(mock_repo_path):
- module('tcl', 'refresh', '-y')
+ module("tcl", "refresh", "-y")
def _module_files(module_type, *specs):
specs = [spack.spec.Spec(x).concretized() for x in specs]
writer_cls = spack.modules.module_types[module_type]
- return [writer_cls(spec, 'default').layout.filename for spec in specs]
+ return [writer_cls(spec, "default").layout.filename for spec in specs]
@pytest.fixture(
params=[
- ['rm', 'doesnotexist'], # Try to remove a non existing module
- ['find', 'mpileaks'], # Try to find a module with multiple matches
- ['find', 'doesnotexist'], # Try to find a module with no matches
- ['find', '--unkown_args'], # Try to give an unknown argument
+ ["rm", "doesnotexist"], # Try to remove a non existing module
+ ["find", "mpileaks"], # Try to find a module with multiple matches
+ ["find", "doesnotexist"], # Try to find a module with no matches
+ ["find", "--unkown_args"], # Try to give an unknown argument
]
)
def failure_args(request):
@@ -52,9 +49,7 @@ def failure_args(request):
return request.param
-@pytest.fixture(
- params=['tcl', 'lmod']
-)
+@pytest.fixture(params=["tcl", "lmod"])
def module_type(request):
return request.param
@@ -63,6 +58,7 @@ def module_type(request):
# TODO : this requires having a separate directory for test modules
# TODO : add tests for loads and find to check the prompt format
+
@pytest.mark.db
def test_exit_with_failure(database, module_type, failure_args):
with pytest.raises(spack.main.SpackCommandError):
@@ -73,13 +69,13 @@ def test_exit_with_failure(database, module_type, failure_args):
def test_remove_and_add(database, module_type):
"""Tests adding and removing a tcl module file."""
- if module_type == 'lmod':
+ if module_type == "lmod":
# TODO: Testing this with lmod requires mocking
# TODO: the core compilers
return
- rm_cli_args = ['rm', '-y', 'mpileaks']
- module_files = _module_files(module_type, 'mpileaks')
+ rm_cli_args = ["rm", "-y", "mpileaks"]
+ module_files = _module_files(module_type, "mpileaks")
for item in module_files:
assert os.path.exists(item)
@@ -87,64 +83,59 @@ def test_remove_and_add(database, module_type):
for item in module_files:
assert not os.path.exists(item)
- module(module_type, 'refresh', '-y', 'mpileaks')
+ module(module_type, "refresh", "-y", "mpileaks")
for item in module_files:
assert os.path.exists(item)
@pytest.mark.db
-@pytest.mark.parametrize('cli_args', [
- ['libelf'],
- ['--full-path', 'libelf']
-])
+@pytest.mark.parametrize("cli_args", [["libelf"], ["--full-path", "libelf"]])
def test_find(database, cli_args, module_type):
- if module_type == 'lmod':
+ if module_type == "lmod":
# TODO: Testing this with lmod requires mocking
# TODO: the core compilers
return
- module(module_type, *(['find'] + cli_args))
+ module(module_type, *(["find"] + cli_args))
@pytest.mark.db
-@pytest.mark.usefixtures('database')
-@pytest.mark.regression('2215')
+@pytest.mark.usefixtures("database")
+@pytest.mark.regression("2215")
def test_find_fails_on_multiple_matches():
# As we installed multiple versions of mpileaks, the command will
# fail because of multiple matches
- out = module('tcl', 'find', 'mpileaks', fail_on_error=False)
+ out = module("tcl", "find", "mpileaks", fail_on_error=False)
assert module.returncode == 1
- assert 'matches multiple packages' in out
+ assert "matches multiple packages" in out
# Passing multiple packages from the command line also results in the
# same failure
- out = module(
- 'tcl', 'find', 'mpileaks ^mpich', 'libelf', fail_on_error=False
- )
+ out = module("tcl", "find", "mpileaks ^mpich", "libelf", fail_on_error=False)
assert module.returncode == 1
- assert 'matches multiple packages' in out
+ assert "matches multiple packages" in out
@pytest.mark.db
-@pytest.mark.usefixtures('database')
-@pytest.mark.regression('2570')
+@pytest.mark.usefixtures("database")
+@pytest.mark.regression("2570")
def test_find_fails_on_non_existing_packages():
# Another way the command might fail is if the package does not exist
- out = module('tcl', 'find', 'doesnotexist', fail_on_error=False)
+ out = module("tcl", "find", "doesnotexist", fail_on_error=False)
assert module.returncode == 1
- assert 'matches no package' in out
+ assert "matches no package" in out
@pytest.mark.db
-@pytest.mark.usefixtures('database')
+@pytest.mark.usefixtures("database")
def test_find_recursive():
# If we call find without options it should return only one module
- out = module('tcl', 'find', 'mpileaks ^zmpi')
+ out = module("tcl", "find", "mpileaks ^zmpi")
assert len(out.split()) == 1
# If instead we call it with the recursive option the length should
# be greater
- out = module('tcl', 'find', '-r', 'mpileaks ^zmpi')
+ out = module("tcl", "find", "-r", "mpileaks ^zmpi")
assert len(out.split()) > 1
@@ -154,8 +145,8 @@ def test_find_recursive():
def test_find_recursive_excluded(database, module_configuration, config_name):
module_configuration(config_name)
- module('lmod', 'refresh', '-y', '--delete-tree')
- module('lmod', 'find', '-r', 'mpileaks ^mpich')
+ module("lmod", "refresh", "-y", "--delete-tree")
+ module("lmod", "find", "-r", "mpileaks ^mpich")
@pytest.mark.db
@@ -164,14 +155,13 @@ def test_find_recursive_excluded(database, module_configuration, config_name):
def test_loads_recursive_excluded(database, module_configuration, config_name):
module_configuration(config_name)
- module('lmod', 'refresh', '-y', '--delete-tree')
- output = module('lmod', 'loads', '-r', 'mpileaks ^mpich')
- lines = output.split('\n')
+ module("lmod", "refresh", "-y", "--delete-tree")
+ output = module("lmod", "loads", "-r", "mpileaks ^mpich")
+ lines = output.split("\n")
- assert any(re.match(r'[^#]*module load.*mpileaks', ln) for ln in lines)
- assert not any(re.match(r'[^#]module load.*callpath', ln) for ln in lines)
- assert any(re.match(r'## excluded or missing.*callpath', ln)
- for ln in lines)
+ assert any(re.match(r"[^#]*module load.*mpileaks", ln) for ln in lines)
+ assert not any(re.match(r"[^#]module load.*callpath", ln) for ln in lines)
+ assert any(re.match(r"## excluded or missing.*callpath", ln) for ln in lines)
# TODO: currently there is no way to separate stdout and stderr when
# invoking a SpackCommand. Supporting this requires refactoring
@@ -185,46 +175,36 @@ writer_cls = spack.modules.lmod.LmodModulefileWriter
@pytest.mark.db
-def test_setdefault_command(
- mutable_database, mutable_config
-):
+def test_setdefault_command(mutable_database, mutable_config):
data = {
- 'default': {
- 'enable': ['lmod'],
- 'lmod': {
- 'core_compilers': ['clang@3.3'],
- 'hierarchy': ['mpi']
- }
+ "default": {
+ "enable": ["lmod"],
+ "lmod": {"core_compilers": ["clang@3.3"], "hierarchy": ["mpi"]},
}
}
- spack.config.set('modules', data)
+ spack.config.set("modules", data)
# Install two different versions of a package
- other_spec, preferred = 'a@1.0', 'a@2.0'
+ other_spec, preferred = "a@1.0", "a@2.0"
spack.spec.Spec(other_spec).concretized().package.do_install(fake=True)
spack.spec.Spec(preferred).concretized().package.do_install(fake=True)
writers = {
- preferred: writer_cls(
- spack.spec.Spec(preferred).concretized(), 'default'),
- other_spec: writer_cls(
- spack.spec.Spec(other_spec).concretized(), 'default')
+ preferred: writer_cls(spack.spec.Spec(preferred).concretized(), "default"),
+ other_spec: writer_cls(spack.spec.Spec(other_spec).concretized(), "default"),
}
# Create two module files for the same software
- module('lmod', 'refresh', '-y', '--delete-tree', preferred, other_spec)
+ module("lmod", "refresh", "-y", "--delete-tree", preferred, other_spec)
# Assert initial directory state: no link and all module files present
- link_name = os.path.join(
- os.path.dirname(writers[preferred].layout.filename),
- 'default'
- )
+ link_name = os.path.join(os.path.dirname(writers[preferred].layout.filename), "default")
for k in preferred, other_spec:
assert os.path.exists(writers[k].layout.filename)
assert not os.path.exists(link_name)
# Set the default to be the other spec
- module('lmod', 'setdefault', other_spec)
+ module("lmod", "setdefault", other_spec)
# Check that a link named 'default' exists, and points to the right file
for k in preferred, other_spec:
@@ -233,7 +213,7 @@ def test_setdefault_command(
assert os.path.realpath(link_name) == writers[other_spec].layout.filename
# Reset the default to be the preferred spec
- module('lmod', 'setdefault', preferred)
+ module("lmod", "setdefault", preferred)
# Check that a link named 'default' exists, and points to the right file
for k in preferred, other_spec:
diff --git a/lib/spack/spack/test/cmd/pkg.py b/lib/spack/spack/test/cmd/pkg.py
index e43fca0535..141b3dd937 100644
--- a/lib/spack/spack/test/cmd/pkg.py
+++ b/lib/spack/spack/test/cmd/pkg.py
@@ -17,11 +17,10 @@ import spack.main
import spack.repo
from spack.util.executable import which
-pytestmark = pytest.mark.skipif(not which('git'),
- reason="spack pkg tests require git")
+pytestmark = pytest.mark.skipif(not which("git"), reason="spack pkg tests require git")
#: new fake package template
-pkg_template = '''\
+pkg_template = """\
from spack.package import *
class {name}(Package):
@@ -32,217 +31,208 @@ class {name}(Package):
def install(self, spec, prefix):
pass
-'''
+"""
-abc = set(('pkg-a', 'pkg-b', 'pkg-c'))
-abd = set(('pkg-a', 'pkg-b', 'pkg-d'))
+abc = set(("pkg-a", "pkg-b", "pkg-c"))
+abd = set(("pkg-a", "pkg-b", "pkg-d"))
# Force all tests to use a git repository *in* the mock packages repo.
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def mock_pkg_git_repo(tmpdir_factory):
"""Copy the builtin.mock repo and make a mutable git repo inside it."""
- tmproot = tmpdir_factory.mktemp('mock_pkg_git_repo')
- repo_path = tmproot.join('builtin.mock')
+ tmproot = tmpdir_factory.mktemp("mock_pkg_git_repo")
+ repo_path = tmproot.join("builtin.mock")
shutil.copytree(spack.paths.mock_packages_path, str(repo_path))
mock_repo = spack.repo.RepoPath(str(repo_path))
mock_repo_packages = mock_repo.repos[0].packages_path
- git = which('git', required=True)
+ git = which("git", required=True)
with working_dir(mock_repo_packages):
- git('init')
+ git("init")
# initial commit with mock packages
# the -f is necessary in case people ignore build-* in their ignores
- git('add', '-f', '.')
- git('config', 'user.email', 'testing@spack.io')
- git('config', 'user.name', 'Spack Testing')
- git('-c', 'commit.gpgsign=false', 'commit',
- '-m', 'initial mock repo commit')
+ git("add", "-f", ".")
+ git("config", "user.email", "testing@spack.io")
+ git("config", "user.name", "Spack Testing")
+ git("-c", "commit.gpgsign=false", "commit", "-m", "initial mock repo commit")
# add commit with pkg-a, pkg-b, pkg-c packages
- mkdirp('pkg-a', 'pkg-b', 'pkg-c')
- with open('pkg-a/package.py', 'w') as f:
- f.write(pkg_template.format(name='PkgA'))
- with open('pkg-b/package.py', 'w') as f:
- f.write(pkg_template.format(name='PkgB'))
- with open('pkg-c/package.py', 'w') as f:
- f.write(pkg_template.format(name='PkgC'))
- git('add', 'pkg-a', 'pkg-b', 'pkg-c')
- git('-c', 'commit.gpgsign=false', 'commit',
- '-m', 'add pkg-a, pkg-b, pkg-c')
+ mkdirp("pkg-a", "pkg-b", "pkg-c")
+ with open("pkg-a/package.py", "w") as f:
+ f.write(pkg_template.format(name="PkgA"))
+ with open("pkg-b/package.py", "w") as f:
+ f.write(pkg_template.format(name="PkgB"))
+ with open("pkg-c/package.py", "w") as f:
+ f.write(pkg_template.format(name="PkgC"))
+ git("add", "pkg-a", "pkg-b", "pkg-c")
+ git("-c", "commit.gpgsign=false", "commit", "-m", "add pkg-a, pkg-b, pkg-c")
# remove pkg-c, add pkg-d
- with open('pkg-b/package.py', 'a') as f:
- f.write('\n# change pkg-b')
- git('add', 'pkg-b')
- mkdirp('pkg-d')
- with open('pkg-d/package.py', 'w') as f:
- f.write(pkg_template.format(name='PkgD'))
- git('add', 'pkg-d')
- git('rm', '-rf', 'pkg-c')
- git('-c', 'commit.gpgsign=false', 'commit',
- '-m', 'change pkg-b, remove pkg-c, add pkg-d')
+ with open("pkg-b/package.py", "a") as f:
+ f.write("\n# change pkg-b")
+ git("add", "pkg-b")
+ mkdirp("pkg-d")
+ with open("pkg-d/package.py", "w") as f:
+ f.write(pkg_template.format(name="PkgD"))
+ git("add", "pkg-d")
+ git("rm", "-rf", "pkg-c")
+ git("-c", "commit.gpgsign=false", "commit", "-m", "change pkg-b, remove pkg-c, add pkg-d")
with spack.repo.use_repositories(mock_repo):
yield mock_repo_packages
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def mock_pkg_names():
- repo = spack.repo.path.get_repo('builtin.mock')
- names = set(name for name in repo.all_package_names()
- if not name.startswith('pkg-'))
+ repo = spack.repo.path.get_repo("builtin.mock")
+ names = set(name for name in repo.all_package_names() if not name.startswith("pkg-"))
return names
def split(output):
"""Split command line output into an array."""
output = output.strip()
- return re.split(r'\s+', output) if output else []
+ return re.split(r"\s+", output) if output else []
-pkg = spack.main.SpackCommand('pkg')
+pkg = spack.main.SpackCommand("pkg")
def test_packages_path():
- assert (spack.repo.packages_path() ==
- spack.repo.path.get_repo('builtin').packages_path)
+ assert spack.repo.packages_path() == spack.repo.path.get_repo("builtin").packages_path
def test_mock_packages_path(mock_packages):
- assert (spack.repo.packages_path() ==
- spack.repo.path.get_repo('builtin.mock').packages_path)
+ assert spack.repo.packages_path() == spack.repo.path.get_repo("builtin.mock").packages_path
def test_pkg_add(mock_pkg_git_repo):
with working_dir(mock_pkg_git_repo):
- mkdirp('pkg-e')
- with open('pkg-e/package.py', 'w') as f:
- f.write(pkg_template.format(name='PkgE'))
+ mkdirp("pkg-e")
+ with open("pkg-e/package.py", "w") as f:
+ f.write(pkg_template.format(name="PkgE"))
- pkg('add', 'pkg-e')
+ pkg("add", "pkg-e")
- git = which('git', required=True)
+ git = which("git", required=True)
with working_dir(mock_pkg_git_repo):
try:
- assert ('A pkg-e/package.py' in
- git('status', '--short', output=str))
+ assert "A pkg-e/package.py" in git("status", "--short", output=str)
finally:
- shutil.rmtree('pkg-e')
+ shutil.rmtree("pkg-e")
# Removing a package mid-run disrupts Spack's caching
if spack.repo.path.repos[0]._fast_package_checker:
spack.repo.path.repos[0]._fast_package_checker.invalidate()
with pytest.raises(spack.main.SpackCommandError):
- pkg('add', 'does-not-exist')
+ pkg("add", "does-not-exist")
-@pytest.mark.skipif(sys.platform == 'win32', reason="stdout format conflict")
+@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
def test_pkg_list(mock_pkg_git_repo, mock_pkg_names):
- out = split(pkg('list', 'HEAD^^'))
+ out = split(pkg("list", "HEAD^^"))
assert sorted(mock_pkg_names) == sorted(out)
- out = split(pkg('list', 'HEAD^'))
- assert sorted(
- mock_pkg_names.union(['pkg-a', 'pkg-b', 'pkg-c'])) == sorted(out)
+ out = split(pkg("list", "HEAD^"))
+ assert sorted(mock_pkg_names.union(["pkg-a", "pkg-b", "pkg-c"])) == sorted(out)
- out = split(pkg('list', 'HEAD'))
- assert sorted(
- mock_pkg_names.union(['pkg-a', 'pkg-b', 'pkg-d'])) == sorted(out)
+ out = split(pkg("list", "HEAD"))
+ assert sorted(mock_pkg_names.union(["pkg-a", "pkg-b", "pkg-d"])) == sorted(out)
# test with three dots to make sure pkg calls `git merge-base`
- out = split(pkg('list', 'HEAD^^...'))
+ out = split(pkg("list", "HEAD^^..."))
assert sorted(mock_pkg_names) == sorted(out)
-@pytest.mark.skipif(sys.platform == 'win32', reason="stdout format conflict")
+@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
def test_pkg_diff(mock_pkg_git_repo, mock_pkg_names):
- out = split(pkg('diff', 'HEAD^^', 'HEAD^'))
- assert out == ['HEAD^:', 'pkg-a', 'pkg-b', 'pkg-c']
+ out = split(pkg("diff", "HEAD^^", "HEAD^"))
+ assert out == ["HEAD^:", "pkg-a", "pkg-b", "pkg-c"]
- out = split(pkg('diff', 'HEAD^^', 'HEAD'))
- assert out == ['HEAD:', 'pkg-a', 'pkg-b', 'pkg-d']
+ out = split(pkg("diff", "HEAD^^", "HEAD"))
+ assert out == ["HEAD:", "pkg-a", "pkg-b", "pkg-d"]
- out = split(pkg('diff', 'HEAD^', 'HEAD'))
- assert out == ['HEAD^:', 'pkg-c', 'HEAD:', 'pkg-d']
+ out = split(pkg("diff", "HEAD^", "HEAD"))
+ assert out == ["HEAD^:", "pkg-c", "HEAD:", "pkg-d"]
-@pytest.mark.skipif(sys.platform == 'win32', reason="stdout format conflict")
+@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
def test_pkg_added(mock_pkg_git_repo):
- out = split(pkg('added', 'HEAD^^', 'HEAD^'))
- assert ['pkg-a', 'pkg-b', 'pkg-c'] == out
+ out = split(pkg("added", "HEAD^^", "HEAD^"))
+ assert ["pkg-a", "pkg-b", "pkg-c"] == out
- out = split(pkg('added', 'HEAD^^', 'HEAD'))
- assert ['pkg-a', 'pkg-b', 'pkg-d'] == out
+ out = split(pkg("added", "HEAD^^", "HEAD"))
+ assert ["pkg-a", "pkg-b", "pkg-d"] == out
- out = split(pkg('added', 'HEAD^', 'HEAD'))
- assert ['pkg-d'] == out
+ out = split(pkg("added", "HEAD^", "HEAD"))
+ assert ["pkg-d"] == out
- out = split(pkg('added', 'HEAD', 'HEAD'))
+ out = split(pkg("added", "HEAD", "HEAD"))
assert out == []
-@pytest.mark.skipif(sys.platform == 'win32', reason="stdout format conflict")
+@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
def test_pkg_removed(mock_pkg_git_repo):
- out = split(pkg('removed', 'HEAD^^', 'HEAD^'))
+ out = split(pkg("removed", "HEAD^^", "HEAD^"))
assert out == []
- out = split(pkg('removed', 'HEAD^^', 'HEAD'))
+ out = split(pkg("removed", "HEAD^^", "HEAD"))
assert out == []
- out = split(pkg('removed', 'HEAD^', 'HEAD'))
- assert out == ['pkg-c']
+ out = split(pkg("removed", "HEAD^", "HEAD"))
+ assert out == ["pkg-c"]
-@pytest.mark.skipif(sys.platform == 'win32', reason="stdout format conflict")
+@pytest.mark.skipif(sys.platform == "win32", reason="stdout format conflict")
def test_pkg_changed(mock_pkg_git_repo):
- out = split(pkg('changed', 'HEAD^^', 'HEAD^'))
+ out = split(pkg("changed", "HEAD^^", "HEAD^"))
assert out == []
- out = split(pkg('changed', '--type', 'c', 'HEAD^^', 'HEAD^'))
+ out = split(pkg("changed", "--type", "c", "HEAD^^", "HEAD^"))
assert out == []
- out = split(pkg('changed', '--type', 'a', 'HEAD^^', 'HEAD^'))
- assert out == ['pkg-a', 'pkg-b', 'pkg-c']
+ out = split(pkg("changed", "--type", "a", "HEAD^^", "HEAD^"))
+ assert out == ["pkg-a", "pkg-b", "pkg-c"]
- out = split(pkg('changed', '--type', 'r', 'HEAD^^', 'HEAD^'))
+ out = split(pkg("changed", "--type", "r", "HEAD^^", "HEAD^"))
assert out == []
- out = split(pkg('changed', '--type', 'ar', 'HEAD^^', 'HEAD^'))
- assert out == ['pkg-a', 'pkg-b', 'pkg-c']
+ out = split(pkg("changed", "--type", "ar", "HEAD^^", "HEAD^"))
+ assert out == ["pkg-a", "pkg-b", "pkg-c"]
- out = split(pkg('changed', '--type', 'arc', 'HEAD^^', 'HEAD^'))
- assert out == ['pkg-a', 'pkg-b', 'pkg-c']
+ out = split(pkg("changed", "--type", "arc", "HEAD^^", "HEAD^"))
+ assert out == ["pkg-a", "pkg-b", "pkg-c"]
- out = split(pkg('changed', 'HEAD^', 'HEAD'))
- assert out == ['pkg-b']
+ out = split(pkg("changed", "HEAD^", "HEAD"))
+ assert out == ["pkg-b"]
- out = split(pkg('changed', '--type', 'c', 'HEAD^', 'HEAD'))
- assert out == ['pkg-b']
+ out = split(pkg("changed", "--type", "c", "HEAD^", "HEAD"))
+ assert out == ["pkg-b"]
- out = split(pkg('changed', '--type', 'a', 'HEAD^', 'HEAD'))
- assert out == ['pkg-d']
+ out = split(pkg("changed", "--type", "a", "HEAD^", "HEAD"))
+ assert out == ["pkg-d"]
- out = split(pkg('changed', '--type', 'r', 'HEAD^', 'HEAD'))
- assert out == ['pkg-c']
+ out = split(pkg("changed", "--type", "r", "HEAD^", "HEAD"))
+ assert out == ["pkg-c"]
- out = split(pkg('changed', '--type', 'ar', 'HEAD^', 'HEAD'))
- assert out == ['pkg-c', 'pkg-d']
+ out = split(pkg("changed", "--type", "ar", "HEAD^", "HEAD"))
+ assert out == ["pkg-c", "pkg-d"]
- out = split(pkg('changed', '--type', 'arc', 'HEAD^', 'HEAD'))
- assert out == ['pkg-b', 'pkg-c', 'pkg-d']
+ out = split(pkg("changed", "--type", "arc", "HEAD^", "HEAD"))
+ assert out == ["pkg-b", "pkg-c", "pkg-d"]
# invalid type argument
with pytest.raises(spack.main.SpackCommandError):
- pkg('changed', '--type', 'foo')
+ pkg("changed", "--type", "foo")
def test_pkg_fails_when_not_git_repo(monkeypatch):
- monkeypatch.setattr(spack.cmd, 'spack_is_git_repo', lambda: False)
+ monkeypatch.setattr(spack.cmd, "spack_is_git_repo", lambda: False)
with pytest.raises(spack.main.SpackCommandError):
- pkg('added')
+ pkg("added")
def test_pkg_source_requires_one_arg(mock_packages):
diff --git a/lib/spack/spack/test/cmd/print_shell_vars.py b/lib/spack/spack/test/cmd/print_shell_vars.py
index ccc8e4c87d..9fdf1f4b26 100644
--- a/lib/spack/spack/test/cmd/print_shell_vars.py
+++ b/lib/spack/spack/test/cmd/print_shell_vars.py
@@ -7,7 +7,7 @@ from spack.main import print_setup_info
def test_print_shell_vars_sh(capsys):
- print_setup_info('sh')
+ print_setup_info("sh")
out, _ = capsys.readouterr()
assert "_sp_sys_type=" in out
@@ -17,7 +17,7 @@ def test_print_shell_vars_sh(capsys):
def test_print_shell_vars_csh(capsys):
- print_setup_info('csh')
+ print_setup_info("csh")
out, _ = capsys.readouterr()
assert "set _sp_sys_type = " in out
@@ -27,7 +27,7 @@ def test_print_shell_vars_csh(capsys):
def test_print_shell_vars_sh_modules(capsys):
- print_setup_info('sh', 'modules')
+ print_setup_info("sh", "modules")
out, _ = capsys.readouterr()
assert "_sp_sys_type=" in out
@@ -37,7 +37,7 @@ def test_print_shell_vars_sh_modules(capsys):
def test_print_shell_vars_csh_modules(capsys):
- print_setup_info('csh', 'modules')
+ print_setup_info("csh", "modules")
out, _ = capsys.readouterr()
assert "set _sp_sys_type = " in out
diff --git a/lib/spack/spack/test/cmd/providers.py b/lib/spack/spack/test/cmd/providers.py
index 44d56ca1a1..8b7cd44219 100644
--- a/lib/spack/spack/test/cmd/providers.py
+++ b/lib/spack/spack/test/cmd/providers.py
@@ -10,45 +10,55 @@ import pytest
from spack.main import SpackCommand
-providers = SpackCommand('providers')
+providers = SpackCommand("providers")
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="Providers not currently supported on Windows")
+pytestmark = pytest.mark.skipif(
+ sys.platform == "win32", reason="Providers not currently supported on Windows"
+)
-@pytest.mark.parametrize('pkg', [
- ('mpi',),
- ('mpi@2',),
- ('mpi', 'lapack'),
- ('',) # Lists all the available virtual packages
-])
+@pytest.mark.parametrize(
+ "pkg",
+ [("mpi",), ("mpi@2",), ("mpi", "lapack"), ("",)], # Lists all the available virtual packages
+)
def test_it_just_runs(pkg):
providers(*pkg)
-@pytest.mark.parametrize('vpkg,provider_list', [
- (('mpi',), ['intel-mpi',
- 'intel-parallel-studio',
- 'mpich',
- 'mpilander',
- 'mvapich2',
- 'openmpi',
- 'openmpi@1.6.5',
- 'openmpi@1.7.5:',
- 'openmpi@2.0.0:',
- 'spectrum-mpi']),
- (('D', 'awk'), ['ldc', 'gawk', 'mawk']) # Call 2 virtual packages at once
-])
+@pytest.mark.parametrize(
+ "vpkg,provider_list",
+ [
+ (
+ ("mpi",),
+ [
+ "intel-mpi",
+ "intel-parallel-studio",
+ "mpich",
+ "mpilander",
+ "mvapich2",
+ "openmpi",
+ "openmpi@1.6.5",
+ "openmpi@1.7.5:",
+ "openmpi@2.0.0:",
+ "spectrum-mpi",
+ ],
+ ),
+ (("D", "awk"), ["ldc", "gawk", "mawk"]), # Call 2 virtual packages at once
+ ],
+)
def test_provider_lists(vpkg, provider_list):
output = providers(*vpkg)
for item in provider_list:
assert item in output
-@pytest.mark.parametrize('pkg,error_cls', [
- ('zlib', ValueError),
- ('foo', ValueError) # Trying to call with a package that does not exist
-])
+@pytest.mark.parametrize(
+ "pkg,error_cls",
+ [
+ ("zlib", ValueError),
+ ("foo", ValueError), # Trying to call with a package that does not exist
+ ],
+)
def test_it_just_fails(pkg, error_cls):
with pytest.raises(error_cls):
providers(pkg)
diff --git a/lib/spack/spack/test/cmd/python.py b/lib/spack/spack/test/cmd/python.py
index 919acc0407..34964d9806 100644
--- a/lib/spack/spack/test/cmd/python.py
+++ b/lib/spack/spack/test/cmd/python.py
@@ -11,21 +11,21 @@ import pytest
import spack
from spack.main import SpackCommand
-python = SpackCommand('python')
+python = SpackCommand("python")
def test_python():
- out = python('-c', 'import spack; print(spack.spack_version)')
+ out = python("-c", "import spack; print(spack.spack_version)")
assert out.strip() == spack.spack_version
def test_python_interpreter_path():
- out = python('--path')
+ out = python("--path")
assert out.strip() == sys.executable
def test_python_version():
- out = python('-V')
+ out = python("-V")
assert platform.python_version() in out
@@ -35,9 +35,9 @@ def test_python_with_module():
# has no code associated with it, raises an error reliably in python
# 2 and 3, which indicates we successfully ran runpy.run_module.
with pytest.raises(ImportError, match="No code object"):
- python('-m', 'sys')
+ python("-m", "sys")
def test_python_raises():
- out = python('--foobar', fail_on_error=False)
+ out = python("--foobar", fail_on_error=False)
assert "Error: Unknown arguments" in out
diff --git a/lib/spack/spack/test/cmd/reindex.py b/lib/spack/spack/test/cmd/reindex.py
index 361b6bb152..cfa80e4020 100644
--- a/lib/spack/spack/test/cmd/reindex.py
+++ b/lib/spack/spack/test/cmd/reindex.py
@@ -10,18 +10,16 @@ import pytest
import spack.store
from spack.main import SpackCommand
-install = SpackCommand('install')
-deprecate = SpackCommand('deprecate')
-reindex = SpackCommand('reindex')
+install = SpackCommand("install")
+deprecate = SpackCommand("deprecate")
+reindex = SpackCommand("reindex")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-def test_reindex_basic(mock_packages, mock_archive, mock_fetch,
- install_mockery):
- install('libelf@0.8.13')
- install('libelf@0.8.12')
+def test_reindex_basic(mock_packages, mock_archive, mock_fetch, install_mockery):
+ install("libelf@0.8.13")
+ install("libelf@0.8.12")
all_installed = spack.store.db.query()
@@ -30,10 +28,9 @@ def test_reindex_basic(mock_packages, mock_archive, mock_fetch,
assert spack.store.db.query() == all_installed
-def test_reindex_db_deleted(mock_packages, mock_archive, mock_fetch,
- install_mockery):
- install('libelf@0.8.13')
- install('libelf@0.8.12')
+def test_reindex_db_deleted(mock_packages, mock_archive, mock_fetch, install_mockery):
+ install("libelf@0.8.13")
+ install("libelf@0.8.12")
all_installed = spack.store.db.query()
@@ -43,12 +40,13 @@ def test_reindex_db_deleted(mock_packages, mock_archive, mock_fetch,
assert spack.store.db.query() == all_installed
-def test_reindex_with_deprecated_packages(mock_packages, mock_archive,
- mock_fetch, install_mockery):
- install('libelf@0.8.13')
- install('libelf@0.8.12')
+def test_reindex_with_deprecated_packages(
+ mock_packages, mock_archive, mock_fetch, install_mockery
+):
+ install("libelf@0.8.13")
+ install("libelf@0.8.12")
- deprecate('-y', 'libelf@0.8.12', 'libelf@0.8.13')
+ deprecate("-y", "libelf@0.8.12", "libelf@0.8.13")
all_installed = spack.store.db.query(installed=any)
non_deprecated = spack.store.db.query(installed=True)
diff --git a/lib/spack/spack/test/cmd/repo.py b/lib/spack/spack/test/cmd/repo.py
index 7fa5027699..8285af5039 100644
--- a/lib/spack/spack/test/cmd/repo.py
+++ b/lib/spack/spack/test/cmd/repo.py
@@ -8,29 +8,29 @@ import pytest
import spack.main
-repo = spack.main.SpackCommand('repo')
+repo = spack.main.SpackCommand("repo")
def test_help_option():
# Test 'spack repo --help' to check basic import works
# and the command exits successfully
with pytest.raises(SystemExit):
- repo('--help')
+ repo("--help")
assert repo.returncode in (None, 0)
def test_create_add_list_remove(mutable_config, tmpdir):
# Create a new repository and check that the expected
# files are there
- repo('create', str(tmpdir), 'mockrepo')
- assert os.path.exists(os.path.join(str(tmpdir), 'repo.yaml'))
+ repo("create", str(tmpdir), "mockrepo")
+ assert os.path.exists(os.path.join(str(tmpdir), "repo.yaml"))
# Add the new repository and check it appears in the list output
- repo('add', '--scope=site', str(tmpdir))
- output = repo('list', '--scope=site', output=str)
- assert 'mockrepo' in output
+ repo("add", "--scope=site", str(tmpdir))
+ output = repo("list", "--scope=site", output=str)
+ assert "mockrepo" in output
# Then remove it and check it's not there
- repo('remove', '--scope=site', str(tmpdir))
- output = repo('list', '--scope=site', output=str)
- assert 'mockrepo' not in output
+ repo("remove", "--scope=site", str(tmpdir))
+ output = repo("list", "--scope=site", output=str)
+ assert "mockrepo" not in output
diff --git a/lib/spack/spack/test/cmd/resource.py b/lib/spack/spack/test/cmd/resource.py
index c6dd537693..9fa99d3155 100644
--- a/lib/spack/spack/test/cmd/resource.py
+++ b/lib/spack/spack/test/cmd/resource.py
@@ -7,73 +7,79 @@ import sys
from spack.main import SpackCommand
-is_windows = sys.platform == 'win32'
-resource = SpackCommand('resource')
+is_windows = sys.platform == "win32"
+resource = SpackCommand("resource")
#: these are hashes used in mock packages
-mock_hashes = [
- 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
- '1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd',
- 'b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c',
- 'c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8',
- '24eceabef5fe8f575ff4b438313dc3e7b30f6a2d1c78841fbbe3b9293a589277',
- '689b8f9b32cb1d2f9271d29ea3fca2e1de5df665e121fca14e1364b711450deb',
- '208fcfb50e5a965d5757d151b675ca4af4ce2dfd56401721b6168fae60ab798f',
- 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c',
- '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730',
-] if not is_windows else [
- 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
- '1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd',
- 'd0df7988457ec999c148a4a2af25ce831bfaad13954ba18a4446374cb0aef55e',
- 'aeb16c4dec1087e39f2330542d59d9b456dd26d791338ae6d80b6ffd10c89dfa',
- 'mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
- 'ff34cb21271d16dbf928374f610bb5dd593d293d311036ddae86c4846ff79070',
- 'bf874c7dd3a83cf370fdc17e496e341de06cd596b5c66dbf3c9bb7f6c139e3ee',
- '3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11'
-]
+mock_hashes = (
+ [
+ "abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
+ "1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd",
+ "b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c",
+ "c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8",
+ "24eceabef5fe8f575ff4b438313dc3e7b30f6a2d1c78841fbbe3b9293a589277",
+ "689b8f9b32cb1d2f9271d29ea3fca2e1de5df665e121fca14e1364b711450deb",
+ "208fcfb50e5a965d5757d151b675ca4af4ce2dfd56401721b6168fae60ab798f",
+ "bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c",
+ "7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730",
+ ]
+ if not is_windows
+ else [
+ "abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
+ "1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd",
+ "d0df7988457ec999c148a4a2af25ce831bfaad13954ba18a4446374cb0aef55e",
+ "aeb16c4dec1087e39f2330542d59d9b456dd26d791338ae6d80b6ffd10c89dfa",
+ "mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
+ "ff34cb21271d16dbf928374f610bb5dd593d293d311036ddae86c4846ff79070",
+ "bf874c7dd3a83cf370fdc17e496e341de06cd596b5c66dbf3c9bb7f6c139e3ee",
+ "3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11",
+ ]
+)
def test_resource_list(mock_packages, capfd):
with capfd.disabled():
- out = resource('list')
+ out = resource("list")
for h in mock_hashes:
assert h in out
- assert 'url:' in out
- assert 'applies to:' in out
- assert 'patched by:' in out
- assert 'path:' in out
+ assert "url:" in out
+ assert "applies to:" in out
+ assert "patched by:" in out
+ assert "path:" in out
- assert os.path.join('repos', 'builtin.mock', 'packages',
- 'patch-a-dependency', 'libelf.patch') in out
- assert 'applies to: builtin.mock.libelf' in out
- assert 'patched by: builtin.mock.patch-a-dependency' in out
+ assert (
+ os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
+ in out
+ )
+ assert "applies to: builtin.mock.libelf" in out
+ assert "patched by: builtin.mock.patch-a-dependency" in out
def test_resource_list_only_hashes(mock_packages, capfd):
with capfd.disabled():
- out = resource('list', '--only-hashes')
+ out = resource("list", "--only-hashes")
for h in mock_hashes:
assert h in out
def test_resource_show(mock_packages, capfd):
- test_hash = 'c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8' \
- if not is_windows \
- else '3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11'
+ test_hash = (
+ "c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8"
+ if not is_windows
+ else "3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11"
+ )
with capfd.disabled():
- out = resource('show', test_hash)
+ out = resource("show", test_hash)
assert out.startswith(test_hash)
- assert os.path.join(
- 'repos',
- 'builtin.mock',
- 'packages',
- 'patch-a-dependency',
- 'libelf.patch') in out
- assert 'applies to: builtin.mock.libelf' in out
- assert 'patched by: builtin.mock.patch-a-dependency' in out
+ assert (
+ os.path.join("repos", "builtin.mock", "packages", "patch-a-dependency", "libelf.patch")
+ in out
+ )
+ assert "applies to: builtin.mock.libelf" in out
+ assert "patched by: builtin.mock.patch-a-dependency" in out
- assert len(out.strip().split('\n')) == 4
+ assert len(out.strip().split("\n")) == 4
diff --git a/lib/spack/spack/test/cmd/spec.py b/lib/spack/spack/test/cmd/spec.py
index 1dfe20c780..b6a6281bf7 100644
--- a/lib/spack/spack/test/cmd/spec.py
+++ b/lib/spack/spack/test/cmd/spec.py
@@ -14,20 +14,20 @@ import spack.spec
import spack.store
from spack.main import SpackCommand, SpackCommandError
-pytestmark = pytest.mark.usefixtures('config', 'mutable_mock_repo')
+pytestmark = pytest.mark.usefixtures("config", "mutable_mock_repo")
-spec = SpackCommand('spec')
+spec = SpackCommand("spec")
def test_spec():
- output = spec('mpileaks')
+ output = spec("mpileaks")
- assert 'mpileaks@2.3' in output
- assert 'callpath@1.0' in output
- assert 'dyninst@8.2' in output
- assert 'libdwarf@20130729' in output
- assert 'libelf@0.8.1' in output
- assert 'mpich@3.0.4' in output
+ assert "mpileaks@2.3" in output
+ assert "callpath@1.0" in output
+ assert "dyninst@8.2" in output
+ assert "libdwarf@20130729" in output
+ assert "libelf@0.8.1" in output
+ assert "mpich@3.0.4" in output
def test_spec_concretizer_args(mutable_config, mutable_database):
@@ -37,8 +37,8 @@ def test_spec_concretizer_args(mutable_config, mutable_database):
options to `solver.py`, and that config options are not
lost along the way.
"""
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
# remove two non-preferred mpileaks installations
# so that reuse will pick up the zmpi one
@@ -60,19 +60,19 @@ def test_spec_concretizer_args(mutable_config, mutable_database):
def test_spec_parse_dependency_variant_value():
"""Verify that we can provide multiple key=value variants to multiple separate
packages within a spec string."""
- output = spec('multivalue-variant fee=barbaz ^ a foobar=baz')
+ output = spec("multivalue-variant fee=barbaz ^ a foobar=baz")
- assert 'fee=barbaz' in output
- assert 'foobar=baz' in output
+ assert "fee=barbaz" in output
+ assert "foobar=baz" in output
def test_spec_parse_cflags_quoting():
"""Verify that compiler flags can be provided to a spec from the command line."""
- output = spec('--yaml', 'gcc cflags="-Os -pipe" cxxflags="-flto -Os"')
+ output = spec("--yaml", 'gcc cflags="-Os -pipe" cxxflags="-flto -Os"')
gh_flagged = spack.spec.Spec.from_yaml(output)
- assert ['-Os', '-pipe'] == gh_flagged.compiler_flags['cflags']
- assert ['-flto', '-Os'] == gh_flagged.compiler_flags['cxxflags']
+ assert ["-Os", "-pipe"] == gh_flagged.compiler_flags["cflags"]
+ assert ["-flto", "-Os"] == gh_flagged.compiler_flags["cxxflags"]
def test_spec_parse_unquoted_flags_report():
@@ -82,9 +82,10 @@ def test_spec_parse_unquoted_flags_report():
with pytest.raises(spack.error.SpackError) as cm:
# We don't try to figure out how many following args were intended to be part of
# cflags, we just explain how to fix it for the immediate next arg.
- spec('gcc cflags=-Os -pipe -other-arg-that-gets-ignored cflags=-I /usr/include')
+ spec("gcc cflags=-Os -pipe -other-arg-that-gets-ignored cflags=-I /usr/include")
# Verify that the generated error message is nicely formatted.
- assert str(cm.value) == dedent('''\
+ assert str(cm.value) == dedent(
+ '''\
No installed spec matches the hash: 'usr'
Some compiler or linker flags were provided without quoting their arguments,
@@ -94,12 +95,13 @@ def test_spec_parse_unquoted_flags_report():
Possible flag quotation errors (with the correctly-quoted version after the =>):
(1) cflags=-Os -pipe => cflags="-Os -pipe"
- (2) cflags=-I /usr/include => cflags="-I /usr/include"''')
+ (2) cflags=-I /usr/include => cflags="-I /usr/include"'''
+ )
# Verify that the same unquoted cflags report is generated in the error message even
# if it fails during concretization, not just during parsing.
with pytest.raises(spack.error.SpackError) as cm:
- spec('gcc cflags=-Os -pipe')
+ spec("gcc cflags=-Os -pipe")
cm = str(cm.value)
assert cm.startswith(
'trying to set variant "pipe" in package "gcc", but the package has no such '
@@ -109,41 +111,41 @@ def test_spec_parse_unquoted_flags_report():
def test_spec_yaml():
- output = spec('--yaml', 'mpileaks')
+ output = spec("--yaml", "mpileaks")
mpileaks = spack.spec.Spec.from_yaml(output)
- assert 'mpileaks' in mpileaks
- assert 'callpath' in mpileaks
- assert 'dyninst' in mpileaks
- assert 'libdwarf' in mpileaks
- assert 'libelf' in mpileaks
- assert 'mpich' in mpileaks
+ assert "mpileaks" in mpileaks
+ assert "callpath" in mpileaks
+ assert "dyninst" in mpileaks
+ assert "libdwarf" in mpileaks
+ assert "libelf" in mpileaks
+ assert "mpich" in mpileaks
def test_spec_json():
- output = spec('--json', 'mpileaks')
+ output = spec("--json", "mpileaks")
mpileaks = spack.spec.Spec.from_json(output)
- assert 'mpileaks' in mpileaks
- assert 'callpath' in mpileaks
- assert 'dyninst' in mpileaks
- assert 'libdwarf' in mpileaks
- assert 'libelf' in mpileaks
- assert 'mpich' in mpileaks
+ assert "mpileaks" in mpileaks
+ assert "callpath" in mpileaks
+ assert "dyninst" in mpileaks
+ assert "libdwarf" in mpileaks
+ assert "libelf" in mpileaks
+ assert "mpich" in mpileaks
def test_spec_format(database, config):
- output = spec('--format', '{name}-{^mpi.name}', 'mpileaks^mpich')
- assert output.rstrip('\n') == "mpileaks-mpich"
+ output = spec("--format", "{name}-{^mpi.name}", "mpileaks^mpich")
+ assert output.rstrip("\n") == "mpileaks-mpich"
def _parse_types(string):
"""Parse deptypes for specs from `spack spec -t` output."""
- lines = string.strip().split('\n')
+ lines = string.strip().split("\n")
result = {}
for line in lines:
- match = re.match(r'\[([^]]*)\]\s*\^?([^@]*)@', line)
+ match = re.match(r"\[([^]]*)\]\s*\^?([^@]*)@", line)
if match:
types, name = match.groups()
result.setdefault(name, []).append(types)
@@ -152,23 +154,23 @@ def _parse_types(string):
def test_spec_deptypes_nodes():
- output = spec('--types', '--cover', 'nodes', 'dt-diamond')
+ output = spec("--types", "--cover", "nodes", "dt-diamond")
types = _parse_types(output)
- assert types['dt-diamond'] == [' ']
- assert types['dt-diamond-left'] == ['bl ']
- assert types['dt-diamond-right'] == ['bl ']
- assert types['dt-diamond-bottom'] == ['blr ']
+ assert types["dt-diamond"] == [" "]
+ assert types["dt-diamond-left"] == ["bl "]
+ assert types["dt-diamond-right"] == ["bl "]
+ assert types["dt-diamond-bottom"] == ["blr "]
def test_spec_deptypes_edges():
- output = spec('--types', '--cover', 'edges', 'dt-diamond')
+ output = spec("--types", "--cover", "edges", "dt-diamond")
types = _parse_types(output)
- assert types['dt-diamond'] == [' ']
- assert types['dt-diamond-left'] == ['bl ']
- assert types['dt-diamond-right'] == ['bl ']
- assert types['dt-diamond-bottom'] == ['b ', 'blr ']
+ assert types["dt-diamond"] == [" "]
+ assert types["dt-diamond-left"] == ["bl "]
+ assert types["dt-diamond-right"] == ["bl "]
+ assert types["dt-diamond-bottom"] == ["b ", "blr "]
def test_spec_returncode():
@@ -189,14 +191,14 @@ def test_spec_parse_error():
def test_env_aware_spec(mutable_mock_env_path):
- env = ev.create('test')
- env.add('mpileaks')
+ env = ev.create("test")
+ env.add("mpileaks")
with env:
output = spec()
- assert 'mpileaks@2.3' in output
- assert 'callpath@1.0' in output
- assert 'dyninst@8.2' in output
- assert 'libdwarf@20130729' in output
- assert 'libelf@0.8.1' in output
- assert 'mpich@3.0.4' in output
+ assert "mpileaks@2.3" in output
+ assert "callpath@1.0" in output
+ assert "dyninst@8.2" in output
+ assert "libdwarf@20130729" in output
+ assert "libelf@0.8.1" in output
+ assert "mpich@3.0.4" in output
diff --git a/lib/spack/spack/test/cmd/stage.py b/lib/spack/spack/test/cmd/stage.py
index 5dec5ca3ff..497bd648cf 100644
--- a/lib/spack/spack/test/cmd/stage.py
+++ b/lib/spack/spack/test/cmd/stage.py
@@ -14,96 +14,94 @@ import spack.repo
from spack.main import SpackCommand
from spack.version import Version
-stage = SpackCommand('stage')
-env = SpackCommand('env')
+stage = SpackCommand("stage")
+env = SpackCommand("env")
-pytestmark = pytest.mark.usefixtures('install_mockery', 'mock_packages')
+pytestmark = pytest.mark.usefixtures("install_mockery", "mock_packages")
-@pytest.mark.skipif(sys.platform == 'win32', reason="not implemented on windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
def test_stage_spec(monkeypatch):
"""Verify that staging specs works."""
- expected = set(['trivial-install-test-package', 'mpileaks'])
+ expected = set(["trivial-install-test-package", "mpileaks"])
def fake_stage(pkg, mirror_only=False):
expected.remove(pkg.name)
- monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, "do_stage", fake_stage)
- stage('trivial-install-test-package', 'mpileaks')
+ stage("trivial-install-test-package", "mpileaks")
assert len(expected) == 0
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def check_stage_path(monkeypatch, tmpdir):
- expected_path = os.path.join(str(tmpdir), 'x')
+ expected_path = os.path.join(str(tmpdir), "x")
def fake_stage(pkg, mirror_only=False):
assert pkg.path == expected_path
- monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, "do_stage", fake_stage)
return expected_path
-@pytest.mark.skipif(sys.platform == 'win32', reason="PermissionError")
+@pytest.mark.skipif(sys.platform == "win32", reason="PermissionError")
def test_stage_path(check_stage_path):
"""Verify that --path only works with single specs."""
- stage('--path={0}'.format(check_stage_path), 'trivial-install-test-package')
+ stage("--path={0}".format(check_stage_path), "trivial-install-test-package")
def test_stage_path_errors_multiple_specs(check_stage_path):
"""Verify that --path only works with single specs."""
with pytest.raises(spack.main.SpackCommandError):
- stage('--path={0}'.format(check_stage_path),
- 'trivial-install-test-package',
- 'mpileaks')
+ stage("--path={0}".format(check_stage_path), "trivial-install-test-package", "mpileaks")
-@pytest.mark.skipif(sys.platform == 'win32', reason="not implemented on windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
def test_stage_with_env_outside_env(mutable_mock_env_path, monkeypatch):
"""Verify that stage concretizes specs not in environment instead of erroring."""
def fake_stage(pkg, mirror_only=False):
- assert pkg.name == 'trivial-install-test-package'
+ assert pkg.name == "trivial-install-test-package"
assert pkg.path is None
- monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, "do_stage", fake_stage)
- e = ev.create('test')
- e.add('mpileaks')
+ e = ev.create("test")
+ e.add("mpileaks")
e.concretize()
with e:
- stage('trivial-install-test-package')
+ stage("trivial-install-test-package")
-@pytest.mark.skipif(sys.platform == 'win32', reason="not implemented on windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
def test_stage_with_env_inside_env(mutable_mock_env_path, monkeypatch):
"""Verify that stage filters specs in environment instead of reconcretizing."""
def fake_stage(pkg, mirror_only=False):
- assert pkg.name == 'mpileaks'
- assert pkg.version == Version('100.100')
+ assert pkg.name == "mpileaks"
+ assert pkg.version == Version("100.100")
- monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, "do_stage", fake_stage)
- e = ev.create('test')
- e.add('mpileaks@100.100')
+ e = ev.create("test")
+ e.add("mpileaks@100.100")
e.concretize()
with e:
- stage('mpileaks')
+ stage("mpileaks")
-@pytest.mark.skipif(sys.platform == 'win32', reason="not implemented on windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="not implemented on windows")
def test_stage_full_env(mutable_mock_env_path, monkeypatch):
"""Verify that stage filters specs in environment."""
- e = ev.create('test')
- e.add('mpileaks@100.100')
+ e = ev.create("test")
+ e.add("mpileaks@100.100")
e.concretize()
# list all the package names that should be staged
@@ -116,7 +114,7 @@ def test_stage_full_env(mutable_mock_env_path, monkeypatch):
def fake_stage(pkg, mirror_only=False):
expected.remove(pkg.name)
- monkeypatch.setattr(spack.package_base.PackageBase, 'do_stage', fake_stage)
+ monkeypatch.setattr(spack.package_base.PackageBase, "do_stage", fake_stage)
with e:
stage()
diff --git a/lib/spack/spack/test/cmd/tags.py b/lib/spack/spack/test/cmd/tags.py
index 91c4621fb8..7304ad249a 100644
--- a/lib/spack/spack/test/cmd/tags.py
+++ b/lib/spack/spack/test/cmd/tags.py
@@ -7,55 +7,55 @@ import spack.main
import spack.repo
import spack.spec
-tags = spack.main.SpackCommand('tags')
+tags = spack.main.SpackCommand("tags")
def test_tags_bad_options():
- out = tags('-a', 'tag1', fail_on_error=False)
+ out = tags("-a", "tag1", fail_on_error=False)
assert "option OR provide" in out
def test_tags_no_installed(install_mockery, mock_fetch):
- out = tags('-i')
- assert 'No installed' in out
+ out = tags("-i")
+ assert "No installed" in out
def test_tags_invalid_tag(mock_packages):
- out = tags('nosuchtag')
- assert 'None' in out
+ out = tags("nosuchtag")
+ assert "None" in out
def test_tags_all_mock_tags(mock_packages):
out = tags()
- for tag in ['tag1', 'tag2', 'tag3']:
+ for tag in ["tag1", "tag2", "tag3"]:
assert tag in out
def test_tags_all_mock_tag_packages(mock_packages):
- out = tags('-a')
- for pkg in ['mpich\n', 'mpich2\n']:
+ out = tags("-a")
+ for pkg in ["mpich\n", "mpich2\n"]:
assert pkg in out
def test_tags_no_tags(monkeypatch):
- class tag_path():
+ class tag_path:
tag_index = dict()
- monkeypatch.setattr(spack.repo, 'path', tag_path)
+ monkeypatch.setattr(spack.repo, "path", tag_path)
out = tags()
assert "No tagged" in out
def test_tags_installed(install_mockery, mock_fetch):
- s = spack.spec.Spec('mpich').concretized()
+ s = spack.spec.Spec("mpich").concretized()
s.package.do_install()
- out = tags('-i')
- for tag in ['tag1', 'tag2']:
+ out = tags("-i")
+ for tag in ["tag1", "tag2"]:
assert tag in out
- out = tags('-i', 'tag1')
- assert 'mpich' in out
+ out = tags("-i", "tag1")
+ assert "mpich" in out
- out = tags('-i', 'tag3')
- assert 'No installed' in out
+ out = tags("-i", "tag3")
+ assert "No installed" in out
diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py
index 0555dc6ca1..bc5724e04e 100644
--- a/lib/spack/spack/test/cmd/test.py
+++ b/lib/spack/spack/test/cmd/test.py
@@ -18,27 +18,35 @@ import spack.paths
import spack.store
from spack.main import SpackCommand
-install = SpackCommand('install')
-spack_test = SpackCommand('test')
+install = SpackCommand("install")
+spack_test = SpackCommand("test")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_test_package_not_installed(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery_mutable_config, mock_test_stage):
+ tmpdir,
+ mock_packages,
+ mock_archive,
+ mock_fetch,
+ config,
+ install_mockery_mutable_config,
+ mock_test_stage,
+):
- output = spack_test('run', 'libdwarf')
+ output = spack_test("run", "libdwarf")
assert "No installed packages match spec libdwarf" in output
-@pytest.mark.parametrize('arguments,expected', [
- (['run'], spack.config.get('config:dirty')), # default from config file
- (['run', '--clean'], False),
- (['run', '--dirty'], True),
-])
+@pytest.mark.parametrize(
+ "arguments,expected",
+ [
+ (["run"], spack.config.get("config:dirty")), # default from config file
+ (["run", "--clean"], False),
+ (["run", "--dirty"], True),
+ ],
+)
def test_test_dirty_flag(arguments, expected):
parser = argparse.ArgumentParser()
spack.cmd.test.setup_parser(parser)
@@ -47,28 +55,28 @@ def test_test_dirty_flag(arguments, expected):
def test_test_dup_alias(
- mock_test_stage, mock_packages, mock_archive, mock_fetch,
- install_mockery_mutable_config, capfd):
+ mock_test_stage, mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config, capfd
+):
"""Ensure re-using an alias fails with suggestion to change."""
- install('libdwarf')
+ install("libdwarf")
# Run the tests with the alias once
- out = spack_test('run', '--alias', 'libdwarf', 'libdwarf')
+ out = spack_test("run", "--alias", "libdwarf", "libdwarf")
assert "Spack test libdwarf" in out
# Try again with the alias but don't let it fail on the error
with capfd.disabled():
- out = spack_test(
- 'run', '--alias', 'libdwarf', 'libdwarf', fail_on_error=False)
+ out = spack_test("run", "--alias", "libdwarf", "libdwarf", fail_on_error=False)
assert "already exists" in out
-def test_test_output(mock_test_stage, mock_packages, mock_archive, mock_fetch,
- install_mockery_mutable_config):
+def test_test_output(
+ mock_test_stage, mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config
+):
"""Ensure output printed from pkgs is captured by output redirection."""
- install('printing-package')
- spack_test('run', '--alias', 'printpkg', 'printing-package')
+ install("printing-package")
+ spack_test("run", "--alias", "printpkg", "printing-package")
stage_files = os.listdir(mock_test_stage)
assert len(stage_files) == 1
@@ -78,10 +86,9 @@ def test_test_output(mock_test_stage, mock_packages, mock_archive, mock_fetch,
testdir_files = os.listdir(testdir)
# Grab the output from the test log
- testlog = list(filter(lambda x: x.endswith('out.txt') and
- x != 'results.txt', testdir_files))
+ testlog = list(filter(lambda x: x.endswith("out.txt") and x != "results.txt", testdir_files))
outfile = os.path.join(testdir, testlog[0])
- with open(outfile, 'r') as f:
+ with open(outfile, "r") as f:
output = f.read()
assert "BEFORE TEST" in output
assert "true: expect command status in [" in output
@@ -90,61 +97,59 @@ def test_test_output(mock_test_stage, mock_packages, mock_archive, mock_fetch,
def test_test_output_on_error(
- mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config,
- capfd, mock_test_stage
+ mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config, capfd, mock_test_stage
):
- install('test-error')
+ install("test-error")
# capfd interferes with Spack's capturing
with capfd.disabled():
- out = spack_test('run', 'test-error', fail_on_error=False)
+ out = spack_test("run", "test-error", fail_on_error=False)
assert "TestFailure" in out
assert "Command exited with status 1" in out
def test_test_output_on_failure(
- mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config,
- capfd, mock_test_stage
+ mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config, capfd, mock_test_stage
):
- install('test-fail')
+ install("test-fail")
with capfd.disabled():
- out = spack_test('run', 'test-fail', fail_on_error=False)
+ out = spack_test("run", "test-fail", fail_on_error=False)
assert "Expected 'not in the output' to match output of `true`" in out
assert "TestFailure" in out
def test_show_log_on_error(
- mock_packages, mock_archive, mock_fetch,
- install_mockery_mutable_config, capfd, mock_test_stage
+ mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config, capfd, mock_test_stage
):
"""Make sure spack prints location of test log on failure."""
- install('test-error')
+ install("test-error")
with capfd.disabled():
- out = spack_test('run', 'test-error', fail_on_error=False)
+ out = spack_test("run", "test-error", fail_on_error=False)
- assert 'See test log' in out
+ assert "See test log" in out
assert mock_test_stage in out
@pytest.mark.usefixtures(
- 'mock_packages', 'mock_archive', 'mock_fetch',
- 'install_mockery_mutable_config'
+ "mock_packages", "mock_archive", "mock_fetch", "install_mockery_mutable_config"
+)
+@pytest.mark.parametrize(
+ "pkg_name,msgs",
+ [
+ ("test-error", ["FAILED: Command exited", "TestFailure"]),
+ ("test-fail", ["FAILED: Expected", "TestFailure"]),
+ ],
)
-@pytest.mark.parametrize('pkg_name,msgs', [
- ('test-error', ['FAILED: Command exited', 'TestFailure']),
- ('test-fail', ['FAILED: Expected', 'TestFailure'])
-])
def test_junit_output_with_failures(tmpdir, mock_test_stage, pkg_name, msgs):
install(pkg_name)
with tmpdir.as_cwd():
- spack_test('run',
- '--log-format=junit', '--log-file=test.xml',
- pkg_name,
- fail_on_error=False)
+ spack_test(
+ "run", "--log-format=junit", "--log-file=test.xml", pkg_name, fail_on_error=False
+ )
files = tmpdir.listdir()
- filename = tmpdir.join('test.xml')
+ filename = tmpdir.join("test.xml")
assert filename in files
content = filename.open().read()
@@ -155,94 +160,103 @@ def test_junit_output_with_failures(tmpdir, mock_test_stage, pkg_name, msgs):
assert 'errors="0"' in content
# We want to have both stdout and stderr
- assert '<system-out>' in content
+ assert "<system-out>" in content
for msg in msgs:
assert msg in content
def test_cdash_output_test_error(
- tmpdir, mock_fetch, install_mockery_mutable_config, mock_packages,
- mock_archive, mock_test_stage, capfd):
- install('test-error')
+ tmpdir,
+ mock_fetch,
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_archive,
+ mock_test_stage,
+ capfd,
+):
+ install("test-error")
with tmpdir.as_cwd():
- spack_test('run',
- '--log-format=cdash',
- '--log-file=cdash_reports',
- 'test-error',
- fail_on_error=False)
- report_dir = tmpdir.join('cdash_reports')
+ spack_test(
+ "run",
+ "--log-format=cdash",
+ "--log-file=cdash_reports",
+ "test-error",
+ fail_on_error=False,
+ )
+ report_dir = tmpdir.join("cdash_reports")
print(tmpdir.listdir())
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('test-error_Test.xml')
+ report_file = report_dir.join("test-error_Test.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
- assert 'FAILED: Command exited with status 1' in content
+ assert "FAILED: Command exited with status 1" in content
def test_cdash_upload_clean_test(
- tmpdir, mock_fetch, install_mockery_mutable_config, mock_packages,
- mock_archive, mock_test_stage):
- install('printing-package')
+ tmpdir,
+ mock_fetch,
+ install_mockery_mutable_config,
+ mock_packages,
+ mock_archive,
+ mock_test_stage,
+):
+ install("printing-package")
with tmpdir.as_cwd():
- spack_test('run',
- '--log-file=cdash_reports',
- '--log-format=cdash',
- 'printing-package')
- report_dir = tmpdir.join('cdash_reports')
+ spack_test("run", "--log-file=cdash_reports", "--log-format=cdash", "printing-package")
+ report_dir = tmpdir.join("cdash_reports")
assert report_dir in tmpdir.listdir()
- report_file = report_dir.join('printing-package_Test.xml')
+ report_file = report_dir.join("printing-package_Test.xml")
assert report_file in report_dir.listdir()
content = report_file.open().read()
- assert '</Test>' in content
- assert '<Text>' not in content
+ assert "</Test>" in content
+ assert "<Text>" not in content
def test_test_help_does_not_show_cdash_options(mock_test_stage, capsys):
"""Make sure `spack test --help` does not describe CDash arguments"""
with pytest.raises(SystemExit):
- spack_test('run', '--help')
+ spack_test("run", "--help")
captured = capsys.readouterr()
- assert 'CDash URL' not in captured.out
+ assert "CDash URL" not in captured.out
def test_test_help_cdash(mock_test_stage):
"""Make sure `spack test --help-cdash` describes CDash arguments"""
- out = spack_test('run', '--help-cdash')
- assert 'CDash URL' in out
+ out = spack_test("run", "--help-cdash")
+ assert "CDash URL" in out
def test_test_list_all(mock_packages):
"""make sure `spack test list --all` returns all packages with tests"""
pkgs = spack_test("list", "--all").strip().split()
- assert set(pkgs) == set([
- "printing-package",
- "py-extension1",
- "py-extension2",
- "simple-standalone-test",
- "test-error",
- "test-fail",
- "test-build-callbacks",
- "test-install-callbacks"
- ])
-
-
-def test_test_list(
- mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config
-):
- pkg_with_tests = 'printing-package'
+ assert set(pkgs) == set(
+ [
+ "printing-package",
+ "py-extension1",
+ "py-extension2",
+ "simple-standalone-test",
+ "test-error",
+ "test-fail",
+ "test-build-callbacks",
+ "test-install-callbacks",
+ ]
+ )
+
+
+def test_test_list(mock_packages, mock_archive, mock_fetch, install_mockery_mutable_config):
+ pkg_with_tests = "printing-package"
install(pkg_with_tests)
output = spack_test("list")
assert pkg_with_tests in output
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_has_test_method_fails(capsys):
with pytest.raises(SystemExit):
- spack.package_base.has_test_method('printing-package')
+ spack.package_base.has_test_method("printing-package")
captured = capsys.readouterr()[1]
- assert 'is not a class' in captured
+ assert "is not a class" in captured
def test_read_old_results(mock_test_stage):
@@ -252,54 +266,56 @@ def test_read_old_results(mock_test_stage):
# spack install printing-package
# spack test run --alias printpkg printing-package
- test_data_src = os.path.join(
- spack.paths.test_path, 'data', 'test', 'test_stage')
+ test_data_src = os.path.join(spack.paths.test_path, "data", "test", "test_stage")
# Copy the old test data into the mock stage directory
copy_tree(test_data_src, mock_test_stage)
# The find command should print info about the old test, under
# the alias used at test generation time
- find_output = spack_test('find')
- assert 'printpkg' in find_output
+ find_output = spack_test("find")
+ assert "printpkg" in find_output
# The results command should still print the old test results
- results_output = spack_test('results')
- assert 'PASSED' in results_output
+ results_output = spack_test("results")
+ assert "PASSED" in results_output
def test_test_results_none(mock_packages, mock_test_stage):
- name = 'trivial'
- spec = spack.spec.Spec('trivial-smoke-test').concretized()
+ name = "trivial"
+ spec = spack.spec.Spec("trivial-smoke-test").concretized()
suite = spack.install_test.TestSuite([spec], name)
suite.ensure_stage()
spack.install_test.write_test_suite_file(suite)
- results = spack_test('results', name)
- assert 'has no results' in results
- assert 'if it is running' in results
-
-
-@pytest.mark.parametrize('status,expected', [
- ('FAILED', '1 failed'),
- ('NO-TESTS', '1 no-tests'),
- ('SKIPPED', '1 skipped'),
- ('PASSED', '1 passed'),
-])
+ results = spack_test("results", name)
+ assert "has no results" in results
+ assert "if it is running" in results
+
+
+@pytest.mark.parametrize(
+ "status,expected",
+ [
+ ("FAILED", "1 failed"),
+ ("NO-TESTS", "1 no-tests"),
+ ("SKIPPED", "1 skipped"),
+ ("PASSED", "1 passed"),
+ ],
+)
def test_test_results_status(mock_packages, mock_test_stage, status, expected):
- name = 'trivial'
- spec = spack.spec.Spec('trivial-smoke-test').concretized()
+ name = "trivial"
+ spec = spack.spec.Spec("trivial-smoke-test").concretized()
suite = spack.install_test.TestSuite([spec], name)
suite.ensure_stage()
spack.install_test.write_test_suite_file(suite)
suite.write_test_result(spec, status)
- for opt in ['', '--failed', '--log']:
- args = ['results', name]
+ for opt in ["", "--failed", "--log"]:
+ args = ["results", name]
if opt:
args.insert(1, opt)
results = spack_test(*args)
- if opt == '--failed' and status != 'FAILED':
+ if opt == "--failed" and status != "FAILED":
assert status not in results
else:
assert status in results
diff --git a/lib/spack/spack/test/cmd/undevelop.py b/lib/spack/spack/test/cmd/undevelop.py
index 43a7f45ac3..41c455d2b1 100644
--- a/lib/spack/spack/test/cmd/undevelop.py
+++ b/lib/spack/spack/test/cmd/undevelop.py
@@ -11,20 +11,20 @@ import spack.environment as ev
import spack.spec
from spack.main import SpackCommand
-undevelop = SpackCommand('undevelop')
-env = SpackCommand('env')
-concretize = SpackCommand('concretize')
+undevelop = SpackCommand("undevelop")
+env = SpackCommand("env")
+concretize = SpackCommand("concretize")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_undevelop(tmpdir, config, mock_packages, mutable_mock_env_path):
# setup environment
- envdir = tmpdir.mkdir('env')
+ envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
- with open('spack.yaml', 'w') as f:
- f.write("""\
+ with open("spack.yaml", "w") as f:
+ f.write(
+ """\
env:
specs:
- mpich
@@ -33,25 +33,27 @@ env:
mpich:
spec: mpich@1.0
path: /fake/path
-""")
+"""
+ )
- env('create', 'test', './spack.yaml')
- with ev.read('test'):
- before = spack.spec.Spec('mpich').concretized()
- undevelop('mpich')
- after = spack.spec.Spec('mpich').concretized()
+ env("create", "test", "./spack.yaml")
+ with ev.read("test"):
+ before = spack.spec.Spec("mpich").concretized()
+ undevelop("mpich")
+ after = spack.spec.Spec("mpich").concretized()
# Removing dev spec from environment changes concretization
- assert before.satisfies('dev_path=*')
- assert not after.satisfies('dev_path=*')
+ assert before.satisfies("dev_path=*")
+ assert not after.satisfies("dev_path=*")
def test_undevelop_nonexistent(tmpdir, config, mock_packages, mutable_mock_env_path):
# setup environment
- envdir = tmpdir.mkdir('env')
+ envdir = tmpdir.mkdir("env")
with envdir.as_cwd():
- with open('spack.yaml', 'w') as f:
- f.write("""\
+ with open("spack.yaml", "w") as f:
+ f.write(
+ """\
env:
specs:
- mpich
@@ -60,14 +62,15 @@ env:
mpich:
spec: mpich@1.0
path: /fake/path
-""")
+"""
+ )
- env('create', 'test', './spack.yaml')
- with ev.read('test') as e:
+ env("create", "test", "./spack.yaml")
+ with ev.read("test") as e:
concretize()
before = e.specs_by_hash
- undevelop('package-not-in-develop') # does nothing
- concretize('-f')
+ undevelop("package-not-in-develop") # does nothing
+ concretize("-f")
after = e.specs_by_hash
# nothing should have changed
diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py
index 857f1ae35d..a47f6c60a5 100644
--- a/lib/spack/spack/test/cmd/uninstall.py
+++ b/lib/spack/spack/test/cmd/uninstall.py
@@ -10,12 +10,11 @@ import llnl.util.tty as tty
import spack.store
from spack.main import SpackCommand, SpackCommandError
-uninstall = SpackCommand('uninstall')
-install = SpackCommand('install')
+uninstall = SpackCommand("uninstall")
+install = SpackCommand("install")
class MockArgs(object):
-
def __init__(self, packages, all=False, force=False, dependents=False):
self.packages = packages
self.all = all
@@ -28,27 +27,27 @@ class MockArgs(object):
def test_multiple_matches(mutable_database):
"""Test unable to uninstall when multiple matches."""
with pytest.raises(SpackCommandError):
- uninstall('-y', 'mpileaks')
+ uninstall("-y", "mpileaks")
@pytest.mark.db
def test_installed_dependents(mutable_database):
"""Test can't uninstall when there are installed dependents."""
with pytest.raises(SpackCommandError):
- uninstall('-y', 'libelf')
+ uninstall("-y", "libelf")
@pytest.mark.db
def test_recursive_uninstall(mutable_database):
"""Test recursive uninstall."""
- uninstall('-y', '-a', '--dependents', 'callpath')
+ uninstall("-y", "-a", "--dependents", "callpath")
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 9
# query specs with multiple configurations
- mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
- callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
- mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+ mpileaks_specs = [s for s in all_specs if s.satisfies("mpileaks")]
+ callpath_specs = [s for s in all_specs if s.satisfies("callpath")]
+ mpi_specs = [s for s in all_specs if s.satisfies("mpi")]
assert len(mpileaks_specs) == 0
assert len(callpath_specs) == 0
@@ -56,27 +55,23 @@ def test_recursive_uninstall(mutable_database):
@pytest.mark.db
-@pytest.mark.regression('3690')
-@pytest.mark.parametrize('constraint,expected_number_of_specs', [
- ('dyninst', 8), ('libelf', 6)
-])
+@pytest.mark.regression("3690")
+@pytest.mark.parametrize("constraint,expected_number_of_specs", [("dyninst", 8), ("libelf", 6)])
def test_uninstall_spec_with_multiple_roots(
- constraint, expected_number_of_specs, mutable_database
+ constraint, expected_number_of_specs, mutable_database
):
- uninstall('-y', '-a', '--dependents', constraint)
+ uninstall("-y", "-a", "--dependents", constraint)
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == expected_number_of_specs
@pytest.mark.db
-@pytest.mark.parametrize('constraint,expected_number_of_specs', [
- ('dyninst', 14), ('libelf', 14)
-])
+@pytest.mark.parametrize("constraint,expected_number_of_specs", [("dyninst", 14), ("libelf", 14)])
def test_force_uninstall_spec_with_ref_count_not_zero(
- constraint, expected_number_of_specs, mutable_database
+ constraint, expected_number_of_specs, mutable_database
):
- uninstall('-f', '-y', constraint)
+ uninstall("-f", "-y", constraint)
all_specs = spack.store.layout.all_specs()
assert len(all_specs) == expected_number_of_specs
@@ -86,7 +81,7 @@ def test_force_uninstall_spec_with_ref_count_not_zero(
def test_force_uninstall_and_reinstall_by_hash(mutable_database):
"""Test forced uninstall and reinstall of old specs."""
# this is the spec to be removed
- callpath_spec = spack.store.db.query_one('callpath ^mpich')
+ callpath_spec = spack.store.db.query_one("callpath ^mpich")
dag_hash = callpath_spec.dag_hash()
# ensure can look up by hash and that it's a dependent of mpileaks
@@ -108,26 +103,24 @@ def test_force_uninstall_and_reinstall_by_hash(mutable_database):
specs = spack.store.db.get_by_hash(dag_hash, installed=not installed)
assert specs is None
- specs = spack.store.db.get_by_hash(dag_hash[:7],
- installed=not installed)
+ specs = spack.store.db.get_by_hash(dag_hash[:7], installed=not installed)
assert specs is None
- mpileaks_spec = spack.store.db.query_one('mpileaks ^mpich')
+ mpileaks_spec = spack.store.db.query_one("mpileaks ^mpich")
assert callpath_spec in mpileaks_spec
- spec = spack.store.db.query_one('callpath ^mpich', installed=installed)
+ spec = spack.store.db.query_one("callpath ^mpich", installed=installed)
assert spec == callpath_spec
- spec = spack.store.db.query_one('callpath ^mpich', installed=any)
+ spec = spack.store.db.query_one("callpath ^mpich", installed=any)
assert spec == callpath_spec
- spec = spack.store.db.query_one('callpath ^mpich',
- installed=not installed)
+ spec = spack.store.db.query_one("callpath ^mpich", installed=not installed)
assert spec is None
validate_callpath_spec(True)
- uninstall('-y', '-f', 'callpath ^mpich')
+ uninstall("-y", "-f", "callpath ^mpich")
# ensure that you can still look up by hash and see deps, EVEN though
# the callpath spec is missing.
@@ -138,10 +131,11 @@ def test_force_uninstall_and_reinstall_by_hash(mutable_database):
all_specs = spack.store.layout.all_specs()
return (
all_specs,
- [s for s in all_specs if s.satisfies('mpileaks')],
- [s for s in all_specs if s.satisfies('callpath')],
- [s for s in all_specs if s.satisfies('mpi')]
+ [s for s in all_specs if s.satisfies("mpileaks")],
+ [s for s in all_specs if s.satisfies("callpath")],
+ [s for s in all_specs if s.satisfies("mpi")],
)
+
all_specs, mpileaks_specs, callpath_specs, mpi_specs = db_specs()
total_specs = len(all_specs)
assert total_specs == 14
@@ -150,25 +144,25 @@ def test_force_uninstall_and_reinstall_by_hash(mutable_database):
assert len(mpi_specs) == 3
# Now, REINSTALL the spec and make sure everything still holds
- install('--fake', '/%s' % dag_hash[:7])
+ install("--fake", "/%s" % dag_hash[:7])
validate_callpath_spec(True)
all_specs, mpileaks_specs, callpath_specs, mpi_specs = db_specs()
- assert len(all_specs) == total_specs + 1 # back to total_specs+1
+ assert len(all_specs) == total_specs + 1 # back to total_specs+1
assert len(mpileaks_specs) == 3
assert len(callpath_specs) == 3 # back to 3
assert len(mpi_specs) == 3
@pytest.mark.db
-@pytest.mark.regression('15773')
-def test_in_memory_consistency_when_uninstalling(
- mutable_database, monkeypatch
-):
+@pytest.mark.regression("15773")
+def test_in_memory_consistency_when_uninstalling(mutable_database, monkeypatch):
"""Test that uninstalling doesn't raise warnings"""
+
def _warn(*args, **kwargs):
- raise RuntimeError('a warning was triggered!')
- monkeypatch.setattr(tty, 'warn', _warn)
+ raise RuntimeError("a warning was triggered!")
+
+ monkeypatch.setattr(tty, "warn", _warn)
# Now try to uninstall and check this doesn't trigger warnings
- uninstall('-y', '-a')
+ uninstall("-y", "-a")
diff --git a/lib/spack/spack/test/cmd/unit_test.py b/lib/spack/spack/test/cmd/unit_test.py
index aa31282fdb..7104879a57 100644
--- a/lib/spack/spack/test/cmd/unit_test.py
+++ b/lib/spack/spack/test/cmd/unit_test.py
@@ -7,19 +7,19 @@ import os
from spack.main import SpackCommand
-spack_test = SpackCommand('unit-test')
-cmd_test_py = os.path.join('lib', 'spack', 'spack', 'test', 'cmd', 'unit_test.py')
+spack_test = SpackCommand("unit-test")
+cmd_test_py = os.path.join("lib", "spack", "spack", "test", "cmd", "unit_test.py")
def test_list():
- output = spack_test('--list')
+ output = spack_test("--list")
assert "unit_test.py" in output
assert "spec_semantics.py" in output
assert "test_list" not in output
def test_list_with_pytest_arg():
- output = spack_test('--list', cmd_test_py)
+ output = spack_test("--list", cmd_test_py)
assert output.strip() == cmd_test_py
@@ -27,13 +27,13 @@ def test_list_with_keywords():
# Here we removed querying with a "/" to separate directories
# since the behavior is inconsistent across different pytest
# versions, see https://stackoverflow.com/a/48814787/771663
- output = spack_test('--list', '-k', 'unit_test.py')
+ output = spack_test("--list", "-k", "unit_test.py")
assert output.strip() == cmd_test_py
def test_list_long(capsys):
with capsys.disabled():
- output = spack_test('--list-long')
+ output = spack_test("--list-long")
assert "unit_test.py::\n" in output
assert "test_list" in output
assert "test_list_with_pytest_arg" in output
@@ -44,13 +44,13 @@ def test_list_long(capsys):
assert "test_list_names_with_pytest_arg" in output
assert "spec_dag.py::\n" in output
- assert 'test_installed_deps' in output
- assert 'test_test_deptype' in output
+ assert "test_installed_deps" in output
+ assert "test_test_deptype" in output
def test_list_long_with_pytest_arg(capsys):
with capsys.disabled():
- output = spack_test('--list-long', cmd_test_py)
+ output = spack_test("--list-long", cmd_test_py)
print(output)
assert "unit_test.py::\n" in output
assert "test_list" in output
@@ -62,12 +62,12 @@ def test_list_long_with_pytest_arg(capsys):
assert "test_list_names_with_pytest_arg" in output
assert "spec_dag.py::\n" not in output
- assert 'test_installed_deps' not in output
- assert 'test_test_deptype' not in output
+ assert "test_installed_deps" not in output
+ assert "test_test_deptype" not in output
def test_list_names():
- output = spack_test('--list-names')
+ output = spack_test("--list-names")
assert "unit_test.py::test_list\n" in output
assert "unit_test.py::test_list_with_pytest_arg\n" in output
assert "unit_test.py::test_list_with_keywords\n" in output
@@ -77,11 +77,11 @@ def test_list_names():
assert "unit_test.py::test_list_names_with_pytest_arg\n" in output
assert "spec_dag.py::test_installed_deps\n" in output
- assert 'spec_dag.py::test_test_deptype\n' in output
+ assert "spec_dag.py::test_test_deptype\n" in output
def test_list_names_with_pytest_arg():
- output = spack_test('--list-names', cmd_test_py)
+ output = spack_test("--list-names", cmd_test_py)
assert "unit_test.py::test_list\n" in output
assert "unit_test.py::test_list_with_pytest_arg\n" in output
assert "unit_test.py::test_list_with_keywords\n" in output
@@ -91,11 +91,11 @@ def test_list_names_with_pytest_arg():
assert "unit_test.py::test_list_names_with_pytest_arg\n" in output
assert "spec_dag.py::test_installed_deps\n" not in output
- assert 'spec_dag.py::test_test_deptype\n' not in output
+ assert "spec_dag.py::test_test_deptype\n" not in output
def test_pytest_help():
- output = spack_test('--pytest-help')
+ output = spack_test("--pytest-help")
assert "-k EXPRESSION" in output
assert "pytest-warnings:" in output
assert "--collect-only" in output
diff --git a/lib/spack/spack/test/cmd/url.py b/lib/spack/spack/test/cmd/url.py
index 301185a6f7..97607fad5c 100644
--- a/lib/spack/spack/test/cmd/url.py
+++ b/lib/spack/spack/test/cmd/url.py
@@ -12,7 +12,7 @@ from spack.cmd.url import name_parsed_correctly, url_summary, version_parsed_cor
from spack.main import SpackCommand
from spack.url import UndetectableVersionError
-url = SpackCommand('url')
+url = SpackCommand("url")
class MyPackage:
@@ -23,135 +23,129 @@ class MyPackage:
def test_name_parsed_correctly():
# Expected True
- assert name_parsed_correctly(MyPackage('netcdf', []), 'netcdf')
- assert name_parsed_correctly(MyPackage('r-devtools', []), 'devtools')
- assert name_parsed_correctly(MyPackage('py-numpy', []), 'numpy')
- assert name_parsed_correctly(MyPackage('octave-splines', []), 'splines')
- assert name_parsed_correctly(MyPackage('th-data', []), 'TH.data')
- assert name_parsed_correctly(
- MyPackage('imagemagick', []), 'ImageMagick')
+ assert name_parsed_correctly(MyPackage("netcdf", []), "netcdf")
+ assert name_parsed_correctly(MyPackage("r-devtools", []), "devtools")
+ assert name_parsed_correctly(MyPackage("py-numpy", []), "numpy")
+ assert name_parsed_correctly(MyPackage("octave-splines", []), "splines")
+ assert name_parsed_correctly(MyPackage("th-data", []), "TH.data")
+ assert name_parsed_correctly(MyPackage("imagemagick", []), "ImageMagick")
# Expected False
- assert not name_parsed_correctly(MyPackage('', []), 'hdf5')
- assert not name_parsed_correctly(MyPackage('hdf5', []), '')
- assert not name_parsed_correctly(MyPackage('yaml-cpp', []), 'yamlcpp')
- assert not name_parsed_correctly(MyPackage('yamlcpp', []), 'yaml-cpp')
- assert not name_parsed_correctly(MyPackage('r-py-parser', []), 'parser')
- assert not name_parsed_correctly(
- MyPackage('oce', []), 'oce-0.18.0')
+ assert not name_parsed_correctly(MyPackage("", []), "hdf5")
+ assert not name_parsed_correctly(MyPackage("hdf5", []), "")
+ assert not name_parsed_correctly(MyPackage("yaml-cpp", []), "yamlcpp")
+ assert not name_parsed_correctly(MyPackage("yamlcpp", []), "yaml-cpp")
+ assert not name_parsed_correctly(MyPackage("r-py-parser", []), "parser")
+ assert not name_parsed_correctly(MyPackage("oce", []), "oce-0.18.0")
def test_version_parsed_correctly():
# Expected True
- assert version_parsed_correctly(MyPackage('', ['1.2.3']), '1.2.3')
- assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4a')
- assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4b')
- assert version_parsed_correctly(MyPackage('', ['1.63.0']), '1_63_0')
- assert version_parsed_correctly(MyPackage('', ['0.94h']), '094h')
+ assert version_parsed_correctly(MyPackage("", ["1.2.3"]), "1.2.3")
+ assert version_parsed_correctly(MyPackage("", ["5.4a", "5.4b"]), "5.4a")
+ assert version_parsed_correctly(MyPackage("", ["5.4a", "5.4b"]), "5.4b")
+ assert version_parsed_correctly(MyPackage("", ["1.63.0"]), "1_63_0")
+ assert version_parsed_correctly(MyPackage("", ["0.94h"]), "094h")
# Expected False
- assert not version_parsed_correctly(MyPackage('', []), '1.2.3')
- assert not version_parsed_correctly(MyPackage('', ['1.2.3']), '')
- assert not version_parsed_correctly(MyPackage('', ['1.2.3']), '1.2.4')
- assert not version_parsed_correctly(MyPackage('', ['3.4a']), '3.4')
- assert not version_parsed_correctly(MyPackage('', ['3.4']), '3.4b')
- assert not version_parsed_correctly(
- MyPackage('', ['0.18.0']), 'oce-0.18.0')
+ assert not version_parsed_correctly(MyPackage("", []), "1.2.3")
+ assert not version_parsed_correctly(MyPackage("", ["1.2.3"]), "")
+ assert not version_parsed_correctly(MyPackage("", ["1.2.3"]), "1.2.4")
+ assert not version_parsed_correctly(MyPackage("", ["3.4a"]), "3.4")
+ assert not version_parsed_correctly(MyPackage("", ["3.4"]), "3.4b")
+ assert not version_parsed_correctly(MyPackage("", ["0.18.0"]), "oce-0.18.0")
def test_url_parse():
- url('parse', 'http://zlib.net/fossils/zlib-1.2.10.tar.gz')
+ url("parse", "http://zlib.net/fossils/zlib-1.2.10.tar.gz")
def test_url_with_no_version_fails():
# No version in URL
with pytest.raises(UndetectableVersionError):
- url('parse', 'http://www.netlib.org/voronoi/triangle.zip')
+ url("parse", "http://www.netlib.org/voronoi/triangle.zip")
def test_url_list(mock_packages):
- out = url('list')
- total_urls = len(out.split('\n'))
+ out = url("list")
+ total_urls = len(out.split("\n"))
# The following two options should not change the number of URLs printed.
- out = url('list', '--color', '--extrapolation')
- colored_urls = len(out.split('\n'))
+ out = url("list", "--color", "--extrapolation")
+ colored_urls = len(out.split("\n"))
assert colored_urls == total_urls
# The following options should print fewer URLs than the default.
# If they print the same number of URLs, something is horribly broken.
# If they say we missed 0 URLs, something is probably broken too.
- out = url('list', '--incorrect-name')
- incorrect_name_urls = len(out.split('\n'))
+ out = url("list", "--incorrect-name")
+ incorrect_name_urls = len(out.split("\n"))
assert 0 < incorrect_name_urls < total_urls
- out = url('list', '--incorrect-version')
- incorrect_version_urls = len(out.split('\n'))
+ out = url("list", "--incorrect-version")
+ incorrect_version_urls = len(out.split("\n"))
assert 0 < incorrect_version_urls < total_urls
- out = url('list', '--correct-name')
- correct_name_urls = len(out.split('\n'))
+ out = url("list", "--correct-name")
+ correct_name_urls = len(out.split("\n"))
assert 0 < correct_name_urls < total_urls
- out = url('list', '--correct-version')
- correct_version_urls = len(out.split('\n'))
+ out = url("list", "--correct-version")
+ correct_version_urls = len(out.split("\n"))
assert 0 < correct_version_urls < total_urls
def test_url_summary(mock_packages):
"""Test the URL summary command."""
# test url_summary, the internal function that does the work
- (total_urls, correct_names, correct_versions,
- name_count_dict, version_count_dict) = url_summary(None)
+ (
+ total_urls,
+ correct_names,
+ correct_versions,
+ name_count_dict,
+ version_count_dict,
+ ) = url_summary(None)
- assert (0 < correct_names <=
- sum(name_count_dict.values()) <= total_urls)
- assert (0 < correct_versions <=
- sum(version_count_dict.values()) <= total_urls)
+ assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls
+ assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls
# make sure it agrees with the actual command.
- out = url('summary')
- out_total_urls = int(
- re.search(r'Total URLs found:\s*(\d+)', out).group(1))
+ out = url("summary")
+ out_total_urls = int(re.search(r"Total URLs found:\s*(\d+)", out).group(1))
assert out_total_urls == total_urls
- out_correct_names = int(
- re.search(r'Names correctly parsed:\s*(\d+)', out).group(1))
+ out_correct_names = int(re.search(r"Names correctly parsed:\s*(\d+)", out).group(1))
assert out_correct_names == correct_names
- out_correct_versions = int(
- re.search(r'Versions correctly parsed:\s*(\d+)', out).group(1))
+ out_correct_versions = int(re.search(r"Versions correctly parsed:\s*(\d+)", out).group(1))
assert out_correct_versions == correct_versions
-@pytest.mark.skipif(
- sys.platform.startswith("win"),
- reason="Unsupported on Windows for now"
-)
+@pytest.mark.skipif(sys.platform.startswith("win"), reason="Unsupported on Windows for now")
def test_url_stats(capfd, mock_packages):
with capfd.disabled():
- output = url('stats')
- npkgs = '%d packages' % len(spack.repo.all_package_names())
+ output = url("stats")
+ npkgs = "%d packages" % len(spack.repo.all_package_names())
assert npkgs in output
- assert 'url' in output
- assert 'git' in output
- assert 'schemes' in output
- assert 'versions' in output
- assert 'resources' in output
-
- output = url('stats', '--show-issues')
- npkgs = '%d packages' % len(spack.repo.all_package_names())
+ assert "url" in output
+ assert "git" in output
+ assert "schemes" in output
+ assert "versions" in output
+ assert "resources" in output
+
+ output = url("stats", "--show-issues")
+ npkgs = "%d packages" % len(spack.repo.all_package_names())
assert npkgs in output
- assert 'url' in output
- assert 'git' in output
- assert 'schemes' in output
- assert 'versions' in output
- assert 'resources' in output
-
- assert 'Package URLs with md5 hashes' in output
- assert 'needs-relocation' in output
- assert 'https://cmake.org/files/v3.4/cmake-0.0.0.tar.gz' in output
-
- assert 'Package URLs with http urls' in output
- assert 'zmpi' in output
- assert 'http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz' in output
+ assert "url" in output
+ assert "git" in output
+ assert "schemes" in output
+ assert "versions" in output
+ assert "resources" in output
+
+ assert "Package URLs with md5 hashes" in output
+ assert "needs-relocation" in output
+ assert "https://cmake.org/files/v3.4/cmake-0.0.0.tar.gz" in output
+
+ assert "Package URLs with http urls" in output
+ assert "zmpi" in output
+ assert "http://www.spack-fake-zmpi.org/downloads/zmpi-1.0.tar.gz" in output
diff --git a/lib/spack/spack/test/cmd/verify.py b/lib/spack/spack/test/cmd/verify.py
index 357f3751c0..fb2b6ed11d 100644
--- a/lib/spack/spack/test/cmd/verify.py
+++ b/lib/spack/spack/test/cmd/verify.py
@@ -17,79 +17,78 @@ import spack.util.spack_json as sjson
import spack.verify
from spack.main import SpackCommand
-verify = SpackCommand('verify')
-install = SpackCommand('install')
+verify = SpackCommand("verify")
+install = SpackCommand("install")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_single_file_verify_cmd(tmpdir):
# Test the verify command interface to verifying a single file.
- filedir = os.path.join(str(tmpdir), 'a', 'b', 'c', 'd')
- filepath = os.path.join(filedir, 'file')
+ filedir = os.path.join(str(tmpdir), "a", "b", "c", "d")
+ filepath = os.path.join(filedir, "file")
metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir)
fs.mkdirp(filedir)
fs.mkdirp(metadir)
- with open(filepath, 'w') as f:
+ with open(filepath, "w") as f:
f.write("I'm a file")
data = spack.verify.create_manifest_entry(filepath)
- manifest_file = os.path.join(metadir,
- spack.store.layout.manifest_file_name)
+ manifest_file = os.path.join(metadir, spack.store.layout.manifest_file_name)
- with open(manifest_file, 'w') as f:
+ with open(manifest_file, "w") as f:
sjson.dump({filepath: data}, f)
- results = verify('-f', filepath, fail_on_error=False)
+ results = verify("-f", filepath, fail_on_error=False)
print(results)
assert not results
os.utime(filepath, (0, 0))
- with open(filepath, 'w') as f:
+ with open(filepath, "w") as f:
f.write("I changed.")
- results = verify('-f', filepath, fail_on_error=False)
+ results = verify("-f", filepath, fail_on_error=False)
- expected = ['hash']
+ expected = ["hash"]
mtime = os.stat(filepath).st_mtime
- if mtime != data['time']:
- expected.append('mtime')
+ if mtime != data["time"]:
+ expected.append("mtime")
assert results
assert filepath in results
assert all(x in results for x in expected)
- results = verify('-fj', filepath, fail_on_error=False)
+ results = verify("-fj", filepath, fail_on_error=False)
res = sjson.load(results)
assert len(res) == 1
errors = res.pop(filepath)
assert sorted(errors) == sorted(expected)
-def test_single_spec_verify_cmd(tmpdir, mock_packages, mock_archive,
- mock_fetch, config, install_mockery):
+def test_single_spec_verify_cmd(
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
# Test the verify command interface to verify a single spec
- install('libelf')
- s = spack.spec.Spec('libelf').concretized()
+ install("libelf")
+ s = spack.spec.Spec("libelf").concretized()
prefix = s.prefix
hash = s.dag_hash()
- results = verify('/%s' % hash, fail_on_error=False)
+ results = verify("/%s" % hash, fail_on_error=False)
assert not results
- new_file = os.path.join(prefix, 'new_file_for_verify_test')
- with open(new_file, 'w') as f:
- f.write('New file')
+ new_file = os.path.join(prefix, "new_file_for_verify_test")
+ with open(new_file, "w") as f:
+ f.write("New file")
- results = verify('/%s' % hash, fail_on_error=False)
+ results = verify("/%s" % hash, fail_on_error=False)
assert new_file in results
- assert 'added' in results
+ assert "added" in results
- results = verify('-j', '/%s' % hash, fail_on_error=False)
+ results = verify("-j", "/%s" % hash, fail_on_error=False)
res = sjson.load(results)
assert len(res) == 1
- assert res[new_file] == ['added']
+ assert res[new_file] == ["added"]
diff --git a/lib/spack/spack/test/cmd/versions.py b/lib/spack/spack/test/cmd/versions.py
index f3be1ed3fe..0acc8a7df7 100644
--- a/lib/spack/spack/test/cmd/versions.py
+++ b/lib/spack/spack/test/cmd/versions.py
@@ -10,40 +10,39 @@ import pytest
from spack.main import SpackCommand
from spack.version import Version
-versions = SpackCommand('versions')
+versions = SpackCommand("versions")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_safe_only_versions():
"""Only test the safe versions of a package.
- (Using the deprecated command line argument)
+ (Using the deprecated command line argument)
"""
- versions('--safe-only', 'zlib')
+ versions("--safe-only", "zlib")
def test_safe_versions():
"""Only test the safe versions of a package."""
- versions('--safe', 'zlib')
+ versions("--safe", "zlib")
@pytest.mark.maybeslow
def test_remote_versions():
"""Test a package for which remote versions should be available."""
- versions('zlib')
+ versions("zlib")
@pytest.mark.maybeslow
def test_remote_versions_only():
"""Test a package for which remote versions should be available."""
- versions('--remote', 'zlib')
+ versions("--remote", "zlib")
-@pytest.mark.usefixtures('mock_packages')
+@pytest.mark.usefixtures("mock_packages")
def test_new_versions_only(monkeypatch):
"""Test a package for which new versions should be available."""
from spack.pkg.builtin.mock.brillig import Brillig # type: ignore[import]
@@ -51,49 +50,50 @@ def test_new_versions_only(monkeypatch):
def mock_fetch_remote_versions(*args, **kwargs):
mock_remote_versions = {
# new version, we expect this to be in output:
- Version('99.99.99'): {},
+ Version("99.99.99"): {},
# some packages use '3.2' equivalently to '3.2.0'
# thus '3.2.1' is considered to be a new version
# and expected in the output also
- Version('3.2.1'): {}, # new version, we expect this to be in output
- Version('3.2'): {},
- Version('1.0.0'): {},
+ Version("3.2.1"): {}, # new version, we expect this to be in output
+ Version("3.2"): {},
+ Version("1.0.0"): {},
}
return mock_remote_versions
+
mock_versions = {
# already checksummed versions:
- Version('3.2'): {},
- Version('1.0.0'): {},
+ Version("3.2"): {},
+ Version("1.0.0"): {},
}
- monkeypatch.setattr(Brillig, 'versions', mock_versions)
- monkeypatch.setattr(Brillig, 'fetch_remote_versions', mock_fetch_remote_versions)
- v = versions('--new', 'brillig')
- assert(v.strip(' \n\t') == "99.99.99\n 3.2.1")
+ monkeypatch.setattr(Brillig, "versions", mock_versions)
+ monkeypatch.setattr(Brillig, "fetch_remote_versions", mock_fetch_remote_versions)
+ v = versions("--new", "brillig")
+ assert v.strip(" \n\t") == "99.99.99\n 3.2.1"
@pytest.mark.maybeslow
def test_no_versions():
"""Test a package for which no remote versions are available."""
- versions('converge')
+ versions("converge")
@pytest.mark.maybeslow
def test_no_unchecksummed_versions():
"""Test a package for which no unchecksummed versions are available."""
- versions('bzip2')
+ versions("bzip2")
@pytest.mark.maybeslow
def test_versions_no_url():
"""Test a package with versions but without a ``url`` attribute."""
- versions('graphviz')
+ versions("graphviz")
@pytest.mark.maybeslow
def test_no_versions_no_url():
"""Test a package without versions or a ``url`` attribute."""
- versions('opengl')
+ versions("opengl")
diff --git a/lib/spack/spack/test/cmd/view.py b/lib/spack/spack/test/cmd/view.py
index d1d265bade..51af2bae2a 100644
--- a/lib/spack/spack/test/cmd/view.py
+++ b/lib/spack/spack/test/cmd/view.py
@@ -11,280 +11,253 @@ import pytest
import spack.util.spack_yaml as s_yaml
from spack.main import SpackCommand
-activate = SpackCommand('activate')
-extensions = SpackCommand('extensions')
-install = SpackCommand('install')
-view = SpackCommand('view')
+activate = SpackCommand("activate")
+extensions = SpackCommand("extensions")
+install = SpackCommand("install")
+view = SpackCommand("view")
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def create_projection_file(tmpdir, projection):
- if 'projections' not in projection:
- projection = {'projections': projection}
+ if "projections" not in projection:
+ projection = {"projections": projection}
- projection_file = tmpdir.mkdir('projection').join('projection.yaml')
+ projection_file = tmpdir.mkdir("projection").join("projection.yaml")
projection_file.write(s_yaml.dump(projection))
return projection_file
-@pytest.mark.parametrize('cmd', ['hardlink', 'symlink', 'hard', 'add',
- 'copy', 'relocate'])
+@pytest.mark.parametrize("cmd", ["hardlink", "symlink", "hard", "add", "copy", "relocate"])
def test_view_link_type(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery, cmd):
- install('libdwarf')
- viewpath = str(tmpdir.mkdir('view_{0}'.format(cmd)))
- view(cmd, viewpath, 'libdwarf')
- package_prefix = os.path.join(viewpath, 'libdwarf')
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery, cmd
+):
+ install("libdwarf")
+ viewpath = str(tmpdir.mkdir("view_{0}".format(cmd)))
+ view(cmd, viewpath, "libdwarf")
+ package_prefix = os.path.join(viewpath, "libdwarf")
assert os.path.exists(package_prefix)
# Check that we use symlinks for and only for the appropriate subcommands
- is_link_cmd = cmd in ('symlink', 'add')
+ is_link_cmd = cmd in ("symlink", "add")
assert os.path.islink(package_prefix) == is_link_cmd
-@pytest.mark.parametrize('add_cmd', ['hardlink', 'symlink', 'hard', 'add',
- 'copy', 'relocate'])
+@pytest.mark.parametrize("add_cmd", ["hardlink", "symlink", "hard", "add", "copy", "relocate"])
def test_view_link_type_remove(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery, add_cmd):
- install('needs-relocation')
- viewpath = str(tmpdir.mkdir('view_{0}'.format(add_cmd)))
- view(add_cmd, viewpath, 'needs-relocation')
- bindir = os.path.join(viewpath, 'bin')
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery, add_cmd
+):
+ install("needs-relocation")
+ viewpath = str(tmpdir.mkdir("view_{0}".format(add_cmd)))
+ view(add_cmd, viewpath, "needs-relocation")
+ bindir = os.path.join(viewpath, "bin")
assert os.path.exists(bindir)
- view('remove', viewpath, 'needs-relocation')
+ view("remove", viewpath, "needs-relocation")
assert not os.path.exists(bindir)
-@pytest.mark.parametrize('cmd', ['hardlink', 'symlink', 'hard', 'add',
- 'copy', 'relocate'])
+@pytest.mark.parametrize("cmd", ["hardlink", "symlink", "hard", "add", "copy", "relocate"])
def test_view_projections(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery, cmd):
- install('libdwarf@20130207')
-
- viewpath = str(tmpdir.mkdir('view_{0}'.format(cmd)))
- view_projection = {
- 'projections': {
- 'all': '{name}-{version}'
- }
- }
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery, cmd
+):
+ install("libdwarf@20130207")
+
+ viewpath = str(tmpdir.mkdir("view_{0}".format(cmd)))
+ view_projection = {"projections": {"all": "{name}-{version}"}}
projection_file = create_projection_file(tmpdir, view_projection)
- view(cmd, viewpath, '--projection-file={0}'.format(projection_file),
- 'libdwarf')
+ view(cmd, viewpath, "--projection-file={0}".format(projection_file), "libdwarf")
- package_prefix = os.path.join(viewpath, 'libdwarf-20130207/libdwarf')
+ package_prefix = os.path.join(viewpath, "libdwarf-20130207/libdwarf")
assert os.path.exists(package_prefix)
# Check that we use symlinks for and only for the appropriate subcommands
- is_symlink_cmd = cmd in ('symlink', 'add')
+ is_symlink_cmd = cmd in ("symlink", "add")
assert os.path.islink(package_prefix) == is_symlink_cmd
def test_view_multiple_projections(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('libdwarf@20130207')
- install('extendee@1.0%gcc')
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("libdwarf@20130207")
+ install("extendee@1.0%gcc")
- viewpath = str(tmpdir.mkdir('view'))
+ viewpath = str(tmpdir.mkdir("view"))
view_projection = s_yaml.syaml_dict(
- [('extendee', '{name}-{compiler.name}'),
- ('all', '{name}-{version}')]
+ [("extendee", "{name}-{compiler.name}"), ("all", "{name}-{version}")]
)
projection_file = create_projection_file(tmpdir, view_projection)
- view('add', viewpath, '--projection-file={0}'.format(projection_file),
- 'libdwarf', 'extendee')
+ view("add", viewpath, "--projection-file={0}".format(projection_file), "libdwarf", "extendee")
- libdwarf_prefix = os.path.join(viewpath, 'libdwarf-20130207/libdwarf')
- extendee_prefix = os.path.join(viewpath, 'extendee-gcc/bin')
+ libdwarf_prefix = os.path.join(viewpath, "libdwarf-20130207/libdwarf")
+ extendee_prefix = os.path.join(viewpath, "extendee-gcc/bin")
assert os.path.exists(libdwarf_prefix)
assert os.path.exists(extendee_prefix)
def test_view_multiple_projections_all_first(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('libdwarf@20130207')
- install('extendee@1.0%gcc')
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("libdwarf@20130207")
+ install("extendee@1.0%gcc")
- viewpath = str(tmpdir.mkdir('view'))
+ viewpath = str(tmpdir.mkdir("view"))
view_projection = s_yaml.syaml_dict(
- [('all', '{name}-{version}'),
- ('extendee', '{name}-{compiler.name}')]
+ [("all", "{name}-{version}"), ("extendee", "{name}-{compiler.name}")]
)
projection_file = create_projection_file(tmpdir, view_projection)
- view('add', viewpath, '--projection-file={0}'.format(projection_file),
- 'libdwarf', 'extendee')
+ view("add", viewpath, "--projection-file={0}".format(projection_file), "libdwarf", "extendee")
- libdwarf_prefix = os.path.join(viewpath, 'libdwarf-20130207/libdwarf')
- extendee_prefix = os.path.join(viewpath, 'extendee-gcc/bin')
+ libdwarf_prefix = os.path.join(viewpath, "libdwarf-20130207/libdwarf")
+ extendee_prefix = os.path.join(viewpath, "extendee-gcc/bin")
assert os.path.exists(libdwarf_prefix)
assert os.path.exists(extendee_prefix)
-def test_view_external(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('externaltool')
- viewpath = str(tmpdir.mkdir('view'))
- output = view('symlink', viewpath, 'externaltool')
- assert 'Skipping external package: externaltool' in output
-
-
-def test_view_extension(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extendee')
- install('extension1@1.0')
- install('extension1@2.0')
- install('extension2@1.0')
- viewpath = str(tmpdir.mkdir('view'))
- view('symlink', viewpath, 'extension1@1.0')
- all_installed = extensions('--show', 'installed', 'extendee')
- assert 'extension1@1.0' in all_installed
- assert 'extension1@2.0' in all_installed
- assert 'extension2@1.0' in all_installed
- global_activated = extensions('--show', 'activated', 'extendee')
- assert 'extension1@1.0' not in global_activated
- assert 'extension1@2.0' not in global_activated
- assert 'extension2@1.0' not in global_activated
- view_activated = extensions('--show', 'activated',
- '-v', viewpath,
- 'extendee')
- assert 'extension1@1.0' in view_activated
- assert 'extension1@2.0' not in view_activated
- assert 'extension2@1.0' not in view_activated
- assert os.path.exists(os.path.join(viewpath, 'bin', 'extension1'))
+def test_view_external(tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery):
+ install("externaltool")
+ viewpath = str(tmpdir.mkdir("view"))
+ output = view("symlink", viewpath, "externaltool")
+ assert "Skipping external package: externaltool" in output
+
+
+def test_view_extension(tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery):
+ install("extendee")
+ install("extension1@1.0")
+ install("extension1@2.0")
+ install("extension2@1.0")
+ viewpath = str(tmpdir.mkdir("view"))
+ view("symlink", viewpath, "extension1@1.0")
+ all_installed = extensions("--show", "installed", "extendee")
+ assert "extension1@1.0" in all_installed
+ assert "extension1@2.0" in all_installed
+ assert "extension2@1.0" in all_installed
+ global_activated = extensions("--show", "activated", "extendee")
+ assert "extension1@1.0" not in global_activated
+ assert "extension1@2.0" not in global_activated
+ assert "extension2@1.0" not in global_activated
+ view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
+ assert "extension1@1.0" in view_activated
+ assert "extension1@2.0" not in view_activated
+ assert "extension2@1.0" not in view_activated
+ assert os.path.exists(os.path.join(viewpath, "bin", "extension1"))
def test_view_extension_projection(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extendee@1.0')
- install('extension1@1.0')
- install('extension1@2.0')
- install('extension2@1.0')
-
- viewpath = str(tmpdir.mkdir('view'))
- view_projection = {'all': '{name}-{version}'}
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("extendee@1.0")
+ install("extension1@1.0")
+ install("extension1@2.0")
+ install("extension2@1.0")
+
+ viewpath = str(tmpdir.mkdir("view"))
+ view_projection = {"all": "{name}-{version}"}
projection_file = create_projection_file(tmpdir, view_projection)
- view('symlink', viewpath, '--projection-file={0}'.format(projection_file),
- 'extension1@1.0')
-
- all_installed = extensions('--show', 'installed', 'extendee')
- assert 'extension1@1.0' in all_installed
- assert 'extension1@2.0' in all_installed
- assert 'extension2@1.0' in all_installed
- global_activated = extensions('--show', 'activated', 'extendee')
- assert 'extension1@1.0' not in global_activated
- assert 'extension1@2.0' not in global_activated
- assert 'extension2@1.0' not in global_activated
- view_activated = extensions('--show', 'activated',
- '-v', viewpath,
- 'extendee')
- assert 'extension1@1.0' in view_activated
- assert 'extension1@2.0' not in view_activated
- assert 'extension2@1.0' not in view_activated
-
- assert os.path.exists(os.path.join(viewpath, 'extendee-1.0',
- 'bin', 'extension1'))
+ view("symlink", viewpath, "--projection-file={0}".format(projection_file), "extension1@1.0")
+
+ all_installed = extensions("--show", "installed", "extendee")
+ assert "extension1@1.0" in all_installed
+ assert "extension1@2.0" in all_installed
+ assert "extension2@1.0" in all_installed
+ global_activated = extensions("--show", "activated", "extendee")
+ assert "extension1@1.0" not in global_activated
+ assert "extension1@2.0" not in global_activated
+ assert "extension2@1.0" not in global_activated
+ view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
+ assert "extension1@1.0" in view_activated
+ assert "extension1@2.0" not in view_activated
+ assert "extension2@1.0" not in view_activated
+
+ assert os.path.exists(os.path.join(viewpath, "extendee-1.0", "bin", "extension1"))
def test_view_extension_remove(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extendee')
- install('extension1@1.0')
- viewpath = str(tmpdir.mkdir('view'))
- view('symlink', viewpath, 'extension1@1.0')
- view('remove', viewpath, 'extension1@1.0')
- all_installed = extensions('--show', 'installed', 'extendee')
- assert 'extension1@1.0' in all_installed
- global_activated = extensions('--show', 'activated', 'extendee')
- assert 'extension1@1.0' not in global_activated
- view_activated = extensions('--show', 'activated',
- '-v', viewpath,
- 'extendee')
- assert 'extension1@1.0' not in view_activated
- assert not os.path.exists(os.path.join(viewpath, 'bin', 'extension1'))
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("extendee")
+ install("extension1@1.0")
+ viewpath = str(tmpdir.mkdir("view"))
+ view("symlink", viewpath, "extension1@1.0")
+ view("remove", viewpath, "extension1@1.0")
+ all_installed = extensions("--show", "installed", "extendee")
+ assert "extension1@1.0" in all_installed
+ global_activated = extensions("--show", "activated", "extendee")
+ assert "extension1@1.0" not in global_activated
+ view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
+ assert "extension1@1.0" not in view_activated
+ assert not os.path.exists(os.path.join(viewpath, "bin", "extension1"))
def test_view_extension_conflict(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extendee')
- install('extension1@1.0')
- install('extension1@2.0')
- viewpath = str(tmpdir.mkdir('view'))
- view('symlink', viewpath, 'extension1@1.0')
- output = view('symlink', viewpath, 'extension1@2.0')
- assert 'Package conflict detected' in output
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("extendee")
+ install("extension1@1.0")
+ install("extension1@2.0")
+ viewpath = str(tmpdir.mkdir("view"))
+ view("symlink", viewpath, "extension1@1.0")
+ output = view("symlink", viewpath, "extension1@2.0")
+ assert "Package conflict detected" in output
def test_view_extension_conflict_ignored(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extendee')
- install('extension1@1.0')
- install('extension1@2.0')
- viewpath = str(tmpdir.mkdir('view'))
- view('symlink', viewpath, 'extension1@1.0')
- view('symlink', viewpath, '-i', 'extension1@2.0')
- with open(os.path.join(viewpath, 'bin', 'extension1'), 'r') as fin:
- assert fin.read() == '1.0'
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("extendee")
+ install("extension1@1.0")
+ install("extension1@2.0")
+ viewpath = str(tmpdir.mkdir("view"))
+ view("symlink", viewpath, "extension1@1.0")
+ view("symlink", viewpath, "-i", "extension1@2.0")
+ with open(os.path.join(viewpath, "bin", "extension1"), "r") as fin:
+ assert fin.read() == "1.0"
def test_view_extension_global_activation(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extendee')
- install('extension1@1.0')
- install('extension1@2.0')
- install('extension2@1.0')
- viewpath = str(tmpdir.mkdir('view'))
- view('symlink', viewpath, 'extension1@1.0')
- activate('extension1@2.0')
- activate('extension2@1.0')
- all_installed = extensions('--show', 'installed', 'extendee')
- assert 'extension1@1.0' in all_installed
- assert 'extension1@2.0' in all_installed
- assert 'extension2@1.0' in all_installed
- global_activated = extensions('--show', 'activated', 'extendee')
- assert 'extension1@1.0' not in global_activated
- assert 'extension1@2.0' in global_activated
- assert 'extension2@1.0' in global_activated
- view_activated = extensions('--show', 'activated',
- '-v', viewpath,
- 'extendee')
- assert 'extension1@1.0' in view_activated
- assert 'extension1@2.0' not in view_activated
- assert 'extension2@1.0' not in view_activated
- assert os.path.exists(os.path.join(viewpath, 'bin', 'extension1'))
- assert not os.path.exists(os.path.join(viewpath, 'bin', 'extension2'))
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("extendee")
+ install("extension1@1.0")
+ install("extension1@2.0")
+ install("extension2@1.0")
+ viewpath = str(tmpdir.mkdir("view"))
+ view("symlink", viewpath, "extension1@1.0")
+ activate("extension1@2.0")
+ activate("extension2@1.0")
+ all_installed = extensions("--show", "installed", "extendee")
+ assert "extension1@1.0" in all_installed
+ assert "extension1@2.0" in all_installed
+ assert "extension2@1.0" in all_installed
+ global_activated = extensions("--show", "activated", "extendee")
+ assert "extension1@1.0" not in global_activated
+ assert "extension1@2.0" in global_activated
+ assert "extension2@1.0" in global_activated
+ view_activated = extensions("--show", "activated", "-v", viewpath, "extendee")
+ assert "extension1@1.0" in view_activated
+ assert "extension1@2.0" not in view_activated
+ assert "extension2@1.0" not in view_activated
+ assert os.path.exists(os.path.join(viewpath, "bin", "extension1"))
+ assert not os.path.exists(os.path.join(viewpath, "bin", "extension2"))
def test_view_extendee_with_global_activations(
- tmpdir, mock_packages, mock_archive, mock_fetch, config,
- install_mockery):
- install('extendee')
- install('extension1@1.0')
- install('extension1@2.0')
- install('extension2@1.0')
- viewpath = str(tmpdir.mkdir('view'))
- activate('extension1@2.0')
- output = view('symlink', viewpath, 'extension1@1.0')
- assert 'Error: Globally activated extensions cannot be used' in output
+ tmpdir, mock_packages, mock_archive, mock_fetch, config, install_mockery
+):
+ install("extendee")
+ install("extension1@1.0")
+ install("extension1@2.0")
+ install("extension2@1.0")
+ viewpath = str(tmpdir.mkdir("view"))
+ activate("extension1@2.0")
+ output = view("symlink", viewpath, "extension1@1.0")
+ assert "Error: Globally activated extensions cannot be used" in output
def test_view_fails_with_missing_projections_file(tmpdir):
- viewpath = str(tmpdir.mkdir('view'))
- projection_file = os.path.join(str(tmpdir), 'nonexistent')
+ viewpath = str(tmpdir.mkdir("view"))
+ projection_file = os.path.join(str(tmpdir), "nonexistent")
with pytest.raises(SystemExit):
- view('symlink', '--projection-file', projection_file, viewpath, 'foo')
+ view("symlink", "--projection-file", projection_file, viewpath, "foo")
diff --git a/lib/spack/spack/test/cmd_extensions.py b/lib/spack/spack/test/cmd_extensions.py
index dbe79e603a..e68ac434fc 100644
--- a/lib/spack/spack/test/cmd_extensions.py
+++ b/lib/spack/spack/test/cmd_extensions.py
@@ -14,13 +14,14 @@ import spack.config
import spack.extensions
import spack.main
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
class Extension:
"""Helper class to simplify the creation of simple command extension
directory structures with a conventional format for testing.
"""
+
def __init__(self, name, root):
"""Create a command extension.
@@ -33,7 +34,7 @@ class Extension:
self.pname = spack.cmd.python_name(name)
self.root = root
self.main = self.root.ensure(self.pname, dir=True)
- self.cmd = self.main.ensure('cmd', dir=True)
+ self.cmd = self.main.ensure("cmd", dir=True)
def add_command(self, command_name, contents):
"""Add a command to this command extension.
@@ -44,20 +45,21 @@ class Extension:
file."""
spack.cmd.require_cmd_name(command_name)
python_name = spack.cmd.python_name(command_name)
- cmd = self.cmd.ensure(python_name + '.py')
+ cmd = self.cmd.ensure(python_name + ".py")
cmd.write(contents)
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def extension_creator(tmpdir, config):
"""Create a basic extension command directory structure"""
+
@contextlib.contextmanager
- def _ce(extension_name='testcommand'):
- root = tmpdir.mkdir('spack-' + extension_name)
+ def _ce(extension_name="testcommand"):
+ root = tmpdir.mkdir("spack-" + extension_name)
extension = Extension(extension_name, root)
- with spack.config.override('config:extensions',
- [str(extension.root)]):
+ with spack.config.override("config:extensions", [str(extension.root)]):
yield extension
+
list_of_modules = list(sys.modules.keys())
try:
yield _ce
@@ -67,11 +69,13 @@ def extension_creator(tmpdir, config):
del sys.modules[module_name]
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def hello_world_extension(extension_creator):
"""Create an extension with a hello-world command."""
with extension_creator() as extension:
- extension.add_command('hello-world', """
+ extension.add_command(
+ "hello-world",
+ """
description = "hello world extension command"
section = "test command"
level = "long"
@@ -82,29 +86,33 @@ def setup_parser(subparser):
def hello_world(parser, args):
print('Hello world!')
-""")
+""",
+ )
yield extension
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def hello_world_cmd(hello_world_extension):
"""Create and return an invokable "hello-world" extension command."""
- yield spack.main.SpackCommand('hello-world')
+ yield spack.main.SpackCommand("hello-world")
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def hello_world_with_module_in_root(extension_creator):
"""Create a "hello-world" extension command with additional code in the
root folder.
"""
+
@contextlib.contextmanager
def _hwwmir(extension_name=None):
- with extension_creator(extension_name) \
- if extension_name else \
- extension_creator() as extension:
+ with extension_creator(
+ extension_name
+ ) if extension_name else extension_creator() as extension:
# Note that the namespace of the extension is derived from the
# fixture.
- extension.add_command('hello', """
+ extension.add_command(
+ "hello",
+ """
# Test an absolute import
from spack.extensions.{ext_pname}.implementation import hello_world
@@ -134,19 +142,23 @@ def hello(parser, args):
hello_folks()
elif args.subcommand == 'global':
print(global_message)
-""".format(ext_pname=extension.pname))
-
- extension.main.ensure('__init__.py')
- implementation \
- = extension.main.ensure('implementation.py')
- implementation.write("""
+""".format(
+ ext_pname=extension.pname
+ ),
+ )
+
+ extension.main.ensure("__init__.py")
+ implementation = extension.main.ensure("implementation.py")
+ implementation.write(
+ """
def hello_world():
print('Hello world!')
def hello_folks():
print('Hello folks!')
-""")
- yield spack.main.SpackCommand('hello')
+"""
+ )
+ yield spack.main.SpackCommand("hello")
yield _hwwmir
@@ -154,7 +166,7 @@ def hello_folks():
def test_simple_command_extension(hello_world_cmd):
"""Basic test of a functioning command."""
output = hello_world_cmd()
- assert 'Hello world!' in output
+ assert "Hello world!" in output
def test_multi_extension_search(hello_world_extension, extension_creator):
@@ -162,8 +174,8 @@ def test_multi_extension_search(hello_world_extension, extension_creator):
place we look.
"""
- with extension_creator('testcommand2'):
- assert ('Hello world') in spack.main.SpackCommand('hello-world')()
+ with extension_creator("testcommand2"):
+ assert ("Hello world") in spack.main.SpackCommand("hello-world")()
def test_duplicate_module_load(hello_world_cmd, capsys):
@@ -177,23 +189,23 @@ def test_duplicate_module_load(hello_world_cmd, capsys):
hw_cmd = spack.cmd.get_command(hello_world_cmd.command_name)
hw_cmd(parser, args)
captured = capsys.readouterr()
- assert captured == ('Hello world!\n', '')
+ assert captured == ("Hello world!\n", "")
-@pytest.mark.parametrize('extension_name',
- [None, 'hyphenated-extension'],
- ids=['simple', 'hyphenated_extension_name'])
+@pytest.mark.parametrize(
+ "extension_name", [None, "hyphenated-extension"], ids=["simple", "hyphenated_extension_name"]
+)
def test_command_with_import(extension_name, hello_world_with_module_in_root):
"""Ensure we can write a functioning command with multiple imported
subcommands, including where the extension name contains a hyphen.
"""
with hello_world_with_module_in_root(extension_name) as hello_world:
- output = hello_world('world')
- assert 'Hello world!' in output
- output = hello_world('folks')
- assert 'Hello folks!' in output
- output = hello_world('global')
- assert 'bar' in output
+ output = hello_world("world")
+ assert "Hello world!" in output
+ output = hello_world("folks")
+ assert "Hello folks!" in output
+ output = hello_world("global")
+ assert "bar" in output
def test_missing_command():
@@ -204,27 +216,24 @@ def test_missing_command():
spack.cmd.get_module("no-such-command")
-@pytest.mark.\
- parametrize('extension_path,expected_exception',
- [('/my/bad/extension',
- spack.extensions.ExtensionNamingError),
- ('', spack.extensions.ExtensionNamingError),
- ('/my/bad/spack--extra-hyphen',
- spack.extensions.ExtensionNamingError),
- ('/my/good/spack-extension',
- spack.extensions.CommandNotFoundError),
- ('/my/still/good/spack-extension/',
- spack.extensions.CommandNotFoundError),
- ('/my/spack-hyphenated-extension',
- spack.extensions.CommandNotFoundError)],
- ids=['no_stem', 'vacuous', 'leading_hyphen',
- 'basic_good', 'trailing_slash', 'hyphenated'])
+@pytest.mark.parametrize(
+ "extension_path,expected_exception",
+ [
+ ("/my/bad/extension", spack.extensions.ExtensionNamingError),
+ ("", spack.extensions.ExtensionNamingError),
+ ("/my/bad/spack--extra-hyphen", spack.extensions.ExtensionNamingError),
+ ("/my/good/spack-extension", spack.extensions.CommandNotFoundError),
+ ("/my/still/good/spack-extension/", spack.extensions.CommandNotFoundError),
+ ("/my/spack-hyphenated-extension", spack.extensions.CommandNotFoundError),
+ ],
+ ids=["no_stem", "vacuous", "leading_hyphen", "basic_good", "trailing_slash", "hyphenated"],
+)
def test_extension_naming(extension_path, expected_exception, config):
"""Ensure that we are correctly validating configured extension paths
for conformity with the rules: the basename should match
``spack-<name>``; <name> may have embedded hyphens but not begin with one.
"""
- with spack.config.override('config:extensions', [extension_path]):
+ with spack.config.override("config:extensions", [extension_path]):
with pytest.raises(expected_exception):
spack.cmd.get_module("no-such-command")
@@ -234,53 +243,51 @@ def test_missing_command_function(extension_creator, capsys):
expected command function defined.
"""
with extension_creator() as extension:
- extension.\
- add_command('bad-cmd',
- """\ndescription = "Empty command implementation"\n""")
+ extension.add_command("bad-cmd", """\ndescription = "Empty command implementation"\n""")
with pytest.raises(SystemExit):
- spack.cmd.get_module('bad-cmd')
+ spack.cmd.get_module("bad-cmd")
capture = capsys.readouterr()
assert "must define function 'bad_cmd'." in capture[1]
def test_get_command_paths(config):
"""Exercise the construction of extension command search paths."""
- extensions = ('extension-1', 'extension-2')
+ extensions = ("extension-1", "extension-2")
ext_paths = []
expected_cmd_paths = []
for ext in extensions:
- ext_path = os.path.join('my', 'path', 'to', 'spack-' + ext)
+ ext_path = os.path.join("my", "path", "to", "spack-" + ext)
ext_paths.append(ext_path)
- path = os.path.join(ext_path, spack.cmd.python_name(ext), 'cmd')
+ path = os.path.join(ext_path, spack.cmd.python_name(ext), "cmd")
path = os.path.abspath(path)
expected_cmd_paths.append(path)
- with spack.config.override('config:extensions', ext_paths):
+ with spack.config.override("config:extensions", ext_paths):
assert spack.extensions.get_command_paths() == expected_cmd_paths
def test_variable_in_extension_path(config, working_env):
"""Test variables in extension paths."""
- os.environ['_MY_VAR'] = os.path.join('my', 'var')
- ext_paths = [
- os.path.join("~", "${_MY_VAR}", "spack-extension-1")
- ]
+ os.environ["_MY_VAR"] = os.path.join("my", "var")
+ ext_paths = [os.path.join("~", "${_MY_VAR}", "spack-extension-1")]
# Home env variable is USERPROFILE on Windows
- home_env = 'USERPROFILE' if is_windows else 'HOME'
+ home_env = "USERPROFILE" if is_windows else "HOME"
expected_ext_paths = [
- os.path.join(os.environ[home_env], os.environ['_MY_VAR'], "spack-extension-1")
+ os.path.join(os.environ[home_env], os.environ["_MY_VAR"], "spack-extension-1")
]
- with spack.config.override('config:extensions', ext_paths):
+ with spack.config.override("config:extensions", ext_paths):
assert spack.extensions.get_extension_paths() == expected_ext_paths
-@pytest.mark.parametrize('command_name,contents,exception',
- [('bad-cmd', 'from oopsie.daisy import bad\n',
- ImportError),
- ('bad-cmd', """var = bad_function_call('blech')\n""",
- NameError),
- ('bad-cmd', ')\n', SyntaxError)],
- ids=['ImportError', 'NameError', 'SyntaxError'])
+@pytest.mark.parametrize(
+ "command_name,contents,exception",
+ [
+ ("bad-cmd", "from oopsie.daisy import bad\n", ImportError),
+ ("bad-cmd", """var = bad_function_call('blech')\n""", NameError),
+ ("bad-cmd", ")\n", SyntaxError),
+ ],
+ ids=["ImportError", "NameError", "SyntaxError"],
+)
def test_failing_command(command_name, contents, exception, extension_creator):
"""Ensure that the configured command fails to import with the specified
error.
diff --git a/lib/spack/spack/test/compilers/basics.py b/lib/spack/spack/test/compilers/basics.py
index 4d4794663d..6405acc7d1 100644
--- a/lib/spack/spack/test/compilers/basics.py
+++ b/lib/spack/spack/test/compilers/basics.py
@@ -23,21 +23,18 @@ from spack.util.executable import ProcessError
@pytest.fixture()
def make_args_for_version(monkeypatch):
-
- def _factory(version, path='/usr/bin/gcc'):
+ def _factory(version, path="/usr/bin/gcc"):
class MockOs(object):
pass
- compiler_name = 'gcc'
+ compiler_name = "gcc"
compiler_cls = compilers.class_for_compiler_name(compiler_name)
- monkeypatch.setattr(compiler_cls, 'cc_version', lambda x: version)
+ monkeypatch.setattr(compiler_cls, "cc_version", lambda x: version)
- compiler_id = compilers.CompilerID(
- os=MockOs, compiler_name=compiler_name, version=None
- )
- variation = compilers.NameVariation(prefix='', suffix='')
+ compiler_id = compilers.CompilerID(os=MockOs, compiler_name=compiler_name, version=None)
+ variation = compilers.NameVariation(prefix="", suffix="")
return compilers.DetectVersionArgs(
- id=compiler_id, variation=variation, language='cc', path=path
+ id=compiler_id, variation=variation, language="cc", path=path
)
return _factory
@@ -45,28 +42,26 @@ def make_args_for_version(monkeypatch):
def test_multiple_conflicting_compiler_definitions(mutable_config):
compiler_def = {
- 'compiler': {
- 'flags': {},
- 'modules': [],
- 'paths': {
- 'cc': 'cc',
- 'cxx': 'cxx',
- 'f77': 'null',
- 'fc': 'null'},
- 'extra_rpaths': [],
- 'operating_system': 'test',
- 'target': 'test',
- 'environment': {},
- 'spec': 'clang@0.0.0'}}
+ "compiler": {
+ "flags": {},
+ "modules": [],
+ "paths": {"cc": "cc", "cxx": "cxx", "f77": "null", "fc": "null"},
+ "extra_rpaths": [],
+ "operating_system": "test",
+ "target": "test",
+ "environment": {},
+ "spec": "clang@0.0.0",
+ }
+ }
compiler_config = [compiler_def, compiler_def]
- compiler_config[0]['compiler']['paths']['f77'] = 'f77'
- mutable_config.update_config('compilers', compiler_config)
+ compiler_config[0]["compiler"]["paths"]["f77"] = "f77"
+ mutable_config.update_config("compilers", compiler_config)
- arch_spec = spack.spec.ArchSpec(('test', 'test', 'test'))
- cspec = compiler_config[0]['compiler']['spec']
+ arch_spec = spack.spec.ArchSpec(("test", "test", "test"))
+ cspec = compiler_config[0]["compiler"]["spec"]
cmp = compilers.compiler_for_spec(cspec, arch_spec)
- assert cmp.f77 == 'f77'
+ assert cmp.f77 == "f77"
def test_get_compiler_duplicates(config):
@@ -74,7 +69,8 @@ def test_get_compiler_duplicates(config):
# the test configuration (so it is not actually a duplicate), but the
# method behaves the same.
cfg_file_to_duplicates = compilers.get_compiler_duplicates(
- 'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
+ "gcc@4.5.0", spack.spec.ArchSpec("cray-CNL-xeon")
+ )
assert len(cfg_file_to_duplicates) == 1
cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
@@ -83,20 +79,18 @@ def test_get_compiler_duplicates(config):
def test_all_compilers(config):
all_compilers = compilers.all_compilers()
- filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
- filtered = [x for x in filtered if x.operating_system == 'SuSE11']
+ filtered = [x for x in all_compilers if str(x.spec) == "clang@3.3"]
+ filtered = [x for x in filtered if x.operating_system == "SuSE11"]
assert len(filtered) == 1
-@pytest.mark.skipif(
- sys.version_info[0] == 2, reason='make_args_for_version requires python 3'
+@pytest.mark.skipif(sys.version_info[0] == 2, reason="make_args_for_version requires python 3")
+@pytest.mark.parametrize(
+ "input_version,expected_version,expected_error",
+ [(None, None, "Couldn't get version for compiler /usr/bin/gcc"), ("4.9", "4.9", None)],
)
-@pytest.mark.parametrize('input_version,expected_version,expected_error', [
- (None, None, "Couldn't get version for compiler /usr/bin/gcc"),
- ('4.9', '4.9', None)
-])
def test_version_detection_is_empty(
- make_args_for_version, input_version, expected_version, expected_error
+ make_args_for_version, input_version, expected_version, expected_error
):
args = make_args_for_version(version=input_version)
result, error = compilers.detect_version(args)
@@ -108,38 +102,26 @@ def test_version_detection_is_empty(
def test_compiler_flags_from_config_are_grouped():
compiler_entry = {
- 'spec': 'intel@17.0.2',
- 'operating_system': 'foo-os',
- 'paths': {
- 'cc': 'cc-path',
- 'cxx': 'cxx-path',
- 'fc': None,
- 'f77': None
- },
- 'flags': {
- 'cflags': '-O0 -foo-flag foo-val'
- },
- 'modules': None
+ "spec": "intel@17.0.2",
+ "operating_system": "foo-os",
+ "paths": {"cc": "cc-path", "cxx": "cxx-path", "fc": None, "f77": None},
+ "flags": {"cflags": "-O0 -foo-flag foo-val"},
+ "modules": None,
}
compiler = compilers.compiler_from_dict(compiler_entry)
- assert any(x == '-foo-flag foo-val' for x in compiler.flags['cflags'])
+ assert any(x == "-foo-flag foo-val" for x in compiler.flags["cflags"])
# Test behavior of flags and UnsupportedCompilerFlag.
# Utility function to test most flags.
default_compiler_entry = {
- 'spec': 'apple-clang@2.0.0',
- 'operating_system': 'foo-os',
- 'paths': {
- 'cc': 'cc-path',
- 'cxx': 'cxx-path',
- 'fc': 'fc-path',
- 'f77': 'f77-path'
- },
- 'flags': {},
- 'modules': None
+ "spec": "apple-clang@2.0.0",
+ "operating_system": "foo-os",
+ "paths": {"cc": "cc-path", "cxx": "cxx-path", "fc": "fc-path", "f77": "f77-path"},
+ "flags": {},
+ "modules": None,
}
@@ -148,13 +130,16 @@ class MockCompiler(Compiler):
def __init__(self):
super(MockCompiler, self).__init__(
cspec="badcompiler@1.0.0",
- operating_system=default_compiler_entry['operating_system'],
+ operating_system=default_compiler_entry["operating_system"],
target=None,
- paths=[default_compiler_entry['paths']['cc'],
- default_compiler_entry['paths']['cxx'],
- default_compiler_entry['paths']['fc'],
- default_compiler_entry['paths']['f77']],
- environment={})
+ paths=[
+ default_compiler_entry["paths"]["cc"],
+ default_compiler_entry["paths"]["cxx"],
+ default_compiler_entry["paths"]["fc"],
+ default_compiler_entry["paths"]["f77"],
+ ],
+ environment={},
+ )
def _get_compiler_link_paths(self, paths):
# Mock os.path.isdir so the link paths don't have to exist
@@ -178,7 +163,7 @@ class MockCompiler(Compiler):
def verbose_flag(self):
return self._verbose_flag
- required_libs = ['libgfortran']
+ required_libs = ["libgfortran"]
def test_implicit_rpaths(dirs_with_libfiles, monkeypatch):
@@ -187,52 +172,52 @@ def test_implicit_rpaths(dirs_with_libfiles, monkeypatch):
def try_all_dirs(*args):
return all_dirs
- monkeypatch.setattr(MockCompiler, '_get_compiler_link_paths', try_all_dirs)
+ monkeypatch.setattr(MockCompiler, "_get_compiler_link_paths", try_all_dirs)
- expected_rpaths = set(lib_to_dirs['libstdc++'] +
- lib_to_dirs['libgfortran'])
+ expected_rpaths = set(lib_to_dirs["libstdc++"] + lib_to_dirs["libgfortran"])
compiler = MockCompiler()
retrieved_rpaths = compiler.implicit_rpaths()
assert set(retrieved_rpaths) == expected_rpaths
-no_flag_dirs = ['/path/to/first/lib', '/path/to/second/lib64']
-no_flag_output = 'ld -L%s -L%s' % tuple(no_flag_dirs)
+no_flag_dirs = ["/path/to/first/lib", "/path/to/second/lib64"]
+no_flag_output = "ld -L%s -L%s" % tuple(no_flag_dirs)
-flag_dirs = ['/path/to/first/with/flag/lib', '/path/to/second/lib64']
-flag_output = 'ld -L%s -L%s' % tuple(flag_dirs)
+flag_dirs = ["/path/to/first/with/flag/lib", "/path/to/second/lib64"]
+flag_output = "ld -L%s -L%s" % tuple(flag_dirs)
def call_compiler(exe, *args, **kwargs):
# This method can replace Executable.__call__ to emulate a compiler that
# changes libraries depending on a flag.
- if '--correct-flag' in exe.exe:
+ if "--correct-flag" in exe.exe:
return flag_output
return no_flag_output
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.parametrize('exe,flagname', [
- ('cxx', ''),
- ('cxx', 'cxxflags'),
- ('cxx', 'cppflags'),
- ('cxx', 'ldflags'),
- ('cc', ''),
- ('cc', 'cflags'),
- ('cc', 'cppflags'),
- ('fc', ''),
- ('fc', 'fflags'),
- ('f77', 'fflags'),
- ('f77', 'cppflags'),
-])
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.parametrize(
+ "exe,flagname",
+ [
+ ("cxx", ""),
+ ("cxx", "cxxflags"),
+ ("cxx", "cppflags"),
+ ("cxx", "ldflags"),
+ ("cc", ""),
+ ("cc", "cflags"),
+ ("cc", "cppflags"),
+ ("fc", ""),
+ ("fc", "fflags"),
+ ("f77", "fflags"),
+ ("f77", "cppflags"),
+ ],
+)
@pytest.mark.enable_compiler_link_paths
def test_get_compiler_link_paths(monkeypatch, exe, flagname):
# create fake compiler that emits mock verbose output
compiler = MockCompiler()
- monkeypatch.setattr(
- spack.util.executable.Executable, '__call__', call_compiler)
+ monkeypatch.setattr(spack.util.executable.Executable, "__call__", call_compiler)
# Grab executable path to test
paths = [getattr(compiler, exe)]
@@ -243,7 +228,7 @@ def test_get_compiler_link_paths(monkeypatch, exe, flagname):
if flagname:
# set flags and test
- setattr(compiler, 'flags', {flagname: ['--correct-flag']})
+ setattr(compiler, "flags", {flagname: ["--correct-flag"]})
dirs = compiler._get_compiler_link_paths(paths)
assert dirs == flag_dirs
@@ -267,30 +252,34 @@ def test_get_compiler_link_paths_no_verbose_flag():
assert dirs == []
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@pytest.mark.enable_compiler_link_paths
def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir):
- gcc = str(tmpdir.join('gcc'))
- with open(gcc, 'w') as f:
- f.write("""#!/bin/bash
+ gcc = str(tmpdir.join("gcc"))
+ with open(gcc, "w") as f:
+ f.write(
+ """#!/bin/bash
if [[ $ENV_SET == "1" && $MODULE_LOADED == "1" ]]; then
- echo '""" + no_flag_output + """'
+ echo '"""
+ + no_flag_output
+ + """'
fi
-""")
+"""
+ )
fs.set_executable(gcc)
# Set module load to turn compiler on
def module(*args):
- if args[0] == 'show':
- return ''
- elif args[0] == 'load':
- os.environ['MODULE_LOADED'] = "1"
- monkeypatch.setattr(spack.util.module_cmd, 'module', module)
+ if args[0] == "show":
+ return ""
+ elif args[0] == "load":
+ os.environ["MODULE_LOADED"] = "1"
+
+ monkeypatch.setattr(spack.util.module_cmd, "module", module)
compiler = MockCompiler()
- compiler.environment = {'set': {'ENV_SET': '1'}}
- compiler.modules = ['turn_on']
+ compiler.environment = {"set": {"ENV_SET": "1"}}
+ compiler.modules = ["turn_on"]
dirs = compiler._get_compiler_link_paths([gcc])
assert dirs == no_flag_dirs
@@ -303,7 +292,7 @@ def flag_value(flag, spec):
compiler = MockCompiler()
else:
compiler_entry = copy(default_compiler_entry)
- compiler_entry['spec'] = spec
+ compiler_entry["spec"] = spec
compiler = compilers.compiler_from_dict(compiler_entry)
return getattr(compiler, flag)
@@ -318,21 +307,21 @@ def unsupported_flag_test(flag, spec=None):
except spack.compiler.UnsupportedCompilerFlag:
caught_exception = True
- assert(caught_exception and "Expected exception not thrown.")
+ assert caught_exception and "Expected exception not thrown."
# Verify the expected flag value for the give compiler spec.
def supported_flag_test(flag, flag_value_ref, spec=None):
- assert(flag_value(flag, spec) == flag_value_ref)
+ assert flag_value(flag, spec) == flag_value_ref
# Tests for UnsupportedCompilerFlag exceptions from default
# implementations of flags.
def test_default_flags():
- supported_flag_test("cc_rpath_arg", "-Wl,-rpath,")
+ supported_flag_test("cc_rpath_arg", "-Wl,-rpath,")
supported_flag_test("cxx_rpath_arg", "-Wl,-rpath,")
supported_flag_test("f77_rpath_arg", "-Wl,-rpath,")
- supported_flag_test("fc_rpath_arg", "-Wl,-rpath,")
+ supported_flag_test("fc_rpath_arg", "-Wl,-rpath,")
supported_flag_test("linker_arg", "-Wl,")
unsupported_flag_test("openmp_flag")
unsupported_flag_test("cxx11_flag")
@@ -341,10 +330,10 @@ def test_default_flags():
supported_flag_test("cxx98_flag", "")
unsupported_flag_test("c99_flag")
unsupported_flag_test("c11_flag")
- supported_flag_test("cc_pic_flag", "-fPIC")
+ supported_flag_test("cc_pic_flag", "-fPIC")
supported_flag_test("cxx_pic_flag", "-fPIC")
supported_flag_test("f77_pic_flag", "-fPIC")
- supported_flag_test("fc_pic_flag", "-fPIC")
+ supported_flag_test("fc_pic_flag", "-fPIC")
supported_flag_test("debug_flags", ["-g"])
supported_flag_test("opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3"])
@@ -357,13 +346,11 @@ def test_arm_flags():
supported_flag_test("cxx17_flag", "-std=c++1z", "arm@1.0")
supported_flag_test("c99_flag", "-std=c99", "arm@1.0")
supported_flag_test("c11_flag", "-std=c11", "arm@1.0")
- supported_flag_test("cc_pic_flag", "-fPIC", "arm@1.0")
+ supported_flag_test("cc_pic_flag", "-fPIC", "arm@1.0")
supported_flag_test("cxx_pic_flag", "-fPIC", "arm@1.0")
supported_flag_test("f77_pic_flag", "-fPIC", "arm@1.0")
- supported_flag_test("fc_pic_flag", "-fPIC", "arm@1.0")
- supported_flag_test("opt_flags",
- ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast'],
- 'arm@1.0')
+ supported_flag_test("fc_pic_flag", "-fPIC", "arm@1.0")
+ supported_flag_test("opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast"], "arm@1.0")
def test_cce_flags():
@@ -382,23 +369,20 @@ def test_cce_flags():
unsupported_flag_test("c11_flag", "cce@8.4")
supported_flag_test("c11_flag", "-std=c11", "cce@9.0.1")
supported_flag_test("c11_flag", "-h std=c11,noconform,gnu", "cce@8.5")
- supported_flag_test("cc_pic_flag", "-h PIC", "cce@1.0")
+ supported_flag_test("cc_pic_flag", "-h PIC", "cce@1.0")
supported_flag_test("cxx_pic_flag", "-h PIC", "cce@1.0")
supported_flag_test("f77_pic_flag", "-h PIC", "cce@1.0")
- supported_flag_test("fc_pic_flag", "-h PIC", "cce@1.0")
- supported_flag_test("cc_pic_flag", "-fPIC", "cce@9.1.0")
+ supported_flag_test("fc_pic_flag", "-h PIC", "cce@1.0")
+ supported_flag_test("cc_pic_flag", "-fPIC", "cce@9.1.0")
supported_flag_test("cxx_pic_flag", "-fPIC", "cce@9.1.0")
supported_flag_test("f77_pic_flag", "-fPIC", "cce@9.1.0")
- supported_flag_test("fc_pic_flag", "-fPIC", "cce@9.1.0")
+ supported_flag_test("fc_pic_flag", "-fPIC", "cce@9.1.0")
supported_flag_test("stdcxx_libs", (), "cce@1.0")
- supported_flag_test("debug_flags", ['-g', '-G0', '-G1', '-G2', '-Gfast'],
- 'cce@1.0')
+ supported_flag_test("debug_flags", ["-g", "-G0", "-G1", "-G2", "-Gfast"], "cce@1.0")
def test_apple_clang_flags():
- supported_flag_test(
- "openmp_flag", "-Xpreprocessor -fopenmp", "apple-clang@2.0.0"
- )
+ supported_flag_test("openmp_flag", "-Xpreprocessor -fopenmp", "apple-clang@2.0.0")
unsupported_flag_test("cxx11_flag", "apple-clang@2.0.0")
supported_flag_test("cxx11_flag", "-std=c++11", "apple-clang@4.0.0")
unsupported_flag_test("cxx14_flag", "apple-clang@5.0.0")
@@ -429,32 +413,53 @@ def test_clang_flags():
supported_flag_test("c99_flag", "-std=c99", "clang@3.3")
unsupported_flag_test("c11_flag", "clang@6.0.0")
supported_flag_test("c11_flag", "-std=c11", "clang@6.1.0")
- supported_flag_test("cc_pic_flag", "-fPIC", "clang@3.3")
+ supported_flag_test("cc_pic_flag", "-fPIC", "clang@3.3")
supported_flag_test("cxx_pic_flag", "-fPIC", "clang@3.3")
supported_flag_test("f77_pic_flag", "-fPIC", "clang@3.3")
- supported_flag_test("fc_pic_flag", "-fPIC", "clang@3.3")
- supported_flag_test("debug_flags",
- ['-gcodeview', '-gdwarf-2', '-gdwarf-3', '-gdwarf-4',
- '-gdwarf-5', '-gline-tables-only', '-gmodules', '-gz',
- '-g'],
- 'clang@3.3')
- supported_flag_test("opt_flags",
- ['-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os', '-Oz',
- '-Og', '-O', '-O4'],
- 'clang@3.3')
+ supported_flag_test("fc_pic_flag", "-fPIC", "clang@3.3")
+ supported_flag_test(
+ "debug_flags",
+ [
+ "-gcodeview",
+ "-gdwarf-2",
+ "-gdwarf-3",
+ "-gdwarf-4",
+ "-gdwarf-5",
+ "-gline-tables-only",
+ "-gmodules",
+ "-gz",
+ "-g",
+ ],
+ "clang@3.3",
+ )
+ supported_flag_test(
+ "opt_flags",
+ ["-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz", "-Og", "-O", "-O4"],
+ "clang@3.3",
+ )
def test_aocc_flags():
- supported_flag_test("debug_flags",
- ['-gcodeview', '-gdwarf-2', '-gdwarf-3',
- '-gdwarf-4', '-gdwarf-5', '-gline-tables-only',
- '-gmodules', '-gz', '-g'],
- 'aocc@2.2.0')
- supported_flag_test("opt_flags",
- ['-O0', '-O1', '-O2', '-O3', '-Ofast',
- '-Os', '-Oz', '-Og',
- '-O', '-O4'],
- 'aocc@2.2.0')
+ supported_flag_test(
+ "debug_flags",
+ [
+ "-gcodeview",
+ "-gdwarf-2",
+ "-gdwarf-3",
+ "-gdwarf-4",
+ "-gdwarf-5",
+ "-gline-tables-only",
+ "-gmodules",
+ "-gz",
+ "-g",
+ ],
+ "aocc@2.2.0",
+ )
+ supported_flag_test(
+ "opt_flags",
+ ["-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os", "-Oz", "-Og", "-O", "-O4"],
+ "aocc@2.2.0",
+ )
supported_flag_test("stdcxx_libs", ("-lstdc++",), "aocc@2.2.0")
supported_flag_test("openmp_flag", "-fopenmp", "aocc@2.2.0")
@@ -482,12 +487,11 @@ def test_fj_flags():
supported_flag_test("cxx17_flag", "-std=c++17", "fj@4.0.0")
supported_flag_test("c99_flag", "-std=c99", "fj@4.0.0")
supported_flag_test("c11_flag", "-std=c11", "fj@4.0.0")
- supported_flag_test("cc_pic_flag", "-KPIC", "fj@4.0.0")
+ supported_flag_test("cc_pic_flag", "-KPIC", "fj@4.0.0")
supported_flag_test("cxx_pic_flag", "-KPIC", "fj@4.0.0")
supported_flag_test("f77_pic_flag", "-KPIC", "fj@4.0.0")
- supported_flag_test("fc_pic_flag", "-KPIC", "fj@4.0.0")
- supported_flag_test("opt_flags", ['-O0', '-O1', '-O2', '-O3', '-Ofast'],
- 'fj@4.0.0')
+ supported_flag_test("fc_pic_flag", "-KPIC", "fj@4.0.0")
+ supported_flag_test("opt_flags", ["-O0", "-O1", "-O2", "-O3", "-Ofast"], "fj@4.0.0")
supported_flag_test("debug_flags", "-g", "fj@4.0.0")
@@ -509,19 +513,17 @@ def test_gcc_flags():
supported_flag_test("c99_flag", "-std=c99", "gcc@4.5")
unsupported_flag_test("c11_flag", "gcc@4.6")
supported_flag_test("c11_flag", "-std=c11", "gcc@4.7")
- supported_flag_test("cc_pic_flag", "-fPIC", "gcc@4.0")
+ supported_flag_test("cc_pic_flag", "-fPIC", "gcc@4.0")
supported_flag_test("cxx_pic_flag", "-fPIC", "gcc@4.0")
supported_flag_test("f77_pic_flag", "-fPIC", "gcc@4.0")
- supported_flag_test("fc_pic_flag", "-fPIC", "gcc@4.0")
+ supported_flag_test("fc_pic_flag", "-fPIC", "gcc@4.0")
supported_flag_test("stdcxx_libs", ("-lstdc++",), "gcc@4.1")
- supported_flag_test("debug_flags",
- ['-g', '-gstabs+', '-gstabs', '-gxcoff+', '-gxcoff',
- '-gvms'],
- 'gcc@4.0')
- supported_flag_test("opt_flags",
- ['-O', '-O0', '-O1', '-O2', '-O3', '-Os', '-Ofast',
- '-Og'],
- 'gcc@4.0')
+ supported_flag_test(
+ "debug_flags", ["-g", "-gstabs+", "-gstabs", "-gxcoff+", "-gxcoff", "-gvms"], "gcc@4.0"
+ )
+ supported_flag_test(
+ "opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-Os", "-Ofast", "-Og"], "gcc@4.0"
+ )
def test_intel_flags():
@@ -537,17 +539,15 @@ def test_intel_flags():
supported_flag_test("c99_flag", "-std=c99", "intel@12.0")
unsupported_flag_test("c11_flag", "intel@15.0")
supported_flag_test("c11_flag", "-std=c1x", "intel@16.0")
- supported_flag_test("cc_pic_flag", "-fPIC", "intel@1.0")
+ supported_flag_test("cc_pic_flag", "-fPIC", "intel@1.0")
supported_flag_test("cxx_pic_flag", "-fPIC", "intel@1.0")
supported_flag_test("f77_pic_flag", "-fPIC", "intel@1.0")
- supported_flag_test("fc_pic_flag", "-fPIC", "intel@1.0")
+ supported_flag_test("fc_pic_flag", "-fPIC", "intel@1.0")
supported_flag_test("stdcxx_libs", ("-cxxlib",), "intel@1.0")
- supported_flag_test("debug_flags",
- ['-debug', '-g', '-g0', '-g1', '-g2', '-g3'],
- 'intel@1.0')
- supported_flag_test("opt_flags",
- ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os'],
- 'intel@1.0')
+ supported_flag_test("debug_flags", ["-debug", "-g", "-g0", "-g1", "-g2", "-g3"], "intel@1.0")
+ supported_flag_test(
+ "opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os"], "intel@1.0"
+ )
def test_oneapi_flags():
@@ -556,34 +556,33 @@ def test_oneapi_flags():
supported_flag_test("cxx14_flag", "-std=c++14", "oneapi@2020.8.0.0827")
supported_flag_test("c99_flag", "-std=c99", "oneapi@2020.8.0.0827")
supported_flag_test("c11_flag", "-std=c1x", "oneapi@2020.8.0.0827")
- supported_flag_test("cc_pic_flag", "-fPIC", "oneapi@2020.8.0.0827")
+ supported_flag_test("cc_pic_flag", "-fPIC", "oneapi@2020.8.0.0827")
supported_flag_test("cxx_pic_flag", "-fPIC", "oneapi@2020.8.0.0827")
supported_flag_test("f77_pic_flag", "-fPIC", "oneapi@2020.8.0.0827")
- supported_flag_test("fc_pic_flag", "-fPIC", "oneapi@2020.8.0.0827")
+ supported_flag_test("fc_pic_flag", "-fPIC", "oneapi@2020.8.0.0827")
supported_flag_test("stdcxx_libs", ("-cxxlib",), "oneapi@2020.8.0.0827")
- supported_flag_test("debug_flags",
- ['-debug', '-g', '-g0', '-g1', '-g2', '-g3'],
- 'oneapi@2020.8.0.0827')
- supported_flag_test("opt_flags",
- ['-O', '-O0', '-O1', '-O2', '-O3', '-Ofast', '-Os'],
- 'oneapi@2020.8.0.0827')
+ supported_flag_test(
+ "debug_flags", ["-debug", "-g", "-g0", "-g1", "-g2", "-g3"], "oneapi@2020.8.0.0827"
+ )
+ supported_flag_test(
+ "opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-Ofast", "-Os"], "oneapi@2020.8.0.0827"
+ )
def test_nag_flags():
supported_flag_test("openmp_flag", "-openmp", "nag@1.0")
supported_flag_test("cxx11_flag", "-std=c++11", "nag@1.0")
- supported_flag_test("cc_pic_flag", "-fPIC", "nag@1.0")
+ supported_flag_test("cc_pic_flag", "-fPIC", "nag@1.0")
supported_flag_test("cxx_pic_flag", "-fPIC", "nag@1.0")
- supported_flag_test("f77_pic_flag", "-PIC", "nag@1.0")
- supported_flag_test("fc_pic_flag", "-PIC", "nag@1.0")
- supported_flag_test("cc_rpath_arg", "-Wl,-rpath,", "nag@1.0")
+ supported_flag_test("f77_pic_flag", "-PIC", "nag@1.0")
+ supported_flag_test("fc_pic_flag", "-PIC", "nag@1.0")
+ supported_flag_test("cc_rpath_arg", "-Wl,-rpath,", "nag@1.0")
supported_flag_test("cxx_rpath_arg", "-Wl,-rpath,", "nag@1.0")
supported_flag_test("f77_rpath_arg", "-Wl,-Wl,,-rpath,,", "nag@1.0")
- supported_flag_test("fc_rpath_arg", "-Wl,-Wl,,-rpath,,", "nag@1.0")
+ supported_flag_test("fc_rpath_arg", "-Wl,-Wl,,-rpath,,", "nag@1.0")
supported_flag_test("linker_arg", "-Wl,-Wl,,", "nag@1.0")
- supported_flag_test("debug_flags", ['-g', '-gline', '-g90'], 'nag@1.0')
- supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'],
- 'nag@1.0')
+ supported_flag_test("debug_flags", ["-g", "-gline", "-g90"], "nag@1.0")
+ supported_flag_test("opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"], "nag@1.0")
def test_nvhpc_flags():
@@ -593,14 +592,13 @@ def test_nvhpc_flags():
supported_flag_test("cxx17_flag", "--c++17", "nvhpc@20.9")
supported_flag_test("c99_flag", "-c99", "nvhpc@20.9")
supported_flag_test("c11_flag", "-c11", "nvhpc@20.9")
- supported_flag_test("cc_pic_flag", "-fpic", "nvhpc@20.9")
+ supported_flag_test("cc_pic_flag", "-fpic", "nvhpc@20.9")
supported_flag_test("cxx_pic_flag", "-fpic", "nvhpc@20.9")
supported_flag_test("f77_pic_flag", "-fpic", "nvhpc@20.9")
- supported_flag_test("fc_pic_flag", "-fpic", "nvhpc@20.9")
- supported_flag_test("debug_flags", ['-g', '-gopt'], 'nvhpc@20.9')
- supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'],
- 'nvhpc@20.9')
- supported_flag_test("stdcxx_libs", ('-c++libs',), 'nvhpc@20.9')
+ supported_flag_test("fc_pic_flag", "-fpic", "nvhpc@20.9")
+ supported_flag_test("debug_flags", ["-g", "-gopt"], "nvhpc@20.9")
+ supported_flag_test("opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"], "nvhpc@20.9")
+ supported_flag_test("stdcxx_libs", ("-c++libs",), "nvhpc@20.9")
def test_pgi_flags():
@@ -610,14 +608,13 @@ def test_pgi_flags():
supported_flag_test("c99_flag", "-c99", "pgi@12.10")
unsupported_flag_test("c11_flag", "pgi@15.2")
supported_flag_test("c11_flag", "-c11", "pgi@15.3")
- supported_flag_test("cc_pic_flag", "-fpic", "pgi@1.0")
+ supported_flag_test("cc_pic_flag", "-fpic", "pgi@1.0")
supported_flag_test("cxx_pic_flag", "-fpic", "pgi@1.0")
supported_flag_test("f77_pic_flag", "-fpic", "pgi@1.0")
- supported_flag_test("fc_pic_flag", "-fpic", "pgi@1.0")
+ supported_flag_test("fc_pic_flag", "-fpic", "pgi@1.0")
supported_flag_test("stdcxx_libs", ("-pgc++libs",), "pgi@1.0")
- supported_flag_test("debug_flags", ['-g', '-gopt'], 'pgi@1.0')
- supported_flag_test("opt_flags", ['-O', '-O0', '-O1', '-O2', '-O3', '-O4'],
- 'pgi@1.0')
+ supported_flag_test("debug_flags", ["-g", "-gopt"], "pgi@1.0")
+ supported_flag_test("opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-O4"], "pgi@1.0")
def test_xl_flags():
@@ -630,18 +627,15 @@ def test_xl_flags():
unsupported_flag_test("c11_flag", "xl@12.0")
supported_flag_test("c11_flag", "-qlanglvl=extc1x", "xl@12.1")
supported_flag_test("c11_flag", "-std=gnu11", "xl@13.1.2")
- supported_flag_test("cc_pic_flag", "-qpic", "xl@1.0")
+ supported_flag_test("cc_pic_flag", "-qpic", "xl@1.0")
supported_flag_test("cxx_pic_flag", "-qpic", "xl@1.0")
supported_flag_test("f77_pic_flag", "-qpic", "xl@1.0")
- supported_flag_test("fc_pic_flag", "-qpic", "xl@1.0")
+ supported_flag_test("fc_pic_flag", "-qpic", "xl@1.0")
supported_flag_test("fflags", "-qzerosize", "xl@1.0")
- supported_flag_test("debug_flags",
- ['-g', '-g0', '-g1', '-g2', '-g8', '-g9'],
- 'xl@1.0')
- supported_flag_test("opt_flags",
- ['-O', '-O0', '-O1', '-O2', '-O3', '-O4', '-O5',
- '-Ofast'],
- 'xl@1.0')
+ supported_flag_test("debug_flags", ["-g", "-g0", "-g1", "-g2", "-g8", "-g9"], "xl@1.0")
+ supported_flag_test(
+ "opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-O4", "-O5", "-Ofast"], "xl@1.0"
+ )
def test_xl_r_flags():
@@ -654,96 +648,100 @@ def test_xl_r_flags():
unsupported_flag_test("c11_flag", "xl_r@12.0")
supported_flag_test("c11_flag", "-qlanglvl=extc1x", "xl_r@12.1")
supported_flag_test("c11_flag", "-std=gnu11", "xl_r@13.1.2")
- supported_flag_test("cc_pic_flag", "-qpic", "xl_r@1.0")
+ supported_flag_test("cc_pic_flag", "-qpic", "xl_r@1.0")
supported_flag_test("cxx_pic_flag", "-qpic", "xl_r@1.0")
supported_flag_test("f77_pic_flag", "-qpic", "xl_r@1.0")
- supported_flag_test("fc_pic_flag", "-qpic", "xl_r@1.0")
+ supported_flag_test("fc_pic_flag", "-qpic", "xl_r@1.0")
supported_flag_test("fflags", "-qzerosize", "xl_r@1.0")
- supported_flag_test("debug_flags",
- ['-g', '-g0', '-g1', '-g2', '-g8', '-g9'],
- 'xl@1.0')
- supported_flag_test("opt_flags",
- ['-O', '-O0', '-O1', '-O2', '-O3', '-O4', '-O5',
- '-Ofast'],
- 'xl@1.0')
-
-
-@pytest.mark.parametrize('compiler_spec,expected_result', [
- ('gcc@4.7.2', False), ('clang@3.3', False), ('clang@8.0.0', True)
-])
+ supported_flag_test("debug_flags", ["-g", "-g0", "-g1", "-g2", "-g8", "-g9"], "xl@1.0")
+ supported_flag_test(
+ "opt_flags", ["-O", "-O0", "-O1", "-O2", "-O3", "-O4", "-O5", "-Ofast"], "xl@1.0"
+ )
+
+
+@pytest.mark.parametrize(
+ "compiler_spec,expected_result",
+ [("gcc@4.7.2", False), ("clang@3.3", False), ("clang@8.0.0", True)],
+)
def test_detecting_mixed_toolchains(compiler_spec, expected_result, config):
compiler = spack.compilers.compilers_for_spec(compiler_spec).pop()
assert spack.compilers.is_mixed_toolchain(compiler) is expected_result
-@pytest.mark.regression('14798,13733')
+@pytest.mark.regression("14798,13733")
def test_raising_if_compiler_target_is_over_specific(config):
# Compiler entry with an overly specific target
- compilers = [{'compiler': {
- 'spec': 'gcc@9.0.1',
- 'paths': {
- 'cc': '/usr/bin/gcc-9',
- 'cxx': '/usr/bin/g++-9',
- 'f77': '/usr/bin/gfortran-9',
- 'fc': '/usr/bin/gfortran-9'
- },
- 'flags': {},
- 'operating_system': 'ubuntu18.04',
- 'target': 'haswell',
- 'modules': [],
- 'environment': {},
- 'extra_rpaths': []
- }}]
- arch_spec = spack.spec.ArchSpec(('linux', 'ubuntu18.04', 'haswell'))
- with spack.config.override('compilers', compilers):
+ compilers = [
+ {
+ "compiler": {
+ "spec": "gcc@9.0.1",
+ "paths": {
+ "cc": "/usr/bin/gcc-9",
+ "cxx": "/usr/bin/g++-9",
+ "f77": "/usr/bin/gfortran-9",
+ "fc": "/usr/bin/gfortran-9",
+ },
+ "flags": {},
+ "operating_system": "ubuntu18.04",
+ "target": "haswell",
+ "modules": [],
+ "environment": {},
+ "extra_rpaths": [],
+ }
+ }
+ ]
+ arch_spec = spack.spec.ArchSpec(("linux", "ubuntu18.04", "haswell"))
+ with spack.config.override("compilers", compilers):
cfg = spack.compilers.get_compiler_config()
with pytest.raises(ValueError):
- spack.compilers.get_compilers(cfg, 'gcc@9.0.1', arch_spec)
+ spack.compilers.get_compilers(cfg, "gcc@9.0.1", arch_spec)
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_compiler_get_real_version(working_env, monkeypatch, tmpdir):
# Test variables
- test_version = '2.2.2'
+ test_version = "2.2.2"
# Create compiler
- gcc = str(tmpdir.join('gcc'))
- with open(gcc, 'w') as f:
- f.write("""#!/bin/bash
+ gcc = str(tmpdir.join("gcc"))
+ with open(gcc, "w") as f:
+ f.write(
+ """#!/bin/bash
if [[ $CMP_ON == "1" ]]; then
echo "$CMP_VER"
fi
-""")
+"""
+ )
fs.set_executable(gcc)
# Add compiler to config
compiler_info = {
- 'spec': 'gcc@foo',
- 'paths': {
- 'cc': gcc,
- 'cxx': None,
- 'f77': None,
- 'fc': None,
+ "spec": "gcc@foo",
+ "paths": {
+ "cc": gcc,
+ "cxx": None,
+ "f77": None,
+ "fc": None,
},
- 'flags': {},
- 'operating_system': 'fake',
- 'target': 'fake',
- 'modules': ['turn_on'],
- 'environment': {
- 'set': {'CMP_VER': test_version},
+ "flags": {},
+ "operating_system": "fake",
+ "target": "fake",
+ "modules": ["turn_on"],
+ "environment": {
+ "set": {"CMP_VER": test_version},
},
- 'extra_rpaths': [],
+ "extra_rpaths": [],
}
- compiler_dict = {'compiler': compiler_info}
+ compiler_dict = {"compiler": compiler_info}
# Set module load to turn compiler on
def module(*args):
- if args[0] == 'show':
- return ''
- elif args[0] == 'load':
- os.environ['CMP_ON'] = "1"
- monkeypatch.setattr(spack.util.module_cmd, 'module', module)
+ if args[0] == "show":
+ return ""
+ elif args[0] == "load":
+ os.environ["CMP_ON"] = "1"
+
+ monkeypatch.setattr(spack.util.module_cmd, "module", module)
# Run and confirm output
compilers = spack.compilers.get_compilers([compiler_dict])
@@ -755,50 +753,54 @@ fi
def test_compiler_get_real_version_fails(working_env, monkeypatch, tmpdir):
# Test variables
- test_version = '2.2.2'
+ test_version = "2.2.2"
# Create compiler
- gcc = str(tmpdir.join('gcc'))
- with open(gcc, 'w') as f:
- f.write("""#!/bin/bash
+ gcc = str(tmpdir.join("gcc"))
+ with open(gcc, "w") as f:
+ f.write(
+ """#!/bin/bash
if [[ $CMP_ON == "1" ]]; then
echo "$CMP_VER"
fi
-""")
+"""
+ )
fs.set_executable(gcc)
# Add compiler to config
compiler_info = {
- 'spec': 'gcc@foo',
- 'paths': {
- 'cc': gcc,
- 'cxx': None,
- 'f77': None,
- 'fc': None,
+ "spec": "gcc@foo",
+ "paths": {
+ "cc": gcc,
+ "cxx": None,
+ "f77": None,
+ "fc": None,
},
- 'flags': {},
- 'operating_system': 'fake',
- 'target': 'fake',
- 'modules': ['turn_on'],
- 'environment': {
- 'set': {'CMP_VER': test_version},
+ "flags": {},
+ "operating_system": "fake",
+ "target": "fake",
+ "modules": ["turn_on"],
+ "environment": {
+ "set": {"CMP_VER": test_version},
},
- 'extra_rpaths': [],
+ "extra_rpaths": [],
}
- compiler_dict = {'compiler': compiler_info}
+ compiler_dict = {"compiler": compiler_info}
# Set module load to turn compiler on
def module(*args):
- if args[0] == 'show':
- return ''
- elif args[0] == 'load':
- os.environ['SPACK_TEST_CMP_ON'] = "1"
- monkeypatch.setattr(spack.util.module_cmd, 'module', module)
+ if args[0] == "show":
+ return ""
+ elif args[0] == "load":
+ os.environ["SPACK_TEST_CMP_ON"] = "1"
+
+ monkeypatch.setattr(spack.util.module_cmd, "module", module)
# Make compiler fail when getting implicit rpaths
def _call(*args, **kwargs):
raise ProcessError("Failed intentionally")
- monkeypatch.setattr(spack.util.executable.Executable, '__call__', _call)
+
+ monkeypatch.setattr(spack.util.executable.Executable, "__call__", _call)
# Run and no change to environment
compilers = spack.compilers.get_compilers([compiler_dict])
@@ -809,60 +811,64 @@ fi
assert False
except ProcessError:
# Confirm environment does not change after failed call
- assert 'SPACK_TEST_CMP_ON' not in os.environ
+ assert "SPACK_TEST_CMP_ON" not in os.environ
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Bash scripting unsupported on Windows (for now)")
+@pytest.mark.skipif(
+ sys.platform == "win32", reason="Bash scripting unsupported on Windows (for now)"
+)
def test_compiler_flags_use_real_version(working_env, monkeypatch, tmpdir):
# Create compiler
- gcc = str(tmpdir.join('gcc'))
- with open(gcc, 'w') as f:
- f.write("""#!/bin/bash
+ gcc = str(tmpdir.join("gcc"))
+ with open(gcc, "w") as f:
+ f.write(
+ """#!/bin/bash
echo "4.4.4"
-""") # Version for which c++11 flag is -std=c++0x
+"""
+ ) # Version for which c++11 flag is -std=c++0x
fs.set_executable(gcc)
# Add compiler to config
compiler_info = {
- 'spec': 'gcc@foo',
- 'paths': {
- 'cc': gcc,
- 'cxx': None,
- 'f77': None,
- 'fc': None,
+ "spec": "gcc@foo",
+ "paths": {
+ "cc": gcc,
+ "cxx": None,
+ "f77": None,
+ "fc": None,
},
- 'flags': {},
- 'operating_system': 'fake',
- 'target': 'fake',
- 'modules': ['turn_on'],
- 'environment': {},
- 'extra_rpaths': [],
+ "flags": {},
+ "operating_system": "fake",
+ "target": "fake",
+ "modules": ["turn_on"],
+ "environment": {},
+ "extra_rpaths": [],
}
- compiler_dict = {'compiler': compiler_info}
+ compiler_dict = {"compiler": compiler_info}
# Run and confirm output
compilers = spack.compilers.get_compilers([compiler_dict])
assert len(compilers) == 1
compiler = compilers[0]
flag = compiler.cxx11_flag
- assert flag == '-std=c++0x'
+ assert flag == "-std=c++0x"
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Apple Clang and XCode unsupported on Windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="Apple Clang and XCode unsupported on Windows")
def test_apple_clang_setup_environment(mock_executable, monkeypatch):
"""Test a code path that is taken only if the package uses
Xcode on MacOS.
"""
+
class MockPackage(object):
use_xcode = False
- apple_clang_cls = spack.compilers.class_for_compiler_name('apple-clang')
+ apple_clang_cls = spack.compilers.class_for_compiler_name("apple-clang")
compiler = apple_clang_cls(
- spack.spec.CompilerSpec('apple-clang@11.0.0'), 'catalina', 'x86_64', [
- '/usr/bin/clang', '/usr/bin/clang++', None, None
- ]
+ spack.spec.CompilerSpec("apple-clang@11.0.0"),
+ "catalina",
+ "x86_64",
+ ["/usr/bin/clang", "/usr/bin/clang++", None, None],
)
env = spack.util.environment.EnvironmentModifications()
# Check a package that doesn't use xcode and ensure we don't add changes
@@ -872,19 +878,25 @@ def test_apple_clang_setup_environment(mock_executable, monkeypatch):
assert not env
# Prepare mock executables to fake the Xcode environment
- xcrun = mock_executable('xcrun', """
+ xcrun = mock_executable(
+ "xcrun",
+ """
if [[ "$2" == "clang" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang"
fi
if [[ "$2" == "clang++" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang++"
fi
-""")
- mock_executable('xcode-select', """
+""",
+ )
+ mock_executable(
+ "xcode-select",
+ """
echo "/Library/Developer"
-""")
+""",
+ )
bin_dir = os.path.dirname(xcrun)
- monkeypatch.setenv('PATH', bin_dir, prepend=os.pathsep)
+ monkeypatch.setenv("PATH", bin_dir, prepend=os.pathsep)
def noop(*args, **kwargs):
pass
@@ -897,49 +909,53 @@ echo "/Library/Developer"
return real_listdir(path)
# Set a few operations to noop
- monkeypatch.setattr(shutil, 'copytree', noop)
- monkeypatch.setattr(os, 'unlink', noop)
- monkeypatch.setattr(os, 'symlink', noop)
- monkeypatch.setattr(os, 'listdir', _listdir)
+ monkeypatch.setattr(shutil, "copytree", noop)
+ monkeypatch.setattr(os, "unlink", noop)
+ monkeypatch.setattr(os, "symlink", noop)
+ monkeypatch.setattr(os, "listdir", _listdir)
# Qt is so far the only package that uses this code path, change
# introduced in https://github.com/spack/spack/pull/1832
pkg.use_xcode = True
compiler.setup_custom_environment(pkg, env)
assert len(env) == 3
- assert env.env_modifications[0].name == 'SPACK_CC'
- assert env.env_modifications[1].name == 'SPACK_CXX'
- assert env.env_modifications[2].name == 'DEVELOPER_DIR'
-
-
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.parametrize('xcode_select_output', [
- '', '/Library/Developer/CommandLineTools'
-])
-def test_xcode_not_available(
- xcode_select_output, mock_executable, monkeypatch
-):
+ assert env.env_modifications[0].name == "SPACK_CC"
+ assert env.env_modifications[1].name == "SPACK_CXX"
+ assert env.env_modifications[2].name == "DEVELOPER_DIR"
+
+
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.parametrize("xcode_select_output", ["", "/Library/Developer/CommandLineTools"])
+def test_xcode_not_available(xcode_select_output, mock_executable, monkeypatch):
# Prepare mock executables to fake the Xcode environment
- xcrun = mock_executable('xcrun', """
+ xcrun = mock_executable(
+ "xcrun",
+ """
if [[ "$2" == "clang" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang"
fi
if [[ "$2" == "clang++" ]] ; then
echo "/Library/Developer/CommandLineTools/usr/bin/clang++"
fi
- """)
- mock_executable('xcode-select', """
+ """,
+ )
+ mock_executable(
+ "xcode-select",
+ """
echo "{0}"
- """.format(xcode_select_output))
+ """.format(
+ xcode_select_output
+ ),
+ )
bin_dir = os.path.dirname(xcrun)
- monkeypatch.setenv('PATH', bin_dir, prepend=os.pathsep)
+ monkeypatch.setenv("PATH", bin_dir, prepend=os.pathsep)
# Prepare compiler
- apple_clang_cls = spack.compilers.class_for_compiler_name('apple-clang')
+ apple_clang_cls = spack.compilers.class_for_compiler_name("apple-clang")
compiler = apple_clang_cls(
- spack.spec.CompilerSpec('apple-clang@11.0.0'), 'catalina', 'x86_64', [
- '/usr/bin/clang', '/usr/bin/clang++', None, None
- ]
+ spack.spec.CompilerSpec("apple-clang@11.0.0"),
+ "catalina",
+ "x86_64",
+ ["/usr/bin/clang", "/usr/bin/clang++", None, None],
)
env = spack.util.environment.EnvironmentModifications()
@@ -954,7 +970,7 @@ def test_xcode_not_available(
@pytest.mark.enable_compiler_verification
def test_compiler_executable_verification_raises(tmpdir):
compiler = MockCompiler()
- compiler.cc = '/this/path/does/not/exist'
+ compiler.cc = "/this/path/does/not/exist"
with pytest.raises(spack.compiler.CompilerAccessError):
compiler.verify_executables()
@@ -963,13 +979,13 @@ def test_compiler_executable_verification_raises(tmpdir):
@pytest.mark.enable_compiler_verification
def test_compiler_executable_verification_success(tmpdir):
def prepare_executable(name):
- real = str(tmpdir.join('cc').ensure())
+ real = str(tmpdir.join("cc").ensure())
fs.set_executable(real)
setattr(compiler, name, real)
# setup mock compiler with real paths
compiler = MockCompiler()
- for name in ('cc', 'cxx', 'f77', 'fc'):
+ for name in ("cc", "cxx", "f77", "fc"):
prepare_executable(name)
# testing that this doesn't raise an error because the paths exist and
diff --git a/lib/spack/spack/test/compilers/detection.py b/lib/spack/spack/test/compilers/detection.py
index a0b81f768f..d4f3c83c10 100644
--- a/lib/spack/spack/test/compilers/detection.py
+++ b/lib/spack/spack/test/compilers/detection.py
@@ -27,294 +27,379 @@ import spack.util.module_cmd
from spack.operating_systems.cray_frontend import CrayFrontend
-@pytest.mark.parametrize('version_str,expected_version', [
- ('Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n'
- 'Target: aarch64--linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir:\n'
- '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n',
- '19.0.0.73'),
- ('Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n'
- 'Target: aarch64--linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir:\n'
- '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n',
- '19.3.1.75')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ (
+ "Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n"
+ "Target: aarch64--linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir:\n"
+ "/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
+ "19.0.0.73",
+ ),
+ (
+ "Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n"
+ "Target: aarch64--linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir:\n"
+ "/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n",
+ "19.3.1.75",
+ ),
+ ],
+)
def test_arm_version_detection(version_str, expected_version):
version = spack.compilers.arm.Arm.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- ('Cray C : Version 8.4.6 Mon Apr 15, 2019 12:13:39\n', '8.4.6'),
- ('Cray C++ : Version 8.4.6 Mon Apr 15, 2019 12:13:45\n', '8.4.6'),
- ('Cray Fortran : Version 8.4.6 Mon Apr 15, 2019 12:13:55\n', '8.4.6')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ ("Cray C : Version 8.4.6 Mon Apr 15, 2019 12:13:39\n", "8.4.6"),
+ ("Cray C++ : Version 8.4.6 Mon Apr 15, 2019 12:13:45\n", "8.4.6"),
+ ("Cray Fortran : Version 8.4.6 Mon Apr 15, 2019 12:13:55\n", "8.4.6"),
+ ],
+)
def test_cce_version_detection(version_str, expected_version):
version = spack.compilers.cce.Cce.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.regression('10191')
-@pytest.mark.parametrize('version_str,expected_version', [
- # macOS clang
- ('Apple clang version 11.0.0 (clang-1100.0.33.8)\n'
- 'Target: x86_64-apple-darwin18.7.0\n'
- 'Thread model: posix\n'
- 'InstalledDir: '
- '/Applications/Xcode.app/Contents/Developer/Toolchains/'
- 'XcodeDefault.xctoolchain/usr/bin\n',
- '11.0.0'),
- ('Apple LLVM version 7.0.2 (clang-700.1.81)\n'
- 'Target: x86_64-apple-darwin15.2.0\n'
- 'Thread model: posix\n', '7.0.2'),
-])
-def test_apple_clang_version_detection(
- version_str, expected_version
-):
- cls = spack.compilers.class_for_compiler_name('apple-clang')
+@pytest.mark.regression("10191")
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ # macOS clang
+ (
+ "Apple clang version 11.0.0 (clang-1100.0.33.8)\n"
+ "Target: x86_64-apple-darwin18.7.0\n"
+ "Thread model: posix\n"
+ "InstalledDir: "
+ "/Applications/Xcode.app/Contents/Developer/Toolchains/"
+ "XcodeDefault.xctoolchain/usr/bin\n",
+ "11.0.0",
+ ),
+ (
+ "Apple LLVM version 7.0.2 (clang-700.1.81)\n"
+ "Target: x86_64-apple-darwin15.2.0\n"
+ "Thread model: posix\n",
+ "7.0.2",
+ ),
+ ],
+)
+def test_apple_clang_version_detection(version_str, expected_version):
+ cls = spack.compilers.class_for_compiler_name("apple-clang")
version = cls.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.regression('10191')
-@pytest.mark.parametrize('version_str,expected_version', [
- # LLVM Clang
- ('clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n'
- 'Target: x86_64-pc-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /usr/bin\n', '6.0.1'),
- ('clang version 3.1 (trunk 149096)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n', '3.1'),
- ('clang version 8.0.0-3~ubuntu18.04.1 (tags/RELEASE_800/final)\n'
- 'Target: x86_64-pc-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /usr/bin\n', '8.0.0'),
- ('clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n'
- 'Target: x86_64-pc-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /usr/bin\n', '9.0.1'),
- ('clang version 8.0.0-3 (tags/RELEASE_800/final)\n'
- 'Target: aarch64-unknown-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /usr/bin\n', '8.0.0'),
- ('clang version 11.0.0\n'
- 'Target: aarch64-unknown-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /usr/bin\n', '11.0.0')
-])
+@pytest.mark.regression("10191")
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ # LLVM Clang
+ (
+ "clang version 6.0.1-svn334776-1~exp1~20181018152737.116 (branches/release_60)\n"
+ "Target: x86_64-pc-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /usr/bin\n",
+ "6.0.1",
+ ),
+ (
+ "clang version 3.1 (trunk 149096)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n",
+ "3.1",
+ ),
+ (
+ "clang version 8.0.0-3~ubuntu18.04.1 (tags/RELEASE_800/final)\n"
+ "Target: x86_64-pc-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /usr/bin\n",
+ "8.0.0",
+ ),
+ (
+ "clang version 9.0.1-+201911131414230800840845a1eea-1~exp1~20191113231141.78\n"
+ "Target: x86_64-pc-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /usr/bin\n",
+ "9.0.1",
+ ),
+ (
+ "clang version 8.0.0-3 (tags/RELEASE_800/final)\n"
+ "Target: aarch64-unknown-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /usr/bin\n",
+ "8.0.0",
+ ),
+ (
+ "clang version 11.0.0\n"
+ "Target: aarch64-unknown-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /usr/bin\n",
+ "11.0.0",
+ ),
+ ],
+)
def test_clang_version_detection(version_str, expected_version):
- version = spack.compilers.clang.Clang.extract_version_from_output(
- version_str
- )
+ version = spack.compilers.clang.Clang.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- # C compiler
- ('fcc (FCC) 4.0.0a 20190314\n'
- 'simulating gcc version 6.1\n'
- 'Copyright FUJITSU LIMITED 2019',
- '4.0.0a'),
- # C++ compiler
- ('FCC (FCC) 4.0.0a 20190314\n'
- 'simulating gcc version 6.1\n'
- 'Copyright FUJITSU LIMITED 2019',
- '4.0.0a'),
- # Fortran compiler
- ('frt (FRT) 4.0.0a 20190314\n'
- 'Copyright FUJITSU LIMITED 2019',
- '4.0.0a')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ # C compiler
+ (
+ "fcc (FCC) 4.0.0a 20190314\n"
+ "simulating gcc version 6.1\n"
+ "Copyright FUJITSU LIMITED 2019",
+ "4.0.0a",
+ ),
+ # C++ compiler
+ (
+ "FCC (FCC) 4.0.0a 20190314\n"
+ "simulating gcc version 6.1\n"
+ "Copyright FUJITSU LIMITED 2019",
+ "4.0.0a",
+ ),
+ # Fortran compiler
+ ("frt (FRT) 4.0.0a 20190314\n" "Copyright FUJITSU LIMITED 2019", "4.0.0a"),
+ ],
+)
def test_fj_version_detection(version_str, expected_version):
version = spack.compilers.fj.Fj.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- # Output of -dumpversion changed to return only major from GCC 7
- ('4.4.7\n', '4.4.7'),
- ('7\n', '7')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ # Output of -dumpversion changed to return only major from GCC 7
+ ("4.4.7\n", "4.4.7"),
+ ("7\n", "7"),
+ ],
+)
def test_gcc_version_detection(version_str, expected_version):
version = spack.compilers.gcc.Gcc.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- ('icpc (ICC) 12.1.5 20120612\n'
- 'Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n',
- '12.1.5'),
- ('ifort (IFORT) 12.1.5 20120612\n'
- 'Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n',
- '12.1.5')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ (
+ "icpc (ICC) 12.1.5 20120612\n"
+ "Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n",
+ "12.1.5",
+ ),
+ (
+ "ifort (IFORT) 12.1.5 20120612\n"
+ "Copyright (C) 1985-2012 Intel Corporation. All rights reserved.\n",
+ "12.1.5",
+ ),
+ ],
+)
def test_intel_version_detection(version_str, expected_version):
- version = spack.compilers.intel.Intel.extract_version_from_output(
- version_str
- )
+ version = spack.compilers.intel.Intel.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- ( # ICX/ICPX
- 'Intel(R) oneAPI DPC++ Compiler 2021.1.2 (2020.10.0.1214)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /made/up/path',
- '2021.1.2'
- ),
- ( # ICX/ICPX
- 'Intel(R) oneAPI DPC++ Compiler 2021.2.0 (2021.2.0.20210317)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /made/up/path',
- '2021.2.0'
- ),
- ( # ICX/ICPX
- 'Intel(R) oneAPI DPC++/C++ Compiler 2021.3.0 (2021.3.0.20210619)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /made/up/path',
- '2021.3.0'
- ),
- ( # ICX/ICPX
- 'Intel(R) oneAPI DPC++/C++ Compiler 2021.4.0 (2021.4.0.20210924)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n'
- 'InstalledDir: /made/up/path',
- '2021.4.0'
- ),
- ( # IFX
- 'ifx (IFORT) 2021.1.2 Beta 20201214\n'
- 'Copyright (C) 1985-2020 Intel Corporation. All rights reserved.',
- '2021.1.2'
- ),
- ( # IFX
- 'ifx (IFORT) 2021.2.0 Beta 20210317\n'
- 'Copyright (C) 1985-2020 Intel Corporation. All rights reserved.',
- '2021.2.0'
- ),
- ( # IFX
- 'ifx (IFORT) 2021.3.0 Beta 20210619\n'
- 'Copyright (C) 1985-2020 Intel Corporation. All rights reserved.',
- '2021.3.0'
- ),
- ( # IFX
- 'ifx (IFORT) 2021.4.0 Beta 20210924\n'
- 'Copyright (C) 1985-2021 Intel Corporation. All rights reserved.',
- '2021.4.0'
- ),
- ( # IFX
- 'ifx (IFORT) 2022.0.0 20211123\n'
- 'Copyright (C) 1985-2021 Intel Corporation. All rights reserved.',
- '2022.0.0'
- ),
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ ( # ICX/ICPX
+ "Intel(R) oneAPI DPC++ Compiler 2021.1.2 (2020.10.0.1214)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /made/up/path",
+ "2021.1.2",
+ ),
+ ( # ICX/ICPX
+ "Intel(R) oneAPI DPC++ Compiler 2021.2.0 (2021.2.0.20210317)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /made/up/path",
+ "2021.2.0",
+ ),
+ ( # ICX/ICPX
+ "Intel(R) oneAPI DPC++/C++ Compiler 2021.3.0 (2021.3.0.20210619)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /made/up/path",
+ "2021.3.0",
+ ),
+ ( # ICX/ICPX
+ "Intel(R) oneAPI DPC++/C++ Compiler 2021.4.0 (2021.4.0.20210924)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n"
+ "InstalledDir: /made/up/path",
+ "2021.4.0",
+ ),
+ ( # IFX
+ "ifx (IFORT) 2021.1.2 Beta 20201214\n"
+ "Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
+ "2021.1.2",
+ ),
+ ( # IFX
+ "ifx (IFORT) 2021.2.0 Beta 20210317\n"
+ "Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
+ "2021.2.0",
+ ),
+ ( # IFX
+ "ifx (IFORT) 2021.3.0 Beta 20210619\n"
+ "Copyright (C) 1985-2020 Intel Corporation. All rights reserved.",
+ "2021.3.0",
+ ),
+ ( # IFX
+ "ifx (IFORT) 2021.4.0 Beta 20210924\n"
+ "Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
+ "2021.4.0",
+ ),
+ ( # IFX
+ "ifx (IFORT) 2022.0.0 20211123\n"
+ "Copyright (C) 1985-2021 Intel Corporation. All rights reserved.",
+ "2022.0.0",
+ ),
+ ],
+)
def test_oneapi_version_detection(version_str, expected_version):
- version = spack.compilers.oneapi.Oneapi.extract_version_from_output(
- version_str
- )
+ version = spack.compilers.oneapi.Oneapi.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- ('NAG Fortran Compiler Release 6.0(Hibiya) Build 1037\n'
- 'Product NPL6A60NA for x86-64 Linux\n', '6.0')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ (
+ "NAG Fortran Compiler Release 6.0(Hibiya) Build 1037\n"
+ "Product NPL6A60NA for x86-64 Linux\n",
+ "6.0",
+ )
+ ],
+)
def test_nag_version_detection(version_str, expected_version):
version = spack.compilers.nag.Nag.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- # C compiler on x86-64
- ('nvc 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # C++ compiler on x86-64
- ('nvc++ 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # Fortran compiler on x86-64
- ('nvfortran 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # C compiler on Power
- ('nvc 20.9-0 linuxpower target on Linuxpower\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # C++ compiler on Power
- ('nvc++ 20.9-0 linuxpower target on Linuxpower\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # Fortran compiler on Power
- ('nvfortran 20.9-0 linuxpower target on Linuxpower\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # C compiler on Arm
- ('nvc 20.9-0 linuxarm64 target on aarch64 Linux\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # C++ compiler on Arm
- ('nvc++ 20.9-0 linuxarm64 target on aarch64 Linux\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9'),
- # Fortran compiler on Arm
- ('nvfortran 20.9-0 linuxarm64 target on aarch64 Linux\n'
- 'NVIDIA Compilers and Tools\n'
- 'Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.',
- '20.9')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ # C compiler on x86-64
+ (
+ "nvc 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # C++ compiler on x86-64
+ (
+ "nvc++ 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # Fortran compiler on x86-64
+ (
+ "nvfortran 20.9-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # C compiler on Power
+ (
+ "nvc 20.9-0 linuxpower target on Linuxpower\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # C++ compiler on Power
+ (
+ "nvc++ 20.9-0 linuxpower target on Linuxpower\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # Fortran compiler on Power
+ (
+ "nvfortran 20.9-0 linuxpower target on Linuxpower\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # C compiler on Arm
+ (
+ "nvc 20.9-0 linuxarm64 target on aarch64 Linux\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # C++ compiler on Arm
+ (
+ "nvc++ 20.9-0 linuxarm64 target on aarch64 Linux\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ # Fortran compiler on Arm
+ (
+ "nvfortran 20.9-0 linuxarm64 target on aarch64 Linux\n"
+ "NVIDIA Compilers and Tools\n"
+ "Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.",
+ "20.9",
+ ),
+ ],
+)
def test_nvhpc_version_detection(version_str, expected_version):
- version = spack.compilers.nvhpc.Nvhpc.extract_version_from_output(
- version_str
- )
+ version = spack.compilers.nvhpc.Nvhpc.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- # Output on x86-64
- ('pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge\n'
- 'The Portland Group - PGI Compilers and Tools\n'
- 'Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.\n',
- '15.10'),
- # Output on PowerPC
- ('pgcc 17.4-0 linuxpower target on Linuxpower\n'
- 'PGI Compilers and Tools\n'
- 'Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.\n',
- '17.4'),
- # Output when LLVM-enabled
- ('pgcc-llvm 18.4-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n'
- 'PGI Compilers and Tools\n'
- 'Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n',
- '18.4')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ # Output on x86-64
+ (
+ "pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge\n"
+ "The Portland Group - PGI Compilers and Tools\n"
+ "Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.\n",
+ "15.10",
+ ),
+ # Output on PowerPC
+ (
+ "pgcc 17.4-0 linuxpower target on Linuxpower\n"
+ "PGI Compilers and Tools\n"
+ "Copyright (c) 2017, NVIDIA CORPORATION. All rights reserved.\n",
+ "17.4",
+ ),
+ # Output when LLVM-enabled
+ (
+ "pgcc-llvm 18.4-0 LLVM 64-bit target on x86-64 Linux -tp haswell\n"
+ "PGI Compilers and Tools\n"
+ "Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.\n",
+ "18.4",
+ ),
+ ],
+)
def test_pgi_version_detection(version_str, expected_version):
version = spack.compilers.pgi.Pgi.extract_version_from_output(version_str)
assert version == expected_version
-@pytest.mark.parametrize('version_str,expected_version', [
- ('IBM XL C/C++ for Linux, V11.1 (5724-X14)\n'
- 'Version: 11.01.0000.0000\n', '11.1'),
- ('IBM XL Fortran for Linux, V13.1 (5724-X16)\n'
- 'Version: 13.01.0000.0000\n', '13.1'),
- ('IBM XL C/C++ for AIX, V11.1 (5724-X13)\n'
- 'Version: 11.01.0000.0009\n', '11.1'),
- ('IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0\n'
- 'Version: 09.00.0000.0017\n', '9.0')
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ ("IBM XL C/C++ for Linux, V11.1 (5724-X14)\n" "Version: 11.01.0000.0000\n", "11.1"),
+ ("IBM XL Fortran for Linux, V13.1 (5724-X16)\n" "Version: 13.01.0000.0000\n", "13.1"),
+ ("IBM XL C/C++ for AIX, V11.1 (5724-X13)\n" "Version: 11.01.0000.0009\n", "11.1"),
+ (
+ "IBM XL C/C++ Advanced Edition for Blue Gene/P, V9.0\n" "Version: 09.00.0000.0017\n",
+ "9.0",
+ ),
+ ],
+)
def test_xl_version_detection(version_str, expected_version):
version = spack.compilers.xl.Xl.extract_version_from_output(version_str)
assert version == expected_version
@@ -323,40 +408,40 @@ def test_xl_version_detection(version_str, expected_version):
assert version == expected_version
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.parametrize('compiler,version', [
- ('gcc', '8.1.0'),
- ('gcc', '1.0.0-foo'),
- ('pgi', '19.1'),
- ('pgi', '19.1a'),
- ('intel', '9.0.0'),
- ('intel', '0.0.0-foobar')
- # ('oneapi', '2021.1'),
- # ('oneapi', '2021.1-foobar')
-])
-def test_cray_frontend_compiler_detection(
- compiler, version, tmpdir, monkeypatch, working_env
-):
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.parametrize(
+ "compiler,version",
+ [
+ ("gcc", "8.1.0"),
+ ("gcc", "1.0.0-foo"),
+ ("pgi", "19.1"),
+ ("pgi", "19.1a"),
+ ("intel", "9.0.0"),
+ ("intel", "0.0.0-foobar")
+ # ('oneapi', '2021.1'),
+ # ('oneapi', '2021.1-foobar')
+ ],
+)
+def test_cray_frontend_compiler_detection(compiler, version, tmpdir, monkeypatch, working_env):
"""Test that the Cray frontend properly finds compilers form modules"""
# setup the fake compiler directory
compiler_dir = tmpdir.join(compiler)
- compiler_exe = compiler_dir.join('cc').ensure()
+ compiler_exe = compiler_dir.join("cc").ensure()
fs.set_executable(str(compiler_exe))
# mock modules
def _module(cmd, *args):
- module_name = '%s/%s' % (compiler, version)
- module_contents = 'prepend-path PATH %s' % compiler_dir
- if cmd == 'avail':
- return module_name if compiler in args[0] else ''
- if cmd == 'show':
- return module_contents if module_name in args else ''
- monkeypatch.setattr(spack.operating_systems.cray_frontend, 'module',
- _module)
+ module_name = "%s/%s" % (compiler, version)
+ module_contents = "prepend-path PATH %s" % compiler_dir
+ if cmd == "avail":
+ return module_name if compiler in args[0] else ""
+ if cmd == "show":
+ return module_contents if module_name in args else ""
+
+ monkeypatch.setattr(spack.operating_systems.cray_frontend, "module", _module)
# remove PATH variable
- os.environ.pop('PATH', None)
+ os.environ.pop("PATH", None)
# get a CrayFrontend object
cray_fe_os = CrayFrontend()
@@ -365,31 +450,40 @@ def test_cray_frontend_compiler_detection(
assert paths == [str(compiler_dir)]
-@pytest.mark.parametrize('version_str,expected_version', [
- # This applies to C,C++ and FORTRAN compiler
- ('AMD clang version 12.0.0 (CLANG: AOCC_3_1_0-Build#126 2021_06_07)'
- '(based on LLVM Mirror.Version.12.0.0)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n', '3.1.0'
- ),
- ('AMD clang version 12.0.0 (CLANG: AOCC_3.0.0-Build#78 2020_12_10)'
- '(based on LLVM Mirror.Version.12.0.0)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n', '3.0.0'
- ),
- ('AMD clang version 11.0.0 (CLANG: AOCC_2.3.0-Build#85 2020_11_10)'
- '(based on LLVM Mirror.Version.11.0.0)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n', '2.3.0'
- ),
- ('AMD clang version 10.0.0 (CLANG: AOCC_2.2.0-Build#93 2020_06_25)'
- '(based on LLVM Mirror.Version.10.0.0)\n'
- 'Target: x86_64-unknown-linux-gnu\n'
- 'Thread model: posix\n', '2.2.0'
- )
-])
+@pytest.mark.parametrize(
+ "version_str,expected_version",
+ [
+ # This applies to C,C++ and FORTRAN compiler
+ (
+ "AMD clang version 12.0.0 (CLANG: AOCC_3_1_0-Build#126 2021_06_07)"
+ "(based on LLVM Mirror.Version.12.0.0)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n",
+ "3.1.0",
+ ),
+ (
+ "AMD clang version 12.0.0 (CLANG: AOCC_3.0.0-Build#78 2020_12_10)"
+ "(based on LLVM Mirror.Version.12.0.0)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n",
+ "3.0.0",
+ ),
+ (
+ "AMD clang version 11.0.0 (CLANG: AOCC_2.3.0-Build#85 2020_11_10)"
+ "(based on LLVM Mirror.Version.11.0.0)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n",
+ "2.3.0",
+ ),
+ (
+ "AMD clang version 10.0.0 (CLANG: AOCC_2.2.0-Build#93 2020_06_25)"
+ "(based on LLVM Mirror.Version.10.0.0)\n"
+ "Target: x86_64-unknown-linux-gnu\n"
+ "Thread model: posix\n",
+ "2.2.0",
+ ),
+ ],
+)
def test_aocc_version_detection(version_str, expected_version):
- version = spack.compilers.aocc.Aocc.extract_version_from_output(
- version_str
- )
+ version = spack.compilers.aocc.Aocc.extract_version_from_output(version_str)
assert version == expected_version
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 845504ff0c..d4f417239f 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -24,7 +24,7 @@ from spack.concretize import find_spec
from spack.spec import Spec
from spack.version import ver
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
def check_spec(abstract, concrete):
@@ -68,21 +68,32 @@ def check_concretize(abstract_spec):
@pytest.fixture(
params=[
# no_deps
- 'libelf', 'libelf@0.8.13',
+ "libelf",
+ "libelf@0.8.13",
# dag
- 'callpath', 'mpileaks', 'libelf',
+ "callpath",
+ "mpileaks",
+ "libelf",
# variant
- 'mpich+debug', 'mpich~debug', 'mpich debug=True', 'mpich',
+ "mpich+debug",
+ "mpich~debug",
+ "mpich debug=True",
+ "mpich",
# compiler flags
'mpich cppflags="-O3"',
# with virtual
- 'mpileaks ^mpi', 'mpileaks ^mpi@:1.1', 'mpileaks ^mpi@2:',
- 'mpileaks ^mpi@2.1', 'mpileaks ^mpi@2.2', 'mpileaks ^mpi@2.2',
- 'mpileaks ^mpi@:1', 'mpileaks ^mpi@1.2:2',
+ "mpileaks ^mpi",
+ "mpileaks ^mpi@:1.1",
+ "mpileaks ^mpi@2:",
+ "mpileaks ^mpi@2.1",
+ "mpileaks ^mpi@2.2",
+ "mpileaks ^mpi@2.2",
+ "mpileaks ^mpi@:1",
+ "mpileaks ^mpi@1.2:2",
# conflict not triggered
- 'conflict',
- 'conflict%clang~foo',
- 'conflict-parent%gcc'
+ "conflict",
+ "conflict%clang~foo",
+ "conflict-parent%gcc",
]
)
def spec(request):
@@ -90,39 +101,48 @@ def spec(request):
return request.param
-@pytest.fixture(params=[
- # Mocking the host detection
- 'haswell', 'broadwell', 'skylake', 'icelake',
- # Using preferred targets from packages.yaml
- 'icelake-preference', 'cannonlake-preference'
-])
+@pytest.fixture(
+ params=[
+ # Mocking the host detection
+ "haswell",
+ "broadwell",
+ "skylake",
+ "icelake",
+ # Using preferred targets from packages.yaml
+ "icelake-preference",
+ "cannonlake-preference",
+ ]
+)
def current_host(request, monkeypatch):
# is_preference is not empty if we want to supply the
# preferred target via packages.yaml
- cpu, _, is_preference = request.param.partition('-')
+ cpu, _, is_preference = request.param.partition("-")
target = archspec.cpu.TARGETS[cpu]
- monkeypatch.setattr(spack.platforms.Test, 'default', cpu)
- monkeypatch.setattr(spack.platforms.Test, 'front_end', cpu)
+ monkeypatch.setattr(spack.platforms.Test, "default", cpu)
+ monkeypatch.setattr(spack.platforms.Test, "front_end", cpu)
if not is_preference:
- monkeypatch.setattr(archspec.cpu, 'host', lambda: target)
+ monkeypatch.setattr(archspec.cpu, "host", lambda: target)
yield target
else:
- with spack.config.override('packages:all', {'target': [cpu]}):
+ with spack.config.override("packages:all", {"target": [cpu]}):
yield target
@pytest.fixture()
def repo_with_changing_recipe(tmpdir_factory, mutable_mock_repo):
- repo_namespace = 'changing'
+ repo_namespace = "changing"
repo_dir = tmpdir_factory.mktemp(repo_namespace)
- repo_dir.join('repo.yaml').write("""
+ repo_dir.join("repo.yaml").write(
+ """
repo:
namespace: changing
-""", ensure=True)
+""",
+ ensure=True,
+ )
- packages_dir = repo_dir.ensure('packages', dir=True)
+ packages_dir = repo_dir.ensure("packages", dir=True)
root_pkg_str = """
class Root(Package):
homepage = "http://www.example.com"
@@ -133,9 +153,7 @@ class Root(Package):
conflicts('changing~foo')
"""
- packages_dir.join('root', 'package.py').write(
- root_pkg_str, ensure=True
- )
+ packages_dir.join("root", "package.py").write(root_pkg_str, ensure=True)
changing_template = """
class Changing(Package):
@@ -160,9 +178,9 @@ class Changing(Package):
class _ChangingPackage(object):
default_context = [
- ('delete_version', True),
- ('delete_variant', False),
- ('add_variant', False)
+ ("delete_version", True),
+ ("delete_variant", False),
+ ("add_variant", False),
]
def __init__(self, repo_directory):
@@ -179,28 +197,22 @@ class Changing(Package):
# TODO: this mocks a change in the recipe that should happen in a
# TODO: different process space. Leaving this comment as a hint
# TODO: in case tests using this fixture start failing.
- if sys.modules.get('spack.pkg.changing.changing'):
- del sys.modules['spack.pkg.changing.changing']
- del sys.modules['spack.pkg.changing.root']
- del sys.modules['spack.pkg.changing']
+ if sys.modules.get("spack.pkg.changing.changing"):
+ del sys.modules["spack.pkg.changing.changing"]
+ del sys.modules["spack.pkg.changing.root"]
+ del sys.modules["spack.pkg.changing"]
# Change the recipe
t = jinja2.Template(changing_template)
changing_pkg_str = t.render(**context)
- packages_dir.join('changing', 'package.py').write(
- changing_pkg_str, ensure=True
- )
+ packages_dir.join("changing", "package.py").write(changing_pkg_str, ensure=True)
# Re-add the repository
self.repo = spack.repo.Repo(str(self.repo_dir))
mutable_mock_repo.put_first(self.repo)
_changing_pkg = _ChangingPackage(repo_dir)
- _changing_pkg.change({
- 'delete_version': False,
- 'delete_variant': False,
- 'add_variant': False
- })
+ _changing_pkg.change({"delete_version": False, "delete_variant": False, "add_variant": False})
return _changing_pkg
@@ -208,12 +220,15 @@ class Changing(Package):
@pytest.fixture()
def additional_repo_with_c(tmpdir_factory, mutable_mock_repo):
"""Add a repository with a simple package"""
- repo_dir = tmpdir_factory.mktemp('myrepo')
- repo_dir.join('repo.yaml').write("""
+ repo_dir = tmpdir_factory.mktemp("myrepo")
+ repo_dir.join("repo.yaml").write(
+ """
repo:
namespace: myrepo
-""", ensure=True)
- packages_dir = repo_dir.ensure('packages', dir=True)
+""",
+ ensure=True,
+ )
+ packages_dir = repo_dir.ensure("packages", dir=True)
package_py = """
class C(Package):
homepage = "http://www.example.com"
@@ -221,7 +236,7 @@ class C(Package):
version(1.0, sha256='abcde')
"""
- packages_dir.join('c', 'package.py').write(package_py, ensure=True)
+ packages_dir.join("c", "package.py").write(package_py, ensure=True)
repo = spack.repo.Repo(str(repo_dir))
mutable_mock_repo.put_first(repo)
return repo
@@ -230,133 +245,122 @@ class C(Package):
# This must use the mutable_config fixture because the test
# adjusting_default_target_based_on_compiler uses the current_host fixture,
# which changes the config.
-@pytest.mark.usefixtures('mutable_config', 'mock_packages')
+@pytest.mark.usefixtures("mutable_config", "mock_packages")
class TestConcretize(object):
def test_concretize(self, spec):
check_concretize(spec)
def test_concretize_mention_build_dep(self):
- spec = check_concretize('cmake-client ^cmake@3.4.3')
+ spec = check_concretize("cmake-client ^cmake@3.4.3")
# Check parent's perspective of child
- to_dependencies = spec.edges_to_dependencies(name='cmake')
+ to_dependencies = spec.edges_to_dependencies(name="cmake")
assert len(to_dependencies) == 1
- assert set(to_dependencies[0].deptypes) == set(['build'])
+ assert set(to_dependencies[0].deptypes) == set(["build"])
# Check child's perspective of parent
- cmake = spec['cmake']
- from_dependents = cmake.edges_from_dependents(name='cmake-client')
+ cmake = spec["cmake"]
+ from_dependents = cmake.edges_from_dependents(name="cmake-client")
assert len(from_dependents) == 1
- assert set(from_dependents[0].deptypes) == set(['build'])
+ assert set(from_dependents[0].deptypes) == set(["build"])
def test_concretize_preferred_version(self):
- spec = check_concretize('python')
- assert spec.versions == ver('2.7.11')
- spec = check_concretize('python@3.5.1')
- assert spec.versions == ver('3.5.1')
+ spec = check_concretize("python")
+ assert spec.versions == ver("2.7.11")
+ spec = check_concretize("python@3.5.1")
+ assert spec.versions == ver("3.5.1")
def test_concretize_with_restricted_virtual(self):
- check_concretize('mpileaks ^mpich2')
+ check_concretize("mpileaks ^mpich2")
- concrete = check_concretize('mpileaks ^mpich2@1.1')
- assert concrete['mpich2'].satisfies('mpich2@1.1')
+ concrete = check_concretize("mpileaks ^mpich2@1.1")
+ assert concrete["mpich2"].satisfies("mpich2@1.1")
- concrete = check_concretize('mpileaks ^mpich2@1.2')
- assert concrete['mpich2'].satisfies('mpich2@1.2')
+ concrete = check_concretize("mpileaks ^mpich2@1.2")
+ assert concrete["mpich2"].satisfies("mpich2@1.2")
- concrete = check_concretize('mpileaks ^mpich2@:1.5')
- assert concrete['mpich2'].satisfies('mpich2@:1.5')
+ concrete = check_concretize("mpileaks ^mpich2@:1.5")
+ assert concrete["mpich2"].satisfies("mpich2@:1.5")
- concrete = check_concretize('mpileaks ^mpich2@:1.3')
- assert concrete['mpich2'].satisfies('mpich2@:1.3')
+ concrete = check_concretize("mpileaks ^mpich2@:1.3")
+ assert concrete["mpich2"].satisfies("mpich2@:1.3")
- concrete = check_concretize('mpileaks ^mpich2@:1.2')
- assert concrete['mpich2'].satisfies('mpich2@:1.2')
+ concrete = check_concretize("mpileaks ^mpich2@:1.2")
+ assert concrete["mpich2"].satisfies("mpich2@:1.2")
- concrete = check_concretize('mpileaks ^mpich2@:1.1')
- assert concrete['mpich2'].satisfies('mpich2@:1.1')
+ concrete = check_concretize("mpileaks ^mpich2@:1.1")
+ assert concrete["mpich2"].satisfies("mpich2@:1.1")
- concrete = check_concretize('mpileaks ^mpich2@1.1:')
- assert concrete['mpich2'].satisfies('mpich2@1.1:')
+ concrete = check_concretize("mpileaks ^mpich2@1.1:")
+ assert concrete["mpich2"].satisfies("mpich2@1.1:")
- concrete = check_concretize('mpileaks ^mpich2@1.5:')
- assert concrete['mpich2'].satisfies('mpich2@1.5:')
+ concrete = check_concretize("mpileaks ^mpich2@1.5:")
+ assert concrete["mpich2"].satisfies("mpich2@1.5:")
- concrete = check_concretize('mpileaks ^mpich2@1.3.1:1.4')
- assert concrete['mpich2'].satisfies('mpich2@1.3.1:1.4')
+ concrete = check_concretize("mpileaks ^mpich2@1.3.1:1.4")
+ assert concrete["mpich2"].satisfies("mpich2@1.3.1:1.4")
def test_concretize_enable_disable_compiler_existence_check(self):
with spack.concretize.enable_compiler_existence_check():
- with pytest.raises(
- spack.concretize.UnavailableCompilerVersionError):
- check_concretize('dttop %gcc@100.100')
+ with pytest.raises(spack.concretize.UnavailableCompilerVersionError):
+ check_concretize("dttop %gcc@100.100")
with spack.concretize.disable_compiler_existence_check():
- spec = check_concretize('dttop %gcc@100.100')
- assert spec.satisfies('%gcc@100.100')
- assert spec['dtlink3'].satisfies('%gcc@100.100')
+ spec = check_concretize("dttop %gcc@100.100")
+ assert spec.satisfies("%gcc@100.100")
+ assert spec["dtlink3"].satisfies("%gcc@100.100")
def test_concretize_with_provides_when(self):
"""Make sure insufficient versions of MPI are not in providers list when
we ask for some advanced version.
"""
repo = spack.repo.path
- assert not any(
- s.satisfies('mpich2@:1.0') for s in repo.providers_for('mpi@2.1')
- )
- assert not any(
- s.satisfies('mpich2@:1.1') for s in repo.providers_for('mpi@2.2')
- )
- assert not any(
- s.satisfies('mpich@:1') for s in repo.providers_for('mpi@2')
- )
- assert not any(
- s.satisfies('mpich@:1') for s in repo.providers_for('mpi@3')
- )
- assert not any(
- s.satisfies('mpich2') for s in repo.providers_for('mpi@3')
- )
+ assert not any(s.satisfies("mpich2@:1.0") for s in repo.providers_for("mpi@2.1"))
+ assert not any(s.satisfies("mpich2@:1.1") for s in repo.providers_for("mpi@2.2"))
+ assert not any(s.satisfies("mpich@:1") for s in repo.providers_for("mpi@2"))
+ assert not any(s.satisfies("mpich@:1") for s in repo.providers_for("mpi@3"))
+ assert not any(s.satisfies("mpich2") for s in repo.providers_for("mpi@3"))
def test_provides_handles_multiple_providers_of_same_version(self):
- """
- """
- providers = spack.repo.path.providers_for('mpi@3.0')
+ """ """
+ providers = spack.repo.path.providers_for("mpi@3.0")
# Note that providers are repo-specific, so we don't misinterpret
# providers, but vdeps are not namespace-specific, so we can
# associate vdeps across repos.
- assert Spec('builtin.mock.multi-provider-mpi@1.10.3') in providers
- assert Spec('builtin.mock.multi-provider-mpi@1.10.2') in providers
- assert Spec('builtin.mock.multi-provider-mpi@1.10.1') in providers
- assert Spec('builtin.mock.multi-provider-mpi@1.10.0') in providers
- assert Spec('builtin.mock.multi-provider-mpi@1.8.8') in providers
+ assert Spec("builtin.mock.multi-provider-mpi@1.10.3") in providers
+ assert Spec("builtin.mock.multi-provider-mpi@1.10.2") in providers
+ assert Spec("builtin.mock.multi-provider-mpi@1.10.1") in providers
+ assert Spec("builtin.mock.multi-provider-mpi@1.10.0") in providers
+ assert Spec("builtin.mock.multi-provider-mpi@1.8.8") in providers
def test_different_compilers_get_different_flags(self):
- client = Spec('cmake-client %gcc@4.7.2 platform=test os=fe target=fe' +
- ' ^cmake %clang@3.5 platform=test os=fe target=fe')
+ client = Spec(
+ "cmake-client %gcc@4.7.2 platform=test os=fe target=fe"
+ + " ^cmake %clang@3.5 platform=test os=fe target=fe"
+ )
client.concretize()
- cmake = client['cmake']
- assert set(client.compiler_flags['cflags']) == set(['-O0', '-g'])
- assert set(cmake.compiler_flags['cflags']) == set(['-O3'])
- assert set(client.compiler_flags['fflags']) == set(['-O0', '-g'])
- assert not set(cmake.compiler_flags['fflags'])
+ cmake = client["cmake"]
+ assert set(client.compiler_flags["cflags"]) == set(["-O0", "-g"])
+ assert set(cmake.compiler_flags["cflags"]) == set(["-O3"])
+ assert set(client.compiler_flags["fflags"]) == set(["-O0", "-g"])
+ assert not set(cmake.compiler_flags["fflags"])
def test_architecture_inheritance(self):
"""test_architecture_inheritance is likely to fail with an
UnavailableCompilerVersionError if the architecture is concretized
incorrectly.
"""
- spec = Spec('cmake-client %gcc@4.7.2 os=fe ^ cmake')
+ spec = Spec("cmake-client %gcc@4.7.2 os=fe ^ cmake")
spec.concretize()
- assert spec['cmake'].architecture == spec.architecture
+ assert spec["cmake"].architecture == spec.architecture
def test_architecture_deep_inheritance(self, mock_targets):
"""Make sure that indirect dependencies receive architecture
information from the root even when partial architecture information
is provided by an intermediate dependency.
"""
- spec_str = ('mpileaks %gcc@4.5.0 os=CNL target=nocona'
- ' ^dyninst os=CNL ^callpath os=CNL')
+ spec_str = "mpileaks %gcc@4.5.0 os=CNL target=nocona" " ^dyninst os=CNL ^callpath os=CNL"
spec = Spec(spec_str).concretized()
for s in spec.traverse(root=False):
assert s.architecture.target == spec.architecture.target
@@ -364,260 +368,199 @@ class TestConcretize(object):
def test_compiler_flags_from_user_are_grouped(self):
spec = Spec('a%gcc cflags="-O -foo-flag foo-val" platform=test')
spec.concretize()
- cflags = spec.compiler_flags['cflags']
- assert any(x == '-foo-flag foo-val' for x in cflags)
+ cflags = spec.compiler_flags["cflags"]
+ assert any(x == "-foo-flag foo-val" for x in cflags)
def concretize_multi_provider(self):
- s = Spec('mpileaks ^multi-provider-mpi@3.0')
+ s = Spec("mpileaks ^multi-provider-mpi@3.0")
s.concretize()
- assert s['mpi'].version == ver('1.10.3')
+ assert s["mpi"].version == ver("1.10.3")
def test_concretize_dependent_with_singlevalued_variant_type(self):
- s = Spec('singlevalue-variant-dependent-type')
+ s = Spec("singlevalue-variant-dependent-type")
s.concretize()
- @pytest.mark.parametrize("spec,version", [
- ('dealii', 'develop'),
- ('xsdk', '0.4.0'),
- ])
+ @pytest.mark.parametrize(
+ "spec,version",
+ [
+ ("dealii", "develop"),
+ ("xsdk", "0.4.0"),
+ ],
+ )
def concretize_difficult_packages(self, a, b):
"""Test a couple of large packages that are often broken due
to current limitations in the concretizer"""
- s = Spec(a + '@' + b)
+ s = Spec(a + "@" + b)
s.concretize()
assert s[a].version == ver(b)
def test_concretize_two_virtuals(self):
"""Test a package with multiple virtual dependencies."""
- Spec('hypre').concretize()
+ Spec("hypre").concretize()
- def test_concretize_two_virtuals_with_one_bound(
- self, mutable_mock_repo
- ):
+ def test_concretize_two_virtuals_with_one_bound(self, mutable_mock_repo):
"""Test a package with multiple virtual dependencies and one preset."""
- Spec('hypre ^openblas').concretize()
+ Spec("hypre ^openblas").concretize()
def test_concretize_two_virtuals_with_two_bound(self):
"""Test a package with multiple virtual deps and two of them preset."""
- Spec('hypre ^openblas ^netlib-lapack').concretize()
+ Spec("hypre ^openblas ^netlib-lapack").concretize()
def test_concretize_two_virtuals_with_dual_provider(self):
"""Test a package with multiple virtual dependencies and force a provider
that provides both.
"""
- Spec('hypre ^openblas-with-lapack').concretize()
+ Spec("hypre ^openblas-with-lapack").concretize()
- def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(
- self
- ):
+ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
"""Test a package with multiple virtual dependencies and force a
provider that provides both, and another conflicting package that
provides one.
"""
- s = Spec('hypre ^openblas-with-lapack ^netlib-lapack')
+ s = Spec("hypre ^openblas-with-lapack ^netlib-lapack")
with pytest.raises(spack.error.SpackError):
s.concretize()
- @pytest.mark.skipif(sys.platform == 'win32', reason='No Compiler for Arch on Win')
+ @pytest.mark.skipif(sys.platform == "win32", reason="No Compiler for Arch on Win")
def test_no_matching_compiler_specs(self, mock_low_high_config):
# only relevant when not building compilers as needed
with spack.concretize.enable_compiler_existence_check():
- s = Spec('a %gcc@0.0.0')
- with pytest.raises(
- spack.concretize.UnavailableCompilerVersionError):
+ s = Spec("a %gcc@0.0.0")
+ with pytest.raises(spack.concretize.UnavailableCompilerVersionError):
s.concretize()
def test_no_compilers_for_arch(self):
- s = Spec('a arch=linux-rhel0-x86_64')
+ s = Spec("a arch=linux-rhel0-x86_64")
with pytest.raises(spack.error.SpackError):
s.concretize()
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
- spec = Spec('callpath ^mpi@10.0')
- assert len(spec.dependencies(name='mpi')) == 1
- assert 'fake' not in spec
+ spec = Spec("callpath ^mpi@10.0")
+ assert len(spec.dependencies(name="mpi")) == 1
+ assert "fake" not in spec
spec.concretize()
- assert len(spec.dependencies(name='zmpi')) == 1
- assert all(not d.dependencies(name='mpi') for d in spec.traverse())
- assert all(x in spec for x in ('zmpi', 'mpi'))
+ assert len(spec.dependencies(name="zmpi")) == 1
+ assert all(not d.dependencies(name="mpi") for d in spec.traverse())
+ assert all(x in spec for x in ("zmpi", "mpi"))
- edges_to_zmpi = spec.edges_to_dependencies(name='zmpi')
+ edges_to_zmpi = spec.edges_to_dependencies(name="zmpi")
assert len(edges_to_zmpi) == 1
- assert 'fake' in edges_to_zmpi[0].spec
+ assert "fake" in edges_to_zmpi[0].spec
- def test_virtual_is_fully_expanded_for_mpileaks(
- self
- ):
- spec = Spec('mpileaks ^mpi@10.0')
- assert len(spec.dependencies(name='mpi')) == 1
- assert 'fake' not in spec
+ def test_virtual_is_fully_expanded_for_mpileaks(self):
+ spec = Spec("mpileaks ^mpi@10.0")
+ assert len(spec.dependencies(name="mpi")) == 1
+ assert "fake" not in spec
spec.concretize()
- assert len(spec.dependencies(name='zmpi')) == 1
- assert len(spec.dependencies(name='callpath')) == 1
+ assert len(spec.dependencies(name="zmpi")) == 1
+ assert len(spec.dependencies(name="callpath")) == 1
- callpath = spec.dependencies(name='callpath')[0]
- assert len(callpath.dependencies(name='zmpi')) == 1
+ callpath = spec.dependencies(name="callpath")[0]
+ assert len(callpath.dependencies(name="zmpi")) == 1
- zmpi = callpath.dependencies(name='zmpi')[0]
- assert len(zmpi.dependencies(name='fake')) == 1
+ zmpi = callpath.dependencies(name="zmpi")[0]
+ assert len(zmpi.dependencies(name="fake")) == 1
- assert all(not d.dependencies(name='mpi') for d in spec.traverse())
- assert all(x in spec for x in ('zmpi', 'mpi'))
+ assert all(not d.dependencies(name="mpi") for d in spec.traverse())
+ assert all(x in spec for x in ("zmpi", "mpi"))
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
- spec = Spec('indirect-mpich')
+ spec = Spec("indirect-mpich")
spec.normalize()
spec.concretize()
- @pytest.mark.parametrize('compiler_str', [
- 'clang', 'gcc', 'gcc@4.5.0', 'clang@:3.3.0'
- ])
+ @pytest.mark.parametrize("compiler_str", ["clang", "gcc", "gcc@4.5.0", "clang@:3.3.0"])
def test_compiler_inheritance(self, compiler_str):
- spec_str = 'mpileaks %{0}'.format(compiler_str)
+ spec_str = "mpileaks %{0}".format(compiler_str)
spec = Spec(spec_str).concretized()
- assert spec['libdwarf'].compiler.satisfies(compiler_str)
- assert spec['libelf'].compiler.satisfies(compiler_str)
+ assert spec["libdwarf"].compiler.satisfies(compiler_str)
+ assert spec["libelf"].compiler.satisfies(compiler_str)
def test_external_package(self):
- spec = Spec('externaltool%gcc')
+ spec = Spec("externaltool%gcc")
spec.concretize()
- assert spec['externaltool'].external_path == \
- os.path.sep + os.path.join('path', 'to', 'external_tool')
- assert 'externalprereq' not in spec
- assert spec['externaltool'].compiler.satisfies('gcc')
+ assert spec["externaltool"].external_path == os.path.sep + os.path.join(
+ "path", "to", "external_tool"
+ )
+ assert "externalprereq" not in spec
+ assert spec["externaltool"].compiler.satisfies("gcc")
def test_external_package_module(self):
# No tcl modules on darwin/linux machines
# and Windows does not (currently) allow for bash calls
# TODO: improved way to check for this.
platform = spack.platforms.real_host().name
- if platform == 'darwin' or platform == 'linux' or platform == 'windows':
+ if platform == "darwin" or platform == "linux" or platform == "windows":
return
- spec = Spec('externalmodule')
+ spec = Spec("externalmodule")
spec.concretize()
- assert spec['externalmodule'].external_modules == ['external-module']
- assert 'externalprereq' not in spec
- assert spec['externalmodule'].compiler.satisfies('gcc')
+ assert spec["externalmodule"].external_modules == ["external-module"]
+ assert "externalprereq" not in spec
+ assert spec["externalmodule"].compiler.satisfies("gcc")
def test_nobuild_package(self):
"""Test that a non-buildable package raise an error if no specs
in packages.yaml are compatible with the request.
"""
- spec = Spec('externaltool%clang')
+ spec = Spec("externaltool%clang")
with pytest.raises(spack.error.SpecError):
spec.concretize()
def test_external_and_virtual(self):
- spec = Spec('externaltest')
+ spec = Spec("externaltest")
spec.concretize()
- assert spec['externaltool'].external_path == \
- os.path.sep + os.path.join('path', 'to', 'external_tool')
- assert spec['stuff'].external_path == \
- os.path.sep + os.path.join('path', 'to', 'external_virtual_gcc')
- assert spec['externaltool'].compiler.satisfies('gcc')
- assert spec['stuff'].compiler.satisfies('gcc')
+ assert spec["externaltool"].external_path == os.path.sep + os.path.join(
+ "path", "to", "external_tool"
+ )
+ assert spec["stuff"].external_path == os.path.sep + os.path.join(
+ "path", "to", "external_virtual_gcc"
+ )
+ assert spec["externaltool"].compiler.satisfies("gcc")
+ assert spec["stuff"].compiler.satisfies("gcc")
def test_find_spec_parents(self):
- """Tests the spec finding logic used by concretization. """
- s = Spec.from_literal({
- 'a +foo': {
- 'b +foo': {
- 'c': None,
- 'd+foo': None
- },
- 'e +foo': None
- }
- })
+ """Tests the spec finding logic used by concretization."""
+ s = Spec.from_literal({"a +foo": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
- assert 'a' == find_spec(s['b'], lambda s: '+foo' in s).name
+ assert "a" == find_spec(s["b"], lambda s: "+foo" in s).name
def test_find_spec_children(self):
- s = Spec.from_literal({
- 'a': {
- 'b +foo': {
- 'c': None,
- 'd+foo': None
- },
- 'e +foo': None
- }
- })
+ s = Spec.from_literal({"a": {"b +foo": {"c": None, "d+foo": None}, "e +foo": None}})
- assert 'd' == find_spec(s['b'], lambda s: '+foo' in s).name
+ assert "d" == find_spec(s["b"], lambda s: "+foo" in s).name
- s = Spec.from_literal({
- 'a': {
- 'b +foo': {
- 'c+foo': None,
- 'd': None
- },
- 'e +foo': None
- }
- })
+ s = Spec.from_literal({"a": {"b +foo": {"c+foo": None, "d": None}, "e +foo": None}})
- assert 'c' == find_spec(s['b'], lambda s: '+foo' in s).name
+ assert "c" == find_spec(s["b"], lambda s: "+foo" in s).name
def test_find_spec_sibling(self):
- s = Spec.from_literal({
- 'a': {
- 'b +foo': {
- 'c': None,
- 'd': None
- },
- 'e +foo': None
- }
- })
-
- assert 'e' == find_spec(s['b'], lambda s: '+foo' in s).name
- assert 'b' == find_spec(s['e'], lambda s: '+foo' in s).name
-
- s = Spec.from_literal({
- 'a': {
- 'b +foo': {
- 'c': None,
- 'd': None
- },
- 'e': {
- 'f +foo': None
- }
- }
- })
+ s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e +foo": None}})
+
+ assert "e" == find_spec(s["b"], lambda s: "+foo" in s).name
+ assert "b" == find_spec(s["e"], lambda s: "+foo" in s).name
- assert 'f' == find_spec(s['b'], lambda s: '+foo' in s).name
+ s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": {"f +foo": None}}})
+
+ assert "f" == find_spec(s["b"], lambda s: "+foo" in s).name
def test_find_spec_self(self):
- s = Spec.from_literal({
- 'a': {
- 'b +foo': {
- 'c': None,
- 'd': None
- },
- 'e': None
- }
- })
- assert 'b' == find_spec(s['b'], lambda s: '+foo' in s).name
+ s = Spec.from_literal({"a": {"b +foo": {"c": None, "d": None}, "e": None}})
+ assert "b" == find_spec(s["b"], lambda s: "+foo" in s).name
def test_find_spec_none(self):
- s = Spec.from_literal({
- 'a': {
- 'b': {
- 'c': None,
- 'd': None
- },
- 'e': None
- }
- })
- assert find_spec(s['b'], lambda s: '+foo' in s) is None
+ s = Spec.from_literal({"a": {"b": {"c": None, "d": None}, "e": None}})
+ assert find_spec(s["b"], lambda s: "+foo" in s) is None
def test_compiler_child(self):
- s = Spec('mpileaks%clang target=x86_64 ^dyninst%gcc')
+ s = Spec("mpileaks%clang target=x86_64 ^dyninst%gcc")
s.concretize()
- assert s['mpileaks'].satisfies('%clang')
- assert s['dyninst'].satisfies('%gcc')
+ assert s["mpileaks"].satisfies("%clang")
+ assert s["dyninst"].satisfies("%gcc")
def test_conflicts_in_spec(self, conflict_spec):
s = Spec(conflict_spec)
@@ -625,8 +568,8 @@ class TestConcretize(object):
s.concretize()
def test_conflicts_show_cores(self, conflict_spec, monkeypatch):
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip('Testing debug statements specific to new concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Testing debug statements specific to new concretizer")
s = Spec(conflict_spec)
with pytest.raises(spack.error.SpackError) as e:
@@ -635,23 +578,16 @@ class TestConcretize(object):
assert "conflict" in e.value.message
def test_conflict_in_all_directives_true(self):
- s = Spec('when-directives-true')
+ s = Spec("when-directives-true")
with pytest.raises(spack.error.SpackError):
s.concretize()
- @pytest.mark.parametrize('spec_str', [
- 'conflict@10.0%clang+foo'
- ])
+ @pytest.mark.parametrize("spec_str", ["conflict@10.0%clang+foo"])
def test_no_conflict_in_external_specs(self, spec_str):
# Modify the configuration to have the spec with conflict
# registered as an external
ext = Spec(spec_str)
- data = {
- 'externals': [
- {'spec': spec_str,
- 'prefix': '/fake/path'}
- ]
- }
+ data = {"externals": [{"spec": spec_str, "prefix": "/fake/path"}]}
spack.config.set("packages::{0}".format(ext.name), data)
ext.concretize() # failure raises exception
@@ -661,7 +597,7 @@ class TestConcretize(object):
# a spec, and then modify it to have no dependency and reset the
# cache values.
- s = Spec('mpileaks')
+ s = Spec("mpileaks")
s.concretize()
# Check that now the Spec is concrete, store the hash
@@ -673,40 +609,40 @@ class TestConcretize(object):
assert not s.concrete
- @pytest.mark.regression('7239')
+ @pytest.mark.regression("7239")
def test_regression_issue_7239(self):
# Constructing a SpecBuildInterface from another SpecBuildInterface
# results in an inconsistent MRO
# Normal Spec
- s = Spec('mpileaks')
+ s = Spec("mpileaks")
s.concretize()
assert llnl.util.lang.ObjectWrapper not in type(s).__mro__
# Spec wrapped in a build interface
- build_interface = s['mpileaks']
+ build_interface = s["mpileaks"]
assert llnl.util.lang.ObjectWrapper in type(build_interface).__mro__
# Mimics asking the build interface from a build interface
- build_interface = s['mpileaks']['mpileaks']
+ build_interface = s["mpileaks"]["mpileaks"]
assert llnl.util.lang.ObjectWrapper in type(build_interface).__mro__
- @pytest.mark.regression('7705')
+ @pytest.mark.regression("7705")
def test_regression_issue_7705(self):
# spec.package.provides(name) doesn't account for conditional
# constraints in the concretized spec
- s = Spec('simple-inheritance~openblas')
+ s = Spec("simple-inheritance~openblas")
s.concretize()
- assert not s.package.provides('lapack')
+ assert not s.package.provides("lapack")
- @pytest.mark.regression('7941')
+ @pytest.mark.regression("7941")
def test_regression_issue_7941(self):
# The string representation of a spec containing
# an explicit multi-valued variant and a dependency
# might be parsed differently than the originating spec
- s = Spec('a foobar=bar ^b')
+ s = Spec("a foobar=bar ^b")
t = Spec(str(s))
s.concretize()
@@ -714,32 +650,30 @@ class TestConcretize(object):
assert s.dag_hash() == t.dag_hash()
- @pytest.mark.parametrize('abstract_specs', [
- # Establish a baseline - concretize a single spec
- ('mpileaks',),
- # When concretized together with older version of callpath
- # and dyninst it uses those older versions
- ('mpileaks', 'callpath@0.9', 'dyninst@8.1.1'),
- # Handle recursive syntax within specs
- ('mpileaks', 'callpath@0.9 ^dyninst@8.1.1', 'dyninst'),
- # Test specs that have overlapping dependencies but are not
- # one a dependency of the other
- ('mpileaks', 'direct-mpich')
- ])
+ @pytest.mark.parametrize(
+ "abstract_specs",
+ [
+ # Establish a baseline - concretize a single spec
+ ("mpileaks",),
+ # When concretized together with older version of callpath
+ # and dyninst it uses those older versions
+ ("mpileaks", "callpath@0.9", "dyninst@8.1.1"),
+ # Handle recursive syntax within specs
+ ("mpileaks", "callpath@0.9 ^dyninst@8.1.1", "dyninst"),
+ # Test specs that have overlapping dependencies but are not
+ # one a dependency of the other
+ ("mpileaks", "direct-mpich"),
+ ],
+ )
def test_simultaneous_concretization_of_specs(self, abstract_specs):
abstract_specs = [Spec(x) for x in abstract_specs]
- concrete_specs = spack.concretize.concretize_specs_together(
- *abstract_specs)
+ concrete_specs = spack.concretize.concretize_specs_together(*abstract_specs)
# Check there's only one configuration of each package in the DAG
- names = set(
- dep.name for spec in concrete_specs for dep in spec.traverse()
- )
+ names = set(dep.name for spec in concrete_specs for dep in spec.traverse())
for name in names:
- name_specs = set(
- spec[name] for spec in concrete_specs if name in spec
- )
+ name_specs = set(spec[name] for spec in concrete_specs if name in spec)
assert len(name_specs) == 1
# Check that there's at least one Spec that satisfies the
@@ -751,118 +685,124 @@ class TestConcretize(object):
for spec in concrete_specs:
assert not spec.dependents()
- @pytest.mark.parametrize('spec', ['noversion', 'noversion-bundle'])
+ @pytest.mark.parametrize("spec", ["noversion", "noversion-bundle"])
def test_noversion_pkg(self, spec):
"""Test concretization failures for no-version packages."""
with pytest.raises(spack.error.SpackError):
Spec(spec).concretized()
- @pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+ @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
# Include targets to prevent regression on 20537
- @pytest.mark.parametrize('spec, best_achievable', [
- ('mpileaks%gcc@4.4.7 ^dyninst@10.2.1 target=x86_64:', 'core2'),
- ('mpileaks%gcc@4.8 target=x86_64:', 'haswell'),
- ('mpileaks%gcc@5.3.0 target=x86_64:', 'broadwell'),
- ('mpileaks%apple-clang@5.1.0 target=x86_64:', 'x86_64')
- ])
- @pytest.mark.regression('13361', '20537')
+ @pytest.mark.parametrize(
+ "spec, best_achievable",
+ [
+ ("mpileaks%gcc@4.4.7 ^dyninst@10.2.1 target=x86_64:", "core2"),
+ ("mpileaks%gcc@4.8 target=x86_64:", "haswell"),
+ ("mpileaks%gcc@5.3.0 target=x86_64:", "broadwell"),
+ ("mpileaks%apple-clang@5.1.0 target=x86_64:", "x86_64"),
+ ],
+ )
+ @pytest.mark.regression("13361", "20537")
def test_adjusting_default_target_based_on_compiler(
- self, spec, best_achievable, current_host, mock_targets
+ self, spec, best_achievable, current_host, mock_targets
):
best_achievable = archspec.cpu.TARGETS[best_achievable]
- expected = best_achievable if best_achievable < current_host \
- else current_host
+ expected = best_achievable if best_achievable < current_host else current_host
with spack.concretize.disable_compiler_existence_check():
s = Spec(spec).concretized()
assert str(s.architecture.target) == str(expected)
- @pytest.mark.regression('8735,14730')
+ @pytest.mark.regression("8735,14730")
def test_compiler_version_matches_any_entry_in_compilers_yaml(self):
# Ensure that a concrete compiler with different compiler version
# doesn't match (here it's 4.5 vs. 4.5.0)
with pytest.raises(spack.concretize.UnavailableCompilerVersionError):
- s = Spec('mpileaks %gcc@4.5')
+ s = Spec("mpileaks %gcc@4.5")
s.concretize()
# An abstract compiler with a version list could resolve to 4.5.0
- s = Spec('mpileaks %gcc@4.5:')
+ s = Spec("mpileaks %gcc@4.5:")
s.concretize()
- assert str(s.compiler.version) == '4.5.0'
+ assert str(s.compiler.version) == "4.5.0"
def test_concretize_anonymous(self):
with pytest.raises(spack.error.SpackError):
- s = Spec('+variant')
+ s = Spec("+variant")
s.concretize()
- @pytest.mark.parametrize('spec_str', [
- 'mpileaks ^%gcc', 'mpileaks ^cflags=-g'
- ])
+ @pytest.mark.parametrize("spec_str", ["mpileaks ^%gcc", "mpileaks ^cflags=-g"])
def test_concretize_anonymous_dep(self, spec_str):
with pytest.raises(spack.error.SpackError):
s = Spec(spec_str)
s.concretize()
- @pytest.mark.parametrize('spec_str,expected_str', [
- # Unconstrained versions select default compiler (gcc@4.5.0)
- ('bowtie@1.3.0', '%gcc@4.5.0'),
- # Version with conflicts and no valid gcc select another compiler
- ('bowtie@1.2.2', '%clang@3.3'),
- # If a higher gcc is available still prefer that
- ('bowtie@1.2.2 os=redhat6', '%gcc@4.7.2'),
- ])
+ @pytest.mark.parametrize(
+ "spec_str,expected_str",
+ [
+ # Unconstrained versions select default compiler (gcc@4.5.0)
+ ("bowtie@1.3.0", "%gcc@4.5.0"),
+ # Version with conflicts and no valid gcc select another compiler
+ ("bowtie@1.2.2", "%clang@3.3"),
+ # If a higher gcc is available still prefer that
+ ("bowtie@1.2.2 os=redhat6", "%gcc@4.7.2"),
+ ],
+ )
def test_compiler_conflicts_in_package_py(self, spec_str, expected_str):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Original concretizer cannot work around conflicts')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Original concretizer cannot work around conflicts")
s = Spec(spec_str).concretized()
assert s.satisfies(expected_str)
- @pytest.mark.parametrize('spec_str,expected,unexpected', [
- ('conditional-variant-pkg@1.0',
- ['two_whens'],
- ['version_based', 'variant_based']),
- ('conditional-variant-pkg@2.0',
- ['version_based', 'variant_based'],
- ['two_whens']),
- ('conditional-variant-pkg@2.0~version_based',
- ['version_based'],
- ['variant_based', 'two_whens']),
- ('conditional-variant-pkg@2.0+version_based+variant_based',
- ['version_based', 'variant_based', 'two_whens'],
- [])
- ])
+ @pytest.mark.parametrize(
+ "spec_str,expected,unexpected",
+ [
+ ("conditional-variant-pkg@1.0", ["two_whens"], ["version_based", "variant_based"]),
+ ("conditional-variant-pkg@2.0", ["version_based", "variant_based"], ["two_whens"]),
+ (
+ "conditional-variant-pkg@2.0~version_based",
+ ["version_based"],
+ ["variant_based", "two_whens"],
+ ),
+ (
+ "conditional-variant-pkg@2.0+version_based+variant_based",
+ ["version_based", "variant_based", "two_whens"],
+ [],
+ ),
+ ],
+ )
def test_conditional_variants(self, spec_str, expected, unexpected):
s = Spec(spec_str).concretized()
for var in expected:
- assert s.satisfies('%s=*' % var)
+ assert s.satisfies("%s=*" % var)
for var in unexpected:
- assert not s.satisfies('%s=*' % var)
-
- @pytest.mark.parametrize('bad_spec', [
- '@1.0~version_based',
- '@1.0+version_based',
- '@2.0~version_based+variant_based',
- '@2.0+version_based~variant_based+two_whens',
- ])
+ assert not s.satisfies("%s=*" % var)
+
+ @pytest.mark.parametrize(
+ "bad_spec",
+ [
+ "@1.0~version_based",
+ "@1.0+version_based",
+ "@2.0~version_based+variant_based",
+ "@2.0+version_based~variant_based+two_whens",
+ ],
+ )
def test_conditional_variants_fail(self, bad_spec):
- with pytest.raises(
- (spack.error.UnsatisfiableSpecError,
- vt.InvalidVariantForSpecError)
- ):
- _ = Spec('conditional-variant-pkg' + bad_spec).concretized()
-
- @pytest.mark.parametrize('spec_str,expected,unexpected', [
- ('py-extension3 ^python@3.5.1', [], ['py-extension1']),
- ('py-extension3 ^python@2.7.11', ['py-extension1'], []),
- ('py-extension3@1.0 ^python@2.7.11', ['patchelf@0.9'], []),
- ('py-extension3@1.1 ^python@2.7.11', ['patchelf@0.9'], []),
- ('py-extension3@1.0 ^python@3.5.1', ['patchelf@0.10'], []),
- ])
- @pytest.mark.skipif(
- sys.version_info[:2] == (3, 5), reason='Known failure with Python3.5'
+ with pytest.raises((spack.error.UnsatisfiableSpecError, vt.InvalidVariantForSpecError)):
+ _ = Spec("conditional-variant-pkg" + bad_spec).concretized()
+
+ @pytest.mark.parametrize(
+ "spec_str,expected,unexpected",
+ [
+ ("py-extension3 ^python@3.5.1", [], ["py-extension1"]),
+ ("py-extension3 ^python@2.7.11", ["py-extension1"], []),
+ ("py-extension3@1.0 ^python@2.7.11", ["patchelf@0.9"], []),
+ ("py-extension3@1.1 ^python@2.7.11", ["patchelf@0.9"], []),
+ ("py-extension3@1.0 ^python@3.5.1", ["patchelf@0.10"], []),
+ ],
)
+ @pytest.mark.skipif(sys.version_info[:2] == (3, 5), reason="Known failure with Python3.5")
def test_conditional_dependencies(self, spec_str, expected, unexpected):
s = Spec(spec_str).concretized()
@@ -874,46 +814,50 @@ class TestConcretize(object):
msg = '"{0}" is in "{1}" but was unexpected'
assert dep not in s, msg.format(dep, spec_str)
- @pytest.mark.parametrize('spec_str,patched_deps', [
- ('patch-several-dependencies', [('libelf', 1), ('fake', 2)]),
- ('patch-several-dependencies@1.0',
- [('libelf', 1), ('fake', 2), ('libdwarf', 1)]),
- ('patch-several-dependencies@1.0 ^libdwarf@20111030',
- [('libelf', 1), ('fake', 2), ('libdwarf', 2)]),
- ('patch-several-dependencies ^libelf@0.8.10',
- [('libelf', 2), ('fake', 2)]),
- ('patch-several-dependencies +foo', [('libelf', 2), ('fake', 2)])
- ])
+ @pytest.mark.parametrize(
+ "spec_str,patched_deps",
+ [
+ ("patch-several-dependencies", [("libelf", 1), ("fake", 2)]),
+ ("patch-several-dependencies@1.0", [("libelf", 1), ("fake", 2), ("libdwarf", 1)]),
+ (
+ "patch-several-dependencies@1.0 ^libdwarf@20111030",
+ [("libelf", 1), ("fake", 2), ("libdwarf", 2)],
+ ),
+ ("patch-several-dependencies ^libelf@0.8.10", [("libelf", 2), ("fake", 2)]),
+ ("patch-several-dependencies +foo", [("libelf", 2), ("fake", 2)]),
+ ],
+ )
def test_patching_dependencies(self, spec_str, patched_deps):
s = Spec(spec_str).concretized()
for dep, num_patches in patched_deps:
- assert s[dep].satisfies('patches=*')
- assert len(s[dep].variants['patches'].value) == num_patches
-
- @pytest.mark.regression(
- '267,303,1781,2310,2632,3628'
+ assert s[dep].satisfies("patches=*")
+ assert len(s[dep].variants["patches"].value) == num_patches
+
+ @pytest.mark.regression("267,303,1781,2310,2632,3628")
+ @pytest.mark.parametrize(
+ "spec_str, expected",
+ [
+ # Need to understand that this configuration is possible
+ # only if we use the +mpi variant, which is not the default
+ ("fftw ^mpich", ["+mpi"]),
+ # This spec imposes two orthogonal constraints on a dependency,
+ # one of which is conditional. The original concretizer fail since
+ # when it applies the first constraint, it sets the unknown variants
+ # of the dependency to their default values
+ ("quantum-espresso", ["^fftw@1.0+mpi"]),
+ # This triggers a conditional dependency on ^fftw@1.0
+ ("quantum-espresso", ["^openblas"]),
+ # This constructs a constraint for a dependency og the type
+ # @x.y:x.z where the lower bound is unconditional, the upper bound
+ # is conditional to having a variant set
+ ("quantum-espresso", ["^libelf@0.8.12"]),
+ ("quantum-espresso~veritas", ["^libelf@0.8.13"]),
+ ],
)
- @pytest.mark.parametrize('spec_str, expected', [
- # Need to understand that this configuration is possible
- # only if we use the +mpi variant, which is not the default
- ('fftw ^mpich', ['+mpi']),
- # This spec imposes two orthogonal constraints on a dependency,
- # one of which is conditional. The original concretizer fail since
- # when it applies the first constraint, it sets the unknown variants
- # of the dependency to their default values
- ('quantum-espresso', ['^fftw@1.0+mpi']),
- # This triggers a conditional dependency on ^fftw@1.0
- ('quantum-espresso', ['^openblas']),
- # This constructs a constraint for a dependency og the type
- # @x.y:x.z where the lower bound is unconditional, the upper bound
- # is conditional to having a variant set
- ('quantum-espresso', ['^libelf@0.8.12']),
- ('quantum-espresso~veritas', ['^libelf@0.8.13'])
- ])
def test_working_around_conflicting_defaults(self, spec_str, expected):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
s = Spec(spec_str).concretized()
@@ -921,299 +865,295 @@ class TestConcretize(object):
for constraint in expected:
assert s.satisfies(constraint)
- @pytest.mark.regression('4635')
- @pytest.mark.parametrize('spec_str,expected', [
- ('cmake', ['%clang']),
- ('cmake %gcc', ['%gcc']),
- ('cmake %clang', ['%clang'])
- ])
- def test_external_package_and_compiler_preferences(
- self, spec_str, expected
- ):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ @pytest.mark.regression("4635")
+ @pytest.mark.parametrize(
+ "spec_str,expected",
+ [("cmake", ["%clang"]), ("cmake %gcc", ["%gcc"]), ("cmake %clang", ["%clang"])],
+ )
+ def test_external_package_and_compiler_preferences(self, spec_str, expected):
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
packages_yaml = {
- 'all': {
- 'compiler': ['clang', 'gcc'],
+ "all": {
+ "compiler": ["clang", "gcc"],
+ },
+ "cmake": {
+ "externals": [{"spec": "cmake@3.4.3", "prefix": "/usr"}],
+ "buildable": False,
},
- 'cmake': {
- 'externals': [
- {'spec': 'cmake@3.4.3', 'prefix': '/usr'}
- ],
- 'buildable': False
- }
}
- spack.config.set('packages', packages_yaml)
+ spack.config.set("packages", packages_yaml)
s = Spec(spec_str).concretized()
assert s.external
for condition in expected:
assert s.satisfies(condition)
- @pytest.mark.regression('5651')
- def test_package_with_constraint_not_met_by_external(
- self
- ):
+ @pytest.mark.regression("5651")
+ def test_package_with_constraint_not_met_by_external(self):
"""Check that if we have an external package A at version X.Y in
packages.yaml, but our spec doesn't allow X.Y as a version, then
a new version of A is built that meets the requirements.
"""
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
- packages_yaml = {
- 'libelf': {
- 'externals': [
- {'spec': 'libelf@0.8.13', 'prefix': '/usr'}
- ]
- }
- }
- spack.config.set('packages', packages_yaml)
+ packages_yaml = {"libelf": {"externals": [{"spec": "libelf@0.8.13", "prefix": "/usr"}]}}
+ spack.config.set("packages", packages_yaml)
# quantum-espresso+veritas requires libelf@:0.8.12
- s = Spec('quantum-espresso+veritas').concretized()
- assert s.satisfies('^libelf@0.8.12')
- assert not s['libelf'].external
+ s = Spec("quantum-espresso+veritas").concretized()
+ assert s.satisfies("^libelf@0.8.12")
+ assert not s["libelf"].external
- @pytest.mark.regression('9744')
+ @pytest.mark.regression("9744")
def test_cumulative_version_ranges_with_different_length(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
- s = Spec('cumulative-vrange-root').concretized()
+ s = Spec("cumulative-vrange-root").concretized()
assert s.concrete
- assert s.satisfies('^cumulative-vrange-bottom@2.2')
+ assert s.satisfies("^cumulative-vrange-bottom@2.2")
- @pytest.mark.regression('9937')
- @pytest.mark.skipif(
- sys.version_info[:2] == (3, 5), reason='Known failure with Python3.5'
- )
+ @pytest.mark.regression("9937")
+ @pytest.mark.skipif(sys.version_info[:2] == (3, 5), reason="Known failure with Python3.5")
def test_dependency_conditional_on_another_dependency_state(self):
- root_str = 'variant-on-dependency-condition-root'
- dep_str = 'variant-on-dependency-condition-a'
- spec_str = '{0} ^{1}'.format(root_str, dep_str)
+ root_str = "variant-on-dependency-condition-root"
+ dep_str = "variant-on-dependency-condition-a"
+ spec_str = "{0} ^{1}".format(root_str, dep_str)
s = Spec(spec_str).concretized()
assert s.concrete
- assert s.satisfies('^variant-on-dependency-condition-b')
+ assert s.satisfies("^variant-on-dependency-condition-b")
- s = Spec(spec_str + '+x').concretized()
+ s = Spec(spec_str + "+x").concretized()
assert s.concrete
- assert s.satisfies('^variant-on-dependency-condition-b')
+ assert s.satisfies("^variant-on-dependency-condition-b")
- s = Spec(spec_str + '~x').concretized()
+ s = Spec(spec_str + "~x").concretized()
assert s.concrete
- assert not s.satisfies('^variant-on-dependency-condition-b')
-
- @pytest.mark.regression('8082')
- @pytest.mark.parametrize('spec_str,expected', [
- ('cmake %gcc', '%gcc'),
- ('cmake %clang', '%clang')
- ])
- def test_compiler_constraint_with_external_package(
- self, spec_str, expected
- ):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ assert not s.satisfies("^variant-on-dependency-condition-b")
+
+ @pytest.mark.regression("8082")
+ @pytest.mark.parametrize(
+ "spec_str,expected", [("cmake %gcc", "%gcc"), ("cmake %clang", "%clang")]
+ )
+ def test_compiler_constraint_with_external_package(self, spec_str, expected):
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
packages_yaml = {
- 'cmake': {
- 'externals': [
- {'spec': 'cmake@3.4.3', 'prefix': '/usr'}
- ],
- 'buildable': False
- }
+ "cmake": {"externals": [{"spec": "cmake@3.4.3", "prefix": "/usr"}], "buildable": False}
}
- spack.config.set('packages', packages_yaml)
+ spack.config.set("packages", packages_yaml)
s = Spec(spec_str).concretized()
assert s.external
assert s.satisfies(expected)
- @pytest.mark.regression('20976')
- @pytest.mark.parametrize('compiler,spec_str,expected,xfailold', [
- ('gcc', 'external-common-python %clang',
- '%clang ^external-common-openssl%gcc ^external-common-gdbm%clang', False),
- ('clang', 'external-common-python',
- '%clang ^external-common-openssl%clang ^external-common-gdbm%clang', True)
- ])
+ @pytest.mark.regression("20976")
+ @pytest.mark.parametrize(
+ "compiler,spec_str,expected,xfailold",
+ [
+ (
+ "gcc",
+ "external-common-python %clang",
+ "%clang ^external-common-openssl%gcc ^external-common-gdbm%clang",
+ False,
+ ),
+ (
+ "clang",
+ "external-common-python",
+ "%clang ^external-common-openssl%clang ^external-common-gdbm%clang",
+ True,
+ ),
+ ],
+ )
def test_compiler_in_nonbuildable_external_package(
- self, compiler, spec_str, expected, xfailold
+ self, compiler, spec_str, expected, xfailold
):
"""Check that the compiler of a non-buildable external package does not
- spread to other dependencies, unless no other commpiler is specified."""
+ spread to other dependencies, unless no other commpiler is specified."""
packages_yaml = {
- 'external-common-openssl': {
- 'externals': [
- {'spec': 'external-common-openssl@1.1.1i%' + compiler,
- 'prefix': '/usr'}
+ "external-common-openssl": {
+ "externals": [
+ {"spec": "external-common-openssl@1.1.1i%" + compiler, "prefix": "/usr"}
],
- 'buildable': False
+ "buildable": False,
}
}
- spack.config.set('packages', packages_yaml)
+ spack.config.set("packages", packages_yaml)
s = Spec(spec_str).concretized()
- if xfailold and spack.config.get('config:concretizer') == 'original':
- pytest.xfail('This only works on the ASP-based concretizer')
+ if xfailold and spack.config.get("config:concretizer") == "original":
+ pytest.xfail("This only works on the ASP-based concretizer")
assert s.satisfies(expected)
- assert 'external-common-perl' not in [d.name for d in s.dependencies()]
+ assert "external-common-perl" not in [d.name for d in s.dependencies()]
def test_external_packages_have_consistent_hash(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip('This tests needs the ASP-based concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("This tests needs the ASP-based concretizer")
- s, t = Spec('externaltool'), Spec('externaltool')
+ s, t = Spec("externaltool"), Spec("externaltool")
s._old_concretize(), t._new_concretize()
assert s.dag_hash() == t.dag_hash()
def test_external_that_would_require_a_virtual_dependency(self):
- s = Spec('requires-virtual').concretized()
+ s = Spec("requires-virtual").concretized()
assert s.external
- assert 'stuff' not in s
+ assert "stuff" not in s
def test_transitive_conditional_virtual_dependency(self):
- s = Spec('transitive-conditional-virtual-dependency').concretized()
+ s = Spec("transitive-conditional-virtual-dependency").concretized()
# The default for conditional-virtual-dependency is to have
# +stuff~mpi, so check that these defaults are respected
- assert '+stuff' in s['conditional-virtual-dependency']
- assert '~mpi' in s['conditional-virtual-dependency']
+ assert "+stuff" in s["conditional-virtual-dependency"]
+ assert "~mpi" in s["conditional-virtual-dependency"]
# 'stuff' is provided by an external package, so check it's present
- assert 'externalvirtual' in s
+ assert "externalvirtual" in s
- @pytest.mark.regression('20040')
+ @pytest.mark.regression("20040")
def test_conditional_provides_or_depends_on(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
# Check that we can concretize correctly a spec that can either
# provide a virtual or depend on it based on the value of a variant
- s = Spec('conditional-provider +disable-v1').concretized()
- assert 'v1-provider' in s
- assert s['v1'].name == 'v1-provider'
- assert s['v2'].name == 'conditional-provider'
-
- @pytest.mark.regression('20079')
- @pytest.mark.parametrize('spec_str,tests_arg,with_dep,without_dep', [
- # Check that True is treated correctly and attaches test deps
- # to all nodes in the DAG
- ('a', True, ['a'], []),
- ('a foobar=bar', True, ['a', 'b'], []),
- # Check that a list of names activates the dependency only for
- # packages in that list
- ('a foobar=bar', ['a'], ['a'], ['b']),
- ('a foobar=bar', ['b'], ['b'], ['a']),
- # Check that False disregard test dependencies
- ('a foobar=bar', False, [], ['a', 'b']),
- ])
- def test_activating_test_dependencies(
- self, spec_str, tests_arg, with_dep, without_dep
- ):
+ s = Spec("conditional-provider +disable-v1").concretized()
+ assert "v1-provider" in s
+ assert s["v1"].name == "v1-provider"
+ assert s["v2"].name == "conditional-provider"
+
+ @pytest.mark.regression("20079")
+ @pytest.mark.parametrize(
+ "spec_str,tests_arg,with_dep,without_dep",
+ [
+ # Check that True is treated correctly and attaches test deps
+ # to all nodes in the DAG
+ ("a", True, ["a"], []),
+ ("a foobar=bar", True, ["a", "b"], []),
+ # Check that a list of names activates the dependency only for
+ # packages in that list
+ ("a foobar=bar", ["a"], ["a"], ["b"]),
+ ("a foobar=bar", ["b"], ["b"], ["a"]),
+ # Check that False disregard test dependencies
+ ("a foobar=bar", False, [], ["a", "b"]),
+ ],
+ )
+ def test_activating_test_dependencies(self, spec_str, tests_arg, with_dep, without_dep):
s = Spec(spec_str).concretized(tests=tests_arg)
for pkg_name in with_dep:
msg = "Cannot find test dependency in package '{0}'"
node = s[pkg_name]
- assert node.dependencies(deptype='test'), msg.format(pkg_name)
+ assert node.dependencies(deptype="test"), msg.format(pkg_name)
for pkg_name in without_dep:
msg = "Test dependency in package '{0}' is unexpected"
node = s[pkg_name]
- assert not node.dependencies(deptype='test'), msg.format(pkg_name)
+ assert not node.dependencies(deptype="test"), msg.format(pkg_name)
- @pytest.mark.regression('20019')
+ @pytest.mark.regression("20019")
def test_compiler_match_is_preferred_to_newer_version(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
# This spec depends on openblas. Openblas has a conflict
# that doesn't allow newer versions with gcc@4.4.0. Check
# that an old version of openblas is selected, rather than
# a different compiler for just that node.
- spec_str = 'simple-inheritance+openblas %gcc@4.4.0 os=redhat6'
+ spec_str = "simple-inheritance+openblas %gcc@4.4.0 os=redhat6"
s = Spec(spec_str).concretized()
- assert 'openblas@0.2.13' in s
- assert s['openblas'].satisfies('%gcc@4.4.0')
+ assert "openblas@0.2.13" in s
+ assert s["openblas"].satisfies("%gcc@4.4.0")
- @pytest.mark.regression('19981')
+ @pytest.mark.regression("19981")
def test_target_ranges_in_conflicts(self):
with pytest.raises(spack.error.SpackError):
- Spec('impossible-concretization').concretized()
+ Spec("impossible-concretization").concretized()
def test_target_compatibility(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
with pytest.raises(spack.error.SpackError):
- Spec('libdwarf target=x86_64 ^libelf target=x86_64_v2').concretized()
+ Spec("libdwarf target=x86_64 ^libelf target=x86_64_v2").concretized()
- @pytest.mark.regression('20040')
+ @pytest.mark.regression("20040")
def test_variant_not_default(self):
- s = Spec('ecp-viz-sdk').concretized()
+ s = Spec("ecp-viz-sdk").concretized()
# Check default variant value for the package
- assert '+dep' in s['conditional-constrained-dependencies']
+ assert "+dep" in s["conditional-constrained-dependencies"]
# Check that non-default variant values are forced on the dependency
- d = s['dep-with-variants']
- assert '+foo+bar+baz' in d
+ d = s["dep-with-variants"]
+ assert "+foo+bar+baz" in d
- @pytest.mark.regression('20055')
+ @pytest.mark.regression("20055")
def test_custom_compiler_version(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
- s = Spec('a %gcc@foo os=redhat6').concretized()
- assert '%gcc@foo' in s
+ s = Spec("a %gcc@foo os=redhat6").concretized()
+ assert "%gcc@foo" in s
def test_all_patches_applied(self):
- uuidpatch = 'a60a42b73e03f207433c5579de207c6ed61d58e4d12dd3b5142eb525728d89ea' if not is_windows else 'd0df7988457ec999c148a4a2af25ce831bfaad13954ba18a4446374cb0aef55e'
- localpatch = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
- spec = spack.spec.Spec('conditionally-patch-dependency+jasper')
+ uuidpatch = (
+ "a60a42b73e03f207433c5579de207c6ed61d58e4d12dd3b5142eb525728d89ea"
+ if not is_windows
+ else "d0df7988457ec999c148a4a2af25ce831bfaad13954ba18a4446374cb0aef55e"
+ )
+ localpatch = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
+ spec = spack.spec.Spec("conditionally-patch-dependency+jasper")
spec.concretize()
- assert ((uuidpatch, localpatch) ==
- spec['libelf'].variants['patches'].value)
+ assert (uuidpatch, localpatch) == spec["libelf"].variants["patches"].value
def test_dont_select_version_that_brings_more_variants_in(self):
- s = Spec('dep-with-variants-if-develop-root').concretized()
- assert s['dep-with-variants-if-develop'].satisfies('@1.0')
-
- @pytest.mark.regression('20244,20736')
- @pytest.mark.parametrize('spec_str,is_external,expected', [
- # These are all externals, and 0_8 is a version not in package.py
- ('externaltool@1.0', True, '@1.0'),
- ('externaltool@0.9', True, '@0.9'),
- ('externaltool@0_8', True, '@0_8'),
- # This external package is buildable, has a custom version
- # in packages.yaml that is greater than the ones in package.py
- # and specifies a variant
- ('external-buildable-with-variant +baz', True, '@1.1.special +baz'),
- ('external-buildable-with-variant ~baz', False, '@1.0 ~baz'),
- ('external-buildable-with-variant@1.0: ~baz', False, '@1.0 ~baz'),
- # This uses an external version that meets the condition for
- # having an additional dependency, but the dependency shouldn't
- # appear in the answer set
- ('external-buildable-with-variant@0.9 +baz', True, '@0.9'),
- # This package has an external version declared that would be
- # the least preferred if Spack had to build it
- ('old-external', True, '@1.0.0'),
- ])
+ s = Spec("dep-with-variants-if-develop-root").concretized()
+ assert s["dep-with-variants-if-develop"].satisfies("@1.0")
+
+ @pytest.mark.regression("20244,20736")
+ @pytest.mark.parametrize(
+ "spec_str,is_external,expected",
+ [
+ # These are all externals, and 0_8 is a version not in package.py
+ ("externaltool@1.0", True, "@1.0"),
+ ("externaltool@0.9", True, "@0.9"),
+ ("externaltool@0_8", True, "@0_8"),
+ # This external package is buildable, has a custom version
+ # in packages.yaml that is greater than the ones in package.py
+ # and specifies a variant
+ ("external-buildable-with-variant +baz", True, "@1.1.special +baz"),
+ ("external-buildable-with-variant ~baz", False, "@1.0 ~baz"),
+ ("external-buildable-with-variant@1.0: ~baz", False, "@1.0 ~baz"),
+ # This uses an external version that meets the condition for
+ # having an additional dependency, but the dependency shouldn't
+ # appear in the answer set
+ ("external-buildable-with-variant@0.9 +baz", True, "@0.9"),
+ # This package has an external version declared that would be
+ # the least preferred if Spack had to build it
+ ("old-external", True, "@1.0.0"),
+ ],
+ )
def test_external_package_versions(self, spec_str, is_external, expected):
s = Spec(spec_str).concretized()
assert s.external == is_external
assert s.satisfies(expected)
- @pytest.mark.parametrize('dev_first', [True, False])
- @pytest.mark.parametrize('spec', [
- 'dev-build-test-install', 'dev-build-test-dependent ^dev-build-test-install'])
- @pytest.mark.parametrize('mock_db', [True, False])
+ @pytest.mark.parametrize("dev_first", [True, False])
+ @pytest.mark.parametrize(
+ "spec", ["dev-build-test-install", "dev-build-test-dependent ^dev-build-test-install"]
+ )
+ @pytest.mark.parametrize("mock_db", [True, False])
def test_reuse_does_not_overwrite_dev_specs(
- self, dev_first, spec, mock_db, tmpdir, monkeypatch):
+ self, dev_first, spec, mock_db, tmpdir, monkeypatch
+ ):
"""Test that reuse does not mix dev specs with non-dev specs.
Tests for either order (dev specs are not reused for non-dev, and
@@ -1224,8 +1164,8 @@ class TestConcretize(object):
# dev and non-dev specs that are otherwise identical
spec = Spec(spec)
dev_spec = spec.copy()
- dev_constraint = 'dev_path=%s' % tmpdir.strpath
- dev_spec['dev-build-test-install'].constrain(dev_constraint)
+ dev_constraint = "dev_path=%s" % tmpdir.strpath
+ dev_spec["dev-build-test-install"].constrain(dev_constraint)
# run the test in both orders
first_spec = dev_spec if dev_first else spec
@@ -1238,42 +1178,42 @@ class TestConcretize(object):
return [first_spec]
if mock_db:
- monkeypatch.setattr(spack.store.db, 'query', mock_fn)
+ monkeypatch.setattr(spack.store.db, "query", mock_fn)
else:
- monkeypatch.setattr(
- spack.binary_distribution, 'update_cache_and_get_specs', mock_fn)
+ monkeypatch.setattr(spack.binary_distribution, "update_cache_and_get_specs", mock_fn)
# concretize and ensure we did not reuse
with spack.config.override("concretizer:reuse", True):
second_spec.concretize()
assert first_spec.dag_hash() != second_spec.dag_hash()
- @pytest.mark.regression('20292')
- @pytest.mark.parametrize('context', [
- {'add_variant': True, 'delete_variant': False},
- {'add_variant': False, 'delete_variant': True},
- {'add_variant': True, 'delete_variant': True}
- ])
+ @pytest.mark.regression("20292")
+ @pytest.mark.parametrize(
+ "context",
+ [
+ {"add_variant": True, "delete_variant": False},
+ {"add_variant": False, "delete_variant": True},
+ {"add_variant": True, "delete_variant": True},
+ ],
+ )
def test_reuse_installed_packages_when_package_def_changes(
- self, context, mutable_database, repo_with_changing_recipe
+ self, context, mutable_database, repo_with_changing_recipe
):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
# Install a spec
- root = Spec('root').concretized()
- dependency = root['changing'].copy()
+ root = Spec("root").concretized()
+ dependency = root["changing"].copy()
root.package.do_install(fake=True, explicit=True)
# Modify package.py
repo_with_changing_recipe.change(context)
# Try to concretize with the spec installed previously
- new_root_with_reuse = Spec('root ^/{0}'.format(
- dependency.dag_hash())
- ).concretized()
+ new_root_with_reuse = Spec("root ^/{0}".format(dependency.dag_hash())).concretized()
- new_root_without_reuse = Spec('root').concretized()
+ new_root_without_reuse = Spec("root").concretized()
# validate that the graphs are the same with reuse, but not without
assert ht.build_hash(root) == ht.build_hash(new_root_with_reuse)
@@ -1285,147 +1225,144 @@ class TestConcretize(object):
# Structure and package hash will be different without reuse
assert root.dag_hash() != new_root_without_reuse.dag_hash()
- @pytest.mark.regression('20784')
+ @pytest.mark.regression("20784")
def test_concretization_of_test_dependencies(self):
# With clingo we emit dependency_conditions regardless of the type
# of the dependency. We need to ensure that there's at least one
# dependency type declared to infer that the dependency holds.
- s = Spec('test-dep-with-imposed-conditions').concretized()
- assert 'c' not in s
+ s = Spec("test-dep-with-imposed-conditions").concretized()
+ assert "c" not in s
- @pytest.mark.parametrize('spec_str', [
- 'wrong-variant-in-conflicts',
- 'wrong-variant-in-depends-on'
- ])
+ @pytest.mark.parametrize(
+ "spec_str", ["wrong-variant-in-conflicts", "wrong-variant-in-depends-on"]
+ )
def test_error_message_for_inconsistent_variants(self, spec_str):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
s = Spec(spec_str)
- with pytest.raises(RuntimeError, match='not found in package'):
+ with pytest.raises(RuntimeError, match="not found in package"):
s.concretize()
- @pytest.mark.regression('22533')
- @pytest.mark.parametrize('spec_str,variant_name,expected_values', [
- # Test the default value 'auto'
- ('mvapich2', 'file_systems', ('auto',)),
- # Test setting a single value from the disjoint set
- ('mvapich2 file_systems=lustre', 'file_systems', ('lustre',)),
- # Test setting multiple values from the disjoint set
- ('mvapich2 file_systems=lustre,gpfs', 'file_systems',
- ('lustre', 'gpfs')),
- ])
- def test_mv_variants_disjoint_sets_from_spec(
- self, spec_str, variant_name, expected_values
- ):
+ @pytest.mark.regression("22533")
+ @pytest.mark.parametrize(
+ "spec_str,variant_name,expected_values",
+ [
+ # Test the default value 'auto'
+ ("mvapich2", "file_systems", ("auto",)),
+ # Test setting a single value from the disjoint set
+ ("mvapich2 file_systems=lustre", "file_systems", ("lustre",)),
+ # Test setting multiple values from the disjoint set
+ ("mvapich2 file_systems=lustre,gpfs", "file_systems", ("lustre", "gpfs")),
+ ],
+ )
+ def test_mv_variants_disjoint_sets_from_spec(self, spec_str, variant_name, expected_values):
s = Spec(spec_str).concretized()
assert set(expected_values) == set(s.variants[variant_name].value)
- @pytest.mark.regression('22533')
+ @pytest.mark.regression("22533")
def test_mv_variants_disjoint_sets_from_packages_yaml(self):
external_mvapich2 = {
- 'mvapich2': {
- 'buildable': False,
- 'externals': [{
- 'spec': 'mvapich2@2.3.1 file_systems=nfs,ufs',
- 'prefix': '/usr'
- }]
+ "mvapich2": {
+ "buildable": False,
+ "externals": [{"spec": "mvapich2@2.3.1 file_systems=nfs,ufs", "prefix": "/usr"}],
}
}
- spack.config.set('packages', external_mvapich2)
+ spack.config.set("packages", external_mvapich2)
- s = Spec('mvapich2').concretized()
- assert set(s.variants['file_systems'].value) == set(['ufs', 'nfs'])
+ s = Spec("mvapich2").concretized()
+ assert set(s.variants["file_systems"].value) == set(["ufs", "nfs"])
- @pytest.mark.regression('22596')
+ @pytest.mark.regression("22596")
def test_external_with_non_default_variant_as_dependency(self):
# This package depends on another that is registered as an external
# with 'buildable: true' and a variant with a non-default value set
- s = Spec('trigger-external-non-default-variant').concretized()
-
- assert '~foo' in s['external-non-default-variant']
- assert '~bar' in s['external-non-default-variant']
- assert s['external-non-default-variant'].external
-
- @pytest.mark.regression('22871')
- @pytest.mark.parametrize('spec_str,expected_os', [
- ('mpileaks', 'os=debian6'),
- # To trigger the bug in 22871 we need to have the same compiler
- # spec available on both operating systems
- ('mpileaks%gcc@4.5.0 platform=test os=debian6', 'os=debian6'),
- ('mpileaks%gcc@4.5.0 platform=test os=redhat6', 'os=redhat6')
- ])
- def test_os_selection_when_multiple_choices_are_possible(
- self, spec_str, expected_os
- ):
+ s = Spec("trigger-external-non-default-variant").concretized()
+
+ assert "~foo" in s["external-non-default-variant"]
+ assert "~bar" in s["external-non-default-variant"]
+ assert s["external-non-default-variant"].external
+
+ @pytest.mark.regression("22871")
+ @pytest.mark.parametrize(
+ "spec_str,expected_os",
+ [
+ ("mpileaks", "os=debian6"),
+ # To trigger the bug in 22871 we need to have the same compiler
+ # spec available on both operating systems
+ ("mpileaks%gcc@4.5.0 platform=test os=debian6", "os=debian6"),
+ ("mpileaks%gcc@4.5.0 platform=test os=redhat6", "os=redhat6"),
+ ],
+ )
+ def test_os_selection_when_multiple_choices_are_possible(self, spec_str, expected_os):
s = Spec(spec_str).concretized()
for node in s.traverse():
assert node.satisfies(expected_os)
- @pytest.mark.regression('22718')
- @pytest.mark.parametrize('spec_str,expected_compiler', [
- ('mpileaks', '%gcc@4.5.0'),
- ('mpileaks ^mpich%clang@3.3', '%clang@3.3')
- ])
+ @pytest.mark.regression("22718")
+ @pytest.mark.parametrize(
+ "spec_str,expected_compiler",
+ [("mpileaks", "%gcc@4.5.0"), ("mpileaks ^mpich%clang@3.3", "%clang@3.3")],
+ )
def test_compiler_is_unique(self, spec_str, expected_compiler):
s = Spec(spec_str).concretized()
for node in s.traverse():
assert node.satisfies(expected_compiler)
- @pytest.mark.parametrize('spec_str,expected_dict', [
- # Check the defaults from the package (libs=shared)
- ('multivalue-variant', {
- 'libs=shared': True,
- 'libs=static': False
- }),
- # Check that libs=static doesn't extend the default
- ('multivalue-variant libs=static', {
- 'libs=shared': False,
- 'libs=static': True
- }),
- ])
+ @pytest.mark.parametrize(
+ "spec_str,expected_dict",
+ [
+ # Check the defaults from the package (libs=shared)
+ ("multivalue-variant", {"libs=shared": True, "libs=static": False}),
+ # Check that libs=static doesn't extend the default
+ ("multivalue-variant libs=static", {"libs=shared": False, "libs=static": True}),
+ ],
+ )
def test_multivalued_variants_from_cli(self, spec_str, expected_dict):
s = Spec(spec_str).concretized()
for constraint, value in expected_dict.items():
assert s.satisfies(constraint) == value
- @pytest.mark.regression('22351')
- @pytest.mark.parametrize('spec_str,expected', [
- # Version 1.1.0 is deprecated and should not be selected, unless we
- # explicitly asked for that
- ('deprecated-versions', ['deprecated-versions@1.0.0']),
- ('deprecated-versions@1.1.0', ['deprecated-versions@1.1.0']),
- ])
+ @pytest.mark.regression("22351")
+ @pytest.mark.parametrize(
+ "spec_str,expected",
+ [
+ # Version 1.1.0 is deprecated and should not be selected, unless we
+ # explicitly asked for that
+ ("deprecated-versions", ["deprecated-versions@1.0.0"]),
+ ("deprecated-versions@1.1.0", ["deprecated-versions@1.1.0"]),
+ ],
+ )
def test_deprecated_versions_not_selected(self, spec_str, expected):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
s = Spec(spec_str).concretized()
for abstract_spec in expected:
assert abstract_spec in s
- @pytest.mark.regression('24196')
+ @pytest.mark.regression("24196")
def test_version_badness_more_important_than_default_mv_variants(self):
# If a dependency had an old version that for some reason pulls in
# a transitive dependency with a multi-valued variant, that old
# version was preferred because of the order of our optimization
# criteria.
- s = spack.spec.Spec('root').concretized()
- assert s['gmt'].satisfies('@2.0')
+ s = spack.spec.Spec("root").concretized()
+ assert s["gmt"].satisfies("@2.0")
- @pytest.mark.regression('24205')
+ @pytest.mark.regression("24205")
def test_provider_must_meet_requirements(self):
# A package can be a provider of a virtual only if the underlying
# requirements are met.
- s = spack.spec.Spec('unsat-virtual-dependency')
+ s = spack.spec.Spec("unsat-virtual-dependency")
with pytest.raises((RuntimeError, spack.error.UnsatisfiableSpecError)):
s.concretize()
- @pytest.mark.regression('23951')
+ @pytest.mark.regression("23951")
def test_newer_dependency_adds_a_transitive_virtual(self):
# Ensure that a package doesn't concretize any of its transitive
# dependencies to an old version because newer versions pull in
@@ -1435,42 +1372,39 @@ class TestConcretize(object):
# root@1.0 <- middle@1.0 <- leaf@1.0
#
# and "blas" is pulled in only by newer versions of "leaf"
- s = spack.spec.Spec('root-adds-virtual').concretized()
- assert s['leaf-adds-virtual'].satisfies('@2.0')
- assert 'blas' in s
+ s = spack.spec.Spec("root-adds-virtual").concretized()
+ assert s["leaf-adds-virtual"].satisfies("@2.0")
+ assert "blas" in s
- @pytest.mark.regression('26718')
+ @pytest.mark.regression("26718")
def test_versions_in_virtual_dependencies(self):
# Ensure that a package that needs a given version of a virtual
# package doesn't end up using a later implementation
- s = spack.spec.Spec('hpcviewer@2019.02').concretized()
- assert s['java'].satisfies('virtual-with-versions@1.8.0')
+ s = spack.spec.Spec("hpcviewer@2019.02").concretized()
+ assert s["java"].satisfies("virtual-with-versions@1.8.0")
- @pytest.mark.regression('26866')
+ @pytest.mark.regression("26866")
def test_non_default_provider_of_multiple_virtuals(self):
- s = spack.spec.Spec(
- 'many-virtual-consumer ^low-priority-provider'
- ).concretized()
- assert s['mpi'].name == 'low-priority-provider'
- assert s['lapack'].name == 'low-priority-provider'
+ s = spack.spec.Spec("many-virtual-consumer ^low-priority-provider").concretized()
+ assert s["mpi"].name == "low-priority-provider"
+ assert s["lapack"].name == "low-priority-provider"
- for virtual_pkg in ('mpi', 'lapack'):
+ for virtual_pkg in ("mpi", "lapack"):
for pkg in spack.repo.path.providers_for(virtual_pkg):
- if pkg.name == 'low-priority-provider':
+ if pkg.name == "low-priority-provider":
continue
assert pkg not in s
- @pytest.mark.regression('27237')
- @pytest.mark.parametrize('spec_str,expect_installed', [
- ('mpich', True),
- ('mpich+debug', False),
- ('mpich~debug', True)
- ])
+ @pytest.mark.regression("27237")
+ @pytest.mark.parametrize(
+ "spec_str,expect_installed",
+ [("mpich", True), ("mpich+debug", False), ("mpich~debug", True)],
+ )
def test_concrete_specs_are_not_modified_on_reuse(
- self, mutable_database, spec_str, expect_installed, config
+ self, mutable_database, spec_str, expect_installed, config
):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Original concretizer cannot reuse specs')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Original concretizer cannot reuse specs")
# Test the internal consistency of solve + DAG reconstruction
# when reused specs are added to the mix. This prevents things
@@ -1481,29 +1415,27 @@ class TestConcretize(object):
assert s.installed is expect_installed
assert s.satisfies(spec_str, strict=True)
- @pytest.mark.regression('26721,19736')
+ @pytest.mark.regression("26721,19736")
def test_sticky_variant_in_package(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Original concretizer cannot use sticky variants')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Original concretizer cannot use sticky variants")
# Here we test that a sticky variant cannot be changed from its default value
# by the ASP solver if not set explicitly. The package used in the test needs
# to have +allow-gcc set to be concretized with %gcc and clingo is not allowed
# to change the default ~allow-gcc
with pytest.raises(spack.error.SpackError):
- spack.spec.Spec('sticky-variant %gcc').concretized()
+ spack.spec.Spec("sticky-variant %gcc").concretized()
- s = spack.spec.Spec('sticky-variant+allow-gcc %gcc').concretized()
- assert s.satisfies('%gcc') and s.satisfies('+allow-gcc')
+ s = spack.spec.Spec("sticky-variant+allow-gcc %gcc").concretized()
+ assert s.satisfies("%gcc") and s.satisfies("+allow-gcc")
- s = spack.spec.Spec('sticky-variant %clang').concretized()
- assert s.satisfies('%clang') and s.satisfies('~allow-gcc')
+ s = spack.spec.Spec("sticky-variant %clang").concretized()
+ assert s.satisfies("%clang") and s.satisfies("~allow-gcc")
def test_do_not_invent_new_concrete_versions_unless_necessary(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail(
- "Original concretizer doesn't resolve concrete versions to known ones"
- )
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Original concretizer doesn't resolve concrete versions to known ones")
# ensure we select a known satisfying version rather than creating
# a new '2.7' version.
@@ -1512,18 +1444,19 @@ class TestConcretize(object):
# Here there is no known satisfying version - use the one on the spec.
assert ver("2.7.21") == Spec("python@2.7.21").concretized().version
- @pytest.mark.parametrize('spec_str', [
- 'conditional-values-in-variant@1.62.0 cxxstd=17',
- 'conditional-values-in-variant@1.62.0 cxxstd=2a',
- 'conditional-values-in-variant@1.72.0 cxxstd=2a',
- # Ensure disjoint set of values work too
- 'conditional-values-in-variant@1.72.0 staging=flexpath',
- ])
+ @pytest.mark.parametrize(
+ "spec_str",
+ [
+ "conditional-values-in-variant@1.62.0 cxxstd=17",
+ "conditional-values-in-variant@1.62.0 cxxstd=2a",
+ "conditional-values-in-variant@1.72.0 cxxstd=2a",
+ # Ensure disjoint set of values work too
+ "conditional-values-in-variant@1.72.0 staging=flexpath",
+ ],
+ )
def test_conditional_values_in_variants(self, spec_str):
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip(
- "Original concretizer doesn't resolve conditional values in variants"
- )
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer doesn't resolve conditional values in variants")
s = Spec(spec_str)
with pytest.raises((RuntimeError, spack.error.UnsatisfiableSpecError)):
@@ -1531,105 +1464,95 @@ class TestConcretize(object):
def test_conditional_values_in_conditional_variant(self):
"""Test that conditional variants play well with conditional possible values"""
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip(
- "Original concretizer doesn't resolve conditional values in variants"
- )
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer doesn't resolve conditional values in variants")
- s = Spec('conditional-values-in-variant@1.50.0').concretized()
- assert 'cxxstd' not in s.variants
+ s = Spec("conditional-values-in-variant@1.50.0").concretized()
+ assert "cxxstd" not in s.variants
- s = Spec('conditional-values-in-variant@1.60.0').concretized()
- assert 'cxxstd' in s.variants
+ s = Spec("conditional-values-in-variant@1.60.0").concretized()
+ assert "cxxstd" in s.variants
def test_target_granularity(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip(
- 'Original concretizer cannot account for target granularity'
- )
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot account for target granularity")
# The test architecture uses core2 as the default target. Check that when
# we configure Spack for "generic" granularity we concretize for x86_64
- s = Spec('python')
- assert s.concretized().satisfies('target=core2')
- with spack.config.override('concretizer:targets', {'granularity': 'generic'}):
- assert s.concretized().satisfies('target=x86_64')
+ s = Spec("python")
+ assert s.concretized().satisfies("target=core2")
+ with spack.config.override("concretizer:targets", {"granularity": "generic"}):
+ assert s.concretized().satisfies("target=x86_64")
def test_host_compatible_concretization(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip(
- 'Original concretizer cannot account for host compatibility'
- )
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot account for host compatibility")
# Check that after setting "host_compatible" to false we cannot concretize.
# Here we use "k10" to set a target non-compatible with the current host
# to avoid a lot of boilerplate when mocking the test platform. The issue
# is that the defaults for the test platform are very old, so there's no
# compiler supporting e.g. icelake etc.
- s = Spec('python target=k10')
+ s = Spec("python target=k10")
assert s.concretized()
- with spack.config.override('concretizer:targets', {'host_compatible': True}):
+ with spack.config.override("concretizer:targets", {"host_compatible": True}):
with pytest.raises(spack.error.SpackError):
s.concretized()
def test_add_microarchitectures_on_explicit_request(self):
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip(
- 'Original concretizer cannot account for host compatibility'
- )
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot account for host compatibility")
# Check that if we consider only "generic" targets, we can still solve for
# specific microarchitectures on explicit requests
- with spack.config.override('concretizer:targets', {'granularity': 'generic'}):
- s = Spec('python target=k10').concretized()
- assert s.satisfies('target=k10')
+ with spack.config.override("concretizer:targets", {"granularity": "generic"}):
+ s = Spec("python target=k10").concretized()
+ assert s.satisfies("target=k10")
- @pytest.mark.regression('29201')
- def test_delete_version_and_reuse(
- self, mutable_database, repo_with_changing_recipe
- ):
+ @pytest.mark.regression("29201")
+ def test_delete_version_and_reuse(self, mutable_database, repo_with_changing_recipe):
"""Test that we can reuse installed specs with versions not
declared in package.py
"""
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
- root = Spec('root').concretized()
+ root = Spec("root").concretized()
root.package.do_install(fake=True, explicit=True)
- repo_with_changing_recipe.change({'delete_version': True})
+ repo_with_changing_recipe.change({"delete_version": True})
with spack.config.override("concretizer:reuse", True):
- new_root = Spec('root').concretized()
+ new_root = Spec("root").concretized()
assert root.dag_hash() == new_root.dag_hash()
- @pytest.mark.regression('29201')
+ @pytest.mark.regression("29201")
def test_installed_version_is_selected_only_for_reuse(
- self, mutable_database, repo_with_changing_recipe
+ self, mutable_database, repo_with_changing_recipe
):
"""Test that a version coming from an installed spec is a possible
version only for reuse
"""
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known failure of the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known failure of the original concretizer")
# Install a dependency that cannot be reused with "root"
# because of a conflict a variant, then delete its version
- dependency = Spec('changing@1.0~foo').concretized()
+ dependency = Spec("changing@1.0~foo").concretized()
dependency.package.do_install(fake=True, explicit=True)
- repo_with_changing_recipe.change({'delete_version': True})
+ repo_with_changing_recipe.change({"delete_version": True})
with spack.config.override("concretizer:reuse", True):
- new_root = Spec('root').concretized()
+ new_root = Spec("root").concretized()
- assert not new_root['changing'].satisfies('@1.0')
+ assert not new_root["changing"].satisfies("@1.0")
- @pytest.mark.regression('28259')
+ @pytest.mark.regression("28259")
def test_reuse_with_unknown_namespace_dont_raise(
- self, additional_repo_with_c, mutable_mock_repo
+ self, additional_repo_with_c, mutable_mock_repo
):
- s = Spec('c').concretized()
- assert s.namespace == 'myrepo'
+ s = Spec("c").concretized()
+ assert s.namespace == "myrepo"
s.package.do_install(fake=True, explicit=True)
# TODO: To mock repo removal we need to recreate the RepoPath
@@ -1637,43 +1560,47 @@ class TestConcretize(object):
spack.repo.path = spack.repo.RepoPath(*spack.repo.path.repos)
with spack.config.override("concretizer:reuse", True):
- s = Spec('c').concretized()
- assert s.namespace == 'builtin.mock'
+ s = Spec("c").concretized()
+ assert s.namespace == "builtin.mock"
- @pytest.mark.regression('28259')
+ @pytest.mark.regression("28259")
def test_reuse_with_unknown_package_dont_raise(
- self, additional_repo_with_c, mutable_mock_repo, monkeypatch
+ self, additional_repo_with_c, mutable_mock_repo, monkeypatch
):
- s = Spec('c').concretized()
- assert s.namespace == 'myrepo'
+ s = Spec("c").concretized()
+ assert s.namespace == "myrepo"
s.package.do_install(fake=True, explicit=True)
# Here we delete the package.py instead of removing the repo and we
# make it such that "c" doesn't exist in myrepo
- del sys.modules['spack.pkg.myrepo.c']
- c_dir = os.path.join(additional_repo_with_c.root, 'packages', 'c')
+ del sys.modules["spack.pkg.myrepo.c"]
+ c_dir = os.path.join(additional_repo_with_c.root, "packages", "c")
shutil.rmtree(c_dir)
- monkeypatch.setattr(additional_repo_with_c, 'exists', lambda x: False)
+ monkeypatch.setattr(additional_repo_with_c, "exists", lambda x: False)
with spack.config.override("concretizer:reuse", True):
- s = Spec('c').concretized()
- assert s.namespace == 'builtin.mock'
-
- @pytest.mark.parametrize('specs,expected', [
- (['libelf', 'libelf@0.8.10'], 1),
- (['libdwarf%gcc', 'libelf%clang'], 2),
- (['libdwarf%gcc', 'libdwarf%clang'], 4),
- (['libdwarf^libelf@0.8.12', 'libdwarf^libelf@0.8.13'], 4),
- (['hdf5', 'zmpi'], 3),
- (['hdf5', 'mpich'], 2),
- (['hdf5^zmpi', 'mpich'], 4),
- (['mpi', 'zmpi'], 2),
- (['mpi', 'mpich'], 1),
- ])
+ s = Spec("c").concretized()
+ assert s.namespace == "builtin.mock"
+
+ @pytest.mark.parametrize(
+ "specs,expected",
+ [
+ (["libelf", "libelf@0.8.10"], 1),
+ (["libdwarf%gcc", "libelf%clang"], 2),
+ (["libdwarf%gcc", "libdwarf%clang"], 4),
+ (["libdwarf^libelf@0.8.12", "libdwarf^libelf@0.8.13"], 4),
+ (["hdf5", "zmpi"], 3),
+ (["hdf5", "mpich"], 2),
+ (["hdf5^zmpi", "mpich"], 4),
+ (["mpi", "zmpi"], 2),
+ (["mpi", "mpich"], 1),
+ ],
+ )
def test_best_effort_coconcretize(self, specs, expected):
import spack.solver.asp
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip('Original concretizer cannot concretize in rounds')
+
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot concretize in rounds")
specs = [spack.spec.Spec(s) for s in specs]
solver = spack.solver.asp.Solver()
@@ -1685,30 +1612,42 @@ class TestConcretize(object):
assert len(concrete_specs) == expected
- @pytest.mark.parametrize('specs,expected_spec,occurances', [
- # The algorithm is greedy, and it might decide to solve the "best"
- # spec early in which case reuse is suboptimal. In this case the most
- # recent version of libdwarf is selected and concretized to libelf@0.8.13
- (['libdwarf@20111030^libelf@0.8.10',
- 'libdwarf@20130207^libelf@0.8.12',
- 'libdwarf@20130729'], 'libelf@0.8.12', 1),
- # Check we reuse the best libelf in the environment
- (['libdwarf@20130729^libelf@0.8.10',
- 'libdwarf@20130207^libelf@0.8.12',
- 'libdwarf@20111030'], 'libelf@0.8.12', 2),
- (['libdwarf@20130729',
- 'libdwarf@20130207',
- 'libdwarf@20111030'], 'libelf@0.8.13', 3),
- # We need to solve in 2 rounds and we expect mpich to be preferred to zmpi
- (['hdf5+mpi', 'zmpi', 'mpich'], 'mpich', 2)
- ])
- def test_best_effort_coconcretize_preferences(
- self, specs, expected_spec, occurances
- ):
+ @pytest.mark.parametrize(
+ "specs,expected_spec,occurances",
+ [
+ # The algorithm is greedy, and it might decide to solve the "best"
+ # spec early in which case reuse is suboptimal. In this case the most
+ # recent version of libdwarf is selected and concretized to libelf@0.8.13
+ (
+ [
+ "libdwarf@20111030^libelf@0.8.10",
+ "libdwarf@20130207^libelf@0.8.12",
+ "libdwarf@20130729",
+ ],
+ "libelf@0.8.12",
+ 1,
+ ),
+ # Check we reuse the best libelf in the environment
+ (
+ [
+ "libdwarf@20130729^libelf@0.8.10",
+ "libdwarf@20130207^libelf@0.8.12",
+ "libdwarf@20111030",
+ ],
+ "libelf@0.8.12",
+ 2,
+ ),
+ (["libdwarf@20130729", "libdwarf@20130207", "libdwarf@20111030"], "libelf@0.8.13", 3),
+ # We need to solve in 2 rounds and we expect mpich to be preferred to zmpi
+ (["hdf5+mpi", "zmpi", "mpich"], "mpich", 2),
+ ],
+ )
+ def test_best_effort_coconcretize_preferences(self, specs, expected_spec, occurances):
"""Test package preferences during coconcretization."""
import spack.solver.asp
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip('Original concretizer cannot concretize in rounds')
+
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot concretize in rounds")
specs = [spack.spec.Spec(s) for s in specs]
solver = spack.solver.asp.Solver()
@@ -1723,46 +1662,45 @@ class TestConcretize(object):
counter += 1
assert counter == occurances, concrete_specs
- @pytest.mark.regression('30864')
+ @pytest.mark.regression("30864")
def test_misleading_error_message_on_version(self, mutable_database):
# For this bug to be triggered we need a reusable dependency
# that is not optimal in terms of optimization scores.
# We pick an old version of "b"
import spack.solver.asp
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip('Original concretizer cannot reuse')
- reusable_specs = [
- spack.spec.Spec('non-existing-conditional-dep@1.0').concretized()
- ]
- root_spec = spack.spec.Spec('non-existing-conditional-dep@2.0')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot reuse")
+
+ reusable_specs = [spack.spec.Spec("non-existing-conditional-dep@1.0").concretized()]
+ root_spec = spack.spec.Spec("non-existing-conditional-dep@2.0")
with spack.config.override("concretizer:reuse", True):
solver = spack.solver.asp.Solver()
setup = spack.solver.asp.SpackSolverSetup()
- with pytest.raises(spack.solver.asp.UnsatisfiableSpecError,
- match="'dep-with-variants' satisfies '@999'"):
+ with pytest.raises(
+ spack.solver.asp.UnsatisfiableSpecError,
+ match="'dep-with-variants' satisfies '@999'",
+ ):
solver.driver.solve(setup, [root_spec], reuse=reusable_specs)
- @pytest.mark.regression('31148')
+ @pytest.mark.regression("31148")
def test_version_weight_and_provenance(self):
"""Test package preferences during coconcretization."""
import spack.solver.asp
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip('Original concretizer cannot reuse')
+
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot reuse")
reusable_specs = [
- spack.spec.Spec(spec_str).concretized()
- for spec_str in ('b@0.9', 'b@1.0')
+ spack.spec.Spec(spec_str).concretized() for spec_str in ("b@0.9", "b@1.0")
]
- root_spec = spack.spec.Spec('a foobar=bar')
+ root_spec = spack.spec.Spec("a foobar=bar")
with spack.config.override("concretizer:reuse", True):
solver = spack.solver.asp.Solver()
setup = spack.solver.asp.SpackSolverSetup()
- result = solver.driver.solve(
- setup, [root_spec], reuse=reusable_specs, out=sys.stdout
- )
+ result = solver.driver.solve(setup, [root_spec], reuse=reusable_specs, out=sys.stdout)
# The result here should have a single spec to build ('a')
# and it should be using b@1.0 with a version badness of 2
# The provenance is:
@@ -1771,30 +1709,29 @@ class TestConcretize(object):
# version_declared("b","1.0",2,"installed").
# version_declared("b","0.9",3,"installed").
for criterion in [
- (1, None, 'number of packages to build (vs. reuse)'),
- (2, 0, 'version badness')
+ (1, None, "number of packages to build (vs. reuse)"),
+ (2, 0, "version badness"),
]:
assert criterion in result.criteria
- assert result.specs[0].satisfies('^b@1.0')
+ assert result.specs[0].satisfies("^b@1.0")
- @pytest.mark.regression('31169')
+ @pytest.mark.regression("31169")
def test_not_reusing_incompatible_os_or_compiler(self):
import spack.solver.asp
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip('Original concretizer cannot reuse')
- root_spec = spack.spec.Spec('b')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer cannot reuse")
+
+ root_spec = spack.spec.Spec("b")
s = root_spec.concretized()
wrong_compiler, wrong_os = s.copy(), s.copy()
- wrong_compiler.compiler = spack.spec.CompilerSpec('gcc@12.1.0')
- wrong_os.architecture = spack.spec.ArchSpec('test-ubuntu2204-x86_64')
+ wrong_compiler.compiler = spack.spec.CompilerSpec("gcc@12.1.0")
+ wrong_os.architecture = spack.spec.ArchSpec("test-ubuntu2204-x86_64")
reusable_specs = [wrong_compiler, wrong_os]
with spack.config.override("concretizer:reuse", True):
solver = spack.solver.asp.Solver()
setup = spack.solver.asp.SpackSolverSetup()
- result = solver.driver.solve(
- setup, [root_spec], reuse=reusable_specs, out=sys.stdout
- )
+ result = solver.driver.solve(setup, [root_spec], reuse=reusable_specs, out=sys.stdout)
concrete_spec = result.specs[0]
- assert concrete_spec.satisfies('%gcc@4.5.0')
- assert concrete_spec.satisfies('os=debian6')
+ assert concrete_spec.satisfies("%gcc@4.5.0")
+ assert concrete_spec.satisfies("os=debian6")
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
index 28bb13ee22..379e5447d0 100644
--- a/lib/spack/spack/test/concretize_preferences.py
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -20,9 +20,8 @@ from spack.version import Version
@pytest.fixture()
def concretize_scope(mutable_config, tmpdir):
"""Adds a scope for concretization preferences"""
- tmpdir.ensure_dir('concretize')
- mutable_config.push_scope(
- ConfigScope('concretize', str(tmpdir.join('concretize'))))
+ tmpdir.ensure_dir("concretize")
+ mutable_config.push_scope(ConfigScope("concretize", str(tmpdir.join("concretize"))))
yield
@@ -32,7 +31,8 @@ def concretize_scope(mutable_config, tmpdir):
@pytest.fixture()
def configure_permissions():
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
permissions:
read: group
@@ -49,8 +49,9 @@ mpileaks:
callpath:
permissions:
write: world
-""")
- spack.config.set('packages', conf, scope='concretize')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
yield
@@ -62,7 +63,7 @@ def concretize(abstract_spec):
def update_packages(pkgname, section, value):
"""Update config and reread package list"""
conf = {pkgname: {section: value}}
- spack.config.set('packages', conf, scope='concretize')
+ spack.config.set("packages", conf, scope="concretize")
def assert_variant_values(spec, **variants):
@@ -71,152 +72,161 @@ def assert_variant_values(spec, **variants):
assert concrete.variants[variant].value == value
-@pytest.mark.usefixtures('concretize_scope', 'mock_packages')
+@pytest.mark.usefixtures("concretize_scope", "mock_packages")
class TestConcretizePreferences(object):
- @pytest.mark.parametrize('package_name,variant_value,expected_results', [
- ('mpileaks', '~debug~opt+shared+static',
- {'debug': False, 'opt': False, 'shared': True, 'static': True}),
- # Check that using a list of variants instead of a single string works
- ('mpileaks', ['~debug', '~opt', '+shared', '+static'],
- {'debug': False, 'opt': False, 'shared': True, 'static': True}),
- # Use different values for the variants and check them again
- ('mpileaks', ['+debug', '+opt', '~shared', '-static'],
- {'debug': True, 'opt': True, 'shared': False, 'static': False}),
- # Check a multivalued variant with multiple values set
- ('multivalue-variant', ['foo=bar,baz', 'fee=bar'],
- {'foo': ('bar', 'baz'), 'fee': 'bar'}),
- ('singlevalue-variant', ['fum=why'],
- {'fum': 'why'})
- ])
- def test_preferred_variants(
- self, package_name, variant_value, expected_results
- ):
+ @pytest.mark.parametrize(
+ "package_name,variant_value,expected_results",
+ [
+ (
+ "mpileaks",
+ "~debug~opt+shared+static",
+ {"debug": False, "opt": False, "shared": True, "static": True},
+ ),
+ # Check that using a list of variants instead of a single string works
+ (
+ "mpileaks",
+ ["~debug", "~opt", "+shared", "+static"],
+ {"debug": False, "opt": False, "shared": True, "static": True},
+ ),
+ # Use different values for the variants and check them again
+ (
+ "mpileaks",
+ ["+debug", "+opt", "~shared", "-static"],
+ {"debug": True, "opt": True, "shared": False, "static": False},
+ ),
+ # Check a multivalued variant with multiple values set
+ (
+ "multivalue-variant",
+ ["foo=bar,baz", "fee=bar"],
+ {"foo": ("bar", "baz"), "fee": "bar"},
+ ),
+ ("singlevalue-variant", ["fum=why"], {"fum": "why"}),
+ ],
+ )
+ def test_preferred_variants(self, package_name, variant_value, expected_results):
"""Test preferred variants are applied correctly"""
- update_packages(package_name, 'variants', variant_value)
+ update_packages(package_name, "variants", variant_value)
assert_variant_values(package_name, **expected_results)
def test_preferred_variants_from_wildcard(self):
"""
Test that 'foo=*' concretizes to any value
"""
- update_packages('multivalue-variant', 'variants', 'foo=bar')
- assert_variant_values(
- 'multivalue-variant foo=*', foo=('bar',)
- )
+ update_packages("multivalue-variant", "variants", "foo=bar")
+ assert_variant_values("multivalue-variant foo=*", foo=("bar",))
def test_preferred_compilers(self):
- """Test preferred compilers are applied correctly
- """
+ """Test preferred compilers are applied correctly"""
# Need to make sure the test uses an available compiler
compiler_list = spack.compilers.all_compiler_specs()
assert compiler_list
# Try the first available compiler
compiler = str(compiler_list[0])
- update_packages('mpileaks', 'compiler', [compiler])
- spec = concretize('mpileaks')
+ update_packages("mpileaks", "compiler", [compiler])
+ spec = concretize("mpileaks")
assert spec.compiler == spack.spec.CompilerSpec(compiler)
# Try the last available compiler
compiler = str(compiler_list[-1])
- update_packages('mpileaks', 'compiler', [compiler])
- spec = concretize('mpileaks os=redhat6')
+ update_packages("mpileaks", "compiler", [compiler])
+ spec = concretize("mpileaks os=redhat6")
assert spec.compiler == spack.spec.CompilerSpec(compiler)
def test_preferred_target(self, mutable_mock_repo):
"""Test preferred targets are applied correctly"""
# FIXME: This test was a false negative, since the default and
# FIXME: the preferred target were the same
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('Known bug in the original concretizer')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("Known bug in the original concretizer")
- spec = concretize('mpich')
+ spec = concretize("mpich")
default = str(spec.target)
preferred = str(spec.target.family)
- update_packages('mpich', 'target', [preferred])
- spec = concretize('mpich')
+ update_packages("mpich", "target", [preferred])
+ spec = concretize("mpich")
assert str(spec.target) == preferred
- spec = concretize('mpileaks')
- assert str(spec['mpileaks'].target) == preferred
- assert str(spec['mpich'].target) == preferred
+ spec = concretize("mpileaks")
+ assert str(spec["mpileaks"].target) == preferred
+ assert str(spec["mpich"].target) == preferred
- update_packages('mpileaks', 'target', [default])
- spec = concretize('mpileaks')
- assert str(spec['mpileaks'].target) == default
- assert str(spec['mpich'].target) == default
+ update_packages("mpileaks", "target", [default])
+ spec = concretize("mpileaks")
+ assert str(spec["mpileaks"].target) == default
+ assert str(spec["mpich"].target) == default
def test_preferred_versions(self):
- """Test preferred package versions are applied correctly
- """
- update_packages('mpileaks', 'version', ['2.3'])
- spec = concretize('mpileaks')
- assert spec.version == Version('2.3')
+ """Test preferred package versions are applied correctly"""
+ update_packages("mpileaks", "version", ["2.3"])
+ spec = concretize("mpileaks")
+ assert spec.version == Version("2.3")
- update_packages('mpileaks', 'version', ['2.2'])
- spec = concretize('mpileaks')
- assert spec.version == Version('2.2')
+ update_packages("mpileaks", "version", ["2.2"])
+ spec = concretize("mpileaks")
+ assert spec.version == Version("2.2")
def test_preferred_versions_mixed_version_types(self):
- update_packages('mixedversions', 'version', ['2.0'])
- spec = concretize('mixedversions')
- assert spec.version == Version('2.0')
+ update_packages("mixedversions", "version", ["2.0"])
+ spec = concretize("mixedversions")
+ assert spec.version == Version("2.0")
def test_preferred_providers(self):
"""Test preferred providers of virtual packages are
applied correctly
"""
- update_packages('all', 'providers', {'mpi': ['mpich']})
- spec = concretize('mpileaks')
- assert 'mpich' in spec
+ update_packages("all", "providers", {"mpi": ["mpich"]})
+ spec = concretize("mpileaks")
+ assert "mpich" in spec
- update_packages('all', 'providers', {'mpi': ['zmpi']})
- spec = concretize('mpileaks')
- assert 'zmpi' in spec
+ update_packages("all", "providers", {"mpi": ["zmpi"]})
+ spec = concretize("mpileaks")
+ assert "zmpi" in spec
def test_preferred(self):
- """"Test packages with some version marked as preferred=True"""
- spec = Spec('python')
+ """ "Test packages with some version marked as preferred=True"""
+ spec = Spec("python")
spec.concretize()
- assert spec.version == Version('2.7.11')
+ assert spec.version == Version("2.7.11")
# now add packages.yaml with versions other than preferred
# ensure that once config is in place, non-preferred version is used
- update_packages('python', 'version', ['3.5.0'])
- spec = Spec('python')
+ update_packages("python", "version", ["3.5.0"])
+ spec = Spec("python")
spec.concretize()
- assert spec.version == Version('3.5.0')
+ assert spec.version == Version("3.5.0")
def test_develop(self):
"""Test concretization with develop-like versions"""
- spec = Spec('develop-test')
+ spec = Spec("develop-test")
spec.concretize()
- assert spec.version == Version('0.2.15')
- spec = Spec('develop-test2')
+ assert spec.version == Version("0.2.15")
+ spec = Spec("develop-test2")
spec.concretize()
- assert spec.version == Version('0.2.15')
+ assert spec.version == Version("0.2.15")
# now add packages.yaml with develop-like versions
# ensure that once config is in place, develop-like version is used
- update_packages('develop-test', 'version', ['develop'])
- spec = Spec('develop-test')
+ update_packages("develop-test", "version", ["develop"])
+ spec = Spec("develop-test")
spec.concretize()
- assert spec.version == Version('develop')
+ assert spec.version == Version("develop")
- update_packages('develop-test2', 'version', ['0.2.15.develop'])
- spec = Spec('develop-test2')
+ update_packages("develop-test2", "version", ["0.2.15.develop"])
+ spec = Spec("develop-test2")
spec.concretize()
- assert spec.version == Version('0.2.15.develop')
+ assert spec.version == Version("0.2.15.develop")
def test_external_mpi(self):
# make sure this doesn't give us an external first.
- spec = Spec('mpi')
+ spec = Spec("mpi")
spec.concretize()
- assert not spec['mpi'].external
+ assert not spec["mpi"].external
# load config
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
providers:
mpi: [mpich]
@@ -225,13 +235,14 @@ mpich:
externals:
- spec: mpich@3.0.4
prefix: /dummy/path
-""")
- spack.config.set('packages', conf, scope='concretize')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
# ensure that once config is in place, external is used
- spec = Spec('mpi')
+ spec = Spec("mpi")
spec.concretize()
- assert spec['mpich'].external_path == os.sep + os.path.join('dummy', 'path')
+ assert spec["mpich"].external_path == os.sep + os.path.join("dummy", "path")
def test_external_module(self, monkeypatch):
"""Test that packages can find externals specified by module
@@ -240,15 +251,17 @@ mpich:
This just tests that the preference is accounted for"""
# make sure this doesn't give us an external first.
def mock_module(cmd, module):
- return 'prepend-path PATH /dummy/path'
- monkeypatch.setattr(spack.util.module_cmd, 'module', mock_module)
+ return "prepend-path PATH /dummy/path"
- spec = Spec('mpi')
+ monkeypatch.setattr(spack.util.module_cmd, "module", mock_module)
+
+ spec = Spec("mpi")
spec.concretize()
- assert not spec['mpi'].external
+ assert not spec["mpi"].external
# load config
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
providers:
mpi: [mpich]
@@ -257,76 +270,87 @@ mpi:
externals:
- spec: mpich@3.0.4
modules: [dummy]
-""")
- spack.config.set('packages', conf, scope='concretize')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
# ensure that once config is in place, external is used
- spec = Spec('mpi')
+ spec = Spec("mpi")
spec.concretize()
- assert spec['mpich'].external_path == '/dummy/path'
+ assert spec["mpich"].external_path == "/dummy/path"
def test_buildable_false(self):
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
libelf:
buildable: false
-""")
- spack.config.set('packages', conf, scope='concretize')
- spec = Spec('libelf')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
+ spec = Spec("libelf")
assert not spack.package_prefs.is_spec_buildable(spec)
- spec = Spec('mpich')
+ spec = Spec("mpich")
assert spack.package_prefs.is_spec_buildable(spec)
def test_buildable_false_virtual(self):
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
mpi:
buildable: false
-""")
- spack.config.set('packages', conf, scope='concretize')
- spec = Spec('libelf')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
+ spec = Spec("libelf")
assert spack.package_prefs.is_spec_buildable(spec)
- spec = Spec('mpich')
+ spec = Spec("mpich")
assert not spack.package_prefs.is_spec_buildable(spec)
def test_buildable_false_all(self):
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
buildable: false
-""")
- spack.config.set('packages', conf, scope='concretize')
- spec = Spec('libelf')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
+ spec = Spec("libelf")
assert not spack.package_prefs.is_spec_buildable(spec)
- spec = Spec('mpich')
+ spec = Spec("mpich")
assert not spack.package_prefs.is_spec_buildable(spec)
def test_buildable_false_all_true_package(self):
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
buildable: false
libelf:
buildable: true
-""")
- spack.config.set('packages', conf, scope='concretize')
- spec = Spec('libelf')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
+ spec = Spec("libelf")
assert spack.package_prefs.is_spec_buildable(spec)
- spec = Spec('mpich')
+ spec = Spec("mpich")
assert not spack.package_prefs.is_spec_buildable(spec)
def test_buildable_false_all_true_virtual(self):
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
buildable: false
mpi:
buildable: true
-""")
- spack.config.set('packages', conf, scope='concretize')
- spec = Spec('libelf')
+"""
+ )
+ spack.config.set("packages", conf, scope="concretize")
+ spec = Spec("libelf")
assert not spack.package_prefs.is_spec_buildable(spec)
- spec = Spec('mpich')
+ spec = Spec("mpich")
assert spack.package_prefs.is_spec_buildable(spec)
def test_config_permissions_from_all(self, configure_permissions):
@@ -335,7 +359,7 @@ mpi:
# Make sure we can configure readable and writable
# Test inheriting from 'all'
- spec = Spec('zmpi')
+ spec = Spec("zmpi")
perms = spack.package_prefs.get_package_permissions(spec)
assert perms == stat.S_IRWXU | stat.S_IRWXG
@@ -343,11 +367,11 @@ mpi:
assert dir_perms == stat.S_IRWXU | stat.S_IRWXG | stat.S_ISGID
group = spack.package_prefs.get_package_group(spec)
- assert group == 'all'
+ assert group == "all"
def test_config_permissions_from_package(self, configure_permissions):
# Test overriding 'all'
- spec = Spec('mpich')
+ spec = Spec("mpich")
perms = spack.package_prefs.get_package_permissions(spec)
assert perms == stat.S_IRWXU
@@ -355,11 +379,11 @@ mpi:
assert dir_perms == stat.S_IRWXU
group = spack.package_prefs.get_package_group(spec)
- assert group == 'all'
+ assert group == "all"
def test_config_permissions_differ_read_write(self, configure_permissions):
# Test overriding group from 'all' and different readable/writable
- spec = Spec('mpileaks')
+ spec = Spec("mpileaks")
perms = spack.package_prefs.get_package_permissions(spec)
assert perms == stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP
@@ -368,37 +392,37 @@ mpi:
assert dir_perms == expected
group = spack.package_prefs.get_package_group(spec)
- assert group == 'mpileaks'
+ assert group == "mpileaks"
def test_config_perms_fail_write_gt_read(self, configure_permissions):
# Test failure for writable more permissive than readable
- spec = Spec('callpath')
+ spec = Spec("callpath")
with pytest.raises(ConfigError):
spack.package_prefs.get_package_permissions(spec)
- @pytest.mark.regression('20040')
+ @pytest.mark.regression("20040")
def test_variant_not_flipped_to_pull_externals(self):
"""Test that a package doesn't prefer pulling in an
external to using the default value of a variant.
"""
- s = Spec('vdefault-or-external-root').concretized()
+ s = Spec("vdefault-or-external-root").concretized()
- assert '~external' in s['vdefault-or-external']
- assert 'externaltool' not in s
+ assert "~external" in s["vdefault-or-external"]
+ assert "externaltool" not in s
- @pytest.mark.regression('25585')
+ @pytest.mark.regression("25585")
def test_dependencies_cant_make_version_parent_score_better(self):
"""Test that a package can't select a worse version for a
dependent because doing so it can pull-in a dependency
that makes the overall version score even or better and maybe
has a better score in some lower priority criteria.
"""
- s = Spec('version-test-root').concretized()
+ s = Spec("version-test-root").concretized()
- assert s.satisfies('^version-test-pkg@2.4.6')
- assert 'version-test-dependency-preferred' not in s
+ assert s.satisfies("^version-test-pkg@2.4.6")
+ assert "version-test-dependency-preferred" not in s
- @pytest.mark.regression('26598')
+ @pytest.mark.regression("26598")
def test_multivalued_variants_are_lower_priority_than_providers(self):
"""Test that the rule to maximize the number of values for multivalued
variants is considered at lower priority than selecting the default
@@ -409,21 +433,13 @@ mpi:
more fabrics by default.
"""
with spack.config.override(
- 'packages:all', {
- 'providers': {
- 'somevirtual': ['some-virtual-preferred']
- }
- }
+ "packages:all", {"providers": {"somevirtual": ["some-virtual-preferred"]}}
):
- s = Spec('somevirtual').concretized()
- assert s.name == 'some-virtual-preferred'
+ s = Spec("somevirtual").concretized()
+ assert s.name == "some-virtual-preferred"
- @pytest.mark.regression('26721,19736')
+ @pytest.mark.regression("26721,19736")
def test_sticky_variant_accounts_for_packages_yaml(self):
- with spack.config.override(
- 'packages:sticky-variant', {
- 'variants': '+allow-gcc'
- }
- ):
- s = Spec('sticky-variant %gcc').concretized()
- assert s.satisfies('%gcc') and s.satisfies('+allow-gcc')
+ with spack.config.override("packages:sticky-variant", {"variants": "+allow-gcc"}):
+ s = Spec("sticky-variant %gcc").concretized()
+ assert s.satisfies("%gcc") and s.satisfies("+allow-gcc")
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index e014941e94..5ecd761e24 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -30,47 +30,35 @@ import spack.util.spack_yaml as syaml
# sample config data
config_low = {
- 'config': {
- 'install_tree': {'root': 'install_tree_path'},
- 'build_stage': ['path1', 'path2', 'path3']}}
+ "config": {
+ "install_tree": {"root": "install_tree_path"},
+ "build_stage": ["path1", "path2", "path3"],
+ }
+}
-config_override_all = {
- 'config:': {
- 'install_tree:': {'root': 'override_all'}}}
+config_override_all = {"config:": {"install_tree:": {"root": "override_all"}}}
-config_override_key = {
- 'config': {
- 'install_tree:': {'root': 'override_key'}}}
+config_override_key = {"config": {"install_tree:": {"root": "override_key"}}}
-config_merge_list = {
- 'config': {
- 'build_stage': ['patha', 'pathb']}}
+config_merge_list = {"config": {"build_stage": ["patha", "pathb"]}}
-config_override_list = {
- 'config': {
- 'build_stage:': ['pathd', 'pathe']}}
+config_override_list = {"config": {"build_stage:": ["pathd", "pathe"]}}
-config_merge_dict = {
- 'config': {
- 'info': {
- 'a': 3,
- 'b': 4}}}
+config_merge_dict = {"config": {"info": {"a": 3, "b": 4}}}
-config_override_dict = {
- 'config': {
- 'info:': {
- 'a': 7,
- 'c': 9}}}
+config_override_dict = {"config": {"info:": {"a": 7, "c": 9}}}
@pytest.fixture()
def write_config_file(tmpdir):
"""Returns a function that writes a config file."""
+
def _write(config, data, scope):
- config_yaml = tmpdir.join(scope, config + '.yaml')
+ config_yaml = tmpdir.join(scope, config + ".yaml")
config_yaml.ensure()
- with config_yaml.open('w') as f:
+ with config_yaml.open("w") as f:
syaml.dump_config(data, f)
+
return _write
@@ -78,8 +66,9 @@ def write_config_file(tmpdir):
def env_yaml(tmpdir):
"""Return a sample env.yaml for test purposes"""
env_yaml = str(tmpdir.join("env.yaml"))
- with open(env_yaml, 'w') as f:
- f.write("""\
+ with open(env_yaml, "w") as f:
+ f.write(
+ """\
env:
config:
verify_ssl: False
@@ -89,7 +78,8 @@ env:
compiler: [ 'gcc@4.5.3' ]
repos:
- /x/y/z
-""")
+"""
+ )
return env_yaml
@@ -102,27 +92,27 @@ def cross_plat_join(*pths):
def check_compiler_config(comps, *compiler_names):
"""Check that named compilers in comps match Spack's config."""
- config = spack.config.get('compilers')
- compiler_list = ['cc', 'cxx', 'f77', 'fc']
- flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
- 'ldflags', 'ldlibs']
- param_list = ['modules', 'paths', 'spec', 'operating_system']
+ config = spack.config.get("compilers")
+ compiler_list = ["cc", "cxx", "f77", "fc"]
+ flag_list = ["cflags", "cxxflags", "fflags", "cppflags", "ldflags", "ldlibs"]
+ param_list = ["modules", "paths", "spec", "operating_system"]
for compiler in config:
- conf = compiler['compiler']
- if conf['spec'] in compiler_names:
- comp = next((c['compiler'] for c in comps if
- c['compiler']['spec'] == conf['spec']), None)
+ conf = compiler["compiler"]
+ if conf["spec"] in compiler_names:
+ comp = next(
+ (c["compiler"] for c in comps if c["compiler"]["spec"] == conf["spec"]), None
+ )
if not comp:
- raise ValueError('Bad config spec')
+ raise ValueError("Bad config spec")
for p in param_list:
assert conf[p] == comp[p]
for f in flag_list:
- expected = comp.get('flags', {}).get(f, None)
- actual = conf.get('flags', {}).get(f, None)
+ expected = comp.get("flags", {}).get(f, None)
+ actual = conf.get("flags", {}).get(f, None)
assert expected == actual
for c in compiler_list:
- expected = comp['paths'][c]
- actual = conf['paths'][c]
+ expected = comp["paths"][c]
+ actual = conf["paths"][c]
assert expected == actual
@@ -130,106 +120,94 @@ def check_compiler_config(comps, *compiler_names):
# Some sample compiler config data and tests.
#
a_comps = {
- 'compilers': [
- {'compiler': {
- 'paths': {
- "cc": "/gcc473",
- "cxx": "/g++473",
- "f77": None,
- "fc": None
- },
- 'modules': None,
- 'spec': 'gcc@4.7.3',
- 'operating_system': 'CNL10'
- }},
- {'compiler': {
- 'paths': {
- "cc": "/gcc450",
- "cxx": "/g++450",
- "f77": 'gfortran',
- "fc": 'gfortran'
- },
- 'modules': None,
- 'spec': 'gcc@4.5.0',
- 'operating_system': 'CNL10'
- }},
- {'compiler': {
- 'paths': {
- "cc": "/gcc422",
- "cxx": "/g++422",
- "f77": 'gfortran',
- "fc": 'gfortran'
- },
- 'flags': {
- "cppflags": "-O0 -fpic",
- "fflags": "-f77",
- },
- 'modules': None,
- 'spec': 'gcc@4.2.2',
- 'operating_system': 'CNL10'
- }},
- {'compiler': {
- 'paths': {
- "cc": "<overwritten>",
- "cxx": "<overwritten>",
- "f77": '<overwritten>',
- "fc": '<overwritten>'},
- 'modules': None,
- 'spec': 'clang@3.3',
- 'operating_system': 'CNL10'
- }}
+ "compilers": [
+ {
+ "compiler": {
+ "paths": {"cc": "/gcc473", "cxx": "/g++473", "f77": None, "fc": None},
+ "modules": None,
+ "spec": "gcc@4.7.3",
+ "operating_system": "CNL10",
+ }
+ },
+ {
+ "compiler": {
+ "paths": {"cc": "/gcc450", "cxx": "/g++450", "f77": "gfortran", "fc": "gfortran"},
+ "modules": None,
+ "spec": "gcc@4.5.0",
+ "operating_system": "CNL10",
+ }
+ },
+ {
+ "compiler": {
+ "paths": {"cc": "/gcc422", "cxx": "/g++422", "f77": "gfortran", "fc": "gfortran"},
+ "flags": {
+ "cppflags": "-O0 -fpic",
+ "fflags": "-f77",
+ },
+ "modules": None,
+ "spec": "gcc@4.2.2",
+ "operating_system": "CNL10",
+ }
+ },
+ {
+ "compiler": {
+ "paths": {
+ "cc": "<overwritten>",
+ "cxx": "<overwritten>",
+ "f77": "<overwritten>",
+ "fc": "<overwritten>",
+ },
+ "modules": None,
+ "spec": "clang@3.3",
+ "operating_system": "CNL10",
+ }
+ },
]
}
b_comps = {
- 'compilers': [
- {'compiler': {
- 'paths': {
- "cc": "/icc100",
- "cxx": "/icp100",
- "f77": None,
- "fc": None
- },
- 'modules': None,
- 'spec': 'icc@10.0',
- 'operating_system': 'CNL10'
- }},
- {'compiler': {
- 'paths': {
- "cc": "/icc111",
- "cxx": "/icp111",
- "f77": 'ifort',
- "fc": 'ifort'
- },
- 'modules': None,
- 'spec': 'icc@11.1',
- 'operating_system': 'CNL10'
- }},
- {'compiler': {
- 'paths': {
- "cc": "/icc123",
- "cxx": "/icp123",
- "f77": 'ifort',
- "fc": 'ifort'
- },
- 'flags': {
- "cppflags": "-O3",
- "fflags": "-f77rtl",
- },
- 'modules': None,
- 'spec': 'icc@12.3',
- 'operating_system': 'CNL10'
- }},
- {'compiler': {
- 'paths': {
- "cc": "<overwritten>",
- "cxx": "<overwritten>",
- "f77": '<overwritten>',
- "fc": '<overwritten>'},
- 'modules': None,
- 'spec': 'clang@3.3',
- 'operating_system': 'CNL10'
- }}
+ "compilers": [
+ {
+ "compiler": {
+ "paths": {"cc": "/icc100", "cxx": "/icp100", "f77": None, "fc": None},
+ "modules": None,
+ "spec": "icc@10.0",
+ "operating_system": "CNL10",
+ }
+ },
+ {
+ "compiler": {
+ "paths": {"cc": "/icc111", "cxx": "/icp111", "f77": "ifort", "fc": "ifort"},
+ "modules": None,
+ "spec": "icc@11.1",
+ "operating_system": "CNL10",
+ }
+ },
+ {
+ "compiler": {
+ "paths": {"cc": "/icc123", "cxx": "/icp123", "f77": "ifort", "fc": "ifort"},
+ "flags": {
+ "cppflags": "-O3",
+ "fflags": "-f77rtl",
+ },
+ "modules": None,
+ "spec": "icc@12.3",
+ "operating_system": "CNL10",
+ }
+ },
+ {
+ "compiler": {
+ "paths": {
+ "cc": "<overwritten>",
+ "cxx": "<overwritten>",
+ "f77": "<overwritten>",
+ "fc": "<overwritten>",
+ },
+ "modules": None,
+ "spec": "clang@3.3",
+ "operating_system": "CNL10",
+ }
+ },
]
}
@@ -237,53 +215,53 @@ b_comps = {
@pytest.fixture()
def compiler_specs():
"""Returns a couple of compiler specs needed for the tests"""
- a = [ac['compiler']['spec'] for ac in a_comps['compilers']]
- b = [bc['compiler']['spec'] for bc in b_comps['compilers']]
- CompilerSpecs = collections.namedtuple('CompilerSpecs', ['a', 'b'])
+ a = [ac["compiler"]["spec"] for ac in a_comps["compilers"]]
+ b = [bc["compiler"]["spec"] for bc in b_comps["compilers"]]
+ CompilerSpecs = collections.namedtuple("CompilerSpecs", ["a", "b"])
return CompilerSpecs(a=a, b=b)
def test_write_key_in_memory(mock_low_high_config, compiler_specs):
# Write b_comps "on top of" a_comps.
- spack.config.set('compilers', a_comps['compilers'], scope='low')
- spack.config.set('compilers', b_comps['compilers'], scope='high')
+ spack.config.set("compilers", a_comps["compilers"], scope="low")
+ spack.config.set("compilers", b_comps["compilers"], scope="high")
# Make sure the config looks how we expect.
- check_compiler_config(a_comps['compilers'], *compiler_specs.a)
- check_compiler_config(b_comps['compilers'], *compiler_specs.b)
+ check_compiler_config(a_comps["compilers"], *compiler_specs.a)
+ check_compiler_config(b_comps["compilers"], *compiler_specs.b)
def test_write_key_to_disk(mock_low_high_config, compiler_specs):
# Write b_comps "on top of" a_comps.
- spack.config.set('compilers', a_comps['compilers'], scope='low')
- spack.config.set('compilers', b_comps['compilers'], scope='high')
+ spack.config.set("compilers", a_comps["compilers"], scope="low")
+ spack.config.set("compilers", b_comps["compilers"], scope="high")
# Clear caches so we're forced to read from disk.
spack.config.config.clear_caches()
# Same check again, to ensure consistency.
- check_compiler_config(a_comps['compilers'], *compiler_specs.a)
- check_compiler_config(b_comps['compilers'], *compiler_specs.b)
+ check_compiler_config(a_comps["compilers"], *compiler_specs.a)
+ check_compiler_config(b_comps["compilers"], *compiler_specs.b)
def test_write_to_same_priority_file(mock_low_high_config, compiler_specs):
# Write b_comps in the same file as a_comps.
- spack.config.set('compilers', a_comps['compilers'], scope='low')
- spack.config.set('compilers', b_comps['compilers'], scope='low')
+ spack.config.set("compilers", a_comps["compilers"], scope="low")
+ spack.config.set("compilers", b_comps["compilers"], scope="low")
# Clear caches so we're forced to read from disk.
spack.config.config.clear_caches()
# Same check again, to ensure consistency.
- check_compiler_config(a_comps['compilers'], *compiler_specs.a)
- check_compiler_config(b_comps['compilers'], *compiler_specs.b)
+ check_compiler_config(a_comps["compilers"], *compiler_specs.a)
+ check_compiler_config(b_comps["compilers"], *compiler_specs.b)
#
# Sample repo data and tests
#
-repos_low = {'repos': ["/some/path"]}
-repos_high = {'repos': ["/some/other/path"]}
+repos_low = {"repos": ["/some/path"]}
+repos_high = {"repos": ["/some/other/path"]}
# Test setting config values via path in filename
@@ -292,23 +270,23 @@ def test_add_config_path(mutable_config):
# Try setting a new install tree root
path = "config:install_tree:root:/path/to/config.yaml"
spack.config.add(path)
- set_value = spack.config.get('config')['install_tree']['root']
- assert set_value == '/path/to/config.yaml'
+ set_value = spack.config.get("config")["install_tree"]["root"]
+ assert set_value == "/path/to/config.yaml"
# Now a package:all setting
path = "packages:all:compiler:[gcc]"
spack.config.add(path)
- compilers = spack.config.get('packages')['all']['compiler']
+ compilers = spack.config.get("packages")["all"]["compiler"]
assert "gcc" in compilers
-@pytest.mark.regression('17543,23259')
+@pytest.mark.regression("17543,23259")
def test_add_config_path_with_enumerated_type(mutable_config):
spack.config.add("config:concretizer:clingo")
- assert spack.config.get('config')['concretizer'] == "clingo"
+ assert spack.config.get("config")["concretizer"] == "clingo"
spack.config.add("config:concretizer:original")
- assert spack.config.get('config')['concretizer'] == "original"
+ assert spack.config.get("config")["concretizer"] == "original"
with pytest.raises(spack.config.ConfigError):
spack.config.add("config:concretizer:foo")
@@ -316,25 +294,25 @@ def test_add_config_path_with_enumerated_type(mutable_config):
def test_add_config_filename(mock_low_high_config, tmpdir):
- config_yaml = tmpdir.join('config-filename.yaml')
+ config_yaml = tmpdir.join("config-filename.yaml")
config_yaml.ensure()
- with config_yaml.open('w') as f:
+ with config_yaml.open("w") as f:
syaml.dump_config(config_low, f)
spack.config.add_from_file(str(config_yaml), scope="low")
- assert "build_stage" in spack.config.get('config')
- build_stages = spack.config.get('config')['build_stage']
- for stage in config_low['config']['build_stage']:
+ assert "build_stage" in spack.config.get("config")
+ build_stages = spack.config.get("config")["build_stage"]
+ for stage in config_low["config"]["build_stage"]:
assert stage in build_stages
# repos
def test_write_list_in_memory(mock_low_high_config):
- spack.config.set('repos', repos_low['repos'], scope='low')
- spack.config.set('repos', repos_high['repos'], scope='high')
+ spack.config.set("repos", repos_low["repos"], scope="low")
+ spack.config.set("repos", repos_high["repos"], scope="high")
- config = spack.config.get('repos')
- assert config == repos_high['repos'] + repos_low['repos']
+ config = spack.config.get("repos")
+ assert config == repos_high["repos"] + repos_low["repos"]
class MockEnv(object):
@@ -343,153 +321,135 @@ class MockEnv(object):
def test_substitute_config_variables(mock_low_high_config, monkeypatch):
- prefix = spack.paths.prefix.lstrip('/')
+ prefix = spack.paths.prefix.lstrip("/")
assert cross_plat_join(
- os.sep + os.path.join('foo', 'bar', 'baz'), prefix
- ) == spack_path.canonicalize_path('/foo/bar/baz/$spack')
+ os.sep + os.path.join("foo", "bar", "baz"), prefix
+ ) == spack_path.canonicalize_path("/foo/bar/baz/$spack")
assert cross_plat_join(
- spack.paths.prefix, os.path.join('foo', 'bar', 'baz')
- ) == spack_path.canonicalize_path('$spack/foo/bar/baz/')
+ spack.paths.prefix, os.path.join("foo", "bar", "baz")
+ ) == spack_path.canonicalize_path("$spack/foo/bar/baz/")
assert cross_plat_join(
- os.sep + os.path.join('foo', 'bar', 'baz'),
- prefix, os.path.join('foo', 'bar', 'baz')
- ) == spack_path.canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/')
+ os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
+ ) == spack_path.canonicalize_path("/foo/bar/baz/$spack/foo/bar/baz/")
assert cross_plat_join(
- os.sep + os.path.join('foo', 'bar', 'baz'), prefix
- ) == spack_path.canonicalize_path('/foo/bar/baz/${spack}')
+ os.sep + os.path.join("foo", "bar", "baz"), prefix
+ ) == spack_path.canonicalize_path("/foo/bar/baz/${spack}")
assert cross_plat_join(
- spack.paths.prefix, os.path.join('foo', 'bar', 'baz')
- ) == spack_path.canonicalize_path('${spack}/foo/bar/baz/')
+ spack.paths.prefix, os.path.join("foo", "bar", "baz")
+ ) == spack_path.canonicalize_path("${spack}/foo/bar/baz/")
assert cross_plat_join(
- os.sep + os.path.join('foo', 'bar', 'baz'),
- prefix, os.path.join('foo', 'bar', 'baz')
- ) == spack_path.canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/')
+ os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
+ ) == spack_path.canonicalize_path("/foo/bar/baz/${spack}/foo/bar/baz/")
assert cross_plat_join(
- os.sep + os.path.join('foo', 'bar', 'baz'),
- prefix, os.path.join('foo', 'bar', 'baz')
- ) != spack_path.canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
+ os.sep + os.path.join("foo", "bar", "baz"), prefix, os.path.join("foo", "bar", "baz")
+ ) != spack_path.canonicalize_path("/foo/bar/baz/${spack/foo/bar/baz/")
# $env replacement is a no-op when no environment is active
assert spack_path.canonicalize_path(
- os.sep + os.path.join('foo', 'bar', 'baz', '$env')
- ) == os.sep + os.path.join('foo', 'bar', 'baz', '$env')
+ os.sep + os.path.join("foo", "bar", "baz", "$env")
+ ) == os.sep + os.path.join("foo", "bar", "baz", "$env")
# Fake an active environment and $env is replaced properly
- fake_env_path = os.sep + os.path.join('quux', 'quuux')
- monkeypatch.setattr(ev, 'active_environment',
- lambda: MockEnv(fake_env_path))
- assert spack_path.canonicalize_path(
- '$env/foo/bar/baz'
- ) == os.path.join(fake_env_path, os.path.join('foo', 'bar', 'baz'))
+ fake_env_path = os.sep + os.path.join("quux", "quuux")
+ monkeypatch.setattr(ev, "active_environment", lambda: MockEnv(fake_env_path))
+ assert spack_path.canonicalize_path("$env/foo/bar/baz") == os.path.join(
+ fake_env_path, os.path.join("foo", "bar", "baz")
+ )
# relative paths without source information are relative to cwd
- assert spack_path.canonicalize_path(
- os.path.join('foo', 'bar', 'baz')
- ) == os.path.abspath(os.path.join('foo', 'bar', 'baz'))
+ assert spack_path.canonicalize_path(os.path.join("foo", "bar", "baz")) == os.path.abspath(
+ os.path.join("foo", "bar", "baz")
+ )
# relative paths with source information are relative to the file
spack.config.set(
- 'modules:default',
- {'roots': {'lmod': os.path.join('foo', 'bar', 'baz')}}, scope='low')
+ "modules:default", {"roots": {"lmod": os.path.join("foo", "bar", "baz")}}, scope="low"
+ )
spack.config.config.clear_caches()
- path = spack.config.get('modules:default:roots:lmod')
+ path = spack.config.get("modules:default:roots:lmod")
assert spack_path.canonicalize_path(path) == os.path.normpath(
- os.path.join(mock_low_high_config.scopes['low'].path,
- os.path.join('foo', 'bar', 'baz')))
+ os.path.join(mock_low_high_config.scopes["low"].path, os.path.join("foo", "bar", "baz"))
+ )
-packages_merge_low = {
- 'packages': {
- 'foo': {
- 'variants': ['+v1']
- },
- 'bar': {
- 'variants': ['+v2']
- }
- }
-}
+packages_merge_low = {"packages": {"foo": {"variants": ["+v1"]}, "bar": {"variants": ["+v2"]}}}
packages_merge_high = {
- 'packages': {
- 'foo': {
- 'version': ['a']
- },
- 'bar': {
- 'version': ['b'],
- 'variants': ['+v3']
- },
- 'baz': {
- 'version': ['c']
- }
+ "packages": {
+ "foo": {"version": ["a"]},
+ "bar": {"version": ["b"], "variants": ["+v3"]},
+ "baz": {"version": ["c"]},
}
}
-@pytest.mark.regression('7924')
+@pytest.mark.regression("7924")
def test_merge_with_defaults(mock_low_high_config, write_config_file):
"""This ensures that specified preferences merge with defaults as
- expected. Originally all defaults were initialized with the
- exact same object, which led to aliasing problems. Therefore
- the test configs used here leave 'version' blank for multiple
- packages in 'packages_merge_low'.
+ expected. Originally all defaults were initialized with the
+ exact same object, which led to aliasing problems. Therefore
+ the test configs used here leave 'version' blank for multiple
+ packages in 'packages_merge_low'.
"""
- write_config_file('packages', packages_merge_low, 'low')
- write_config_file('packages', packages_merge_high, 'high')
- cfg = spack.config.get('packages')
+ write_config_file("packages", packages_merge_low, "low")
+ write_config_file("packages", packages_merge_high, "high")
+ cfg = spack.config.get("packages")
- assert cfg['foo']['version'] == ['a']
- assert cfg['bar']['version'] == ['b']
- assert cfg['baz']['version'] == ['c']
+ assert cfg["foo"]["version"] == ["a"]
+ assert cfg["bar"]["version"] == ["b"]
+ assert cfg["baz"]["version"] == ["c"]
def test_substitute_user(mock_low_high_config):
user = getpass.getuser()
- assert os.sep + os.path.join('foo', 'bar') + os.sep \
- + user + os.sep \
- + 'baz' == spack_path.canonicalize_path(
- os.sep + os.path.join('foo', 'bar', '$user', 'baz')
+ assert os.sep + os.path.join(
+ "foo", "bar"
+ ) + os.sep + user + os.sep + "baz" == spack_path.canonicalize_path(
+ os.sep + os.path.join("foo", "bar", "$user", "baz")
)
def test_substitute_user_cache(mock_low_high_config):
user_cache_path = spack.paths.user_cache_path
- assert user_cache_path + os.sep + 'baz' == spack_path.canonicalize_path(
- os.path.join('$user_cache_path', 'baz')
+ assert user_cache_path + os.sep + "baz" == spack_path.canonicalize_path(
+ os.path.join("$user_cache_path", "baz")
)
def test_substitute_tempdir(mock_low_high_config):
tempdir = tempfile.gettempdir()
- assert tempdir == spack_path.canonicalize_path('$tempdir')
- assert tempdir + os.sep + \
- os.path.join('foo', 'bar', 'baz') == spack_path.canonicalize_path(
- os.path.join('$tempdir', 'foo', 'bar', 'baz')
- )
+ assert tempdir == spack_path.canonicalize_path("$tempdir")
+ assert tempdir + os.sep + os.path.join("foo", "bar", "baz") == spack_path.canonicalize_path(
+ os.path.join("$tempdir", "foo", "bar", "baz")
+ )
PAD_STRING = spack.util.path.SPACK_PATH_PADDING_CHARS
MAX_PATH_LEN = spack.util.path.get_system_path_max()
MAX_PADDED_LEN = MAX_PATH_LEN - spack.util.path.SPACK_MAX_INSTALL_PATH_LENGTH
reps = [PAD_STRING for _ in range((MAX_PADDED_LEN // len(PAD_STRING) + 1) + 2)]
-full_padded_string = os.path.join(
- os.sep + 'path', os.sep.join(reps))[:MAX_PADDED_LEN]
-
-
-@pytest.mark.parametrize('config_settings,expected', [
- ([], [None, None, None]),
- ([['config:install_tree:root', os.sep + 'path']], [os.sep + 'path', None, None]),
- ([['config:install_tree', os.sep + 'path']], [os.sep + 'path', None, None]),
- ([['config:install_tree:projections', {'all': '{name}'}]],
- [None, None, {'all': '{name}'}]),
- ([['config:install_path_scheme', '{name}']],
- [None, None, {'all': '{name}'}]),
-])
+full_padded_string = os.path.join(os.sep + "path", os.sep.join(reps))[:MAX_PADDED_LEN]
+
+
+@pytest.mark.parametrize(
+ "config_settings,expected",
+ [
+ ([], [None, None, None]),
+ ([["config:install_tree:root", os.sep + "path"]], [os.sep + "path", None, None]),
+ ([["config:install_tree", os.sep + "path"]], [os.sep + "path", None, None]),
+ (
+ [["config:install_tree:projections", {"all": "{name}"}]],
+ [None, None, {"all": "{name}"}],
+ ),
+ ([["config:install_path_scheme", "{name}"]], [None, None, {"all": "{name}"}]),
+ ],
+)
def test_parse_install_tree(config_settings, expected, mutable_config):
expected_root = expected[0] or spack.store.default_install_tree_root
expected_unpadded_root = expected[1] or expected_root
@@ -501,33 +461,50 @@ def test_parse_install_tree(config_settings, expected, mutable_config):
for config_setting in config_settings:
mutable_config.set(*config_setting)
- config_dict = mutable_config.get('config')
- root, unpadded_root, projections = spack.store.parse_install_tree(
- config_dict)
+ config_dict = mutable_config.get("config")
+ root, unpadded_root, projections = spack.store.parse_install_tree(config_dict)
assert root == expected_root
assert unpadded_root == expected_unpadded_root
assert projections == expected_proj
-@pytest.mark.skipif(sys.platform == 'win32',
- reason='Padding unsupported on Windows')
-@pytest.mark.parametrize('config_settings,expected', [
- ([['config:install_tree:root', os.sep + 'path'],
- ['config:install_tree:padded_length', 11]],
- [os.path.join(os.sep + 'path', PAD_STRING[:5]), os.sep + 'path', None]),
- ([['config:install_tree:root', '/path/$padding:11']],
- [os.path.join(os.sep + 'path', PAD_STRING[:5]), os.sep + 'path', None]),
- ([['config:install_tree', '/path/${padding:11}']],
- [os.path.join(os.sep + 'path', PAD_STRING[:5]), os.sep + 'path', None]),
- ([['config:install_tree:padded_length', False]], [None, None, None]),
- ([['config:install_tree:padded_length', True],
- ['config:install_tree:root', os.sep + 'path']],
- [full_padded_string, os.sep + 'path', None]),
- ([['config:install_tree:', os.sep + 'path$padding']],
- [full_padded_string, os.sep + 'path', None]),
- ([['config:install_tree:', os.sep + 'path' + os.sep + '${padding}']],
- [full_padded_string, os.sep + 'path', None]),
-])
+@pytest.mark.skipif(sys.platform == "win32", reason="Padding unsupported on Windows")
+@pytest.mark.parametrize(
+ "config_settings,expected",
+ [
+ (
+ [
+ ["config:install_tree:root", os.sep + "path"],
+ ["config:install_tree:padded_length", 11],
+ ],
+ [os.path.join(os.sep + "path", PAD_STRING[:5]), os.sep + "path", None],
+ ),
+ (
+ [["config:install_tree:root", "/path/$padding:11"]],
+ [os.path.join(os.sep + "path", PAD_STRING[:5]), os.sep + "path", None],
+ ),
+ (
+ [["config:install_tree", "/path/${padding:11}"]],
+ [os.path.join(os.sep + "path", PAD_STRING[:5]), os.sep + "path", None],
+ ),
+ ([["config:install_tree:padded_length", False]], [None, None, None]),
+ (
+ [
+ ["config:install_tree:padded_length", True],
+ ["config:install_tree:root", os.sep + "path"],
+ ],
+ [full_padded_string, os.sep + "path", None],
+ ),
+ (
+ [["config:install_tree:", os.sep + "path$padding"]],
+ [full_padded_string, os.sep + "path", None],
+ ),
+ (
+ [["config:install_tree:", os.sep + "path" + os.sep + "${padding}"]],
+ [full_padded_string, os.sep + "path", None],
+ ),
+ ],
+)
def test_parse_install_tree_padded(config_settings, expected, mutable_config):
expected_root = expected[0] or spack.store.default_install_tree_root
expected_unpadded_root = expected[1] or expected_root
@@ -539,89 +516,80 @@ def test_parse_install_tree_padded(config_settings, expected, mutable_config):
for config_setting in config_settings:
mutable_config.set(*config_setting)
- config_dict = mutable_config.get('config')
- root, unpadded_root, projections = spack.store.parse_install_tree(
- config_dict)
+ config_dict = mutable_config.get("config")
+ root, unpadded_root, projections = spack.store.parse_install_tree(config_dict)
assert root == expected_root
assert unpadded_root == expected_unpadded_root
assert projections == expected_proj
def test_read_config(mock_low_high_config, write_config_file):
- write_config_file('config', config_low, 'low')
- assert spack.config.get('config') == config_low['config']
+ write_config_file("config", config_low, "low")
+ assert spack.config.get("config") == config_low["config"]
def test_read_config_override_all(mock_low_high_config, write_config_file):
- write_config_file('config', config_low, 'low')
- write_config_file('config', config_override_all, 'high')
- assert spack.config.get('config') == {
- 'install_tree': {
- 'root': 'override_all'
- }
- }
+ write_config_file("config", config_low, "low")
+ write_config_file("config", config_override_all, "high")
+ assert spack.config.get("config") == {"install_tree": {"root": "override_all"}}
def test_read_config_override_key(mock_low_high_config, write_config_file):
- write_config_file('config', config_low, 'low')
- write_config_file('config', config_override_key, 'high')
- assert spack.config.get('config') == {
- 'install_tree': {
- 'root': 'override_key'
- },
- 'build_stage': ['path1', 'path2', 'path3']
+ write_config_file("config", config_low, "low")
+ write_config_file("config", config_override_key, "high")
+ assert spack.config.get("config") == {
+ "install_tree": {"root": "override_key"},
+ "build_stage": ["path1", "path2", "path3"],
}
def test_read_config_merge_list(mock_low_high_config, write_config_file):
- write_config_file('config', config_low, 'low')
- write_config_file('config', config_merge_list, 'high')
- assert spack.config.get('config') == {
- 'install_tree': {
- 'root': 'install_tree_path'
- },
- 'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
+ write_config_file("config", config_low, "low")
+ write_config_file("config", config_merge_list, "high")
+ assert spack.config.get("config") == {
+ "install_tree": {"root": "install_tree_path"},
+ "build_stage": ["patha", "pathb", "path1", "path2", "path3"],
}
def test_read_config_override_list(mock_low_high_config, write_config_file):
- write_config_file('config', config_low, 'low')
- write_config_file('config', config_override_list, 'high')
- assert spack.config.get('config') == {
- 'install_tree': {
- 'root': 'install_tree_path'
- },
- 'build_stage': config_override_list['config']['build_stage:']
+ write_config_file("config", config_low, "low")
+ write_config_file("config", config_override_list, "high")
+ assert spack.config.get("config") == {
+ "install_tree": {"root": "install_tree_path"},
+ "build_stage": config_override_list["config"]["build_stage:"],
}
def test_ordereddict_merge_order():
- """"Test that source keys come before dest keys in merge_yaml results."""
- source = syaml.syaml_dict([
- ("k1", "v1"),
- ("k2", "v2"),
- ("k3", "v3"),
- ])
-
- dest = syaml.syaml_dict([
- ("k4", "v4"),
- ("k3", "WRONG"),
- ("k5", "v5"),
- ])
+ """ "Test that source keys come before dest keys in merge_yaml results."""
+ source = syaml.syaml_dict(
+ [
+ ("k1", "v1"),
+ ("k2", "v2"),
+ ("k3", "v3"),
+ ]
+ )
+
+ dest = syaml.syaml_dict(
+ [
+ ("k4", "v4"),
+ ("k3", "WRONG"),
+ ("k5", "v5"),
+ ]
+ )
result = spack.config.merge_yaml(dest, source)
assert "WRONG" not in result.values()
expected_keys = ["k1", "k2", "k3", "k4", "k5"]
- expected_items = [
- ("k1", "v1"), ("k2", "v2"), ("k3", "v3"), ("k4", "v4"), ("k5", "v5")
- ]
+ expected_items = [("k1", "v1"), ("k2", "v2"), ("k3", "v3"), ("k4", "v4"), ("k5", "v5")]
assert expected_keys == list(result.keys())
assert expected_items == list(result.items())
def test_list_merge_order():
- """"Test that source lists are prepended to dest."""
+ """ "Test that source lists are prepended to dest."""
source = ["a", "b", "c"]
dest = ["d", "e", "f"]
@@ -631,51 +599,50 @@ def test_list_merge_order():
def test_internal_config_update(mock_low_high_config, write_config_file):
- write_config_file('config', config_low, 'low')
+ write_config_file("config", config_low, "low")
- before = mock_low_high_config.get('config')
- assert before['install_tree']['root'] == 'install_tree_path'
+ before = mock_low_high_config.get("config")
+ assert before["install_tree"]["root"] == "install_tree_path"
# add an internal configuration scope
- scope = spack.config.InternalConfigScope('command_line')
- assert 'InternalConfigScope' in repr(scope)
+ scope = spack.config.InternalConfigScope("command_line")
+ assert "InternalConfigScope" in repr(scope)
mock_low_high_config.push_scope(scope)
- command_config = mock_low_high_config.get('config', scope='command_line')
- command_config['install_tree'] = {'root': 'foo/bar'}
+ command_config = mock_low_high_config.get("config", scope="command_line")
+ command_config["install_tree"] = {"root": "foo/bar"}
- mock_low_high_config.set('config', command_config, scope='command_line')
+ mock_low_high_config.set("config", command_config, scope="command_line")
- after = mock_low_high_config.get('config')
- assert after['install_tree']['root'] == 'foo/bar'
+ after = mock_low_high_config.get("config")
+ assert after["install_tree"]["root"] == "foo/bar"
def test_internal_config_filename(mock_low_high_config, write_config_file):
- write_config_file('config', config_low, 'low')
- mock_low_high_config.push_scope(
- spack.config.InternalConfigScope('command_line'))
+ write_config_file("config", config_low, "low")
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope("command_line"))
with pytest.raises(NotImplementedError):
- mock_low_high_config.get_config_filename('command_line', 'config')
+ mock_low_high_config.get_config_filename("command_line", "config")
def test_mark_internal():
data = {
- 'config': {
- 'bool': False,
- 'int': 6,
- 'numbers': [1, 2, 3],
- 'string': 'foo',
- 'dict': {
- 'more_numbers': [1, 2, 3],
- 'another_string': 'foo',
- 'another_int': 7,
- }
+ "config": {
+ "bool": False,
+ "int": 6,
+ "numbers": [1, 2, 3],
+ "string": "foo",
+ "dict": {
+ "more_numbers": [1, 2, 3],
+ "another_string": "foo",
+ "another_int": 7,
+ },
}
}
- marked = spack.config._mark_internal(data, 'x')
+ marked = spack.config._mark_internal(data, "x")
# marked version should be equal to the original
assert data == marked
@@ -684,16 +651,20 @@ def test_mark_internal():
if type(obj) is bool:
return # can't subclass bool, so can't mark it
- assert hasattr(obj, '_start_mark') and obj._start_mark.name == 'x'
- assert hasattr(obj, '_end_mark') and obj._end_mark.name == 'x'
+ assert hasattr(obj, "_start_mark") and obj._start_mark.name == "x"
+ assert hasattr(obj, "_end_mark") and obj._end_mark.name == "x"
# everything in the marked version should have marks
- checks = (marked.keys(), marked.values(),
- marked['config'].keys(), marked['config'].values(),
- marked['config']['numbers'],
- marked['config']['dict'].keys(),
- marked['config']['dict'].values(),
- marked['config']['dict']['more_numbers'])
+ checks = (
+ marked.keys(),
+ marked.values(),
+ marked["config"].keys(),
+ marked["config"].values(),
+ marked["config"]["numbers"],
+ marked["config"]["dict"].keys(),
+ marked["config"]["dict"].values(),
+ marked["config"]["dict"]["more_numbers"],
+ )
for seq in checks:
for obj in seq:
@@ -704,65 +675,74 @@ def test_internal_config_from_data():
config = spack.config.Configuration()
# add an internal config initialized from an inline dict
- config.push_scope(spack.config.InternalConfigScope('_builtin', {
- 'config': {
- 'verify_ssl': False,
- 'build_jobs': 6,
- }
- }))
+ config.push_scope(
+ spack.config.InternalConfigScope(
+ "_builtin",
+ {
+ "config": {
+ "verify_ssl": False,
+ "build_jobs": 6,
+ }
+ },
+ )
+ )
- assert config.get('config:verify_ssl', scope='_builtin') is False
- assert config.get('config:build_jobs', scope='_builtin') == 6
+ assert config.get("config:verify_ssl", scope="_builtin") is False
+ assert config.get("config:build_jobs", scope="_builtin") == 6
- assert config.get('config:verify_ssl') is False
- assert config.get('config:build_jobs') == 6
+ assert config.get("config:verify_ssl") is False
+ assert config.get("config:build_jobs") == 6
# push one on top and see what happens.
- config.push_scope(spack.config.InternalConfigScope('higher', {
- 'config': {
- 'checksum': True,
- 'verify_ssl': True,
- }
- }))
+ config.push_scope(
+ spack.config.InternalConfigScope(
+ "higher",
+ {
+ "config": {
+ "checksum": True,
+ "verify_ssl": True,
+ }
+ },
+ )
+ )
- assert config.get('config:verify_ssl', scope='_builtin') is False
- assert config.get('config:build_jobs', scope='_builtin') == 6
+ assert config.get("config:verify_ssl", scope="_builtin") is False
+ assert config.get("config:build_jobs", scope="_builtin") == 6
- assert config.get('config:verify_ssl', scope='higher') is True
- assert config.get('config:build_jobs', scope='higher') is None
+ assert config.get("config:verify_ssl", scope="higher") is True
+ assert config.get("config:build_jobs", scope="higher") is None
- assert config.get('config:verify_ssl') is True
- assert config.get('config:build_jobs') == 6
- assert config.get('config:checksum') is True
+ assert config.get("config:verify_ssl") is True
+ assert config.get("config:build_jobs") == 6
+ assert config.get("config:checksum") is True
- assert config.get('config:checksum', scope='_builtin') is None
- assert config.get('config:checksum', scope='higher') is True
+ assert config.get("config:checksum", scope="_builtin") is None
+ assert config.get("config:checksum", scope="higher") is True
def test_keys_are_ordered():
"""Test that keys in Spack YAML files retain their order from the file."""
expected_order = (
- 'bin',
- 'man',
- 'share/man',
- 'share/aclocal',
- 'lib',
- 'lib64',
- 'include',
- 'lib/pkgconfig',
- 'lib64/pkgconfig',
- 'share/pkgconfig',
- ''
+ "bin",
+ "man",
+ "share/man",
+ "share/aclocal",
+ "lib",
+ "lib64",
+ "include",
+ "lib/pkgconfig",
+ "lib64/pkgconfig",
+ "share/pkgconfig",
+ "",
)
config_scope = spack.config.ConfigScope(
- 'modules',
- os.path.join(spack.paths.test_path, 'data', 'config')
+ "modules", os.path.join(spack.paths.test_path, "data", "config")
)
- data = config_scope.get_section('modules')
+ data = config_scope.get_section("modules")
- prefix_inspections = data['modules']['prefix_inspections']
+ prefix_inspections = data["modules"]["prefix_inspections"]
for actual, expected in zip(prefix_inspections, expected_order):
assert actual == expected
@@ -771,7 +751,7 @@ def test_keys_are_ordered():
def test_config_format_error(mutable_config):
"""This is raised when we try to write a bad configuration."""
with pytest.raises(spack.config.ConfigFormatError):
- spack.config.set('compilers', {'bad': 'data'}, scope='site')
+ spack.config.set("compilers", {"bad": "data"}, scope="site")
def get_config_error(filename, schema, yaml_string):
@@ -779,7 +759,7 @@ def get_config_error(filename, schema, yaml_string):
Fail if there is no ConfigFormatError
"""
- with open(filename, 'w') as f:
+ with open(filename, "w") as f:
f.write(yaml_string)
# parse and return error, or fail.
@@ -788,88 +768,98 @@ def get_config_error(filename, schema, yaml_string):
except spack.config.ConfigFormatError as e:
return e
else:
- pytest.fail('ConfigFormatError was not raised!')
+ pytest.fail("ConfigFormatError was not raised!")
def test_config_parse_dict_in_list(tmpdir):
with tmpdir.as_cwd():
e = get_config_error(
- 'repos.yaml', spack.schema.repos.schema, """\
+ "repos.yaml",
+ spack.schema.repos.schema,
+ """\
repos:
- https://foobar.com/foo
- https://foobar.com/bar
- error:
- abcdef
- https://foobar.com/baz
-""")
+""",
+ )
assert "repos.yaml:4" in str(e)
def test_config_parse_str_not_bool(tmpdir):
with tmpdir.as_cwd():
e = get_config_error(
- 'config.yaml', spack.schema.config.schema, """\
+ "config.yaml",
+ spack.schema.config.schema,
+ """\
config:
verify_ssl: False
checksum: foobar
dirty: True
-""")
+""",
+ )
assert "config.yaml:3" in str(e)
def test_config_parse_list_in_dict(tmpdir):
with tmpdir.as_cwd():
e = get_config_error(
- 'mirrors.yaml', spack.schema.mirrors.schema, """\
+ "mirrors.yaml",
+ spack.schema.mirrors.schema,
+ """\
mirrors:
foo: http://foobar.com/baz
bar: http://barbaz.com/foo
baz: http://bazfoo.com/bar
travis: [1, 2, 3]
-""")
+""",
+ )
assert "mirrors.yaml:5" in str(e)
def test_bad_config_section(mock_low_high_config):
"""Test that getting or setting a bad section gives an error."""
with pytest.raises(spack.config.ConfigSectionError):
- spack.config.set('foobar', 'foobar')
+ spack.config.set("foobar", "foobar")
with pytest.raises(spack.config.ConfigSectionError):
- spack.config.get('foobar')
+ spack.config.get("foobar")
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.skipif(getuid() == 0, reason='user is root')
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(getuid() == 0, reason="user is root")
def test_bad_command_line_scopes(tmpdir, mock_low_high_config):
cfg = spack.config.Configuration()
with tmpdir.as_cwd():
with pytest.raises(spack.config.ConfigError):
- spack.config._add_command_line_scopes(cfg, ['bad_path'])
+ spack.config._add_command_line_scopes(cfg, ["bad_path"])
- touch('unreadable_file')
+ touch("unreadable_file")
with pytest.raises(spack.config.ConfigError):
- spack.config._add_command_line_scopes(cfg, ['unreadable_file'])
+ spack.config._add_command_line_scopes(cfg, ["unreadable_file"])
- mkdirp('unreadable_dir')
+ mkdirp("unreadable_dir")
with pytest.raises(spack.config.ConfigError):
try:
- os.chmod('unreadable_dir', 0)
- spack.config._add_command_line_scopes(cfg, ['unreadable_dir'])
+ os.chmod("unreadable_dir", 0)
+ spack.config._add_command_line_scopes(cfg, ["unreadable_dir"])
finally:
- os.chmod('unreadable_dir', 0o700) # so tmpdir can be removed
+ os.chmod("unreadable_dir", 0o700) # so tmpdir can be removed
def test_add_command_line_scopes(tmpdir, mutable_config):
- config_yaml = str(tmpdir.join('config.yaml'))
- with open(config_yaml, 'w') as f:
- f.write("""\
+ config_yaml = str(tmpdir.join("config.yaml"))
+ with open(config_yaml, "w") as f:
+ f.write(
+ """\
config:
verify_ssl: False
dirty: False
-""")
+"""
+ )
spack.config._add_command_line_scopes(mutable_config, [str(tmpdir)])
@@ -879,19 +869,20 @@ def test_nested_override():
base_name = spack.config.overrides_base_name
def _check_scopes(num_expected, debug_values):
- scope_names = [s.name for s in spack.config.config.scopes.values() if
- s.name.startswith(base_name)]
+ scope_names = [
+ s.name for s in spack.config.config.scopes.values() if s.name.startswith(base_name)
+ ]
for i in range(num_expected):
- name = '{0}{1}'.format(base_name, i)
+ name = "{0}{1}".format(base_name, i)
assert name in scope_names
- data = spack.config.config.get_config('config', name)
- assert data['debug'] == debug_values[i]
+ data = spack.config.config.get_config("config", name)
+ assert data["debug"] == debug_values[i]
# Check results from single and nested override
- with spack.config.override('config:debug', True):
- with spack.config.override('config:debug', False):
+ with spack.config.override("config:debug", True):
+ with spack.config.override("config:debug", False):
_check_scopes(2, [True, False])
_check_scopes(1, [True])
@@ -902,57 +893,56 @@ def test_alternate_override(monkeypatch):
base_name = spack.config.overrides_base_name
def _matching_scopes(regexpr):
- return [spack.config.InternalConfigScope('{0}1'.format(base_name))]
+ return [spack.config.InternalConfigScope("{0}1".format(base_name))]
# Check that the alternate naming works
- monkeypatch.setattr(spack.config.config, 'matching_scopes',
- _matching_scopes)
+ monkeypatch.setattr(spack.config.config, "matching_scopes", _matching_scopes)
- with spack.config.override('config:debug', False):
- name = '{0}2'.format(base_name)
+ with spack.config.override("config:debug", False):
+ name = "{0}2".format(base_name)
- scope_names = [s.name for s in spack.config.config.scopes.values() if
- s.name.startswith(base_name)]
+ scope_names = [
+ s.name for s in spack.config.config.scopes.values() if s.name.startswith(base_name)
+ ]
assert name in scope_names
- data = spack.config.config.get_config('config', name)
- assert data['debug'] is False
+ data = spack.config.config.get_config("config", name)
+ assert data["debug"] is False
def test_immutable_scope(tmpdir):
- config_yaml = str(tmpdir.join('config.yaml'))
- with open(config_yaml, 'w') as f:
- f.write("""\
+ config_yaml = str(tmpdir.join("config.yaml"))
+ with open(config_yaml, "w") as f:
+ f.write(
+ """\
config:
install_tree:
root: dummy_tree_value
-""")
- scope = spack.config.ImmutableConfigScope('test', str(tmpdir))
+"""
+ )
+ scope = spack.config.ImmutableConfigScope("test", str(tmpdir))
- data = scope.get_section('config')
- assert data['config']['install_tree'] == {'root': 'dummy_tree_value'}
+ data = scope.get_section("config")
+ assert data["config"]["install_tree"] == {"root": "dummy_tree_value"}
with pytest.raises(spack.config.ConfigError):
- scope._write_section('config')
+ scope._write_section("config")
def test_single_file_scope(config, env_yaml):
- scope = spack.config.SingleFileScope(
- 'env', env_yaml, spack.schema.env.schema, ['env']
- )
+ scope = spack.config.SingleFileScope("env", env_yaml, spack.schema.env.schema, ["env"])
with spack.config.override(scope):
# from the single-file config
- assert spack.config.get('config:verify_ssl') is False
- assert spack.config.get('config:dirty') is False
- assert spack.config.get('packages:libelf:compiler') == ['gcc@4.5.3']
+ assert spack.config.get("config:verify_ssl") is False
+ assert spack.config.get("config:dirty") is False
+ assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"]
# from the lower config scopes
- assert spack.config.get('config:checksum') is True
- assert spack.config.get('config:checksum') is True
- assert spack.config.get('packages:externalmodule:buildable') is False
- assert spack.config.get('repos') == [
- '/x/y/z', '$spack/var/spack/repos/builtin']
+ assert spack.config.get("config:checksum") is True
+ assert spack.config.get("config:checksum") is True
+ assert spack.config.get("packages:externalmodule:buildable") is False
+ assert spack.config.get("repos") == ["/x/y/z", "$spack/var/spack/repos/builtin"]
def test_single_file_scope_section_override(tmpdir, config):
@@ -962,8 +952,9 @@ def test_single_file_scope_section_override(tmpdir, config):
"::" syntax).
"""
env_yaml = str(tmpdir.join("env.yaml"))
- with open(env_yaml, 'w') as f:
- f.write("""\
+ with open(env_yaml, "w") as f:
+ f.write(
+ """\
env:
config:
verify_ssl: False
@@ -972,30 +963,30 @@ env:
compiler: [ 'gcc@4.5.3' ]
repos:
- /x/y/z
-""")
+"""
+ )
- scope = spack.config.SingleFileScope(
- 'env', env_yaml, spack.schema.env.schema, ['env'])
+ scope = spack.config.SingleFileScope("env", env_yaml, spack.schema.env.schema, ["env"])
with spack.config.override(scope):
# from the single-file config
- assert spack.config.get('config:verify_ssl') is False
- assert spack.config.get('packages:libelf:compiler') == ['gcc@4.5.3']
+ assert spack.config.get("config:verify_ssl") is False
+ assert spack.config.get("packages:libelf:compiler") == ["gcc@4.5.3"]
# from the lower config scopes
- assert spack.config.get('config:checksum') is True
- assert not spack.config.get('packages:externalmodule')
- assert spack.config.get('repos') == [
- '/x/y/z', '$spack/var/spack/repos/builtin']
+ assert spack.config.get("config:checksum") is True
+ assert not spack.config.get("packages:externalmodule")
+ assert spack.config.get("repos") == ["/x/y/z", "$spack/var/spack/repos/builtin"]
def test_write_empty_single_file_scope(tmpdir):
env_schema = spack.schema.env.schema
scope = spack.config.SingleFileScope(
- 'test', str(tmpdir.ensure('config.yaml')), env_schema, ['spack'])
- scope._write_section('config')
+ "test", str(tmpdir.ensure("config.yaml")), env_schema, ["spack"]
+ )
+ scope._write_section("config")
# confirm we can write empty config
- assert not scope.get_section('config')
+ assert not scope.get_section("config")
def check_schema(name, file_contents):
@@ -1006,7 +997,9 @@ def check_schema(name, file_contents):
def test_good_env_yaml(tmpdir):
- check_schema(spack.schema.env.schema, """\
+ check_schema(
+ spack.schema.env.schema,
+ """\
spack:
config:
verify_ssl: False
@@ -1025,136 +1018,150 @@ spack:
cxx: /path/to/cxx
fc: /path/to/fc
f77: /path/to/f77
-""")
+""",
+ )
def test_bad_env_yaml(tmpdir):
with pytest.raises(spack.config.ConfigFormatError):
- check_schema(spack.schema.env.schema, """\
+ check_schema(
+ spack.schema.env.schema,
+ """\
env:
foobar:
verify_ssl: False
dirty: False
-""")
+""",
+ )
def test_bad_config_yaml(tmpdir):
with pytest.raises(spack.config.ConfigFormatError):
- check_schema(spack.schema.config.schema, """\
+ check_schema(
+ spack.schema.config.schema,
+ """\
config:
verify_ssl: False
install_tree:
root:
extra_level: foo
-""")
+""",
+ )
def test_bad_mirrors_yaml(tmpdir):
with pytest.raises(spack.config.ConfigFormatError):
- check_schema(spack.schema.mirrors.schema, """\
+ check_schema(
+ spack.schema.mirrors.schema,
+ """\
mirrors:
local: True
-""")
+""",
+ )
def test_bad_repos_yaml(tmpdir):
with pytest.raises(spack.config.ConfigFormatError):
- check_schema(spack.schema.repos.schema, """\
+ check_schema(
+ spack.schema.repos.schema,
+ """\
repos:
True
-""")
+""",
+ )
def test_bad_compilers_yaml(tmpdir):
with pytest.raises(spack.config.ConfigFormatError):
- check_schema(spack.schema.compilers.schema, """\
+ check_schema(
+ spack.schema.compilers.schema,
+ """\
compilers:
key_instead_of_list: 'value'
-""")
+""",
+ )
with pytest.raises(spack.config.ConfigFormatError):
- check_schema(spack.schema.compilers.schema, """\
+ check_schema(
+ spack.schema.compilers.schema,
+ """\
compilers:
- shmompiler:
environment: /bad/value
-""")
+""",
+ )
with pytest.raises(spack.config.ConfigFormatError):
- check_schema(spack.schema.compilers.schema, """\
+ check_schema(
+ spack.schema.compilers.schema,
+ """\
compilers:
- compiler:
fenfironfent: /bad/value
-""")
+""",
+ )
-def test_internal_config_section_override(mock_low_high_config,
- write_config_file):
- write_config_file('config', config_merge_list, 'low')
- wanted_list = config_override_list['config']['build_stage:']
- mock_low_high_config.push_scope(spack.config.InternalConfigScope
- ('high', {
- 'config:': {
- 'build_stage': wanted_list
- }
- }))
- assert mock_low_high_config.get('config:build_stage') == wanted_list
+def test_internal_config_section_override(mock_low_high_config, write_config_file):
+ write_config_file("config", config_merge_list, "low")
+ wanted_list = config_override_list["config"]["build_stage:"]
+ mock_low_high_config.push_scope(
+ spack.config.InternalConfigScope("high", {"config:": {"build_stage": wanted_list}})
+ )
+ assert mock_low_high_config.get("config:build_stage") == wanted_list
-def test_internal_config_dict_override(mock_low_high_config,
- write_config_file):
- write_config_file('config', config_merge_dict, 'low')
- wanted_dict = config_override_dict['config']['info:']
- mock_low_high_config.push_scope(spack.config.InternalConfigScope
- ('high', config_override_dict))
- assert mock_low_high_config.get('config:info') == wanted_dict
+def test_internal_config_dict_override(mock_low_high_config, write_config_file):
+ write_config_file("config", config_merge_dict, "low")
+ wanted_dict = config_override_dict["config"]["info:"]
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope("high", config_override_dict))
+ assert mock_low_high_config.get("config:info") == wanted_dict
-def test_internal_config_list_override(mock_low_high_config,
- write_config_file):
- write_config_file('config', config_merge_list, 'low')
- wanted_list = config_override_list['config']['build_stage:']
- mock_low_high_config.push_scope(spack.config.InternalConfigScope
- ('high', config_override_list))
- assert mock_low_high_config.get('config:build_stage') == wanted_list
+def test_internal_config_list_override(mock_low_high_config, write_config_file):
+ write_config_file("config", config_merge_list, "low")
+ wanted_list = config_override_list["config"]["build_stage:"]
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope("high", config_override_list))
+ assert mock_low_high_config.get("config:build_stage") == wanted_list
def test_set_section_override(mock_low_high_config, write_config_file):
- write_config_file('config', config_merge_list, 'low')
- wanted_list = config_override_list['config']['build_stage:']
- with spack.config.override('config::build_stage', wanted_list):
- assert mock_low_high_config.get('config:build_stage') == wanted_list
- assert config_merge_list['config']['build_stage'] == \
- mock_low_high_config.get('config:build_stage')
+ write_config_file("config", config_merge_list, "low")
+ wanted_list = config_override_list["config"]["build_stage:"]
+ with spack.config.override("config::build_stage", wanted_list):
+ assert mock_low_high_config.get("config:build_stage") == wanted_list
+ assert config_merge_list["config"]["build_stage"] == mock_low_high_config.get(
+ "config:build_stage"
+ )
def test_set_list_override(mock_low_high_config, write_config_file):
- write_config_file('config', config_merge_list, 'low')
- wanted_list = config_override_list['config']['build_stage:']
- with spack.config.override('config:build_stage:', wanted_list):
- assert wanted_list == mock_low_high_config.get('config:build_stage')
- assert config_merge_list['config']['build_stage'] == \
- mock_low_high_config.get('config:build_stage')
+ write_config_file("config", config_merge_list, "low")
+ wanted_list = config_override_list["config"]["build_stage:"]
+ with spack.config.override("config:build_stage:", wanted_list):
+ assert wanted_list == mock_low_high_config.get("config:build_stage")
+ assert config_merge_list["config"]["build_stage"] == mock_low_high_config.get(
+ "config:build_stage"
+ )
def test_set_dict_override(mock_low_high_config, write_config_file):
- write_config_file('config', config_merge_dict, 'low')
- wanted_dict = config_override_dict['config']['info:']
- with spack.config.override('config:info:', wanted_dict):
- assert wanted_dict == mock_low_high_config.get('config:info')
- assert config_merge_dict['config']['info'] == \
- mock_low_high_config.get('config:info')
+ write_config_file("config", config_merge_dict, "low")
+ wanted_dict = config_override_dict["config"]["info:"]
+ with spack.config.override("config:info:", wanted_dict):
+ assert wanted_dict == mock_low_high_config.get("config:info")
+ assert config_merge_dict["config"]["info"] == mock_low_high_config.get("config:info")
def test_set_bad_path(config):
- with pytest.raises(syaml.SpackYAMLError, match='Illegal leading'):
- with spack.config.override(':bad:path', ''):
+ with pytest.raises(syaml.SpackYAMLError, match="Illegal leading"):
+ with spack.config.override(":bad:path", ""):
pass
def test_bad_path_double_override(config):
- with pytest.raises(syaml.SpackYAMLError,
- match='Meaningless second override'):
- with spack.config.override('bad::double:override::directive', ''):
+ with pytest.raises(syaml.SpackYAMLError, match="Meaningless second override"):
+ with spack.config.override("bad::double:override::directive", ""):
pass
@@ -1172,84 +1179,76 @@ def test_license_dir_config(mutable_config, mock_packages):
assert spack.repo.path.get_pkg_class("a").global_license_dir == rel_path
-@pytest.mark.regression('22547')
+@pytest.mark.regression("22547")
def test_single_file_scope_cache_clearing(env_yaml):
- scope = spack.config.SingleFileScope(
- 'env', env_yaml, spack.schema.env.schema, ['env']
- )
+ scope = spack.config.SingleFileScope("env", env_yaml, spack.schema.env.schema, ["env"])
# Check that we can retrieve data from the single file scope
- before = scope.get_section('config')
+ before = scope.get_section("config")
assert before
# Clear the cache of the Single file scope
scope.clear()
# Check that the section can be retireved again and it's
# the same as before
- after = scope.get_section('config')
+ after = scope.get_section("config")
assert after
assert before == after
-@pytest.mark.regression('22611')
+@pytest.mark.regression("22611")
def test_internal_config_scope_cache_clearing():
"""
An InternalConfigScope object is constructed from data that is already
in memory, therefore it doesn't have any cache to clear. Here we ensure
that calling the clear method is consistent with that..
"""
- data = {
- 'config': {
- 'build_jobs': 10
- }
- }
- internal_scope = spack.config.InternalConfigScope('internal', data)
+ data = {"config": {"build_jobs": 10}}
+ internal_scope = spack.config.InternalConfigScope("internal", data)
# Ensure that the initial object is properly set
- assert internal_scope.sections['config'] == data
+ assert internal_scope.sections["config"] == data
# Call the clear method
internal_scope.clear()
# Check that this didn't affect the scope object
- assert internal_scope.sections['config'] == data
+ assert internal_scope.sections["config"] == data
def test_system_config_path_is_overridable(working_env):
p = "/some/path"
- os.environ['SPACK_SYSTEM_CONFIG_PATH'] = p
+ os.environ["SPACK_SYSTEM_CONFIG_PATH"] = p
assert spack.paths._get_system_config_path() == p
def test_system_config_path_is_default_when_env_var_is_empty(working_env):
- os.environ['SPACK_SYSTEM_CONFIG_PATH'] = ''
- assert os.sep + os.path.join('etc', 'spack') == \
- spack.paths._get_system_config_path()
+ os.environ["SPACK_SYSTEM_CONFIG_PATH"] = ""
+ assert os.sep + os.path.join("etc", "spack") == spack.paths._get_system_config_path()
def test_user_config_path_is_overridable(working_env):
p = "/some/path"
- os.environ['SPACK_USER_CONFIG_PATH'] = p
+ os.environ["SPACK_USER_CONFIG_PATH"] = p
assert p == spack.paths._get_user_config_path()
def test_user_config_path_is_default_when_env_var_is_empty(working_env):
- os.environ['SPACK_USER_CONFIG_PATH'] = ''
- assert os.path.expanduser("~%s.spack" % os.sep) == \
- spack.paths._get_user_config_path()
+ os.environ["SPACK_USER_CONFIG_PATH"] = ""
+ assert os.path.expanduser("~%s.spack" % os.sep) == spack.paths._get_user_config_path()
def test_local_config_can_be_disabled(working_env):
- os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = 'true'
+ os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = "true"
cfg = spack.config._config()
assert "defaults" in cfg.scopes
assert "system" not in cfg.scopes
assert "site" in cfg.scopes
assert "user" not in cfg.scopes
- os.environ['SPACK_DISABLE_LOCAL_CONFIG'] = ''
+ os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = ""
cfg = spack.config._config()
assert "defaults" in cfg.scopes
assert "system" not in cfg.scopes
assert "site" in cfg.scopes
assert "user" not in cfg.scopes
- del os.environ['SPACK_DISABLE_LOCAL_CONFIG']
+ del os.environ["SPACK_DISABLE_LOCAL_CONFIG"]
cfg = spack.config._config()
assert "defaults" in cfg.scopes
assert "system" in cfg.scopes
@@ -1259,11 +1258,10 @@ def test_local_config_can_be_disabled(working_env):
def test_user_cache_path_is_overridable(working_env):
p = "/some/path"
- os.environ['SPACK_USER_CACHE_PATH'] = p
+ os.environ["SPACK_USER_CACHE_PATH"] = p
assert spack.paths._get_user_cache_path() == p
def test_user_cache_path_is_default_when_env_var_is_empty(working_env):
- os.environ['SPACK_USER_CACHE_PATH'] = ''
- assert os.path.expanduser("~%s.spack" % os.sep) == \
- spack.paths._get_user_cache_path()
+ os.environ["SPACK_USER_CACHE_PATH"] = ""
+ assert os.path.expanduser("~%s.spack" % os.sep) == spack.paths._get_user_cache_path()
diff --git a/lib/spack/spack/test/config_values.py b/lib/spack/spack/test/config_values.py
index 6600545610..f631a4aa82 100644
--- a/lib/spack/spack/test/config_values.py
+++ b/lib/spack/spack/test/config_values.py
@@ -9,44 +9,37 @@ import pytest
import spack.spec
import spack.store
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-@pytest.mark.parametrize('hash_length', [1, 2, 3, 4, 5, 9])
-@pytest.mark.usefixtures('mock_packages')
+@pytest.mark.parametrize("hash_length", [1, 2, 3, 4, 5, 9])
+@pytest.mark.usefixtures("mock_packages")
def test_set_install_hash_length(hash_length, mutable_config, tmpdir):
- mutable_config.set('config:install_hash_length', hash_length)
- mutable_config.set('config:install_tree', {'root': str(tmpdir)})
+ mutable_config.set("config:install_hash_length", hash_length)
+ mutable_config.set("config:install_tree", {"root": str(tmpdir)})
# The call below is to reinitialize the directory layout associated
# with the store according to the configuration changes above (i.e.
# with the shortened hash)
store = spack.store._store()
with spack.store.use_store(store):
- spec = spack.spec.Spec('libelf').concretized()
+ spec = spack.spec.Spec("libelf").concretized()
prefix = spec.prefix
- hash_str = prefix.rsplit('-')[-1]
+ hash_str = prefix.rsplit("-")[-1]
assert len(hash_str) == hash_length
-@pytest.mark.use_fixtures('mock_packages')
+@pytest.mark.use_fixtures("mock_packages")
def test_set_install_hash_length_upper_case(mutable_config, tmpdir):
- mutable_config.set('config:install_hash_length', 5)
+ mutable_config.set("config:install_hash_length", 5)
mutable_config.set(
- 'config:install_tree',
- {
- 'root': str(tmpdir),
- 'projections': {
- 'all': '{name}-{HASH}'
- }
- }
+ "config:install_tree", {"root": str(tmpdir), "projections": {"all": "{name}-{HASH}"}}
)
# The call below is to reinitialize the directory layout associated
# with the store according to the configuration changes above (i.e.
# with the shortened hash and projection)
store = spack.store._store()
with spack.store.use_store(store):
- spec = spack.spec.Spec('libelf').concretized()
+ spec = spack.spec.Spec("libelf").concretized()
prefix = spec.prefix
- hash_str = prefix.rsplit('-')[-1]
+ hash_str = prefix.rsplit("-")[-1]
assert len(hash_str) == 5
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 2e0062a194..3440351e93 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -50,25 +50,25 @@ import spack.util.spack_yaml as syaml
from spack.fetch_strategy import FetchError, FetchStrategyComposite, URLFetchStrategy
from spack.util.pattern import Bunch
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
#
# Return list of shas for latest two git commits in local spack repo
#
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def last_two_git_commits():
- git = spack.util.executable.which('git', required=True)
+ git = spack.util.executable.which("git", required=True)
spack_git_path = spack.paths.prefix
with working_dir(spack_git_path):
- git_log_out = git('log', '-n', '2', output=str, error=os.devnull)
+ git_log_out = git("log", "-n", "2", output=str, error=os.devnull)
regex = re.compile(r"^commit\s([^\s]+$)", re.MULTILINE)
yield regex.findall(git_log_out)
def write_file(filename, contents):
- with open(filename, 'w') as f:
+ with open(filename, "w") as f:
f.write(contents)
@@ -78,15 +78,14 @@ commit_counter = 0
@pytest.fixture
def override_git_repos_cache_path(tmpdir):
saved = spack.paths.user_repos_cache_path
- tmp_path = tmpdir.mkdir('git-repo-cache-path-for-tests')
+ tmp_path = tmpdir.mkdir("git-repo-cache-path-for-tests")
spack.paths.user_repos_cache_path = str(tmp_path)
yield
spack.paths.user_repos_cache_path = saved
@pytest.fixture
-def mock_git_version_info(tmpdir, override_git_repos_cache_path,
- scope="function"):
+def mock_git_version_info(tmpdir, override_git_repos_cache_path, scope="function"):
"""Create a mock git repo with known structure
The structure of commits in this repo is as follows::
@@ -111,72 +110,71 @@ def mock_git_version_info(tmpdir, override_git_repos_cache_path,
version tags on multiple branches, and version order is not equal to time
order or topological order.
"""
- git = spack.util.executable.which('git', required=True)
- repo_path = str(tmpdir.mkdir('git_repo'))
- filename = 'file.txt'
+ git = spack.util.executable.which("git", required=True)
+ repo_path = str(tmpdir.mkdir("git_repo"))
+ filename = "file.txt"
def commit(message):
global commit_counter
- git('commit', '--date', '2020-01-%02d 12:0:00 +0300' % commit_counter,
- '-am', message)
+ git("commit", "--date", "2020-01-%02d 12:0:00 +0300" % commit_counter, "-am", message)
commit_counter += 1
with working_dir(repo_path):
git("init")
- git('config', 'user.name', 'Spack')
- git('config', 'user.email', 'spack@spack.io')
+ git("config", "user.name", "Spack")
+ git("config", "user.email", "spack@spack.io")
commits = []
def latest_commit():
- return git('rev-list', '-n1', 'HEAD', output=str, error=str).strip()
+ return git("rev-list", "-n1", "HEAD", output=str, error=str).strip()
# Add two commits on main branch
- write_file(filename, '[]')
- git('add', filename)
- commit('first commit')
+ write_file(filename, "[]")
+ git("add", filename)
+ commit("first commit")
commits.append(latest_commit())
# Get name of default branch (differs by git version)
- main = git('rev-parse', '--abbrev-ref', 'HEAD', output=str, error=str).strip()
+ main = git("rev-parse", "--abbrev-ref", "HEAD", output=str, error=str).strip()
# Tag second commit as v1.0
write_file(filename, "[1, 0]")
- commit('second commit')
+ commit("second commit")
commits.append(latest_commit())
- git('tag', 'v1.0')
+ git("tag", "v1.0")
# Add two commits and a tag on 1.x branch
- git('checkout', '-b', '1.x')
+ git("checkout", "-b", "1.x")
write_file(filename, "[1, 0, '', 1]")
- commit('first 1.x commit')
+ commit("first 1.x commit")
commits.append(latest_commit())
write_file(filename, "[1, 1]")
- commit('second 1.x commit')
+ commit("second 1.x commit")
commits.append(latest_commit())
- git('tag', 'v1.1')
+ git("tag", "v1.1")
# Add two commits and a tag on main branch
- git('checkout', main)
+ git("checkout", main)
write_file(filename, "[1, 0, '', 1]")
- commit('third main commit')
+ commit("third main commit")
commits.append(latest_commit())
write_file(filename, "[2, 0]")
- commit('fourth main commit')
+ commit("fourth main commit")
commits.append(latest_commit())
- git('tag', 'v2.0')
+ git("tag", "v2.0")
# Add two more commits on 1.x branch to ensure we aren't cheating by using time
- git('checkout', '1.x')
+ git("checkout", "1.x")
write_file(filename, "[1, 1, '', 1]")
- commit('third 1.x commit')
+ commit("third 1.x commit")
commits.append(latest_commit())
write_file(filename, "[1, 2]")
- commit('fourth 1.x commit')
+ commit("fourth 1.x commit")
commits.append(latest_commit())
- git('tag', '1.2') # test robust parsing to different syntax, no v
+ git("tag", "1.2") # test robust parsing to different syntax, no v
# The commits are ordered with the last commit first in the list
commits = list(reversed(commits))
@@ -191,9 +189,10 @@ def clear_recorded_monkeypatches():
spack.subprocess_context.clear_patches()
-@pytest.fixture(scope='session', autouse=True)
+@pytest.fixture(scope="session", autouse=True)
def record_monkeypatch_setattr():
import _pytest
+
saved_setattr = _pytest.monkeypatch.MonkeyPatch.setattr
def record_setattr(cls, target, name, value, *args, **kwargs):
@@ -213,13 +212,13 @@ def _can_access(path, perms):
@pytest.fixture
def no_path_access(monkeypatch):
- monkeypatch.setattr(os, 'access', _can_access)
+ monkeypatch.setattr(os, "access", _can_access)
#
# Disable any active Spack environment BEFORE all tests
#
-@pytest.fixture(scope='session', autouse=True)
+@pytest.fixture(scope="session", autouse=True)
def clean_user_environment():
spack_env_value = os.environ.pop(ev.spack_env_var, None)
with ev.no_active_environment():
@@ -231,7 +230,7 @@ def clean_user_environment():
#
# Make sure global state of active env does not leak between tests.
#
-@pytest.fixture(scope='function', autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def clean_test_environment():
yield
ev.deactivate()
@@ -244,7 +243,7 @@ def _verify_executables_noop(*args):
#
# Disable checks on compiler executable existence
#
-@pytest.fixture(scope='function', autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def mock_compiler_executable_verification(request, monkeypatch):
"""Mock the compiler executable verification to allow missing executables.
@@ -254,10 +253,10 @@ def mock_compiler_executable_verification(request, monkeypatch):
@pytest.mark.enable_compiler_verification
If a test is marked in that way this is a no-op."""
- if 'enable_compiler_verification' not in request.keywords:
- monkeypatch.setattr(spack.compiler.Compiler,
- 'verify_executables',
- _verify_executables_noop)
+ if "enable_compiler_verification" not in request.keywords:
+ monkeypatch.setattr(
+ spack.compiler.Compiler, "verify_executables", _verify_executables_noop
+ )
# Hooks to add command line options or set other custom behaviors.
@@ -268,19 +267,20 @@ def mock_compiler_executable_verification(request, monkeypatch):
def pytest_addoption(parser):
group = parser.getgroup("Spack specific command line options")
group.addoption(
- '--fast', action='store_true', default=False,
- help='runs only "fast" unit tests, instead of the whole suite')
+ "--fast",
+ action="store_true",
+ default=False,
+ help='runs only "fast" unit tests, instead of the whole suite',
+ )
def pytest_collection_modifyitems(config, items):
- if not config.getoption('--fast'):
+ if not config.getoption("--fast"):
# --fast not given, run all the tests
return
- slow_tests = ['db', 'network', 'maybeslow']
- skip_as_slow = pytest.mark.skip(
- reason='skipped slow test [--fast command line option given]'
- )
+ slow_tests = ["db", "network", "maybeslow"]
+ skip_as_slow = pytest.mark.skip(reason="skipped slow test [--fast command line option given]")
for item in items:
if any(x in item.keywords for x in slow_tests):
item.add_marker(skip_as_slow)
@@ -289,7 +289,7 @@ def pytest_collection_modifyitems(config, items):
#
# These fixtures are applied to all tests
#
-@pytest.fixture(scope='function', autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def no_chdir():
"""Ensure that no test changes Spack's working dirctory.
@@ -308,7 +308,7 @@ def no_chdir():
assert os.getcwd() == original_wd
-@pytest.fixture(scope='function', autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def reset_compiler_cache():
"""Ensure that the compiler cache is not shared across Spack tests
@@ -328,24 +328,23 @@ def onerror(func, path, error_info):
func(path)
-@pytest.fixture(scope='function', autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def mock_stage(tmpdir_factory, monkeypatch, request):
"""Establish the temporary build_stage for the mock archive."""
# The approach with this autouse fixture is to set the stage root
# instead of using spack.config.override() to avoid configuration
# conflicts with dozens of tests that rely on other configuration
# fixtures, such as config.
- if 'nomockstage' not in request.keywords:
+ if "nomockstage" not in request.keywords:
# Set the build stage to the requested path
- new_stage = tmpdir_factory.mktemp('mock-stage')
+ new_stage = tmpdir_factory.mktemp("mock-stage")
new_stage_path = str(new_stage)
# Ensure the source directory exists within the new stage path
- source_path = os.path.join(new_stage_path,
- spack.stage._source_path_subdir)
+ source_path = os.path.join(new_stage_path, spack.stage._source_path_subdir)
mkdirp(source_path)
- monkeypatch.setattr(spack.stage, '_stage_root', new_stage_path)
+ monkeypatch.setattr(spack.stage, "_stage_root", new_stage_path)
yield new_stage_path
@@ -357,14 +356,14 @@ def mock_stage(tmpdir_factory, monkeypatch, request):
yield str(tmpdir_factory)
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def ignore_stage_files():
"""Session-scoped helper for check_for_leftover_stage_files.
Used to track which leftover files in the stage have been seen.
"""
# to start with, ignore the .lock file at the stage root.
- return set(['.lock', spack.stage._source_path_subdir, 'build_cache'])
+ return set([".lock", spack.stage._source_path_subdir, "build_cache"])
def remove_whatever_it_is(path):
@@ -388,7 +387,7 @@ def working_env():
os.environ.update(saved_env)
-@pytest.fixture(scope='function', autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def check_for_leftover_stage_files(request, mock_stage, ignore_stage_files):
"""
Ensure that each (mock_stage) test leaves a clean stage when done.
@@ -414,7 +413,7 @@ def check_for_leftover_stage_files(request, mock_stage, ignore_stage_files):
else:
raise
- if 'disable_clean_stage_check' in request.keywords:
+ if "disable_clean_stage_check" in request.keywords:
# clean up after tests that are expected to be dirty
for f in files_in_stage:
path = os.path.join(stage_path, f)
@@ -434,7 +433,7 @@ class MockCache(object):
class MockCacheFetcher(object):
def fetch(self):
- raise FetchError('Mock cache always fails for tests')
+ raise FetchError("Mock cache always fails for tests")
def __str__(self):
return "[mock fetch cache]"
@@ -445,7 +444,7 @@ def mock_fetch_cache(monkeypatch):
"""Substitutes spack.paths.fetch_cache with a mock object that does nothing
and raises on fetch.
"""
- monkeypatch.setattr(spack.caches, 'fetch_cache', MockCache())
+ monkeypatch.setattr(spack.caches, "fetch_cache", MockCache())
@pytest.fixture()
@@ -453,10 +452,10 @@ def mock_binary_index(monkeypatch, tmpdir_factory):
"""Changes the directory for the binary index and creates binary index for
every test. Clears its own index when it's done.
"""
- tmpdir = tmpdir_factory.mktemp('mock_binary_index')
- index_path = tmpdir.join('binary_index').strpath
+ tmpdir = tmpdir_factory.mktemp("mock_binary_index")
+ index_path = tmpdir.join("binary_index").strpath
mock_index = spack.binary_distribution.BinaryCacheIndex(index_path)
- monkeypatch.setattr(spack.binary_distribution, 'binary_index', mock_index)
+ monkeypatch.setattr(spack.binary_distribution, "binary_index", mock_index)
yield
@@ -465,28 +464,26 @@ def _skip_if_missing_executables(request):
"""Permits to mark tests with 'require_executables' and skip the
tests if the executables passed as arguments are not found.
"""
- if hasattr(request.node, 'get_marker'):
+ if hasattr(request.node, "get_marker"):
# TODO: Remove the deprecated API as soon as we drop support for Python 2.6
- marker = request.node.get_marker('requires_executables')
+ marker = request.node.get_marker("requires_executables")
else:
- marker = request.node.get_closest_marker('requires_executables')
+ marker = request.node.get_closest_marker("requires_executables")
if marker:
required_execs = marker.args
- missing_execs = [
- x for x in required_execs if spack.util.executable.which(x) is None
- ]
+ missing_execs = [x for x in required_execs if spack.util.executable.which(x) is None]
if missing_execs:
- msg = 'could not find executables: {0}'
- pytest.skip(msg.format(', '.join(missing_execs)))
+ msg = "could not find executables: {0}"
+ pytest.skip(msg.format(", ".join(missing_execs)))
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def test_platform():
return spack.platforms.Test()
-@pytest.fixture(autouse=True, scope='session')
+@pytest.fixture(autouse=True, scope="session")
def _use_test_platform(test_platform):
# This is the only context manager used at session scope (see note
# below for more insight) since we want to use the test platform as
@@ -494,6 +491,7 @@ def _use_test_platform(test_platform):
with spack.platforms.use_platform(test_platform):
yield
+
#
# Note on context managers used by fixtures
#
@@ -520,7 +518,7 @@ def _use_test_platform(test_platform):
#
# Test-specific fixtures
#
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_repo_path():
yield spack.repo.Repo(spack.paths.mock_packages_path)
@@ -532,18 +530,17 @@ def _pkg_install_fn(pkg, spec, prefix):
@pytest.fixture
def mock_pkg_install(monkeypatch):
- monkeypatch.setattr(spack.package_base.PackageBase, 'install',
- _pkg_install_fn, raising=False)
+ monkeypatch.setattr(spack.package_base.PackageBase, "install", _pkg_install_fn, raising=False)
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def mock_packages(mock_repo_path, mock_pkg_install):
"""Use the 'builtin.mock' repository instead of 'builtin'"""
with spack.repo.use_repositories(mock_repo_path) as mock_repo:
yield mock_repo
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def mutable_mock_repo(mock_repo_path):
"""Function-scoped mock packages, for tests that need to modify them."""
mock_repo = spack.repo.Repo(spack.paths.mock_packages_path)
@@ -551,52 +548,54 @@ def mutable_mock_repo(mock_repo_path):
yield mock_repo_path
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def linux_os():
"""Returns a named tuple with attributes 'name' and 'version'
representing the OS.
"""
platform = spack.platforms.host()
- name, version = 'debian', '6'
- if platform.name == 'linux':
- current_os = platform.operating_system('default_os')
+ name, version = "debian", "6"
+ if platform.name == "linux":
+ current_os = platform.operating_system("default_os")
name, version = current_os.name, current_os.version
- LinuxOS = collections.namedtuple('LinuxOS', ['name', 'version'])
+ LinuxOS = collections.namedtuple("LinuxOS", ["name", "version"])
return LinuxOS(name=name, version=version)
-@pytest.fixture(autouse=is_windows, scope='session')
+@pytest.fixture(autouse=is_windows, scope="session")
def platform_config():
spack.config.add_default_platform_scope(spack.platforms.real_host().name)
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def default_config():
"""Isolates the default configuration from the user configs.
This ensures we can test the real default configuration without having
tests fail when the user overrides the defaults that we test against."""
- defaults_path = os.path.join(spack.paths.etc_path, 'defaults')
+ defaults_path = os.path.join(spack.paths.etc_path, "defaults")
if is_windows:
defaults_path = os.path.join(defaults_path, "windows")
with spack.config.use_configuration(defaults_path) as defaults_config:
yield defaults_config
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_uarch_json(tmpdir_factory):
"""Mock microarchitectures.json with test architecture descriptions."""
- tmpdir = tmpdir_factory.mktemp('microarchitectures')
+ tmpdir = tmpdir_factory.mktemp("microarchitectures")
uarch_json = py.path.local(spack.paths.test_path).join(
- "data", "microarchitectures", "microarchitectures.json")
+ "data", "microarchitectures", "microarchitectures.json"
+ )
uarch_json.copy(tmpdir)
yield str(tmpdir.join("microarchitectures.json"))
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_uarch_configuration(mock_uarch_json):
"""Create mock dictionaries for the archspec.cpu."""
+
def load_json():
with open(mock_uarch_json) as f:
return json.load(f)
@@ -607,7 +606,7 @@ def mock_uarch_configuration(mock_uarch_json):
yield targets_json, targets
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def mock_targets(mock_uarch_configuration, monkeypatch):
"""Use this fixture to enable mock uarch targets for testing."""
targets_json, targets = mock_uarch_configuration
@@ -616,42 +615,40 @@ def mock_targets(mock_uarch_configuration, monkeypatch):
monkeypatch.setattr(archspec.cpu.microarchitecture, "TARGETS", targets)
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def configuration_dir(tmpdir_factory, linux_os):
"""Copies mock configuration files in a temporary directory. Returns the
directory path.
"""
- tmpdir = tmpdir_factory.mktemp('configurations')
+ tmpdir = tmpdir_factory.mktemp("configurations")
# <test_path>/data/config has mock config yaml files in it
# copy these to the site config.
- test_config = py.path.local(spack.paths.test_path).join('data', 'config')
- test_config.copy(tmpdir.join('site'))
+ test_config = py.path.local(spack.paths.test_path).join("data", "config")
+ test_config.copy(tmpdir.join("site"))
# Create temporary 'defaults', 'site' and 'user' folders
- tmpdir.ensure('user', dir=True)
+ tmpdir.ensure("user", dir=True)
# Slightly modify config.yaml and compilers.yaml
if is_windows:
- solver = 'original'
+ solver = "original"
locks = False
else:
- solver = os.environ.get('SPACK_TEST_SOLVER', 'clingo')
+ solver = os.environ.get("SPACK_TEST_SOLVER", "clingo")
locks = True
- config_yaml = test_config.join('config.yaml')
- modules_root = tmpdir_factory.mktemp('share')
- tcl_root = modules_root.ensure('modules', dir=True)
- lmod_root = modules_root.ensure('lmod', dir=True)
- content = ''.join(config_yaml.read()).format(
- solver, locks, str(tcl_root), str(lmod_root)
- )
- t = tmpdir.join('site', 'config.yaml')
+ config_yaml = test_config.join("config.yaml")
+ modules_root = tmpdir_factory.mktemp("share")
+ tcl_root = modules_root.ensure("modules", dir=True)
+ lmod_root = modules_root.ensure("lmod", dir=True)
+ content = "".join(config_yaml.read()).format(solver, locks, str(tcl_root), str(lmod_root))
+ t = tmpdir.join("site", "config.yaml")
t.write(content)
- compilers_yaml = test_config.join('compilers.yaml')
- content = ''.join(compilers_yaml.read()).format(linux_os)
- t = tmpdir.join('site', 'compilers.yaml')
+ compilers_yaml = test_config.join("compilers.yaml")
+ content = "".join(compilers_yaml.read()).format(linux_os)
+ t = tmpdir.join("site", "compilers.yaml")
t.write(content)
yield tmpdir
@@ -662,33 +659,33 @@ def configuration_dir(tmpdir_factory, linux_os):
def _create_mock_configuration_scopes(configuration_dir):
"""Create the configuration scopes used in `config` and `mutable_config`."""
scopes = [
- spack.config.InternalConfigScope('_builtin', spack.config.config_defaults),
+ spack.config.InternalConfigScope("_builtin", spack.config.config_defaults),
]
scopes += [
spack.config.ConfigScope(name, str(configuration_dir.join(name)))
- for name in ['site', 'system', 'user']
+ for name in ["site", "system", "user"]
]
- scopes += [spack.config.InternalConfigScope('command_line')]
+ scopes += [spack.config.InternalConfigScope("command_line")]
return scopes
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_configuration_scopes(configuration_dir):
"""Create a persistent Configuration object from the configuration_dir."""
yield _create_mock_configuration_scopes(configuration_dir)
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def config(mock_configuration_scopes):
"""This fixture activates/deactivates the mock configuration."""
with spack.config.use_configuration(*mock_configuration_scopes) as config:
yield config
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def mutable_config(tmpdir_factory, configuration_dir):
"""Like config, but tests can modify the configuration."""
- mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
+ mutable_dir = tmpdir_factory.mktemp("mutable_config").join("tmp")
configuration_dir.copy(mutable_dir)
scopes = _create_mock_configuration_scopes(mutable_dir)
@@ -696,12 +693,14 @@ def mutable_config(tmpdir_factory, configuration_dir):
yield cfg
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def mutable_empty_config(tmpdir_factory, configuration_dir):
"""Empty configuration that can be modified by the tests."""
- mutable_dir = tmpdir_factory.mktemp('mutable_config').join('tmp')
- scopes = [spack.config.ConfigScope(name, str(mutable_dir.join(name)))
- for name in ['site', 'system', 'user']]
+ mutable_dir = tmpdir_factory.mktemp("mutable_config").join("tmp")
+ scopes = [
+ spack.config.ConfigScope(name, str(mutable_dir.join(name)))
+ for name in ["site", "system", "user"]
+ ]
with spack.config.use_configuration(*scopes) as cfg:
yield cfg
@@ -713,7 +712,7 @@ def no_compilers_yaml(mutable_config):
for scope, local_config in mutable_config.scopes.items():
if not local_config.path: # skip internal scopes
continue
- compilers_yaml = os.path.join(local_config.path, 'compilers.yaml')
+ compilers_yaml = os.path.join(local_config.path, "compilers.yaml")
if os.path.exists(compilers_yaml):
os.remove(compilers_yaml)
@@ -721,8 +720,7 @@ def no_compilers_yaml(mutable_config):
@pytest.fixture()
def mock_low_high_config(tmpdir):
"""Mocks two configuration scopes: 'low' and 'high'."""
- scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name)))
- for name in ['low', 'high']]
+ scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name))) for name in ["low", "high"]]
with spack.config.use_configuration(*scopes) as config:
yield config
@@ -749,30 +747,30 @@ def _populate(mock_db):
|/
o libelf
"""
+
def _install(spec):
s = spack.spec.Spec(spec).concretized()
s.package.do_install(fake=True, explicit=True)
- _install('mpileaks ^mpich')
- _install('mpileaks ^mpich2')
- _install('mpileaks ^zmpi')
- _install('externaltest')
- _install('trivial-smoke-test')
+ _install("mpileaks ^mpich")
+ _install("mpileaks ^mpich2")
+ _install("mpileaks ^zmpi")
+ _install("externaltest")
+ _install("trivial-smoke-test")
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def _store_dir_and_cache(tmpdir_factory):
"""Returns the directory where to build the mock database and
where to cache it.
"""
- store = tmpdir_factory.mktemp('mock_store')
- cache = tmpdir_factory.mktemp('mock_store_cache')
+ store = tmpdir_factory.mktemp("mock_store")
+ cache = tmpdir_factory.mktemp("mock_store_cache")
return store, cache
-@pytest.fixture(scope='session')
-def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
- _store_dir_and_cache):
+@pytest.fixture(scope="session")
+def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes, _store_dir_and_cache):
"""Creates a read-only mock database with some packages installed note
that the ref count for dyninst here will be 3, as it's recycled
across each install.
@@ -784,7 +782,7 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
store_path, store_cache = _store_dir_and_cache
# If the cache does not exist populate the store and create it
- if not os.path.exists(str(store_cache.join('.spack-db'))):
+ if not os.path.exists(str(store_cache.join(".spack-db"))):
with spack.config.use_configuration(*mock_configuration_scopes):
with spack.store.use_store(str(store_path)) as store:
with spack.repo.use_repositories(mock_repo_path):
@@ -792,39 +790,38 @@ def mock_store(tmpdir_factory, mock_repo_path, mock_configuration_scopes,
store_path.copy(store_cache, mode=True, stat=True)
# Make the DB filesystem read-only to ensure we can't modify entries
- store_path.join('.spack-db').chmod(mode=0o555, rec=1)
+ store_path.join(".spack-db").chmod(mode=0o555, rec=1)
yield store_path
- store_path.join('.spack-db').chmod(mode=0o755, rec=1)
+ store_path.join(".spack-db").chmod(mode=0o755, rec=1)
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def database(mock_store, mock_packages, config):
"""This activates the mock store, packages, AND config."""
with spack.store.use_store(str(mock_store)) as store:
yield store.db
# Force reading the database again between tests
- store.db.last_seen_verifier = ''
+ store.db.last_seen_verifier = ""
-@pytest.fixture(scope='function')
-def database_mutable_config(mock_store, mock_packages, mutable_config,
- monkeypatch):
+@pytest.fixture(scope="function")
+def database_mutable_config(mock_store, mock_packages, mutable_config, monkeypatch):
"""This activates the mock store, packages, AND config."""
with spack.store.use_store(str(mock_store)) as store:
yield store.db
- store.db.last_seen_verifier = ''
+ store.db.last_seen_verifier = ""
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def mutable_database(database_mutable_config, _store_dir_and_cache):
"""Writeable version of the fixture, restored to its initial state
after each test.
"""
# Make the database writeable, as we are going to modify it
store_path, store_cache = _store_dir_and_cache
- store_path.join('.spack-db').chmod(mode=0o755, rec=1)
+ store_path.join(".spack-db").chmod(mode=0o755, rec=1)
yield database_mutable_config
@@ -832,18 +829,18 @@ def mutable_database(database_mutable_config, _store_dir_and_cache):
# the store and making the database read-only
store_path.remove(rec=1)
store_cache.copy(store_path, mode=True, stat=True)
- store_path.join('.spack-db').chmod(mode=0o555, rec=1)
+ store_path.join(".spack-db").chmod(mode=0o555, rec=1)
@pytest.fixture()
def dirs_with_libfiles(tmpdir_factory):
lib_to_libfiles = {
- 'libstdc++': ['libstdc++.so', 'libstdc++.tbd'],
- 'libgfortran': ['libgfortran.a', 'libgfortran.dylib'],
- 'libirc': ['libirc.a', 'libirc.so']
+ "libstdc++": ["libstdc++.so", "libstdc++.tbd"],
+ "libgfortran": ["libgfortran.a", "libgfortran.dylib"],
+ "libirc": ["libirc.a", "libirc.so"],
}
- root = tmpdir_factory.mktemp('root')
+ root = tmpdir_factory.mktemp("root")
lib_to_dirs = {}
i = 0
for lib, libfiles in lib_to_libfiles.items():
@@ -864,7 +861,7 @@ def _compiler_link_paths_noop(*args):
return []
-@pytest.fixture(scope='function', autouse=True)
+@pytest.fixture(scope="function", autouse=True)
def disable_compiler_execution(monkeypatch, request):
"""
This fixture can be disabled for tests of the compiler link path
@@ -873,22 +870,20 @@ def disable_compiler_execution(monkeypatch, request):
@pytest.mark.enable_compiler_link_paths
If a test is marked in that way this is a no-op."""
- if 'enable_compiler_link_paths' not in request.keywords:
+ if "enable_compiler_link_paths" not in request.keywords:
# Compiler.determine_implicit_rpaths actually runs the compiler. So
# replace that function with a noop that simulates finding no implicit
# RPATHs
monkeypatch.setattr(
- spack.compiler.Compiler,
- '_get_compiler_link_paths',
- _compiler_link_paths_noop
+ spack.compiler.Compiler, "_get_compiler_link_paths", _compiler_link_paths_noop
)
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def install_mockery(temporary_store, config, mock_packages):
"""Hooks a fake install directory, DB, and stage directory into Spack."""
# We use a fake package, so temporarily disable checksumming
- with spack.config.override('config:checksum', False):
+ with spack.config.override("config:checksum", False):
yield
# Also wipe out any cached prefix failure locks (associated with
@@ -902,19 +897,17 @@ def install_mockery(temporary_store, config, mock_packages):
pass
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def temporary_store(tmpdir):
"""Hooks a temporary empty store for the test function."""
- temporary_store_path = tmpdir.join('opt')
+ temporary_store_path = tmpdir.join("opt")
with spack.store.use_store(str(temporary_store_path)) as s:
yield s
temporary_store_path.remove()
-@pytest.fixture(scope='function')
-def install_mockery_mutable_config(
- temporary_store, mutable_config, mock_packages
-):
+@pytest.fixture(scope="function")
+def install_mockery_mutable_config(temporary_store, mutable_config, mock_packages):
"""Hooks a fake install directory, DB, and stage directory into Spack.
This is specifically for tests which want to use 'install_mockery' but
@@ -922,7 +915,7 @@ def install_mockery_mutable_config(
'mutable config'): 'install_mockery' does not support this.
"""
# We use a fake package, so temporarily disable checksumming
- with spack.config.override('config:checksum', False):
+ with spack.config.override("config:checksum", False):
yield
@@ -932,8 +925,7 @@ def mock_fetch(mock_archive, monkeypatch):
mock_fetcher = FetchStrategyComposite()
mock_fetcher.append(URLFetchStrategy(mock_archive.url))
- monkeypatch.setattr(
- spack.package_base.PackageBase, 'fetcher', mock_fetcher)
+ monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", mock_fetcher)
class MockLayout(object):
@@ -941,7 +933,7 @@ class MockLayout(object):
self.root = root
def path_for_spec(self, spec):
- return os.path.sep.join([self.root, spec.name + '-' + spec.dag_hash()])
+ return os.path.sep.join([self.root, spec.name + "-" + spec.dag_hash()])
def ensure_installed(self, spec):
pass
@@ -979,27 +971,17 @@ class ConfigUpdate(object):
self.monkeypatch = monkeypatch
def __call__(self, filename):
- file = os.path.join(self.root_for_conf, filename + '.yaml')
+ file = os.path.join(self.root_for_conf, filename + ".yaml")
with open(file) as f:
config_settings = syaml.load_config(f)
- spack.config.set('modules:default', config_settings)
+ spack.config.set("modules:default", config_settings)
mock_config = MockConfig(config_settings, self.writer_key)
+ self.monkeypatch.setattr(spack.modules.common, "configuration", mock_config.configuration)
self.monkeypatch.setattr(
- spack.modules.common,
- 'configuration',
- mock_config.configuration
- )
- self.monkeypatch.setattr(
- self.writer_mod,
- 'configuration',
- mock_config.writer_configuration
- )
- self.monkeypatch.setattr(
- self.writer_mod,
- 'configuration_registry',
- {}
+ self.writer_mod, "configuration", mock_config.writer_configuration
)
+ self.monkeypatch.setattr(self.writer_mod, "configuration_registry", {})
@pytest.fixture()
@@ -1008,15 +990,13 @@ def module_configuration(monkeypatch, request, mutable_config):
for tests and monkeypatches the right classes to hook it in.
"""
# Class of the module file writer
- writer_cls = getattr(request.module, 'writer_cls')
+ writer_cls = getattr(request.module, "writer_cls")
# Module where the module file writer is defined
writer_mod = inspect.getmodule(writer_cls)
# Key for specific settings relative to this module type
- writer_key = str(writer_mod.__name__).split('.')[-1]
+ writer_key = str(writer_mod.__name__).split(".")[-1]
# Root folder for configuration
- root_for_conf = os.path.join(
- spack.paths.test_path, 'data', 'modules', writer_key
- )
+ root_for_conf = os.path.join(spack.paths.test_path, "data", "modules", writer_key)
# ConfigUpdate, when called, will modify configuration, so we need to use
# the mutable_config fixture
@@ -1033,7 +1013,7 @@ def mock_gnupghome(monkeypatch):
spack.util.gpg.init()
except spack.util.gpg.SpackGPGError:
if not spack.util.gpg.GPG:
- pytest.skip('This test requires gpg')
+ pytest.skip("This test requires gpg")
short_name_tmpdir = tempfile.mkdtemp()
with spack.util.gpg.gnupghome_override(short_name_tmpdir):
@@ -1045,26 +1025,26 @@ def mock_gnupghome(monkeypatch):
# file or directory: 'S.gpg-agent.extra').
shutil.rmtree(short_name_tmpdir, ignore_errors=True)
+
##########
# Fake archives and repositories
##########
-@pytest.fixture(scope='session', params=[('.tar.gz', 'z')])
+@pytest.fixture(scope="session", params=[(".tar.gz", "z")])
def mock_archive(request, tmpdir_factory):
"""Creates a very simple archive directory with a configure script and a
makefile that installs to a prefix. Tars it up into an archive.
"""
- tar = spack.util.executable.which('tar', required=True)
+ tar = spack.util.executable.which("tar", required=True)
- tmpdir = tmpdir_factory.mktemp('mock-archive-dir')
+ tmpdir = tmpdir_factory.mktemp("mock-archive-dir")
tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir)
# Create the configure script
- configure_path = str(tmpdir.join(spack.stage._source_path_subdir,
- 'configure'))
- with open(configure_path, 'w') as f:
+ configure_path = str(tmpdir.join(spack.stage._source_path_subdir, "configure"))
+ with open(configure_path, "w") as f:
f.write(
"#!/bin/sh\n"
"prefix=$(echo $1 | sed 's/--prefix=//')\n"
@@ -1080,23 +1060,22 @@ def mock_archive(request, tmpdir_factory):
# Archive it
with tmpdir.as_cwd():
- archive_name = '{0}{1}'.format(spack.stage._source_path_subdir,
- request.param[0])
- tar('-c{0}f'.format(request.param[1]), archive_name,
- spack.stage._source_path_subdir)
-
- Archive = collections.namedtuple('Archive',
- ['url', 'path', 'archive_file',
- 'expanded_archive_basedir'])
+ archive_name = "{0}{1}".format(spack.stage._source_path_subdir, request.param[0])
+ tar("-c{0}f".format(request.param[1]), archive_name, spack.stage._source_path_subdir)
+
+ Archive = collections.namedtuple(
+ "Archive", ["url", "path", "archive_file", "expanded_archive_basedir"]
+ )
archive_file = str(tmpdir.join(archive_name))
- url = ('file://' + archive_file)
+ url = "file://" + archive_file
# Return the url
yield Archive(
url=url,
archive_file=archive_file,
path=str(repodir),
- expanded_archive_basedir=spack.stage._source_path_subdir)
+ expanded_archive_basedir=spack.stage._source_path_subdir,
+ )
def _parse_cvs_date(line):
@@ -1105,25 +1084,25 @@ def _parse_cvs_date(line):
# date: 2021-07-07 02:43:33 -0700; ...
# date: 2021-07-07 02:43:33; ...
# date: 2021/07/07 02:43:33; ...
- m = re.search(r'date:\s+(\d+)[/-](\d+)[/-](\d+)\s+(\d+):(\d+):(\d+)', line)
+ m = re.search(r"date:\s+(\d+)[/-](\d+)[/-](\d+)\s+(\d+):(\d+):(\d+)", line)
if not m:
return None
year, month, day, hour, minute, second = [int(g) for g in m.groups()]
return datetime.datetime(year, month, day, hour, minute, second)
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_cvs_repository(tmpdir_factory):
"""Creates a very simple CVS repository with two commits and a branch."""
- cvs = spack.util.executable.which('cvs', required=True)
+ cvs = spack.util.executable.which("cvs", required=True)
- tmpdir = tmpdir_factory.mktemp('mock-cvs-repo-dir')
+ tmpdir = tmpdir_factory.mktemp("mock-cvs-repo-dir")
tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir)
cvsroot = str(repodir)
# The CVS repository and source tree need to live in a different directories
- sourcedirparent = tmpdir_factory.mktemp('mock-cvs-source-dir')
+ sourcedirparent = tmpdir_factory.mktemp("mock-cvs-source-dir")
module = spack.stage._source_path_subdir
url = cvsroot + "%module=" + module
sourcedirparent.ensure(module, dir=True)
@@ -1132,7 +1111,7 @@ def mock_cvs_repository(tmpdir_factory):
def format_date(date):
if date is None:
return None
- return date.strftime('%Y-%m-%d %H:%M:%S')
+ return date.strftime("%Y-%m-%d %H:%M:%S")
def get_cvs_timestamp(output):
"""Find the most recent CVS time stamp in a `cvs log` output"""
@@ -1155,41 +1134,49 @@ def mock_cvs_repository(tmpdir_factory):
# Initialize the repository
with sourcedir.as_cwd():
- cvs('-d', cvsroot, 'init')
- cvs('-d', cvsroot, 'import', '-m', 'initial mock repo commit',
- module, 'mockvendor', 'mockrelease')
+ cvs("-d", cvsroot, "init")
+ cvs(
+ "-d",
+ cvsroot,
+ "import",
+ "-m",
+ "initial mock repo commit",
+ module,
+ "mockvendor",
+ "mockrelease",
+ )
with sourcedirparent.as_cwd():
- cvs('-d', cvsroot, 'checkout', module)
+ cvs("-d", cvsroot, "checkout", module)
# Commit file r0
- r0_file = 'r0_file'
+ r0_file = "r0_file"
sourcedir.ensure(r0_file)
- cvs('-d', cvsroot, 'add', r0_file)
- cvs('-d', cvsroot, 'commit', '-m', 'revision 0', r0_file)
- output = cvs('log', '-N', r0_file, output=str)
- revision_date['1.1'] = format_date(get_cvs_timestamp(output))
+ cvs("-d", cvsroot, "add", r0_file)
+ cvs("-d", cvsroot, "commit", "-m", "revision 0", r0_file)
+ output = cvs("log", "-N", r0_file, output=str)
+ revision_date["1.1"] = format_date(get_cvs_timestamp(output))
# Commit file r1
- r1_file = 'r1_file'
+ r1_file = "r1_file"
sourcedir.ensure(r1_file)
- cvs('-d', cvsroot, 'add', r1_file)
- cvs('-d', cvsroot, 'commit', '-m' 'revision 1', r1_file)
- output = cvs('log', '-N', r0_file, output=str)
- revision_date['1.2'] = format_date(get_cvs_timestamp(output))
+ cvs("-d", cvsroot, "add", r1_file)
+ cvs("-d", cvsroot, "commit", "-m" "revision 1", r1_file)
+ output = cvs("log", "-N", r0_file, output=str)
+ revision_date["1.2"] = format_date(get_cvs_timestamp(output))
# Create branch 'mock-branch'
- cvs('-d', cvsroot, 'tag', 'mock-branch-root')
- cvs('-d', cvsroot, 'tag', '-b', 'mock-branch')
+ cvs("-d", cvsroot, "tag", "mock-branch-root")
+ cvs("-d", cvsroot, "tag", "-b", "mock-branch")
# CVS does not have the notion of a unique branch; branches and revisions
# are managed separately for every file
def get_branch():
"""Return the branch name if all files are on the same branch, else
return None. Also return None if all files are on the trunk."""
- lines = cvs('-d', cvsroot, 'status', '-v', output=str).splitlines()
+ lines = cvs("-d", cvsroot, "status", "-v", output=str).splitlines()
branch = None
for line in lines:
- m = re.search(r'(\S+)\s+[(]branch:', line)
+ m = re.search(r"(\S+)\s+[(]branch:", line)
if m:
tag = m.group(1)
if branch is None:
@@ -1209,31 +1196,30 @@ def mock_cvs_repository(tmpdir_factory):
# commit dates instead
def get_date():
"""Return latest date of the revisions of all files"""
- output = cvs('log', '-N', r0_file, output=str)
+ output = cvs("log", "-N", r0_file, output=str)
timestamp = get_cvs_timestamp(output)
if timestamp is None:
return None
return format_date(timestamp)
checks = {
- 'default': Bunch(
+ "default": Bunch(
file=r1_file,
branch=None,
date=None,
- args={'cvs': url},
+ args={"cvs": url},
),
- 'branch': Bunch(
+ "branch": Bunch(
file=r1_file,
- branch='mock-branch',
+ branch="mock-branch",
date=None,
- args={'cvs': url, 'branch': 'mock-branch'},
+ args={"cvs": url, "branch": "mock-branch"},
),
- 'date': Bunch(
+ "date": Bunch(
file=r0_file,
branch=None,
- date=revision_date['1.1'],
- args={'cvs': url,
- 'date': revision_date['1.1']},
+ date=revision_date["1.1"],
+ args={"cvs": url, "date": revision_date["1.1"]},
),
}
@@ -1248,7 +1234,7 @@ def mock_cvs_repository(tmpdir_factory):
yield test
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_git_repository(tmpdir_factory):
"""Creates a git repository multiple commits, branches, submodules, and
a tag. Visual representation of the commit history (starting with the
@@ -1273,90 +1259,93 @@ def mock_git_repository(tmpdir_factory):
associated builtin.mock package 'git-test'. c3 is a commit in the
repository but does not have an associated explicit package version.
"""
- git = spack.util.executable.which('git', required=True)
+ git = spack.util.executable.which("git", required=True)
suburls = []
# Create two git repositories which will be used as submodules in the
# main repository
for submodule_count in range(2):
- tmpdir = tmpdir_factory.mktemp('mock-git-repo-submodule-dir-{0}'
- .format(submodule_count))
+ tmpdir = tmpdir_factory.mktemp("mock-git-repo-submodule-dir-{0}".format(submodule_count))
tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir)
- suburls.append((submodule_count, 'file://' + str(repodir)))
+ suburls.append((submodule_count, "file://" + str(repodir)))
with repodir.as_cwd():
- git('init')
- git('config', 'user.name', 'Spack')
- git('config', 'user.email', 'spack@spack.io')
+ git("init")
+ git("config", "user.name", "Spack")
+ git("config", "user.email", "spack@spack.io")
# r0 is just the first commit
- submodule_file = 'r0_file_{0}'.format(submodule_count)
+ submodule_file = "r0_file_{0}".format(submodule_count)
repodir.ensure(submodule_file)
- git('add', submodule_file)
- git('-c', 'commit.gpgsign=false', 'commit',
- '-m', 'mock-git-repo r0 {0}'.format(submodule_count))
-
- tmpdir = tmpdir_factory.mktemp('mock-git-repo-dir')
+ git("add", submodule_file)
+ git(
+ "-c",
+ "commit.gpgsign=false",
+ "commit",
+ "-m",
+ "mock-git-repo r0 {0}".format(submodule_count),
+ )
+
+ tmpdir = tmpdir_factory.mktemp("mock-git-repo-dir")
tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir)
# Create the main repository
with repodir.as_cwd():
- git('init')
- git('config', 'user.name', 'Spack')
- git('config', 'user.email', 'spack@spack.io')
- url = 'file://' + str(repodir)
+ git("init")
+ git("config", "user.name", "Spack")
+ git("config", "user.email", "spack@spack.io")
+ url = "file://" + str(repodir)
for number, suburl in suburls:
- git('submodule', 'add', suburl,
- 'third_party/submodule{0}'.format(number))
+ git("submodule", "add", suburl, "third_party/submodule{0}".format(number))
# r0 is the first commit: it consists of one file and two submodules
- r0_file = 'r0_file'
+ r0_file = "r0_file"
repodir.ensure(r0_file)
- git('add', r0_file)
- git('-c', 'commit.gpgsign=false', 'commit', '-m', 'mock-git-repo r0')
+ git("add", r0_file)
+ git("-c", "commit.gpgsign=false", "commit", "-m", "mock-git-repo r0")
- branch = 'test-branch'
- branch_file = 'branch_file'
- git('branch', branch)
+ branch = "test-branch"
+ branch_file = "branch_file"
+ git("branch", branch)
- tag_branch = 'tag-branch'
- tag_file = 'tag_file'
- git('branch', tag_branch)
+ tag_branch = "tag-branch"
+ tag_file = "tag_file"
+ git("branch", tag_branch)
# Check out test branch and add one commit
- git('checkout', branch)
+ git("checkout", branch)
repodir.ensure(branch_file)
- git('add', branch_file)
- git('-c', 'commit.gpgsign=false', 'commit', '-m' 'r1 test branch')
+ git("add", branch_file)
+ git("-c", "commit.gpgsign=false", "commit", "-m" "r1 test branch")
# Check out the tag branch, add one commit, and then add a tag for it
- git('checkout', tag_branch)
+ git("checkout", tag_branch)
repodir.ensure(tag_file)
- git('add', tag_file)
- git('-c', 'commit.gpgsign=false', 'commit', '-m' 'tag test branch')
+ git("add", tag_file)
+ git("-c", "commit.gpgsign=false", "commit", "-m" "tag test branch")
- tag = 'test-tag'
- git('tag', tag)
+ tag = "test-tag"
+ git("tag", tag)
try:
default_branch = git(
- 'config',
- '--get',
- 'init.defaultBranch',
+ "config",
+ "--get",
+ "init.defaultBranch",
output=str,
).strip()
except Exception:
- default_branch = 'master'
- git('checkout', default_branch)
+ default_branch = "master"
+ git("checkout", default_branch)
- r2_file = 'r2_file'
+ r2_file = "r2_file"
repodir.ensure(r2_file)
- git('add', r2_file)
- git('-c', 'commit.gpgsign=false', 'commit', '-m', 'mock-git-repo r2')
+ git("add", r2_file)
+ git("-c", "commit.gpgsign=false", "commit", "-m", "mock-git-repo r2")
- rev_hash = lambda x: git('rev-parse', x, output=str).strip()
+ rev_hash = lambda x: git("rev-parse", x, output=str).strip()
r2 = rev_hash(default_branch)
# Record the commit hash of the (only) commit from test-branch and
@@ -1370,135 +1359,116 @@ def mock_git_repository(tmpdir_factory):
# revision for the version; a file associated with (and particular to)
# that revision/branch.
checks = {
- 'default': Bunch(
- revision=default_branch, file=r0_file, args={'git': url}
- ),
- 'branch': Bunch(
- revision=branch, file=branch_file, args={
- 'git': url, 'branch': branch
- }
- ),
- 'tag-branch': Bunch(
- revision=tag_branch, file=tag_file, args={
- 'git': url, 'branch': tag_branch
- }
- ),
- 'tag': Bunch(
- revision=tag, file=tag_file, args={'git': url, 'tag': tag}
- ),
- 'commit': Bunch(
- revision=r1, file=r1_file, args={'git': url, 'commit': r1}
+ "default": Bunch(revision=default_branch, file=r0_file, args={"git": url}),
+ "branch": Bunch(revision=branch, file=branch_file, args={"git": url, "branch": branch}),
+ "tag-branch": Bunch(
+ revision=tag_branch, file=tag_file, args={"git": url, "branch": tag_branch}
),
+ "tag": Bunch(revision=tag, file=tag_file, args={"git": url, "tag": tag}),
+ "commit": Bunch(revision=r1, file=r1_file, args={"git": url, "commit": r1}),
# In this case, the version() args do not include a 'git' key:
# this is the norm for packages, so this tests how the fetching logic
# would most-commonly assemble a Git fetcher
- 'default-no-per-version-git': Bunch(
- revision=default_branch, file=r0_file, args={'branch': default_branch}
- )
+ "default-no-per-version-git": Bunch(
+ revision=default_branch, file=r0_file, args={"branch": default_branch}
+ ),
}
- t = Bunch(checks=checks, url=url, hash=rev_hash,
- path=str(repodir), git_exe=git, unversioned_commit=r2)
+ t = Bunch(
+ checks=checks,
+ url=url,
+ hash=rev_hash,
+ path=str(repodir),
+ git_exe=git,
+ unversioned_commit=r2,
+ )
yield t
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_hg_repository(tmpdir_factory):
"""Creates a very simple hg repository with two commits."""
- hg = spack.util.executable.which('hg', required=True)
+ hg = spack.util.executable.which("hg", required=True)
- tmpdir = tmpdir_factory.mktemp('mock-hg-repo-dir')
+ tmpdir = tmpdir_factory.mktemp("mock-hg-repo-dir")
tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir)
- get_rev = lambda: hg('id', '-i', output=str).strip()
+ get_rev = lambda: hg("id", "-i", output=str).strip()
# Initialize the repository
with repodir.as_cwd():
- url = 'file://' + str(repodir)
- hg('init')
+ url = "file://" + str(repodir)
+ hg("init")
# Commit file r0
- r0_file = 'r0_file'
+ r0_file = "r0_file"
repodir.ensure(r0_file)
- hg('add', r0_file)
- hg('commit', '-m', 'revision 0', '-u', 'test')
+ hg("add", r0_file)
+ hg("commit", "-m", "revision 0", "-u", "test")
r0 = get_rev()
# Commit file r1
- r1_file = 'r1_file'
+ r1_file = "r1_file"
repodir.ensure(r1_file)
- hg('add', r1_file)
- hg('commit', '-m' 'revision 1', '-u', 'test')
+ hg("add", r1_file)
+ hg("commit", "-m" "revision 1", "-u", "test")
r1 = get_rev()
checks = {
- 'default': Bunch(
- revision=r1, file=r1_file, args={'hg': str(repodir)}
- ),
- 'rev0': Bunch(
- revision=r0, file=r0_file, args={
- 'hg': str(repodir), 'revision': r0
- }
- )
+ "default": Bunch(revision=r1, file=r1_file, args={"hg": str(repodir)}),
+ "rev0": Bunch(revision=r0, file=r0_file, args={"hg": str(repodir), "revision": r0}),
}
t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
yield t
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def mock_svn_repository(tmpdir_factory):
"""Creates a very simple svn repository with two commits."""
- svn = spack.util.executable.which('svn', required=True)
- svnadmin = spack.util.executable.which('svnadmin', required=True)
+ svn = spack.util.executable.which("svn", required=True)
+ svnadmin = spack.util.executable.which("svnadmin", required=True)
- tmpdir = tmpdir_factory.mktemp('mock-svn-stage')
+ tmpdir = tmpdir_factory.mktemp("mock-svn-stage")
tmpdir.ensure(spack.stage._source_path_subdir, dir=True)
repodir = tmpdir.join(spack.stage._source_path_subdir)
- url = 'file://' + str(repodir)
+ url = "file://" + str(repodir)
# Initialize the repository
with repodir.as_cwd():
# NOTE: Adding --pre-1.5-compatible works for NERSC
# Unknown if this is also an issue at other sites.
- svnadmin('create', '--pre-1.5-compatible', str(repodir))
+ svnadmin("create", "--pre-1.5-compatible", str(repodir))
# Import a structure (first commit)
- r0_file = 'r0_file'
- tmpdir.ensure('tmp-path', r0_file)
- tmp_path = tmpdir.join('tmp-path')
- svn('import',
- str(tmp_path),
- url,
- '-m',
- 'Initial import r0')
+ r0_file = "r0_file"
+ tmpdir.ensure("tmp-path", r0_file)
+ tmp_path = tmpdir.join("tmp-path")
+ svn("import", str(tmp_path), url, "-m", "Initial import r0")
tmp_path.remove()
# Second commit
- r1_file = 'r1_file'
- svn('checkout', url, str(tmp_path))
- tmpdir.ensure('tmp-path', r1_file)
+ r1_file = "r1_file"
+ svn("checkout", url, str(tmp_path))
+ tmpdir.ensure("tmp-path", r1_file)
with tmp_path.as_cwd():
- svn('add', str(tmpdir.ensure('tmp-path', r1_file)))
- svn('ci', '-m', 'second revision r1')
+ svn("add", str(tmpdir.ensure("tmp-path", r1_file)))
+ svn("ci", "-m", "second revision r1")
tmp_path.remove()
- r0 = '1'
- r1 = '2'
+ r0 = "1"
+ r1 = "2"
checks = {
- 'default': Bunch(
- revision=r1, file=r1_file, args={'svn': url}),
- 'rev0': Bunch(
- revision=r0, file=r0_file, args={
- 'svn': url, 'revision': r0})
+ "default": Bunch(revision=r1, file=r1_file, args={"svn": url}),
+ "rev0": Bunch(revision=r0, file=r0_file, args={"svn": url, "revision": r0}),
}
def get_rev():
- output = svn('info', '--xml', output=str)
+ output = svn("info", "--xml", output=str)
info = xml.etree.ElementTree.fromstring(output)
- return info.find('entry/commit').get('revision')
+ return info.find("entry/commit").get("revision")
t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
yield t
@@ -1508,7 +1478,7 @@ def mock_svn_repository(tmpdir_factory):
def mutable_mock_env_path(tmpdir_factory):
"""Fixture for mocking the internal spack environments directory."""
saved_path = ev.environment.env_path
- mock_path = tmpdir_factory.mktemp('mock-env-path')
+ mock_path = tmpdir_factory.mktemp("mock-env-path")
ev.environment.env_path = str(mock_path)
yield mock_path
ev.environment.env_path = saved_path
@@ -1520,7 +1490,7 @@ def installation_dir_with_headers(tmpdir_factory):
subdirectories. Shouldn't be modified by tests as it is session
scoped.
"""
- root = tmpdir_factory.mktemp('prefix')
+ root = tmpdir_factory.mktemp("prefix")
# Create a few header files:
#
@@ -1535,10 +1505,10 @@ def installation_dir_with_headers(tmpdir_factory):
# |-- subdir
# |-- ex2.h
#
- root.ensure('include', 'boost', 'ex3.h')
- root.ensure('include', 'ex3.h')
- root.ensure('path', 'to', 'ex1.h')
- root.ensure('path', 'to', 'subdir', 'ex2.h')
+ root.ensure("include", "boost", "ex3.h")
+ root.ensure("include", "ex3.h")
+ root.ensure("path", "to", "ex1.h")
+ root.ensure("path", "to", "subdir", "ex2.h")
return root
@@ -1548,12 +1518,7 @@ def installation_dir_with_headers(tmpdir_factory):
##########
-@pytest.fixture(
- params=[
- 'conflict%clang+foo',
- 'conflict-parent@0.9^conflict~foo'
- ]
-)
+@pytest.fixture(params=["conflict%clang+foo", "conflict-parent@0.9^conflict~foo"])
def conflict_spec(request):
"""Specs which violate constraints specified with the "conflicts"
directive in the "conflict" package.
@@ -1561,28 +1526,25 @@ def conflict_spec(request):
return request.param
-@pytest.fixture(
- params=[
- 'conflict%~'
- ]
-)
+@pytest.fixture(params=["conflict%~"])
def invalid_spec(request):
- """Specs that do not parse cleanly due to invalid formatting.
- """
+ """Specs that do not parse cleanly due to invalid formatting."""
return request.param
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def mock_test_repo(tmpdir_factory):
"""Create an empty repository."""
- repo_namespace = 'mock_test_repo'
+ repo_namespace = "mock_test_repo"
repodir = tmpdir_factory.mktemp(repo_namespace)
repodir.ensure(spack.repo.packages_dir_name, dir=True)
- yaml = repodir.join('repo.yaml')
- yaml.write("""
+ yaml = repodir.join("repo.yaml")
+ yaml.write(
+ """
repo:
namespace: mock_test_repo
-""")
+"""
+ )
with spack.repo.use_repositories(str(repodir)) as repo:
yield repo, repodir
@@ -1598,9 +1560,10 @@ repo:
# At this point only version and patch directive handling has been addressed.
##########
+
class MockBundle(object):
has_code = False
- name = 'mock-bundle'
+ name = "mock-bundle"
versions = {} # type: Dict
@@ -1627,13 +1590,14 @@ def mock_executable(tmpdir):
output a custom string when run.
"""
import jinja2
- shebang = '#!/bin/bash\n' if not is_windows else '@ECHO OFF'
- def _factory(name, output, subdir=('bin',)):
+ shebang = "#!/bin/bash\n" if not is_windows else "@ECHO OFF"
+
+ def _factory(name, output, subdir=("bin",)):
f = tmpdir.ensure(*subdir, dir=True).join(name)
if is_windows:
- f += '.bat'
- t = jinja2.Template('{{ shebang }}{{ output }}\n')
+ f += ".bat"
+ t = jinja2.Template("{{ shebang }}{{ output }}\n")
f.write(t.render(shebang=shebang, output=output))
f.chmod(0o755)
return str(f)
@@ -1646,8 +1610,8 @@ def mock_test_stage(mutable_config, tmpdir):
# NOTE: This fixture MUST be applied after any fixture that uses
# the config fixture under the hood
# No need to unset because we use mutable_config
- tmp_stage = str(tmpdir.join('test_stage'))
- mutable_config.set('config:test_stage', tmp_stage)
+ tmp_stage = str(tmpdir.join("test_stage"))
+ mutable_config.set("config:test_stage", tmp_stage)
yield tmp_stage
@@ -1656,18 +1620,17 @@ def mock_test_stage(mutable_config, tmpdir):
def brand_new_binary_cache():
yield
spack.binary_distribution.binary_index = llnl.util.lang.Singleton(
- spack.binary_distribution._binary_index)
+ spack.binary_distribution._binary_index
+ )
@pytest.fixture
def directory_with_manifest(tmpdir):
- """Create a manifest file in a directory. Used by 'spack external'.
- """
+ """Create a manifest file in a directory. Used by 'spack external'."""
with tmpdir.as_cwd():
- test_db_fname = 'external-db.json'
- with open(test_db_fname, 'w') as db_file:
- json.dump(spack.test.cray_manifest.create_manifest_content(),
- db_file)
+ test_db_fname = "external-db.json"
+ with open(test_db_fname, "w") as db_file:
+ json.dump(spack.test.cray_manifest.create_manifest_content(), db_file)
yield str(tmpdir)
@@ -1690,20 +1653,20 @@ def noncyclical_dir_structure(tmpdir):
| `-- file_2
`-- file_3
"""
- d, j = tmpdir.mkdir('nontrivial-dir'), os.path.join
+ d, j = tmpdir.mkdir("nontrivial-dir"), os.path.join
with d.as_cwd():
- os.mkdir(j('a'))
- os.mkdir(j('a', 'd'))
- with open(j('a', 'file_1'), 'wb'):
+ os.mkdir(j("a"))
+ os.mkdir(j("a", "d"))
+ with open(j("a", "file_1"), "wb"):
pass
- os.symlink(j('file_1'), j('a', 'to_file_1'))
- os.symlink(j('..', 'c'), j('a', 'to_c'))
- os.symlink(j('a'), j('b'))
- os.mkdir(j('c'))
- os.symlink(j('nowhere'), j('c', 'dangling_link'))
- with open(j('c', 'file_2'), 'wb'):
+ os.symlink(j("file_1"), j("a", "to_file_1"))
+ os.symlink(j("..", "c"), j("a", "to_c"))
+ os.symlink(j("a"), j("b"))
+ os.mkdir(j("c"))
+ os.symlink(j("nowhere"), j("c", "dangling_link"))
+ with open(j("c", "file_2"), "wb"):
pass
- with open(j('file_3'), 'wb'):
+ with open(j("file_3"), "wb"):
pass
yield d
diff --git a/lib/spack/spack/test/container/cli.py b/lib/spack/spack/test/container/cli.py
index 4199aa51b7..793af9fbbf 100644
--- a/lib/spack/spack/test/container/cli.py
+++ b/lib/spack/spack/test/container/cli.py
@@ -10,33 +10,30 @@ import spack.container.images
import spack.main
import spack.platforms
-containerize = spack.main.SpackCommand('containerize')
+containerize = spack.main.SpackCommand("containerize")
def test_command(default_config, container_config_dir, capsys):
with capsys.disabled():
with fs.working_dir(container_config_dir):
output = containerize()
- assert 'FROM spack/ubuntu-bionic' in output
+ assert "FROM spack/ubuntu-bionic" in output
def test_listing_possible_os():
- output = containerize('--list-os')
+ output = containerize("--list-os")
for expected_os in spack.container.images.all_bootstrap_os():
assert expected_os in output
-@pytest.mark.skipif(str(spack.platforms.host()) == "windows",
- reason="test unsupported on Windows")
+@pytest.mark.skipif(str(spack.platforms.host()) == "windows", reason="test unsupported on Windows")
@pytest.mark.maybeslow
-@pytest.mark.requires_executables('git')
+@pytest.mark.requires_executables("git")
def test_bootstrap_phase(minimal_configuration, config_dumper, capsys):
- minimal_configuration['spack']['container']['images'] = {
- 'os': 'amazonlinux:2',
- 'spack': {
- 'resolve_sha': True
- }
+ minimal_configuration["spack"]["container"]["images"] = {
+ "os": "amazonlinux:2",
+ "spack": {"resolve_sha": True},
}
spack_yaml_dir = config_dumper(minimal_configuration)
@@ -45,4 +42,4 @@ def test_bootstrap_phase(minimal_configuration, config_dumper, capsys):
output = containerize()
# Check for the presence of the clone command
- assert 'git clone' in output
+ assert "git clone" in output
diff --git a/lib/spack/spack/test/container/conftest.py b/lib/spack/spack/test/container/conftest.py
index 52b67782b0..d968e4db72 100644
--- a/lib/spack/spack/test/container/conftest.py
+++ b/lib/spack/spack/test/container/conftest.py
@@ -10,19 +10,12 @@ import spack.util.spack_yaml as syaml
@pytest.fixture()
def minimal_configuration():
return {
- 'spack': {
- 'specs': [
- 'gromacs',
- 'mpich',
- 'fftw precision=float'
- ],
- 'container': {
- 'format': 'docker',
- 'images': {
- 'os': 'ubuntu:18.04',
- 'spack': 'develop'
- }
- }
+ "spack": {
+ "specs": ["gromacs", "mpich", "fftw precision=float"],
+ "container": {
+ "format": "docker",
+ "images": {"os": "ubuntu:18.04", "spack": "develop"},
+ },
}
}
@@ -30,11 +23,13 @@ def minimal_configuration():
@pytest.fixture()
def config_dumper(tmpdir):
"""Function that dumps an environment config in a temporary folder."""
+
def dumper(configuration):
content = syaml.dump(configuration, default_flow_style=False)
- config_file = tmpdir / 'spack.yaml'
+ config_file = tmpdir / "spack.yaml"
config_file.write(content)
return str(tmpdir)
+
return dumper
diff --git a/lib/spack/spack/test/container/docker.py b/lib/spack/spack/test/container/docker.py
index 4dd483aa16..f191cc4671 100644
--- a/lib/spack/spack/test/container/docker.py
+++ b/lib/spack/spack/test/container/docker.py
@@ -10,8 +10,8 @@ import spack.container.writers as writers
def test_manifest(minimal_configuration):
writer = writers.create(minimal_configuration)
manifest_str = writer.manifest
- for line in manifest_str.split('\n'):
- assert 'echo' in line
+ for line in manifest_str.split("\n"):
+ assert "echo" in line
def test_build_and_run_images(minimal_configuration):
@@ -19,11 +19,11 @@ def test_build_and_run_images(minimal_configuration):
# Test the output of run property
run = writer.run
- assert run.image == 'ubuntu:18.04'
+ assert run.image == "ubuntu:18.04"
# Test the output of the build property
build = writer.build
- assert build.image == 'spack/ubuntu-bionic:latest'
+ assert build.image == "spack/ubuntu-bionic:latest"
def test_packages(minimal_configuration):
@@ -33,10 +33,8 @@ def test_packages(minimal_configuration):
assert writer.os_packages_final is None
# If we add them a list should be returned
- pkgs = ['libgomp1']
- minimal_configuration['spack']['container']['os_packages'] = {
- 'final': pkgs
- }
+ pkgs = ["libgomp1"]
+ minimal_configuration["spack"]["container"]["os_packages"] = {"final": pkgs}
writer = writers.create(minimal_configuration)
p = writer.os_packages_final
assert p.update
@@ -55,7 +53,7 @@ def test_strip_is_set_from_config(minimal_configuration):
writer = writers.create(minimal_configuration)
assert writer.strip is True
- minimal_configuration['spack']['container']['strip'] = False
+ minimal_configuration["spack"]["container"]["strip"] = False
writer = writers.create(minimal_configuration)
assert writer.strip is False
@@ -64,46 +62,50 @@ def test_extra_instructions_is_set_from_config(minimal_configuration):
writer = writers.create(minimal_configuration)
assert writer.extra_instructions == (None, None)
- test_line = 'RUN echo Hello world!'
- e = minimal_configuration['spack']['container']
- e['extra_instructions'] = {}
- e['extra_instructions']['build'] = test_line
+ test_line = "RUN echo Hello world!"
+ e = minimal_configuration["spack"]["container"]
+ e["extra_instructions"] = {}
+ e["extra_instructions"]["build"] = test_line
writer = writers.create(minimal_configuration)
assert writer.extra_instructions == (test_line, None)
- e['extra_instructions']['final'] = test_line
- del e['extra_instructions']['build']
+ e["extra_instructions"]["final"] = test_line
+ del e["extra_instructions"]["build"]
writer = writers.create(minimal_configuration)
assert writer.extra_instructions == (None, test_line)
def test_custom_base_images(minimal_configuration):
"""Test setting custom base images from configuration file"""
- minimal_configuration['spack']['container']['images'] = {
- 'build': 'custom-build:latest',
- 'final': 'custom-final:latest'
+ minimal_configuration["spack"]["container"]["images"] = {
+ "build": "custom-build:latest",
+ "final": "custom-final:latest",
}
writer = writers.create(minimal_configuration)
assert writer.bootstrap.image is None
- assert writer.build.image == 'custom-build:latest'
- assert writer.run.image == 'custom-final:latest'
-
-
-@pytest.mark.parametrize('images_cfg,expected', [
- ({'os': 'amazonlinux:2', 'spack': 'develop'}, {
- 'bootstrap_image': 'amazonlinux:2',
- 'build_image': 'bootstrap',
- 'final_image': 'amazonlinux:2'
- })
-])
-def test_base_images_with_bootstrap(
- minimal_configuration, images_cfg, expected
-):
+ assert writer.build.image == "custom-build:latest"
+ assert writer.run.image == "custom-final:latest"
+
+
+@pytest.mark.parametrize(
+ "images_cfg,expected",
+ [
+ (
+ {"os": "amazonlinux:2", "spack": "develop"},
+ {
+ "bootstrap_image": "amazonlinux:2",
+ "build_image": "bootstrap",
+ "final_image": "amazonlinux:2",
+ },
+ )
+ ],
+)
+def test_base_images_with_bootstrap(minimal_configuration, images_cfg, expected):
"""Check that base images are computed correctly when a
bootstrap phase is present
"""
- minimal_configuration['spack']['container']['images'] = images_cfg
+ minimal_configuration["spack"]["container"]["images"] = images_cfg
writer = writers.create(minimal_configuration)
for property_name, value in expected.items():
@@ -111,6 +113,6 @@ def test_base_images_with_bootstrap(
def test_error_message_invalid_os(minimal_configuration):
- minimal_configuration['spack']['container']['images']['os'] = 'invalid:1'
- with pytest.raises(ValueError, match='invalid operating system'):
+ minimal_configuration["spack"]["container"]["images"]["os"] = "invalid:1"
+ with pytest.raises(ValueError, match="invalid operating system"):
writers.create(minimal_configuration)
diff --git a/lib/spack/spack/test/container/images.py b/lib/spack/spack/test/container/images.py
index 0c993921e9..3a07beb464 100644
--- a/lib/spack/spack/test/container/images.py
+++ b/lib/spack/spack/test/container/images.py
@@ -9,18 +9,19 @@ import pytest
import spack.container
-@pytest.mark.parametrize('image,spack_version,expected', [
- ('ubuntu:18.04', 'develop', ('spack/ubuntu-bionic', 'latest')),
- ('ubuntu:18.04', '0.14.0', ('spack/ubuntu-bionic', '0.14.0')),
-])
+@pytest.mark.parametrize(
+ "image,spack_version,expected",
+ [
+ ("ubuntu:18.04", "develop", ("spack/ubuntu-bionic", "latest")),
+ ("ubuntu:18.04", "0.14.0", ("spack/ubuntu-bionic", "0.14.0")),
+ ],
+)
def test_build_info(image, spack_version, expected):
output = spack.container.images.build_info(image, spack_version)
assert output == expected
-@pytest.mark.parametrize('image', [
- 'ubuntu:18.04'
-])
+@pytest.mark.parametrize("image", ["ubuntu:18.04"])
def test_package_info(image):
pkg_manager = spack.container.images.os_package_manager_for(image)
update, install, clean = spack.container.images.commands_for(pkg_manager)
@@ -29,19 +30,22 @@ def test_package_info(image):
assert clean
-@pytest.mark.parametrize('extra_config,expected_msg', [
- ({'modules': {'enable': ['tcl']}}, 'the subsection "modules" in'),
- ({'concretizer': {'unify': False}}, '"concretizer:unify" is not set to "true"'),
- ({'config': {'install_tree': '/some/dir'}},
- 'the "config:install_tree" attribute has been set'),
- ({'view': '/some/dir'}, 'the "view" attribute has been set')
-])
-def test_validate(
- extra_config, expected_msg, minimal_configuration, config_dumper
-):
- minimal_configuration['spack'].update(extra_config)
+@pytest.mark.parametrize(
+ "extra_config,expected_msg",
+ [
+ ({"modules": {"enable": ["tcl"]}}, 'the subsection "modules" in'),
+ ({"concretizer": {"unify": False}}, '"concretizer:unify" is not set to "true"'),
+ (
+ {"config": {"install_tree": "/some/dir"}},
+ 'the "config:install_tree" attribute has been set',
+ ),
+ ({"view": "/some/dir"}, 'the "view" attribute has been set'),
+ ],
+)
+def test_validate(extra_config, expected_msg, minimal_configuration, config_dumper):
+ minimal_configuration["spack"].update(extra_config)
spack_yaml_dir = config_dumper(minimal_configuration)
- spack_yaml = os.path.join(spack_yaml_dir, 'spack.yaml')
+ spack_yaml = os.path.join(spack_yaml_dir, "spack.yaml")
with pytest.warns(UserWarning) as w:
spack.container.validate(spack_yaml)
diff --git a/lib/spack/spack/test/container/singularity.py b/lib/spack/spack/test/container/singularity.py
index 87a0dfea3a..18172d4bd2 100644
--- a/lib/spack/spack/test/container/singularity.py
+++ b/lib/spack/spack/test/container/singularity.py
@@ -9,32 +9,32 @@ import spack.container.writers as writers
@pytest.fixture
def singularity_configuration(minimal_configuration):
- minimal_configuration['spack']['container']['format'] = 'singularity'
+ minimal_configuration["spack"]["container"]["format"] = "singularity"
return minimal_configuration
def test_ensure_render_works(default_config, singularity_configuration):
- container_config = singularity_configuration['spack']['container']
- assert container_config['format'] == 'singularity'
+ container_config = singularity_configuration["spack"]["container"]
+ assert container_config["format"] == "singularity"
# Here we just want to ensure that nothing is raised
writer = writers.create(singularity_configuration)
writer()
-@pytest.mark.parametrize('properties,expected', [
- ({'runscript': '/opt/view/bin/h5ls'},
- {'runscript': '/opt/view/bin/h5ls',
- 'startscript': '',
- 'test': '',
- 'help': ''})
-])
-def test_singularity_specific_properties(
- properties, expected, singularity_configuration
-):
+@pytest.mark.parametrize(
+ "properties,expected",
+ [
+ (
+ {"runscript": "/opt/view/bin/h5ls"},
+ {"runscript": "/opt/view/bin/h5ls", "startscript": "", "test": "", "help": ""},
+ )
+ ],
+)
+def test_singularity_specific_properties(properties, expected, singularity_configuration):
# Set the property in the configuration
- container_config = singularity_configuration['spack']['container']
+ container_config = singularity_configuration["spack"]["container"]
for name, value in properties.items():
- container_config.setdefault('singularity', {})[name] = value
+ container_config.setdefault("singularity", {})[name] = value
# Assert the properties return the expected values
writer = writers.create(singularity_configuration)
diff --git a/lib/spack/spack/test/cray_manifest.py b/lib/spack/spack/test/cray_manifest.py
index f8e86607f9..addf4e5287 100644
--- a/lib/spack/spack/test/cray_manifest.py
+++ b/lib/spack/spack/test/cray_manifest.py
@@ -67,8 +67,7 @@ example_compiler_entry = """\
class JsonSpecEntry(object):
- def __init__(self, name, hash, prefix, version, arch, compiler,
- dependencies, parameters):
+ def __init__(self, name, hash, prefix, version, arch, compiler, dependencies, parameters):
self.name = name
self.hash = hash
self.prefix = prefix
@@ -80,20 +79,18 @@ class JsonSpecEntry(object):
def to_dict(self):
return {
- 'name': self.name,
- 'hash': self.hash,
- 'prefix': self.prefix,
- 'version': self.version,
- 'arch': self.arch,
- 'compiler': self.compiler,
- 'dependencies': self.dependencies,
- 'parameters': self.parameters
+ "name": self.name,
+ "hash": self.hash,
+ "prefix": self.prefix,
+ "version": self.version,
+ "arch": self.arch,
+ "compiler": self.compiler,
+ "dependencies": self.dependencies,
+ "parameters": self.parameters,
}
def as_dependency(self, deptypes):
- return (self.name,
- {'hash': self.hash,
- 'type': list(deptypes)})
+ return (self.name, {"hash": self.hash, "type": list(deptypes)})
class JsonArchEntry(object):
@@ -103,13 +100,7 @@ class JsonArchEntry(object):
self.target = target
def to_dict(self):
- return {
- 'platform': self.platform,
- 'platform_os': self.os,
- 'target': {
- 'name': self.target
- }
- }
+ return {"platform": self.platform, "platform_os": self.os, "target": {"name": self.target}}
class JsonCompilerEntry(object):
@@ -117,85 +108,75 @@ class JsonCompilerEntry(object):
self.name = name
self.version = version
if not arch:
- arch = {
- "os": "centos8",
- "target": "x86_64"
- }
+ arch = {"os": "centos8", "target": "x86_64"}
if not executables:
executables = {
"cc": "/path/to/compiler/cc",
"cxx": "/path/to/compiler/cxx",
- "fc": "/path/to/compiler/fc"
+ "fc": "/path/to/compiler/fc",
}
self.arch = arch
self.executables = executables
def compiler_json(self):
return {
- 'name': self.name,
- 'version': self.version,
- 'arch': self.arch,
- 'executables': self.executables,
+ "name": self.name,
+ "version": self.version,
+ "arch": self.arch,
+ "executables": self.executables,
}
def spec_json(self):
"""The compiler spec only lists the name/version, not
- arch/executables.
+ arch/executables.
"""
return {
- 'name': self.name,
- 'version': self.version,
+ "name": self.name,
+ "version": self.version,
}
-_common_arch = JsonArchEntry(
- platform='linux',
- os='centos8',
- target='haswell'
-).to_dict()
+_common_arch = JsonArchEntry(platform="linux", os="centos8", target="haswell").to_dict()
# Intended to match example_compiler_entry above
_common_compiler = JsonCompilerEntry(
- name='gcc',
- version='10.2.0.cray',
- arch={
- "os": "centos8",
- "target": "x86_64"
- },
+ name="gcc",
+ version="10.2.0.cray",
+ arch={"os": "centos8", "target": "x86_64"},
executables={
"cc": "/path/to/compiler/cc",
"cxx": "/path/to/compiler/cxx",
- "fc": "/path/to/compiler/fc"
- }
+ "fc": "/path/to/compiler/fc",
+ },
)
def test_compatibility():
"""Make sure that JsonSpecEntry outputs the expected JSON structure
- by comparing it with JSON parsed from an example string. This
- ensures that the testing objects like JsonSpecEntry produce the
- same JSON structure as the expected file format.
+ by comparing it with JSON parsed from an example string. This
+ ensures that the testing objects like JsonSpecEntry produce the
+ same JSON structure as the expected file format.
"""
y = JsonSpecEntry(
- name='packagey',
- hash='hash-of-y',
- prefix='/path/to/packagey-install/',
- version='1.0',
+ name="packagey",
+ hash="hash-of-y",
+ prefix="/path/to/packagey-install/",
+ version="1.0",
arch=_common_arch,
compiler=_common_compiler.spec_json(),
dependencies={},
- parameters={}
+ parameters={},
)
x = JsonSpecEntry(
- name='packagex',
- hash='hash-of-x',
- prefix='/path/to/packagex-install/',
- version='1.0',
+ name="packagex",
+ hash="hash-of-x",
+ prefix="/path/to/packagex-install/",
+ version="1.0",
arch=_common_arch,
compiler=_common_compiler.spec_json(),
- dependencies=dict([y.as_dependency(deptypes=['link'])]),
- parameters={'precision': ['double', 'float']}
+ dependencies=dict([y.as_dependency(deptypes=["link"])]),
+ parameters={"precision": ["double", "float"]},
)
x_from_entry = x.to_dict()
@@ -210,37 +191,33 @@ def test_compiler_from_entry():
def generate_openmpi_entries():
"""Generate two example JSON entries that refer to an OpenMPI
- installation and a hwloc dependency.
+ installation and a hwloc dependency.
"""
# The hashes need to be padded with 'a' at the end to align with 8-byte
# boundaries (for base-32 decoding)
hwloc = JsonSpecEntry(
- name='hwloc',
- hash='hwlocfakehashaaa',
- prefix='/path/to/hwloc-install/',
- version='2.0.3',
+ name="hwloc",
+ hash="hwlocfakehashaaa",
+ prefix="/path/to/hwloc-install/",
+ version="2.0.3",
arch=_common_arch,
compiler=_common_compiler.spec_json(),
dependencies={},
- parameters={}
+ parameters={},
)
# This includes a variant which is guaranteed not to appear in the
# OpenMPI package: we need to make sure we can use such package
# descriptions.
openmpi = JsonSpecEntry(
- name='openmpi',
- hash='openmpifakehasha',
- prefix='/path/to/openmpi-install/',
- version='4.1.0',
+ name="openmpi",
+ hash="openmpifakehasha",
+ prefix="/path/to/openmpi-install/",
+ version="4.1.0",
arch=_common_arch,
compiler=_common_compiler.spec_json(),
- dependencies=dict([hwloc.as_dependency(deptypes=['link'])]),
- parameters={
- 'internal-hwloc': False,
- 'fabrics': ['psm'],
- 'missing_variant': True
- }
+ dependencies=dict([hwloc.as_dependency(deptypes=["link"])]),
+ parameters={"internal-hwloc": False, "fabrics": ["psm"], "missing_variant": True},
)
return [openmpi, hwloc]
@@ -248,60 +225,57 @@ def generate_openmpi_entries():
def test_generate_specs_from_manifest():
"""Given JSON entries, check that we can form a set of Specs
- including dependency references.
+ including dependency references.
"""
entries = list(x.to_dict() for x in generate_openmpi_entries())
specs = entries_to_specs(entries)
- openmpi_spec, = list(x for x in specs.values() if x.name == 'openmpi')
- assert openmpi_spec['hwloc']
+ (openmpi_spec,) = list(x for x in specs.values() if x.name == "openmpi")
+ assert openmpi_spec["hwloc"]
def test_translate_compiler_name():
nvidia_compiler = JsonCompilerEntry(
- name='nvidia',
- version='19.1',
+ name="nvidia",
+ version="19.1",
executables={
"cc": "/path/to/compiler/nvc",
"cxx": "/path/to/compiler/nvc++",
- }
+ },
)
compiler = compiler_from_entry(nvidia_compiler.compiler_json())
- assert compiler.name == 'nvhpc'
+ assert compiler.name == "nvhpc"
spec_json = JsonSpecEntry(
- name='hwloc',
- hash='hwlocfakehashaaa',
- prefix='/path/to/hwloc-install/',
- version='2.0.3',
+ name="hwloc",
+ hash="hwlocfakehashaaa",
+ prefix="/path/to/hwloc-install/",
+ version="2.0.3",
arch=_common_arch,
compiler=nvidia_compiler.spec_json(),
dependencies={},
- parameters={}
+ parameters={},
).to_dict()
- spec, = entries_to_specs([spec_json]).values()
- assert spec.compiler.name == 'nvhpc'
+ (spec,) = entries_to_specs([spec_json]).values()
+ assert spec.compiler.name == "nvhpc"
def test_failed_translate_compiler_name():
- unknown_compiler = JsonCompilerEntry(
- name='unknown',
- version='1.0'
- )
+ unknown_compiler = JsonCompilerEntry(name="unknown", version="1.0")
with pytest.raises(spack.compilers.UnknownCompilerError):
compiler_from_entry(unknown_compiler.compiler_json())
spec_json = JsonSpecEntry(
- name='packagey',
- hash='hash-of-y',
- prefix='/path/to/packagey-install/',
- version='1.0',
+ name="packagey",
+ hash="hash-of-y",
+ prefix="/path/to/packagey-install/",
+ version="1.0",
arch=_common_arch,
compiler=unknown_compiler.spec_json(),
dependencies={},
- parameters={}
+ parameters={},
).to_dict()
with pytest.raises(spack.compilers.UnknownCompilerError):
@@ -312,50 +286,52 @@ def create_manifest_content():
return {
# Note: the cray_manifest module doesn't use the _meta section right
# now, but it is anticipated to be useful
- '_meta': {
+ "_meta": {
"file-type": "cray-pe-json",
"system-type": "test",
"schema-version": "1.3",
- "cpe-version": "22.06"
+ "cpe-version": "22.06",
},
- 'specs': list(x.to_dict() for x in generate_openmpi_entries()),
- 'compilers': [_common_compiler.compiler_json()]
+ "specs": list(x.to_dict() for x in generate_openmpi_entries()),
+ "compilers": [_common_compiler.compiler_json()],
}
-def test_read_cray_manifest(
- tmpdir, mutable_config, mock_packages, mutable_database):
+def test_read_cray_manifest(tmpdir, mutable_config, mock_packages, mutable_database):
"""Check that (a) we can read the cray manifest and add it to the Spack
- Database and (b) we can concretize specs based on that.
+ Database and (b) we can concretize specs based on that.
"""
- if spack.config.get('config:concretizer') == 'clingo':
- pytest.skip("The ASP-based concretizer is currently picky about "
- " OS matching and will fail.")
+ if spack.config.get("config:concretizer") == "clingo":
+ pytest.skip(
+ "The ASP-based concretizer is currently picky about " " OS matching and will fail."
+ )
with tmpdir.as_cwd():
- test_db_fname = 'external-db.json'
- with open(test_db_fname, 'w') as db_file:
+ test_db_fname = "external-db.json"
+ with open(test_db_fname, "w") as db_file:
json.dump(create_manifest_content(), db_file)
cray_manifest.read(test_db_fname, True)
- query_specs = spack.store.db.query('openmpi')
- assert any(x.dag_hash() == 'openmpifakehasha' for x in query_specs)
+ query_specs = spack.store.db.query("openmpi")
+ assert any(x.dag_hash() == "openmpifakehasha" for x in query_specs)
concretized_specs = spack.cmd.parse_specs(
- 'depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64'
- ' ^/openmpifakehasha'.split(),
- concretize=True)
- assert concretized_specs[0]['hwloc'].dag_hash() == 'hwlocfakehashaaa'
+ "depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64" " ^/openmpifakehasha".split(),
+ concretize=True,
+ )
+ assert concretized_specs[0]["hwloc"].dag_hash() == "hwlocfakehashaaa"
def test_read_cray_manifest_twice_no_compiler_duplicates(
- tmpdir, mutable_config, mock_packages, mutable_database):
- if spack.config.get('config:concretizer') == 'clingo':
- pytest.skip("The ASP-based concretizer is currently picky about "
- " OS matching and will fail.")
+ tmpdir, mutable_config, mock_packages, mutable_database
+):
+ if spack.config.get("config:concretizer") == "clingo":
+ pytest.skip(
+ "The ASP-based concretizer is currently picky about " " OS matching and will fail."
+ )
with tmpdir.as_cwd():
- test_db_fname = 'external-db.json'
- with open(test_db_fname, 'w') as db_file:
+ test_db_fname = "external-db.json"
+ with open(test_db_fname, "w") as db_file:
json.dump(create_manifest_content(), db_file)
# Read the manifest twice
@@ -363,20 +339,21 @@ def test_read_cray_manifest_twice_no_compiler_duplicates(
cray_manifest.read(test_db_fname, True)
compilers = spack.compilers.all_compilers()
- filtered = list(c for c in compilers if
- c.spec == spack.spec.CompilerSpec('gcc@10.2.0.cray'))
- assert(len(filtered) == 1)
+ filtered = list(
+ c for c in compilers if c.spec == spack.spec.CompilerSpec("gcc@10.2.0.cray")
+ )
+ assert len(filtered) == 1
-def test_read_old_manifest_v1_2(
- tmpdir, mutable_config, mock_packages, mutable_database):
+def test_read_old_manifest_v1_2(tmpdir, mutable_config, mock_packages, mutable_database):
"""Test reading a file using the older format
('version' instead of 'schema-version').
"""
- manifest_dir = str(tmpdir.mkdir('manifest_dir'))
- manifest_file_path = os.path.join(manifest_dir, 'test.json')
- with open(manifest_file_path, 'w') as manifest_file:
- manifest_file.write("""\
+ manifest_dir = str(tmpdir.mkdir("manifest_dir"))
+ manifest_file_path = os.path.join(manifest_dir, "test.json")
+ with open(manifest_file_path, "w") as manifest_file:
+ manifest_file.write(
+ """\
{
"_meta": {
"file-type": "cray-pe-json",
@@ -385,5 +362,6 @@ def test_read_old_manifest_v1_2(
},
"specs": []
}
-""")
+"""
+ )
cray_manifest.read(manifest_file_path, True)
diff --git a/lib/spack/spack/test/cvs_fetch.py b/lib/spack/spack/test/cvs_fetch.py
index ac2f57c341..3c8159efd2 100644
--- a/lib/spack/spack/test/cvs_fetch.py
+++ b/lib/spack/spack/test/cvs_fetch.py
@@ -15,18 +15,11 @@ from spack.stage import Stage
from spack.util.executable import which
from spack.version import ver
-pytestmark = pytest.mark.skipif(
- not which('cvs'),
- reason='requires CVS to be installed')
-
-
-@pytest.mark.parametrize("type_of_test", ['default', 'branch', 'date'])
-def test_fetch(
- type_of_test,
- mock_cvs_repository,
- config,
- mutable_mock_repo
-):
+pytestmark = pytest.mark.skipif(not which("cvs"), reason="requires CVS to be installed")
+
+
+@pytest.mark.parametrize("type_of_test", ["default", "branch", "date"])
+def test_fetch(type_of_test, mock_cvs_repository, config, mutable_mock_repo):
"""Tries to:
1. Fetch the repo using a fetch strategy constructed with
@@ -45,8 +38,8 @@ def test_fetch(
get_date = mock_cvs_repository.get_date
# Construct the package under test
- spec = Spec('cvs-test').concretized()
- spec.package.versions[ver('cvs')] = test.args
+ spec = Spec("cvs-test").concretized()
+ spec.package.versions[ver("cvs")] = test.args
# Enter the stage directory and check some properties
with spec.package.stage:
@@ -68,7 +61,7 @@ def test_fetch(
os.unlink(file_path)
assert not os.path.isfile(file_path)
- untracked_file = 'foobarbaz'
+ untracked_file = "foobarbaz"
touch(untracked_file)
assert os.path.isfile(untracked_file)
spec.package.do_restage()
@@ -82,8 +75,7 @@ def test_cvs_extra_fetch(tmpdir):
"""Ensure a fetch after downloading is effectively a no-op."""
testpath = str(tmpdir)
- fetcher = CvsFetchStrategy(
- cvs=':pserver:not-a-real-cvs-repo%module=not-a-real-module')
+ fetcher = CvsFetchStrategy(cvs=":pserver:not-a-real-cvs-repo%module=not-a-real-module")
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 4c5c0539df..21bc676298 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -18,6 +18,7 @@ import pytest
try:
import uuid
+
_use_uuid = True
except ImportError:
_use_uuid = False
@@ -37,43 +38,44 @@ from spack.schema.database_index import schema
from spack.util.executable import Executable
from spack.util.mock_package import MockPackageMultiRepo
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
pytestmark = pytest.mark.db
@pytest.fixture()
def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
- mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))
+ mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
upstream_write_db = spack.database.Database(mock_db_root)
upstream_db = spack.database.Database(mock_db_root, is_upstream=True)
# Generate initial DB file to avoid reindex
- with open(upstream_write_db._index_path, 'w') as db_file:
+ with open(upstream_write_db._index_path, "w") as db_file:
upstream_write_db._write_to_file(db_file)
- upstream_layout = gen_mock_layout('/a/')
+ upstream_layout = gen_mock_layout("/a/")
- downstream_db_root = str(
- tmpdir_factory.mktemp('mock_downstream_db_root'))
- downstream_db = spack.database.Database(
- downstream_db_root, upstream_dbs=[upstream_db])
- with open(downstream_db._index_path, 'w') as db_file:
+ downstream_db_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
+ downstream_db = spack.database.Database(downstream_db_root, upstream_dbs=[upstream_db])
+ with open(downstream_db._index_path, "w") as db_file:
downstream_db._write_to_file(db_file)
- downstream_layout = gen_mock_layout('/b/')
+ downstream_layout = gen_mock_layout("/b/")
- yield upstream_write_db, upstream_db, upstream_layout,\
- downstream_db, downstream_layout
+ yield upstream_write_db, upstream_db, upstream_layout, downstream_db, downstream_layout
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Upstreams currently unsupported on Windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
def test_spec_installed_upstream(upstream_and_downstream_db, config, monkeypatch):
"""Test whether Spec.installed_upstream() works."""
- upstream_write_db, upstream_db, upstream_layout, \
- downstream_db, downstream_layout = upstream_and_downstream_db
+ (
+ upstream_write_db,
+ upstream_db,
+ upstream_layout,
+ downstream_db,
+ downstream_layout,
+ ) = upstream_and_downstream_db
# a known installed spec should say that it's installed
mock_repo = MockPackageMultiRepo()
- mock_repo.add_package('x', [], [])
+ mock_repo.add_package("x", [], [])
with spack.repo.use_repositories(mock_repo):
spec = spack.spec.Spec("x").concretized()
@@ -94,22 +96,26 @@ def test_spec_installed_upstream(upstream_and_downstream_db, config, monkeypatch
assert not spec.installed_upstream
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Upstreams currently unsupported on Windows")
-@pytest.mark.usefixtures('config')
+@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
+@pytest.mark.usefixtures("config")
def test_installed_upstream(upstream_and_downstream_db):
- upstream_write_db, upstream_db, upstream_layout,\
- downstream_db, downstream_layout = (upstream_and_downstream_db)
-
- default = ('build', 'link')
+ (
+ upstream_write_db,
+ upstream_db,
+ upstream_layout,
+ downstream_db,
+ downstream_layout,
+ ) = upstream_and_downstream_db
+
+ default = ("build", "link")
mock_repo = MockPackageMultiRepo()
- x = mock_repo.add_package('x', [], [])
- z = mock_repo.add_package('z', [], [])
- y = mock_repo.add_package('y', [z], [default])
- mock_repo.add_package('w', [x, y], [default, default])
+ x = mock_repo.add_package("x", [], [])
+ z = mock_repo.add_package("z", [], [])
+ y = mock_repo.add_package("y", [z], [default])
+ mock_repo.add_package("w", [x, y], [default, default])
with spack.repo.use_repositories(mock_repo):
- spec = spack.spec.Spec('w')
+ spec = spack.spec.Spec("w")
spec.concretize()
for dep in spec.traverse(root=False):
@@ -122,16 +128,14 @@ def test_installed_upstream(upstream_and_downstream_db):
with pytest.raises(spack.database.ForbiddenLockError):
record = upstream_db.get_by_hash(dep.dag_hash())
- new_spec = spack.spec.Spec('w')
+ new_spec = spack.spec.Spec("w")
new_spec.concretize()
downstream_db.add(new_spec, downstream_layout)
for dep in new_spec.traverse(root=False):
- upstream, record = downstream_db.query_by_spec_hash(
- dep.dag_hash())
+ upstream, record = downstream_db.query_by_spec_hash(dep.dag_hash())
assert upstream
assert record.path == upstream_layout.path_for_spec(dep)
- upstream, record = downstream_db.query_by_spec_hash(
- new_spec.dag_hash())
+ upstream, record = downstream_db.query_by_spec_hash(new_spec.dag_hash())
assert not upstream
assert record.installed
@@ -139,55 +143,62 @@ def test_installed_upstream(upstream_and_downstream_db):
downstream_db._check_ref_counts()
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Upstreams currently unsupported on Windows")
-@pytest.mark.usefixtures('config')
+@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
+@pytest.mark.usefixtures("config")
def test_removed_upstream_dep(upstream_and_downstream_db):
- upstream_write_db, upstream_db, upstream_layout,\
- downstream_db, downstream_layout = (upstream_and_downstream_db)
-
- default = ('build', 'link')
+ (
+ upstream_write_db,
+ upstream_db,
+ upstream_layout,
+ downstream_db,
+ downstream_layout,
+ ) = upstream_and_downstream_db
+
+ default = ("build", "link")
mock_repo = MockPackageMultiRepo()
- z = mock_repo.add_package('z', [], [])
- mock_repo.add_package('y', [z], [default])
+ z = mock_repo.add_package("z", [], [])
+ mock_repo.add_package("y", [z], [default])
with spack.repo.use_repositories(mock_repo):
- spec = spack.spec.Spec('y')
+ spec = spack.spec.Spec("y")
spec.concretize()
- upstream_write_db.add(spec['z'], upstream_layout)
+ upstream_write_db.add(spec["z"], upstream_layout)
upstream_db._read()
- new_spec = spack.spec.Spec('y')
+ new_spec = spack.spec.Spec("y")
new_spec.concretize()
downstream_db.add(new_spec, downstream_layout)
- upstream_write_db.remove(new_spec['z'])
+ upstream_write_db.remove(new_spec["z"])
upstream_db._read()
- new_downstream = spack.database.Database(
- downstream_db.root, upstream_dbs=[upstream_db])
+ new_downstream = spack.database.Database(downstream_db.root, upstream_dbs=[upstream_db])
new_downstream._fail_when_missing_deps = True
with pytest.raises(spack.database.MissingDependenciesError):
new_downstream._read()
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Upstreams currently unsupported on Windows")
-@pytest.mark.usefixtures('config')
+@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
+@pytest.mark.usefixtures("config")
def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
"""An upstream DB can add a package after it is installed in the downstream
DB. When a package is recorded as installed in both, the results should
refer to the downstream DB.
"""
- upstream_write_db, upstream_db, upstream_layout,\
- downstream_db, downstream_layout = (upstream_and_downstream_db)
+ (
+ upstream_write_db,
+ upstream_db,
+ upstream_layout,
+ downstream_db,
+ downstream_layout,
+ ) = upstream_and_downstream_db
mock_repo = MockPackageMultiRepo()
- mock_repo.add_package('x', [], [])
+ mock_repo.add_package("x", [], [])
with spack.repo.use_repositories(mock_repo):
- spec = spack.spec.Spec('x')
+ spec = spack.spec.Spec("x")
spec.concretize()
downstream_db.add(spec, downstream_layout)
@@ -200,9 +211,9 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
# we prefer the locally-installed instance
assert not upstream
- qresults = downstream_db.query('x')
+ qresults = downstream_db.query("x")
assert len(qresults) == 1
- queried_spec, = qresults
+ (queried_spec,) = qresults
try:
orig_db = spack.store.db
spack.store.db = downstream_db
@@ -211,15 +222,14 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
spack.store.db = orig_db
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Upstreams currently unsupported on Windows")
-@pytest.mark.usefixtures('config', 'temporary_store')
+@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
+@pytest.mark.usefixtures("config", "temporary_store")
def test_cannot_write_upstream(tmpdir_factory, gen_mock_layout):
- roots = [str(tmpdir_factory.mktemp(x)) for x in ['a', 'b']]
- layouts = [gen_mock_layout(x) for x in ['/ra/', '/rb/']]
+ roots = [str(tmpdir_factory.mktemp(x)) for x in ["a", "b"]]
+ layouts = [gen_mock_layout(x) for x in ["/ra/", "/rb/"]]
mock_repo = MockPackageMultiRepo()
- mock_repo.add_package('x', [], [])
+ mock_repo.add_package("x", [], [])
# Instantiate the database that will be used as the upstream DB and make
# sure it has an index file
@@ -227,63 +237,63 @@ def test_cannot_write_upstream(tmpdir_factory, gen_mock_layout):
with upstream_db_independent.write_transaction():
pass
- upstream_dbs = spack.store._construct_upstream_dbs_from_install_roots(
- [roots[1]], _test=True)
+ upstream_dbs = spack.store._construct_upstream_dbs_from_install_roots([roots[1]], _test=True)
with spack.repo.use_repositories(mock_repo):
- spec = spack.spec.Spec('x')
+ spec = spack.spec.Spec("x")
spec.concretize()
with pytest.raises(spack.database.ForbiddenLockError):
upstream_dbs[0].add(spec, layouts[1])
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Upstreams currently unsupported on Windows")
-@pytest.mark.usefixtures('config', 'temporary_store')
+@pytest.mark.skipif(sys.platform == "win32", reason="Upstreams currently unsupported on Windows")
+@pytest.mark.usefixtures("config", "temporary_store")
def test_recursive_upstream_dbs(tmpdir_factory, gen_mock_layout):
- roots = [str(tmpdir_factory.mktemp(x)) for x in ['a', 'b', 'c']]
- layouts = [gen_mock_layout(x) for x in ['/ra/', '/rb/', '/rc/']]
+ roots = [str(tmpdir_factory.mktemp(x)) for x in ["a", "b", "c"]]
+ layouts = [gen_mock_layout(x) for x in ["/ra/", "/rb/", "/rc/"]]
- default = ('build', 'link')
+ default = ("build", "link")
mock_repo = MockPackageMultiRepo()
- z = mock_repo.add_package('z', [], [])
- y = mock_repo.add_package('y', [z], [default])
- mock_repo.add_package('x', [y], [default])
+ z = mock_repo.add_package("z", [], [])
+ y = mock_repo.add_package("y", [z], [default])
+ mock_repo.add_package("x", [y], [default])
with spack.repo.use_repositories(mock_repo):
- spec = spack.spec.Spec('x')
+ spec = spack.spec.Spec("x")
spec.concretize()
db_c = spack.database.Database(roots[2])
- db_c.add(spec['z'], layouts[2])
+ db_c.add(spec["z"], layouts[2])
db_b = spack.database.Database(roots[1], upstream_dbs=[db_c])
- db_b.add(spec['y'], layouts[1])
+ db_b.add(spec["y"], layouts[1])
db_a = spack.database.Database(roots[0], upstream_dbs=[db_b, db_c])
- db_a.add(spec['x'], layouts[0])
+ db_a.add(spec["x"], layouts[0])
- upstream_dbs_from_scratch = (
- spack.store._construct_upstream_dbs_from_install_roots(
- [roots[1], roots[2]], _test=True))
+ upstream_dbs_from_scratch = spack.store._construct_upstream_dbs_from_install_roots(
+ [roots[1], roots[2]], _test=True
+ )
db_a_from_scratch = spack.database.Database(
- roots[0], upstream_dbs=upstream_dbs_from_scratch)
+ roots[0], upstream_dbs=upstream_dbs_from_scratch
+ )
- assert db_a_from_scratch.db_for_spec_hash(spec.dag_hash()) == (
- db_a_from_scratch)
- assert db_a_from_scratch.db_for_spec_hash(spec['y'].dag_hash()) == (
- upstream_dbs_from_scratch[0])
- assert db_a_from_scratch.db_for_spec_hash(spec['z'].dag_hash()) == (
- upstream_dbs_from_scratch[1])
+ assert db_a_from_scratch.db_for_spec_hash(spec.dag_hash()) == (db_a_from_scratch)
+ assert db_a_from_scratch.db_for_spec_hash(spec["y"].dag_hash()) == (
+ upstream_dbs_from_scratch[0]
+ )
+ assert db_a_from_scratch.db_for_spec_hash(spec["z"].dag_hash()) == (
+ upstream_dbs_from_scratch[1]
+ )
db_a_from_scratch._check_ref_counts()
upstream_dbs_from_scratch[0]._check_ref_counts()
upstream_dbs_from_scratch[1]._check_ref_counts()
- assert (db_a_from_scratch.installed_relatives(spec) ==
- set(spec.traverse(root=False)))
- assert (db_a_from_scratch.installed_relatives(
- spec['z'], direction='parents') == set([spec, spec['y']]))
+ assert db_a_from_scratch.installed_relatives(spec) == set(spec.traverse(root=False))
+ assert db_a_from_scratch.installed_relatives(spec["z"], direction="parents") == set(
+ [spec, spec["y"]]
+ )
@pytest.fixture()
@@ -296,11 +306,11 @@ def usr_folder_exists(monkeypatch):
@functools.wraps(os.path.isdir)
def mock_isdir(path):
- if path == '/usr':
+ if path == "/usr":
return True
return isdir(path)
- monkeypatch.setattr(os.path, 'isdir', mock_isdir)
+ monkeypatch.setattr(os.path, "isdir", mock_isdir)
def _print_ref_counts():
@@ -311,29 +321,29 @@ def _print_ref_counts():
cspecs = spack.store.db.query(spec, installed=any)
if not cspecs:
- recs.append("[ %-7s ] %-20s-" % ('', spec))
+ recs.append("[ %-7s ] %-20s-" % ("", spec))
else:
key = cspecs[0].dag_hash()
rec = spack.store.db.get_record(cspecs[0])
recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count))
with spack.store.db.read_transaction():
- add_rec('mpileaks ^mpich')
- add_rec('callpath ^mpich')
- add_rec('mpich')
+ add_rec("mpileaks ^mpich")
+ add_rec("callpath ^mpich")
+ add_rec("mpich")
- add_rec('mpileaks ^mpich2')
- add_rec('callpath ^mpich2')
- add_rec('mpich2')
+ add_rec("mpileaks ^mpich2")
+ add_rec("callpath ^mpich2")
+ add_rec("mpich2")
- add_rec('mpileaks ^zmpi')
- add_rec('callpath ^zmpi')
- add_rec('zmpi')
- add_rec('fake')
+ add_rec("mpileaks ^zmpi")
+ add_rec("callpath ^zmpi")
+ add_rec("zmpi")
+ add_rec("fake")
- add_rec('dyninst')
- add_rec('libdwarf')
- add_rec('libelf')
+ add_rec("dyninst")
+ add_rec("libdwarf")
+ add_rec("libelf")
colify(recs, cols=3)
@@ -415,47 +425,47 @@ def _mock_remove(spec):
def test_default_queries(database):
# Testing a package whose name *doesn't* start with 'lib'
# to ensure the library has 'lib' prepended to the name
- rec = database.get_record('zmpi')
+ rec = database.get_record("zmpi")
spec = rec.spec
- libraries = spec['zmpi'].libs
+ libraries = spec["zmpi"].libs
assert len(libraries) == 1
- assert libraries.names[0] == 'zmpi'
+ assert libraries.names[0] == "zmpi"
- headers = spec['zmpi'].headers
+ headers = spec["zmpi"].headers
assert len(headers) == 1
- assert headers.names[0] == 'zmpi'
+ assert headers.names[0] == "zmpi"
- command = spec['zmpi'].command
+ command = spec["zmpi"].command
assert isinstance(command, Executable)
- assert command.name == 'zmpi'
+ assert command.name == "zmpi"
assert os.path.exists(command.path)
# Testing a package whose name *does* start with 'lib'
# to ensure the library doesn't have a double 'lib' prefix
- rec = database.get_record('libelf')
+ rec = database.get_record("libelf")
spec = rec.spec
- libraries = spec['libelf'].libs
+ libraries = spec["libelf"].libs
assert len(libraries) == 1
- assert libraries.names[0] == 'elf'
+ assert libraries.names[0] == "elf"
- headers = spec['libelf'].headers
+ headers = spec["libelf"].headers
assert len(headers) == 1
- assert headers.names[0] == 'libelf'
+ assert headers.names[0] == "libelf"
- command = spec['libelf'].command
+ command = spec["libelf"].command
assert isinstance(command, Executable)
- assert command.name == 'libelf'
+ assert command.name == "libelf"
assert os.path.exists(command.path)
def test_005_db_exists(database):
"""Make sure db cache file exists after creating."""
- index_file = os.path.join(database.root, '.spack-db', 'index.json')
- lock_file = os.path.join(database.root, '.spack-db', 'lock')
+ index_file = os.path.join(database.root, ".spack-db", "index.json")
+ lock_file = os.path.join(database.root, ".spack-db", "lock")
assert os.path.exists(str(index_file))
# Lockfiles not currently supported on Windows
if not is_windows:
@@ -472,33 +482,27 @@ def test_010_all_install_sanity(database):
assert len(all_specs) == 15
# Query specs with multiple configurations
- mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
- callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
- mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+ mpileaks_specs = [s for s in all_specs if s.satisfies("mpileaks")]
+ callpath_specs = [s for s in all_specs if s.satisfies("callpath")]
+ mpi_specs = [s for s in all_specs if s.satisfies("mpi")]
assert len(mpileaks_specs) == 3
assert len(callpath_specs) == 3
assert len(mpi_specs) == 3
# Query specs with single configurations
- dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
- libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')]
- libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
+ dyninst_specs = [s for s in all_specs if s.satisfies("dyninst")]
+ libdwarf_specs = [s for s in all_specs if s.satisfies("libdwarf")]
+ libelf_specs = [s for s in all_specs if s.satisfies("libelf")]
assert len(dyninst_specs) == 1
assert len(libdwarf_specs) == 1
assert len(libelf_specs) == 1
# Query by dependency
- assert len(
- [s for s in all_specs if s.satisfies('mpileaks ^mpich')]
- ) == 1
- assert len(
- [s for s in all_specs if s.satisfies('mpileaks ^mpich2')]
- ) == 1
- assert len(
- [s for s in all_specs if s.satisfies('mpileaks ^zmpi')]
- ) == 1
+ assert len([s for s in all_specs if s.satisfies("mpileaks ^mpich")]) == 1
+ assert len([s for s in all_specs if s.satisfies("mpileaks ^mpich2")]) == 1
+ assert len([s for s in all_specs if s.satisfies("mpileaks ^zmpi")]) == 1
def test_015_write_and_read(mutable_database):
@@ -516,7 +520,7 @@ def test_015_write_and_read(mutable_database):
def test_017_write_and_read_without_uuid(mutable_database, monkeypatch):
- monkeypatch.setattr(spack.database, '_use_uuid', False)
+ monkeypatch.setattr(spack.database, "_use_uuid", False)
# write and read DB
with spack.store.db.write_transaction():
specs = spack.store.db.query()
@@ -543,8 +547,8 @@ def test_025_reindex(mutable_database):
def test_026_reindex_after_deprecate(mutable_database):
"""Make sure reindex works and ref counts are valid after deprecation."""
- mpich = mutable_database.query_one('mpich')
- zmpi = mutable_database.query_one('zmpi')
+ mpich = mutable_database.query_one("mpich")
+ zmpi = mutable_database.query_one("zmpi")
mutable_database.deprecate(mpich, zmpi)
spack.store.store.reindex()
@@ -555,11 +559,12 @@ class ReadModify(object):
"""Provide a function which can execute in a separate process that removes
a spec from the database.
"""
+
def __call__(self):
# check that other process can read DB
_check_db_sanity(spack.store.db)
with spack.store.db.write_transaction():
- _mock_remove('mpileaks ^zmpi')
+ _mock_remove("mpileaks ^zmpi")
def test_030_db_sanity_from_another_process(mutable_database):
@@ -570,7 +575,7 @@ def test_030_db_sanity_from_another_process(mutable_database):
# ensure child process change is visible in parent process
with mutable_database.read_transaction():
- assert len(mutable_database.query('mpileaks ^zmpi')) == 0
+ assert len(mutable_database.query("mpileaks ^zmpi")) == 0
def test_040_ref_counts(database):
@@ -580,8 +585,8 @@ def test_040_ref_counts(database):
def test_041_ref_counts_deprecate(mutable_database):
"""Ensure that we have appropriate ref counts after deprecating"""
- mpich = mutable_database.query_one('mpich')
- zmpi = mutable_database.query_one('zmpi')
+ mpich = mutable_database.query_one("mpich")
+ zmpi = mutable_database.query_one("zmpi")
mutable_database.deprecate(mpich, zmpi)
mutable_database._check_ref_counts()
@@ -594,27 +599,27 @@ def test_050_basic_query(database):
assert total_specs == 17
# query specs with multiple configurations
- mpileaks_specs = database.query('mpileaks')
- callpath_specs = database.query('callpath')
- mpi_specs = database.query('mpi')
+ mpileaks_specs = database.query("mpileaks")
+ callpath_specs = database.query("callpath")
+ mpi_specs = database.query("mpi")
assert len(mpileaks_specs) == 3
assert len(callpath_specs) == 3
assert len(mpi_specs) == 3
# query specs with single configurations
- dyninst_specs = database.query('dyninst')
- libdwarf_specs = database.query('libdwarf')
- libelf_specs = database.query('libelf')
+ dyninst_specs = database.query("dyninst")
+ libdwarf_specs = database.query("libdwarf")
+ libelf_specs = database.query("libelf")
assert len(dyninst_specs) == 1
assert len(libdwarf_specs) == 1
assert len(libelf_specs) == 1
# Query by dependency
- assert len(database.query('mpileaks ^mpich')) == 1
- assert len(database.query('mpileaks ^mpich2')) == 1
- assert len(database.query('mpileaks ^zmpi')) == 1
+ assert len(database.query("mpileaks ^mpich")) == 1
+ assert len(database.query("mpileaks ^mpich2")) == 1
+ assert len(database.query("mpileaks ^zmpi")) == 1
# Query by date
assert len(database.query(start_date=datetime.datetime.min)) == total_specs
@@ -624,98 +629,96 @@ def test_050_basic_query(database):
def test_060_remove_and_add_root_package(mutable_database):
- _check_remove_and_add_package(mutable_database, 'mpileaks ^mpich')
+ _check_remove_and_add_package(mutable_database, "mpileaks ^mpich")
def test_070_remove_and_add_dependency_package(mutable_database):
- _check_remove_and_add_package(mutable_database, 'dyninst')
+ _check_remove_and_add_package(mutable_database, "dyninst")
def test_080_root_ref_counts(mutable_database):
- rec = mutable_database.get_record('mpileaks ^mpich')
+ rec = mutable_database.get_record("mpileaks ^mpich")
# Remove a top-level spec from the DB
- mutable_database.remove('mpileaks ^mpich')
+ mutable_database.remove("mpileaks ^mpich")
# record no longer in DB
- assert mutable_database.query('mpileaks ^mpich', installed=any) == []
+ assert mutable_database.query("mpileaks ^mpich", installed=any) == []
# record's deps have updated ref_counts
- assert mutable_database.get_record('callpath ^mpich').ref_count == 0
- assert mutable_database.get_record('mpich').ref_count == 1
+ assert mutable_database.get_record("callpath ^mpich").ref_count == 0
+ assert mutable_database.get_record("mpich").ref_count == 1
# Put the spec back
mutable_database.add(rec.spec, spack.store.layout)
# record is present again
- assert len(mutable_database.query('mpileaks ^mpich', installed=any)) == 1
+ assert len(mutable_database.query("mpileaks ^mpich", installed=any)) == 1
# dependencies have ref counts updated
- assert mutable_database.get_record('callpath ^mpich').ref_count == 1
- assert mutable_database.get_record('mpich').ref_count == 2
+ assert mutable_database.get_record("callpath ^mpich").ref_count == 1
+ assert mutable_database.get_record("mpich").ref_count == 2
def test_090_non_root_ref_counts(mutable_database):
- mutable_database.get_record('mpileaks ^mpich')
- mutable_database.get_record('callpath ^mpich')
+ mutable_database.get_record("mpileaks ^mpich")
+ mutable_database.get_record("callpath ^mpich")
# "force remove" a non-root spec from the DB
- mutable_database.remove('callpath ^mpich')
+ mutable_database.remove("callpath ^mpich")
# record still in DB but marked uninstalled
- assert mutable_database.query('callpath ^mpich', installed=True) == []
- assert len(mutable_database.query('callpath ^mpich', installed=any)) == 1
+ assert mutable_database.query("callpath ^mpich", installed=True) == []
+ assert len(mutable_database.query("callpath ^mpich", installed=any)) == 1
# record and its deps have same ref_counts
- assert mutable_database.get_record(
- 'callpath ^mpich', installed=any
- ).ref_count == 1
- assert mutable_database.get_record('mpich').ref_count == 2
+ assert mutable_database.get_record("callpath ^mpich", installed=any).ref_count == 1
+ assert mutable_database.get_record("mpich").ref_count == 2
# remove only dependent of uninstalled callpath record
- mutable_database.remove('mpileaks ^mpich')
+ mutable_database.remove("mpileaks ^mpich")
# record and parent are completely gone.
- assert mutable_database.query('mpileaks ^mpich', installed=any) == []
- assert mutable_database.query('callpath ^mpich', installed=any) == []
+ assert mutable_database.query("mpileaks ^mpich", installed=any) == []
+ assert mutable_database.query("callpath ^mpich", installed=any) == []
# mpich ref count updated properly.
- mpich_rec = mutable_database.get_record('mpich')
+ mpich_rec = mutable_database.get_record("mpich")
assert mpich_rec.ref_count == 0
def test_100_no_write_with_exception_on_remove(database):
def fail_while_writing():
with database.write_transaction():
- _mock_remove('mpileaks ^zmpi')
+ _mock_remove("mpileaks ^zmpi")
raise Exception()
with database.read_transaction():
- assert len(database.query('mpileaks ^zmpi', installed=any)) == 1
+ assert len(database.query("mpileaks ^zmpi", installed=any)) == 1
with pytest.raises(Exception):
fail_while_writing()
# reload DB and make sure zmpi is still there.
with database.read_transaction():
- assert len(database.query('mpileaks ^zmpi', installed=any)) == 1
+ assert len(database.query("mpileaks ^zmpi", installed=any)) == 1
def test_110_no_write_with_exception_on_install(database):
def fail_while_writing():
with database.write_transaction():
- _mock_install('cmake')
+ _mock_install("cmake")
raise Exception()
with database.read_transaction():
- assert database.query('cmake', installed=any) == []
+ assert database.query("cmake", installed=any) == []
with pytest.raises(Exception):
fail_while_writing()
# reload DB and make sure cmake was not written.
with database.read_transaction():
- assert database.query('cmake', installed=any) == []
+ assert database.query("cmake", installed=any) == []
def test_115_reindex_with_packages_not_in_repo(mutable_database):
@@ -728,31 +731,29 @@ def test_115_reindex_with_packages_not_in_repo(mutable_database):
def test_external_entries_in_db(mutable_database):
- rec = mutable_database.get_record('mpileaks ^zmpi')
+ rec = mutable_database.get_record("mpileaks ^zmpi")
assert rec.spec.external_path is None
assert not rec.spec.external_modules
- rec = mutable_database.get_record('externaltool')
- assert rec.spec.external_path == os.sep + \
- os.path.join('path', 'to', 'external_tool')
+ rec = mutable_database.get_record("externaltool")
+ assert rec.spec.external_path == os.sep + os.path.join("path", "to", "external_tool")
assert not rec.spec.external_modules
assert rec.explicit is False
rec.spec.package.do_install(fake=True, explicit=True)
- rec = mutable_database.get_record('externaltool')
- assert rec.spec.external_path == os.sep + \
- os.path.join('path', 'to', 'external_tool')
+ rec = mutable_database.get_record("externaltool")
+ assert rec.spec.external_path == os.sep + os.path.join("path", "to", "external_tool")
assert not rec.spec.external_modules
assert rec.explicit is True
-@pytest.mark.regression('8036')
+@pytest.mark.regression("8036")
def test_regression_issue_8036(mutable_database, usr_folder_exists):
# The test ensures that the external package prefix is treated as
# existing. Even when the package prefix exists, the package should
# not be considered installed until it is added to the database with
# do_install.
- s = spack.spec.Spec('externaltool@0.9')
+ s = spack.spec.Spec("externaltool@0.9")
s.concretize()
assert not s.installed
@@ -761,22 +762,22 @@ def test_regression_issue_8036(mutable_database, usr_folder_exists):
assert s.installed
-@pytest.mark.regression('11118')
+@pytest.mark.regression("11118")
def test_old_external_entries_prefix(mutable_database):
- with open(spack.store.db._index_path, 'r') as f:
+ with open(spack.store.db._index_path, "r") as f:
db_obj = json.loads(f.read())
validate(db_obj, schema)
- s = spack.spec.Spec('externaltool')
+ s = spack.spec.Spec("externaltool")
s.concretize()
- db_obj['database']['installs'][s.dag_hash()]['path'] = 'None'
+ db_obj["database"]["installs"][s.dag_hash()]["path"] = "None"
- with open(spack.store.db._index_path, 'w') as f:
+ with open(spack.store.db._index_path, "w") as f:
f.write(json.dumps(db_obj))
if _use_uuid:
- with open(spack.store.db._verifier_path, 'w') as f:
+ with open(spack.store.db._verifier_path, "w") as f:
f.write(str(uuid.uuid4()))
record = spack.store.db.get_record(s)
@@ -798,90 +799,93 @@ def test_uninstall_by_spec(mutable_database):
def test_query_unused_specs(mutable_database):
# This spec installs a fake cmake as a build only dependency
- s = spack.spec.Spec('simple-inheritance')
+ s = spack.spec.Spec("simple-inheritance")
s.concretize()
s.package.do_install(fake=True, explicit=True)
unused = spack.store.db.unused_specs
assert len(unused) == 1
- assert unused[0].name == 'cmake'
+ assert unused[0].name == "cmake"
-@pytest.mark.regression('10019')
+@pytest.mark.regression("10019")
def test_query_spec_with_conditional_dependency(mutable_database):
# The issue is triggered by having dependencies that are
# conditional on a Boolean variant
- s = spack.spec.Spec('hdf5~mpi')
+ s = spack.spec.Spec("hdf5~mpi")
s.concretize()
s.package.do_install(fake=True, explicit=True)
- results = spack.store.db.query_local('hdf5 ^mpich')
+ results = spack.store.db.query_local("hdf5 ^mpich")
assert not results
-@pytest.mark.regression('10019')
+@pytest.mark.regression("10019")
def test_query_spec_with_non_conditional_virtual_dependency(database):
# Ensure the same issue doesn't come up for virtual
# dependency that are not conditional on variants
- results = spack.store.db.query_local('mpileaks ^mpich')
+ results = spack.store.db.query_local("mpileaks ^mpich")
assert len(results) == 1
def test_failed_spec_path_error(database):
"""Ensure spec not concrete check is covered."""
- s = spack.spec.Spec('a')
- with pytest.raises(ValueError, match='Concrete spec required'):
+ s = spack.spec.Spec("a")
+ with pytest.raises(ValueError, match="Concrete spec required"):
spack.store.db._failed_spec_path(s)
@pytest.mark.db
def test_clear_failure_keep(mutable_database, monkeypatch, capfd):
"""Add test coverage for clear_failure operation when to be retained."""
+
def _is(db, spec):
return True
# Pretend the spec has been failure locked
- monkeypatch.setattr(spack.database.Database, 'prefix_failure_locked', _is)
+ monkeypatch.setattr(spack.database.Database, "prefix_failure_locked", _is)
- s = spack.spec.Spec('a')
+ s = spack.spec.Spec("a")
spack.store.db.clear_failure(s)
out = capfd.readouterr()[0]
- assert 'Retaining failure marking' in out
+ assert "Retaining failure marking" in out
@pytest.mark.db
def test_clear_failure_forced(mutable_database, monkeypatch, capfd):
"""Add test coverage for clear_failure operation when force."""
+
def _is(db, spec):
return True
# Pretend the spec has been failure locked
- monkeypatch.setattr(spack.database.Database, 'prefix_failure_locked', _is)
+ monkeypatch.setattr(spack.database.Database, "prefix_failure_locked", _is)
# Ensure raise OSError when try to remove the non-existent marking
- monkeypatch.setattr(spack.database.Database, 'prefix_failure_marked', _is)
+ monkeypatch.setattr(spack.database.Database, "prefix_failure_marked", _is)
- s = spack.spec.Spec('a').concretized()
+ s = spack.spec.Spec("a").concretized()
spack.store.db.clear_failure(s, force=True)
out = capfd.readouterr()[1]
- assert 'Removing failure marking despite lock' in out
- assert 'Unable to remove failure marking' in out
+ assert "Removing failure marking despite lock" in out
+ assert "Unable to remove failure marking" in out
@pytest.mark.db
def test_mark_failed(mutable_database, monkeypatch, tmpdir, capsys):
"""Add coverage to mark_failed."""
+
def _raise_exc(lock):
- raise lk.LockTimeoutError('Mock acquire_write failure')
+ raise lk.LockTimeoutError("Mock acquire_write failure")
# Ensure attempt to acquire write lock on the mark raises the exception
- monkeypatch.setattr(lk.Lock, 'acquire_write', _raise_exc)
+ monkeypatch.setattr(lk.Lock, "acquire_write", _raise_exc)
with tmpdir.as_cwd():
- s = spack.spec.Spec('a').concretized()
+ s = spack.spec.Spec("a").concretized()
spack.store.db.mark_failed(s)
out = str(capsys.readouterr()[1])
- assert 'Unable to mark a as failed' in out
+ assert "Unable to mark a as failed" in out
# Clean up the failure mark to ensure it does not interfere with other
# tests using the same spec.
@@ -891,10 +895,11 @@ def test_mark_failed(mutable_database, monkeypatch, tmpdir, capsys):
@pytest.mark.db
def test_prefix_failed(mutable_database, monkeypatch):
"""Add coverage to prefix_failed operation."""
+
def _is(db, spec):
return True
- s = spack.spec.Spec('a').concretized()
+ s = spack.spec.Spec("a").concretized()
# Confirm the spec is not already marked as failed
assert not spack.store.db.prefix_failed(s)
@@ -908,19 +913,20 @@ def test_prefix_failed(mutable_database, monkeypatch):
assert not spack.store.db.prefix_failed(s)
# Now pretend that the prefix failure is locked
- monkeypatch.setattr(spack.database.Database, 'prefix_failure_locked', _is)
+ monkeypatch.setattr(spack.database.Database, "prefix_failure_locked", _is)
assert spack.store.db.prefix_failed(s)
def test_prefix_read_lock_error(mutable_database, monkeypatch):
"""Cover the prefix read lock exception."""
+
def _raise(db, spec):
- raise lk.LockError('Mock lock error')
+ raise lk.LockError("Mock lock error")
- s = spack.spec.Spec('a').concretized()
+ s = spack.spec.Spec("a").concretized()
# Ensure subsequent lock operations fail
- monkeypatch.setattr(lk.Lock, 'acquire_read', _raise)
+ monkeypatch.setattr(lk.Lock, "acquire_read", _raise)
with pytest.raises(Exception):
with spack.store.db.prefix_read_lock(s):
@@ -929,26 +935,27 @@ def test_prefix_read_lock_error(mutable_database, monkeypatch):
def test_prefix_write_lock_error(mutable_database, monkeypatch):
"""Cover the prefix write lock exception."""
+
def _raise(db, spec):
- raise lk.LockError('Mock lock error')
+ raise lk.LockError("Mock lock error")
- s = spack.spec.Spec('a').concretized()
+ s = spack.spec.Spec("a").concretized()
# Ensure subsequent lock operations fail
- monkeypatch.setattr(lk.Lock, 'acquire_write', _raise)
+ monkeypatch.setattr(lk.Lock, "acquire_write", _raise)
with pytest.raises(Exception):
with spack.store.db.prefix_write_lock(s):
assert False
-@pytest.mark.regression('26600')
+@pytest.mark.regression("26600")
def test_database_works_with_empty_dir(tmpdir):
# Create the lockfile and failures directory otherwise
# we'll get a permission error on Database creation
- db_dir = tmpdir.ensure_dir('.spack-db')
- db_dir.ensure('lock')
- db_dir.ensure_dir('failures')
+ db_dir = tmpdir.ensure_dir(".spack-db")
+ db_dir.ensure("lock")
+ db_dir.ensure_dir("failures")
tmpdir.chmod(mode=0o555, rec=1)
db = spack.database.Database(str(tmpdir))
with db.read_transaction():
@@ -957,10 +964,13 @@ def test_database_works_with_empty_dir(tmpdir):
assert not os.path.exists(db._index_path)
-@pytest.mark.parametrize('query_arg,exc_type,msg_str', [
- (['callpath'], spack.store.MatchError, 'matches multiple packages'),
- (['tensorflow'], spack.store.MatchError, 'does not match any')
-])
+@pytest.mark.parametrize(
+ "query_arg,exc_type,msg_str",
+ [
+ (["callpath"], spack.store.MatchError, "matches multiple packages"),
+ (["tensorflow"], spack.store.MatchError, "does not match any"),
+ ],
+)
def test_store_find_failures(database, query_arg, exc_type, msg_str):
with pytest.raises(exc_type) as exc_info:
spack.store.find(query_arg, multiple=False)
@@ -968,7 +978,7 @@ def test_store_find_failures(database, query_arg, exc_type, msg_str):
def test_store_find_accept_string(database):
- result = spack.store.find('callpath', multiple=True)
+ result = spack.store.find("callpath", multiple=True)
assert len(result) == 3
@@ -978,7 +988,7 @@ def test_reindex_removed_prefix_is_not_installed(mutable_database, mock_store, c
not installed."""
# Remove libelf from the filesystem
- prefix = mutable_database.query_one('libelf').prefix
+ prefix = mutable_database.query_one("libelf").prefix
assert prefix.startswith(str(mock_store))
shutil.rmtree(prefix)
@@ -987,11 +997,11 @@ def test_reindex_removed_prefix_is_not_installed(mutable_database, mock_store, c
# Reindexing should warn about libelf not being found on the filesystem
err = capfd.readouterr()[1]
- assert 'this directory does not contain an installation of the spec' in err
+ assert "this directory does not contain an installation of the spec" in err
# And we should still have libelf in the database, but not installed.
- assert not mutable_database.query_one('libelf', installed=True)
- assert mutable_database.query_one('libelf', installed=False)
+ assert not mutable_database.query_one("libelf", installed=True)
+ assert mutable_database.query_one("libelf", installed=False)
def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
@@ -1020,12 +1030,11 @@ def test_reindex_when_all_prefixes_are_removed(mutable_database, mock_store):
assert len(mutable_database.query_local(installed=False, explicit=True)) == 0
-@pytest.mark.parametrize('spec_str,parent_name,expected_nparents', [
- ('dyninst', 'callpath', 3),
- ('libelf', 'dyninst', 1),
- ('libelf', 'libdwarf', 1)
-])
-@pytest.mark.regression('11983')
+@pytest.mark.parametrize(
+ "spec_str,parent_name,expected_nparents",
+ [("dyninst", "callpath", 3), ("libelf", "dyninst", 1), ("libelf", "libdwarf", 1)],
+)
+@pytest.mark.regression("11983")
def test_check_parents(spec_str, parent_name, expected_nparents, database):
"""Check that a spec returns the correct number of parents."""
s = database.query_one(spec_str)
@@ -1039,27 +1048,27 @@ def test_check_parents(spec_str, parent_name, expected_nparents, database):
def test_consistency_of_dependents_upon_remove(mutable_database):
# Check the initial state
- s = mutable_database.query_one('dyninst')
- parents = s.dependents(name='callpath')
+ s = mutable_database.query_one("dyninst")
+ parents = s.dependents(name="callpath")
assert len(parents) == 3
# Remove a dependent (and all its dependents)
- mutable_database.remove('mpileaks ^callpath ^mpich2')
- mutable_database.remove('callpath ^mpich2')
+ mutable_database.remove("mpileaks ^callpath ^mpich2")
+ mutable_database.remove("callpath ^mpich2")
# Check the final state
- s = mutable_database.query_one('dyninst')
- parents = s.dependents(name='callpath')
+ s = mutable_database.query_one("dyninst")
+ parents = s.dependents(name="callpath")
assert len(parents) == 2
-@pytest.mark.regression('30187')
+@pytest.mark.regression("30187")
def test_query_installed_when_package_unknown(database):
"""Test that we can query the installation status of a spec
when we don't know its package.py
"""
with spack.repo.use_repositories(MockPackageMultiRepo()):
- specs = database.query('mpileaks')
+ specs = database.query("mpileaks")
for s in specs:
# Assert that we can query the installation methods even though we
# don't have the package.py available
diff --git a/lib/spack/spack/test/directives.py b/lib/spack/spack/test/directives.py
index 5eb7bf8164..616d7ef5ee 100644
--- a/lib/spack/spack/test/directives.py
+++ b/lib/spack/spack/test/directives.py
@@ -10,9 +10,9 @@ import spack.spec
def test_false_directives_do_not_exist(mock_packages):
"""Ensure directives that evaluate to False at import time are added to
- dicts on packages.
+ dicts on packages.
"""
- cls = spack.repo.path.get_pkg_class('when-directives-false')
+ cls = spack.repo.path.get_pkg_class("when-directives-false")
assert not cls.dependencies
assert not cls.resources
assert not cls.patches
@@ -20,13 +20,13 @@ def test_false_directives_do_not_exist(mock_packages):
def test_true_directives_exist(mock_packages):
"""Ensure directives that evaluate to True at import time are added to
- dicts on packages.
+ dicts on packages.
"""
- cls = spack.repo.path.get_pkg_class('when-directives-true')
+ cls = spack.repo.path.get_pkg_class("when-directives-true")
assert cls.dependencies
- assert spack.spec.Spec() in cls.dependencies['extendee']
- assert spack.spec.Spec() in cls.dependencies['b']
+ assert spack.spec.Spec() in cls.dependencies["extendee"]
+ assert spack.spec.Spec() in cls.dependencies["b"]
assert cls.resources
assert spack.spec.Spec() in cls.resources
@@ -36,27 +36,27 @@ def test_true_directives_exist(mock_packages):
def test_constraints_from_context(mock_packages):
- pkg_cls = spack.repo.path.get_pkg_class('with-constraint-met')
+ pkg_cls = spack.repo.path.get_pkg_class("with-constraint-met")
assert pkg_cls.dependencies
- assert spack.spec.Spec('@1.0') in pkg_cls.dependencies['b']
+ assert spack.spec.Spec("@1.0") in pkg_cls.dependencies["b"]
assert pkg_cls.conflicts
- assert (spack.spec.Spec('+foo@1.0'), None) in pkg_cls.conflicts['%gcc']
+ assert (spack.spec.Spec("+foo@1.0"), None) in pkg_cls.conflicts["%gcc"]
-@pytest.mark.regression('26656')
+@pytest.mark.regression("26656")
def test_constraints_from_context_are_merged(mock_packages):
- pkg_cls = spack.repo.path.get_pkg_class('with-constraint-met')
+ pkg_cls = spack.repo.path.get_pkg_class("with-constraint-met")
assert pkg_cls.dependencies
- assert spack.spec.Spec('@0.14:15 ^b@3.8:4.0') in pkg_cls.dependencies['c']
+ assert spack.spec.Spec("@0.14:15 ^b@3.8:4.0") in pkg_cls.dependencies["c"]
-@pytest.mark.regression('27754')
+@pytest.mark.regression("27754")
def test_extends_spec(config, mock_packages):
- extender = spack.spec.Spec('extends-spec').concretized()
- extendee = spack.spec.Spec('extendee').concretized()
+ extender = spack.spec.Spec("extends-spec").concretized()
+ extendee = spack.spec.Spec("extendee").concretized()
assert extender.dependencies
assert extender.package.extends(extendee)
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index 9b52a6f179..b21260a829 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -26,17 +26,16 @@ max_packages = 10
def test_yaml_directory_layout_parameters(tmpdir, config):
"""This tests the various parameters that can be used to configure
- the install location """
- spec = Spec('python')
+ the install location"""
+ spec = Spec("python")
spec.concretize()
# Ensure default layout matches expected spec format
layout_default = DirectoryLayout(str(tmpdir))
path_default = layout_default.relative_path_for_spec(spec)
- assert(path_default == spec.format(
- "{architecture}/"
- "{compiler.name}-{compiler.version}/"
- "{name}-{version}-{hash}"))
+ assert path_default == spec.format(
+ "{architecture}/" "{compiler.name}-{compiler.version}/" "{name}-{version}-{hash}"
+ )
# Test hash_length parameter works correctly
layout_10 = DirectoryLayout(str(tmpdir), hash_length=10)
@@ -44,41 +43,36 @@ def test_yaml_directory_layout_parameters(tmpdir, config):
layout_7 = DirectoryLayout(str(tmpdir), hash_length=7)
path_7 = layout_7.relative_path_for_spec(spec)
- assert(len(path_default) - len(path_10) == 22)
- assert(len(path_default) - len(path_7) == 25)
+ assert len(path_default) - len(path_10) == 22
+ assert len(path_default) - len(path_7) == 25
# Test path_scheme
- arch, compiler, package7 = path_7.split('/')
- projections_package7 = {'all': "{name}-{version}-{hash:7}"}
- layout_package7 = DirectoryLayout(str(tmpdir),
- projections=projections_package7)
+ arch, compiler, package7 = path_7.split("/")
+ projections_package7 = {"all": "{name}-{version}-{hash:7}"}
+ layout_package7 = DirectoryLayout(str(tmpdir), projections=projections_package7)
path_package7 = layout_package7.relative_path_for_spec(spec)
- assert(package7 == path_package7)
+ assert package7 == path_package7
# Test separation of architecture or namespace
- spec2 = Spec('libelf').concretized()
+ spec2 = Spec("libelf").concretized()
arch_scheme = (
"{architecture.platform}/{architecture.target}/{architecture.os}/{name}/{version}/{hash:7}"
)
ns_scheme = "${ARCHITECTURE}/${NAMESPACE}/${PACKAGE}-${VERSION}-${HASH:7}"
- arch_ns_scheme_projections = {'all': arch_scheme,
- 'python': ns_scheme}
- layout_arch_ns = DirectoryLayout(
- str(tmpdir), projections=arch_ns_scheme_projections)
+ arch_ns_scheme_projections = {"all": arch_scheme, "python": ns_scheme}
+ layout_arch_ns = DirectoryLayout(str(tmpdir), projections=arch_ns_scheme_projections)
arch_path_spec2 = layout_arch_ns.relative_path_for_spec(spec2)
- assert(arch_path_spec2 == spec2.format(arch_scheme))
+ assert arch_path_spec2 == spec2.format(arch_scheme)
ns_path_spec = layout_arch_ns.relative_path_for_spec(spec)
- assert(ns_path_spec == spec.format(ns_scheme))
+ assert ns_path_spec == spec.format(ns_scheme)
# Ensure conflicting parameters caught
with pytest.raises(InvalidDirectoryLayoutParametersError):
- DirectoryLayout(str(tmpdir),
- hash_length=20,
- projections=projections_package7)
+ DirectoryLayout(str(tmpdir), hash_length=20, projections=projections_package7)
def test_read_and_write_spec(temporary_store, config, mock_packages):
@@ -92,7 +86,7 @@ def test_read_and_write_spec(temporary_store, config, mock_packages):
pkg_names = list(spack.repo.path.all_package_names())[:max_packages]
for name in pkg_names:
- if name.startswith('external'):
+ if name.startswith("external"):
# External package tests cannot be installed
continue
@@ -165,8 +159,8 @@ def test_handle_unknown_package(temporary_store, config, mock_packages):
mock_db = spack.repo.RepoPath(spack.paths.mock_packages_path)
not_in_mock = set.difference(
- set(spack.repo.all_package_names()),
- set(mock_db.all_package_names()))
+ set(spack.repo.all_package_names()), set(mock_db.all_package_names())
+ )
packages = list(not_in_mock)[:max_packages]
# Create all the packages that are not in mock.
@@ -188,8 +182,7 @@ def test_handle_unknown_package(temporary_store, config, mock_packages):
# Now check that even without the package files, we know
# enough to read a spec from the spec file.
for spec, path in installed_specs.items():
- spec_from_file = layout.read_spec(
- os.path.join(path, '.spack', 'spec.json'))
+ spec_from_file = layout.read_spec(os.path.join(path, ".spack", "spec.json"))
# To satisfy these conditions, directory layouts need to
# read in concrete specs from their install dirs somehow.
@@ -207,7 +200,7 @@ def test_find(temporary_store, config, mock_packages):
# Create install prefixes for all packages in the list
installed_specs = {}
for name in package_names:
- if name.startswith('external'):
+ if name.startswith("external"):
# External package tests cannot be installed
continue
spec = spack.spec.Spec(name).concretized()
@@ -224,10 +217,9 @@ def test_find(temporary_store, config, mock_packages):
def test_yaml_directory_layout_build_path(tmpdir, config):
"""This tests build path method."""
- spec = Spec('python')
+ spec = Spec("python")
spec.concretize()
layout = DirectoryLayout(str(tmpdir))
rel_path = os.path.join(layout.metadata_dir, layout.packages_dir)
- assert layout.build_packages_path(spec) == os.path.join(spec.prefix,
- rel_path)
+ assert layout.build_packages_path(spec) == os.path.join(spec.prefix, rel_path)
diff --git a/lib/spack/spack/test/env.py b/lib/spack/spack/test/env.py
index 6d16924cbd..c1b36eb5dd 100644
--- a/lib/spack/spack/test/env.py
+++ b/lib/spack/spack/test/env.py
@@ -11,22 +11,23 @@ import spack.environment as ev
import spack.spec
-@pytest.mark.skipif(str(spack.platforms.host()) == 'windows',
- reason='Not supported on Windows (yet)')
+@pytest.mark.skipif(
+ str(spack.platforms.host()) == "windows", reason="Not supported on Windows (yet)"
+)
def test_hash_change_no_rehash_concrete(tmpdir, mock_packages, config):
# create an environment
- env_path = tmpdir.mkdir('env_dir').strpath
+ env_path = tmpdir.mkdir("env_dir").strpath
env = ev.Environment(env_path)
env.write()
# add a spec with a rewritten build hash
- spec = spack.spec.Spec('mpileaks')
+ spec = spack.spec.Spec("mpileaks")
env.add(spec)
env.concretize()
# rewrite the hash
old_hash = env.concretized_order[0]
- new_hash = 'abc'
+ new_hash = "abc"
env.specs_by_hash[old_hash]._hash = new_hash
env.concretized_order[0] = new_hash
env.specs_by_hash[new_hash] = env.specs_by_hash[old_hash]
@@ -44,7 +45,7 @@ def test_hash_change_no_rehash_concrete(tmpdir, mock_packages, config):
def test_activate_should_require_an_env():
with pytest.raises(TypeError):
- ev.activate(env='name')
+ ev.activate(env="name")
with pytest.raises(TypeError):
ev.activate(env=None)
diff --git a/lib/spack/spack/test/environment_modifications.py b/lib/spack/spack/test/environment_modifications.py
index 9be7ec2eeb..dc6bc0d9a3 100644
--- a/lib/spack/spack/test/environment_modifications.py
+++ b/lib/spack/spack/test/environment_modifications.py
@@ -21,46 +21,44 @@ from spack.util.environment import (
is_system_path,
)
-datadir = os.path.join(spack_root, 'lib', 'spack', 'spack', 'test', 'data')
+datadir = os.path.join(spack_root, "lib", "spack", "spack", "test", "data")
def test_inspect_path(tmpdir):
inspections = {
- 'bin': ['PATH'],
- 'man': ['MANPATH'],
- 'share/man': ['MANPATH'],
- 'share/aclocal': ['ACLOCAL_PATH'],
- 'lib': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
- 'lib64': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
- 'include': ['CPATH'],
- 'lib/pkgconfig': ['PKG_CONFIG_PATH'],
- 'lib64/pkgconfig': ['PKG_CONFIG_PATH'],
- 'share/pkgconfig': ['PKG_CONFIG_PATH'],
- '': ['CMAKE_PREFIX_PATH']
+ "bin": ["PATH"],
+ "man": ["MANPATH"],
+ "share/man": ["MANPATH"],
+ "share/aclocal": ["ACLOCAL_PATH"],
+ "lib": ["LIBRARY_PATH", "LD_LIBRARY_PATH"],
+ "lib64": ["LIBRARY_PATH", "LD_LIBRARY_PATH"],
+ "include": ["CPATH"],
+ "lib/pkgconfig": ["PKG_CONFIG_PATH"],
+ "lib64/pkgconfig": ["PKG_CONFIG_PATH"],
+ "share/pkgconfig": ["PKG_CONFIG_PATH"],
+ "": ["CMAKE_PREFIX_PATH"],
}
- tmpdir.mkdir('bin')
- tmpdir.mkdir('lib')
- tmpdir.mkdir('include')
+ tmpdir.mkdir("bin")
+ tmpdir.mkdir("lib")
+ tmpdir.mkdir("include")
env = environment.inspect_path(str(tmpdir), inspections)
names = [item.name for item in env]
- assert 'PATH' in names
- assert 'LIBRARY_PATH' in names
- assert 'LD_LIBRARY_PATH' in names
- assert 'CPATH' in names
+ assert "PATH" in names
+ assert "LIBRARY_PATH" in names
+ assert "LD_LIBRARY_PATH" in names
+ assert "CPATH" in names
def test_exclude_paths_from_inspection():
inspections = {
- 'lib': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
- 'lib64': ['LIBRARY_PATH', 'LD_LIBRARY_PATH'],
- 'include': ['CPATH']
+ "lib": ["LIBRARY_PATH", "LD_LIBRARY_PATH"],
+ "lib64": ["LIBRARY_PATH", "LD_LIBRARY_PATH"],
+ "include": ["CPATH"],
}
- env = environment.inspect_path(
- '/usr', inspections, exclude=is_system_path
- )
+ env = environment.inspect_path("/usr", inspections, exclude=is_system_path)
assert len(env) == 0
@@ -70,14 +68,12 @@ def prepare_environment_for_tests(working_env):
"""Sets a few dummy variables in the current environment, that will be
useful for the tests below.
"""
- os.environ['UNSET_ME'] = 'foo'
- os.environ['EMPTY_PATH_LIST'] = ''
- os.environ['PATH_LIST'] = '/path/second:/path/third'
- os.environ['REMOVE_PATH_LIST'] \
- = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
- os.environ['PATH_LIST_WITH_SYSTEM_PATHS'] \
- = '/usr/include:' + os.environ['REMOVE_PATH_LIST']
- os.environ['PATH_LIST_WITH_DUPLICATES'] = os.environ['REMOVE_PATH_LIST']
+ os.environ["UNSET_ME"] = "foo"
+ os.environ["EMPTY_PATH_LIST"] = ""
+ os.environ["PATH_LIST"] = "/path/second:/path/third"
+ os.environ["REMOVE_PATH_LIST"] = "/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g"
+ os.environ["PATH_LIST_WITH_SYSTEM_PATHS"] = "/usr/include:" + os.environ["REMOVE_PATH_LIST"]
+ os.environ["PATH_LIST_WITH_DUPLICATES"] = os.environ["REMOVE_PATH_LIST"]
@pytest.fixture
@@ -90,25 +86,25 @@ def env(prepare_environment_for_tests):
def miscellaneous_paths():
"""Returns a list of paths, including system ones."""
return [
- '/usr/local/Cellar/gcc/5.3.0/lib',
- '/usr/local/lib',
- '/usr/local',
- '/usr/local/include',
- '/usr/local/lib64',
- '/usr/local/opt/some-package/lib',
- '/usr/opt/lib',
- '/usr/local/../bin',
- '/lib',
- '/',
- '/usr',
- '/usr/',
- '/usr/bin',
- '/bin64',
- '/lib64',
- '/include',
- '/include/',
- '/opt/some-package/include',
- '/opt/some-package/local/..',
+ "/usr/local/Cellar/gcc/5.3.0/lib",
+ "/usr/local/lib",
+ "/usr/local",
+ "/usr/local/include",
+ "/usr/local/lib64",
+ "/usr/local/opt/some-package/lib",
+ "/usr/opt/lib",
+ "/usr/local/../bin",
+ "/lib",
+ "/",
+ "/usr",
+ "/usr/",
+ "/usr/bin",
+ "/bin64",
+ "/lib64",
+ "/include",
+ "/include/",
+ "/opt/some-package/include",
+ "/opt/some-package/local/..",
]
@@ -116,10 +112,10 @@ def miscellaneous_paths():
def files_to_be_sourced():
"""Returns a list of files to be sourced"""
return [
- os.path.join(datadir, 'sourceme_first.sh'),
- os.path.join(datadir, 'sourceme_second.sh'),
- os.path.join(datadir, 'sourceme_parameters.sh'),
- os.path.join(datadir, 'sourceme_unicode.sh')
+ os.path.join(datadir, "sourceme_first.sh"),
+ os.path.join(datadir, "sourceme_second.sh"),
+ os.path.join(datadir, "sourceme_parameters.sh"),
+ os.path.join(datadir, "sourceme_unicode.sh"),
]
@@ -127,133 +123,129 @@ def test_set(env):
"""Tests setting values in the environment."""
# Here we are storing the commands to set a couple of variables
- env.set('A', 'dummy value')
- env.set('B', 3)
+ env.set("A", "dummy value")
+ env.set("B", 3)
# ...and then we are executing them
env.apply_modifications()
- assert 'dummy value' == os.environ['A']
- assert str(3) == os.environ['B']
+ assert "dummy value" == os.environ["A"]
+ assert str(3) == os.environ["B"]
def test_append_flags(env):
"""Tests appending to a value in the environment."""
# Store a couple of commands
- env.append_flags('APPEND_TO_ME', 'flag1')
- env.append_flags('APPEND_TO_ME', 'flag2')
+ env.append_flags("APPEND_TO_ME", "flag1")
+ env.append_flags("APPEND_TO_ME", "flag2")
# ... execute the commands
env.apply_modifications()
- assert 'flag1 flag2' == os.environ['APPEND_TO_ME']
+ assert "flag1 flag2" == os.environ["APPEND_TO_ME"]
def test_unset(env):
"""Tests unsetting values in the environment."""
# Assert that the target variable is there and unset it
- assert 'foo' == os.environ['UNSET_ME']
- env.unset('UNSET_ME')
+ assert "foo" == os.environ["UNSET_ME"]
+ env.unset("UNSET_ME")
env.apply_modifications()
# Trying to retrieve is after deletion should cause a KeyError
with pytest.raises(KeyError):
- os.environ['UNSET_ME']
+ os.environ["UNSET_ME"]
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_filter_system_paths(miscellaneous_paths):
"""Tests that the filtering of system paths works as expected."""
filtered = filter_system_paths(miscellaneous_paths)
expected = [
- '/usr/local/Cellar/gcc/5.3.0/lib',
- '/usr/local/opt/some-package/lib',
- '/usr/opt/lib',
- '/opt/some-package/include',
- '/opt/some-package/local/..',
+ "/usr/local/Cellar/gcc/5.3.0/lib",
+ "/usr/local/opt/some-package/lib",
+ "/usr/opt/lib",
+ "/opt/some-package/include",
+ "/opt/some-package/local/..",
]
assert filtered == expected
# TODO 27021
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_set_path(env):
"""Tests setting paths in an environment variable."""
# Check setting paths with the default separator
- env.set_path('A', ['foo', 'bar', 'baz'])
+ env.set_path("A", ["foo", "bar", "baz"])
env.apply_modifications()
- assert 'foo:bar:baz' == os.environ['A']
+ assert "foo:bar:baz" == os.environ["A"]
- env.set_path('B', ['foo', 'bar', 'baz'], separator=';')
+ env.set_path("B", ["foo", "bar", "baz"], separator=";")
env.apply_modifications()
- assert 'foo;bar;baz' == os.environ['B']
+ assert "foo;bar;baz" == os.environ["B"]
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_path_manipulation(env):
"""Tests manipulating list of paths in the environment."""
- env.append_path('PATH_LIST', '/path/last')
- env.prepend_path('PATH_LIST', '/path/first')
+ env.append_path("PATH_LIST", "/path/last")
+ env.prepend_path("PATH_LIST", "/path/first")
- env.append_path('EMPTY_PATH_LIST', '/path/middle')
- env.append_path('EMPTY_PATH_LIST', '/path/last')
- env.prepend_path('EMPTY_PATH_LIST', '/path/first')
+ env.append_path("EMPTY_PATH_LIST", "/path/middle")
+ env.append_path("EMPTY_PATH_LIST", "/path/last")
+ env.prepend_path("EMPTY_PATH_LIST", "/path/first")
- env.append_path('NEWLY_CREATED_PATH_LIST', '/path/middle')
- env.append_path('NEWLY_CREATED_PATH_LIST', '/path/last')
- env.prepend_path('NEWLY_CREATED_PATH_LIST', '/path/first')
+ env.append_path("NEWLY_CREATED_PATH_LIST", "/path/middle")
+ env.append_path("NEWLY_CREATED_PATH_LIST", "/path/last")
+ env.prepend_path("NEWLY_CREATED_PATH_LIST", "/path/first")
- env.remove_path('REMOVE_PATH_LIST', '/remove/this')
- env.remove_path('REMOVE_PATH_LIST', '/duplicate/')
+ env.remove_path("REMOVE_PATH_LIST", "/remove/this")
+ env.remove_path("REMOVE_PATH_LIST", "/duplicate/")
- env.deprioritize_system_paths('PATH_LIST_WITH_SYSTEM_PATHS')
- env.prune_duplicate_paths('PATH_LIST_WITH_DUPLICATES')
+ env.deprioritize_system_paths("PATH_LIST_WITH_SYSTEM_PATHS")
+ env.prune_duplicate_paths("PATH_LIST_WITH_DUPLICATES")
env.apply_modifications()
- expected = '/path/first:/path/second:/path/third:/path/last'
- assert os.environ['PATH_LIST'] == expected
+ expected = "/path/first:/path/second:/path/third:/path/last"
+ assert os.environ["PATH_LIST"] == expected
- expected = '/path/first:/path/middle:/path/last'
- assert os.environ['EMPTY_PATH_LIST'] == expected
+ expected = "/path/first:/path/middle:/path/last"
+ assert os.environ["EMPTY_PATH_LIST"] == expected
- expected = '/path/first:/path/middle:/path/last'
- assert os.environ['NEWLY_CREATED_PATH_LIST'] == expected
+ expected = "/path/first:/path/middle:/path/last"
+ assert os.environ["NEWLY_CREATED_PATH_LIST"] == expected
- assert os.environ['REMOVE_PATH_LIST'] == '/a/b:/a/c:/a/d:/f/g'
+ assert os.environ["REMOVE_PATH_LIST"] == "/a/b:/a/c:/a/d:/f/g"
- assert not os.environ['PATH_LIST_WITH_SYSTEM_PATHS'].\
- startswith('/usr/include:')
- assert os.environ['PATH_LIST_WITH_SYSTEM_PATHS'].endswith(':/usr/include')
+ assert not os.environ["PATH_LIST_WITH_SYSTEM_PATHS"].startswith("/usr/include:")
+ assert os.environ["PATH_LIST_WITH_SYSTEM_PATHS"].endswith(":/usr/include")
- assert os.environ['PATH_LIST_WITH_DUPLICATES'].count('/duplicate') == 1
+ assert os.environ["PATH_LIST_WITH_DUPLICATES"].count("/duplicate") == 1
def test_extra_arguments(env):
"""Tests that we can attach extra arguments to any command."""
- env.set('A', 'dummy value', who='Pkg1')
+ env.set("A", "dummy value", who="Pkg1")
for x in env:
- assert 'who' in x.args
+ assert "who" in x.args
env.apply_modifications()
- assert 'dummy value' == os.environ['A']
+ assert "dummy value" == os.environ["A"]
def test_extend(env):
"""Tests that we can construct a list of environment modifications
starting from another list.
"""
- env.set('A', 'dummy value')
- env.set('B', 3)
+ env.set("A", "dummy value")
+ env.set("B", 3)
copy_construct = EnvironmentModifications(env)
assert len(copy_construct) == 2
@@ -262,18 +254,16 @@ def test_extend(env):
assert x is y
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.usefixtures('prepare_environment_for_tests')
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.usefixtures("prepare_environment_for_tests")
def test_source_files(files_to_be_sourced):
"""Tests the construction of a list of environment modifications that are
the result of sourcing a file.
"""
env = EnvironmentModifications()
for filename in files_to_be_sourced:
- if filename.endswith('sourceme_parameters.sh'):
- env.extend(EnvironmentModifications.from_sourcing_file(
- filename, 'intel64'))
+ if filename.endswith("sourceme_parameters.sh"):
+ env.extend(EnvironmentModifications.from_sourcing_file(filename, "intel64"))
else:
env.extend(EnvironmentModifications.from_sourcing_file(filename))
@@ -286,77 +276,91 @@ def test_source_files(files_to_be_sourced):
assert len(modifications) >= 5
# Set new variables
- assert len(modifications['NEW_VAR']) == 1
- assert isinstance(modifications['NEW_VAR'][0], SetEnv)
- assert modifications['NEW_VAR'][0].value == 'new'
+ assert len(modifications["NEW_VAR"]) == 1
+ assert isinstance(modifications["NEW_VAR"][0], SetEnv)
+ assert modifications["NEW_VAR"][0].value == "new"
- assert len(modifications['FOO']) == 1
- assert isinstance(modifications['FOO'][0], SetEnv)
- assert modifications['FOO'][0].value == 'intel64'
+ assert len(modifications["FOO"]) == 1
+ assert isinstance(modifications["FOO"][0], SetEnv)
+ assert modifications["FOO"][0].value == "intel64"
# Unset variables
- assert len(modifications['EMPTY_PATH_LIST']) == 1
- assert isinstance(modifications['EMPTY_PATH_LIST'][0], UnsetEnv)
+ assert len(modifications["EMPTY_PATH_LIST"]) == 1
+ assert isinstance(modifications["EMPTY_PATH_LIST"][0], UnsetEnv)
# Modified variables
- assert len(modifications['UNSET_ME']) == 1
- assert isinstance(modifications['UNSET_ME'][0], SetEnv)
- assert modifications['UNSET_ME'][0].value == 'overridden'
+ assert len(modifications["UNSET_ME"]) == 1
+ assert isinstance(modifications["UNSET_ME"][0], SetEnv)
+ assert modifications["UNSET_ME"][0].value == "overridden"
- assert len(modifications['PATH_LIST']) == 3
- assert isinstance(modifications['PATH_LIST'][0], RemovePath)
- assert modifications['PATH_LIST'][0].value == '/path/third'
- assert isinstance(modifications['PATH_LIST'][1], AppendPath)
- assert modifications['PATH_LIST'][1].value == '/path/fourth'
- assert isinstance(modifications['PATH_LIST'][2], PrependPath)
- assert modifications['PATH_LIST'][2].value == '/path/first'
+ assert len(modifications["PATH_LIST"]) == 3
+ assert isinstance(modifications["PATH_LIST"][0], RemovePath)
+ assert modifications["PATH_LIST"][0].value == "/path/third"
+ assert isinstance(modifications["PATH_LIST"][1], AppendPath)
+ assert modifications["PATH_LIST"][1].value == "/path/fourth"
+ assert isinstance(modifications["PATH_LIST"][2], PrependPath)
+ assert modifications["PATH_LIST"][2].value == "/path/first"
-@pytest.mark.regression('8345')
+@pytest.mark.regression("8345")
def test_preserve_environment(prepare_environment_for_tests):
# UNSET_ME is defined, and will be unset in the context manager,
# NOT_SET is not in the environment and will be set within the
# context manager, PATH_LIST is set and will be changed.
- with environment.preserve_environment('UNSET_ME', 'NOT_SET', 'PATH_LIST'):
- os.environ['NOT_SET'] = 'a'
- assert os.environ['NOT_SET'] == 'a'
-
- del os.environ['UNSET_ME']
- assert 'UNSET_ME' not in os.environ
-
- os.environ['PATH_LIST'] = 'changed'
-
- assert 'NOT_SET' not in os.environ
- assert os.environ['UNSET_ME'] == 'foo'
- assert os.environ['PATH_LIST'] == '/path/second:/path/third'
-
-
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.parametrize('files,expected,deleted', [
- # Sets two variables
- ((os.path.join(datadir, 'sourceme_first.sh'),),
- {'NEW_VAR': 'new', 'UNSET_ME': 'overridden'}, []),
- # Check if we can set a variable to different values depending
- # on command line parameters
- ((os.path.join(datadir, 'sourceme_parameters.sh'),),
- {'FOO': 'default'}, []),
- (([os.path.join(datadir, 'sourceme_parameters.sh'), 'intel64'],),
- {'FOO': 'intel64'}, []),
- # Check unsetting variables
- ((os.path.join(datadir, 'sourceme_second.sh'),),
- {'PATH_LIST': '/path/first:/path/second:/path/fourth'},
- ['EMPTY_PATH_LIST']),
- # Check that order of sourcing matters
- ((os.path.join(datadir, 'sourceme_unset.sh'),
- os.path.join(datadir, 'sourceme_first.sh')),
- {'NEW_VAR': 'new', 'UNSET_ME': 'overridden'}, []),
- ((os.path.join(datadir, 'sourceme_first.sh'),
- os.path.join(datadir, 'sourceme_unset.sh')),
- {'NEW_VAR': 'new'}, ['UNSET_ME']),
-
-])
-@pytest.mark.usefixtures('prepare_environment_for_tests')
+ with environment.preserve_environment("UNSET_ME", "NOT_SET", "PATH_LIST"):
+ os.environ["NOT_SET"] = "a"
+ assert os.environ["NOT_SET"] == "a"
+
+ del os.environ["UNSET_ME"]
+ assert "UNSET_ME" not in os.environ
+
+ os.environ["PATH_LIST"] = "changed"
+
+ assert "NOT_SET" not in os.environ
+ assert os.environ["UNSET_ME"] == "foo"
+ assert os.environ["PATH_LIST"] == "/path/second:/path/third"
+
+
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.parametrize(
+ "files,expected,deleted",
+ [
+ # Sets two variables
+ (
+ (os.path.join(datadir, "sourceme_first.sh"),),
+ {"NEW_VAR": "new", "UNSET_ME": "overridden"},
+ [],
+ ),
+ # Check if we can set a variable to different values depending
+ # on command line parameters
+ ((os.path.join(datadir, "sourceme_parameters.sh"),), {"FOO": "default"}, []),
+ (([os.path.join(datadir, "sourceme_parameters.sh"), "intel64"],), {"FOO": "intel64"}, []),
+ # Check unsetting variables
+ (
+ (os.path.join(datadir, "sourceme_second.sh"),),
+ {"PATH_LIST": "/path/first:/path/second:/path/fourth"},
+ ["EMPTY_PATH_LIST"],
+ ),
+ # Check that order of sourcing matters
+ (
+ (
+ os.path.join(datadir, "sourceme_unset.sh"),
+ os.path.join(datadir, "sourceme_first.sh"),
+ ),
+ {"NEW_VAR": "new", "UNSET_ME": "overridden"},
+ [],
+ ),
+ (
+ (
+ os.path.join(datadir, "sourceme_first.sh"),
+ os.path.join(datadir, "sourceme_unset.sh"),
+ ),
+ {"NEW_VAR": "new"},
+ ["UNSET_ME"],
+ ),
+ ],
+)
+@pytest.mark.usefixtures("prepare_environment_for_tests")
def test_environment_from_sourcing_files(files, expected, deleted):
env = environment.environment_after_sourcing_files(*files)
@@ -373,18 +377,21 @@ def test_environment_from_sourcing_files(files, expected, deleted):
def test_clear(env):
- env.set('A', 'dummy value')
+ env.set("A", "dummy value")
assert len(env) > 0
env.clear()
assert len(env) == 0
-@pytest.mark.parametrize('env,exclude,include', [
- # Check we can exclude a literal
- ({'SHLVL': '1'}, ['SHLVL'], []),
- # Check include takes precedence
- ({'SHLVL': '1'}, ['SHLVL'], ['SHLVL']),
-])
+@pytest.mark.parametrize(
+ "env,exclude,include",
+ [
+ # Check we can exclude a literal
+ ({"SHLVL": "1"}, ["SHLVL"], []),
+ # Check include takes precedence
+ ({"SHLVL": "1"}, ["SHLVL"], ["SHLVL"]),
+ ],
+)
def test_sanitize_literals(env, exclude, include):
after = environment.sanitize(env, exclude, include)
@@ -398,18 +405,30 @@ def test_sanitize_literals(env, exclude, include):
assert all(x not in after for x in exclude)
-@pytest.mark.parametrize('env,exclude,include,expected,deleted', [
- # Check we can exclude using a regex
- ({'SHLVL': '1'}, ['SH.*'], [], [], ['SHLVL']),
- # Check we can include using a regex
- ({'SHLVL': '1'}, ['SH.*'], ['SH.*'], ['SHLVL'], []),
- # Check regex to exclude Modules v4 related vars
- ({'MODULES_LMALTNAME': '1', 'MODULES_LMCONFLICT': '2'},
- ['MODULES_(.*)'], [], [], ['MODULES_LMALTNAME', 'MODULES_LMCONFLICT']),
- ({'A_modquar': '1', 'b_modquar': '2', 'C_modshare': '3'},
- [r'(\w*)_mod(quar|share)'], [], [],
- ['A_modquar', 'b_modquar', 'C_modshare']),
-])
+@pytest.mark.parametrize(
+ "env,exclude,include,expected,deleted",
+ [
+ # Check we can exclude using a regex
+ ({"SHLVL": "1"}, ["SH.*"], [], [], ["SHLVL"]),
+ # Check we can include using a regex
+ ({"SHLVL": "1"}, ["SH.*"], ["SH.*"], ["SHLVL"], []),
+ # Check regex to exclude Modules v4 related vars
+ (
+ {"MODULES_LMALTNAME": "1", "MODULES_LMCONFLICT": "2"},
+ ["MODULES_(.*)"],
+ [],
+ [],
+ ["MODULES_LMALTNAME", "MODULES_LMCONFLICT"],
+ ),
+ (
+ {"A_modquar": "1", "b_modquar": "2", "C_modshare": "3"},
+ [r"(\w*)_mod(quar|share)"],
+ [],
+ [],
+ ["A_modquar", "b_modquar", "C_modshare"],
+ ),
+ ],
+)
def test_sanitize_regex(env, exclude, include, expected, deleted):
after = environment.sanitize(env, exclude, include)
@@ -418,53 +437,72 @@ def test_sanitize_regex(env, exclude, include, expected, deleted):
assert all(x not in after for x in deleted)
-@pytest.mark.regression('12085')
-@pytest.mark.parametrize('before,after,search_list', [
- # Set environment variables
- ({}, {'FOO': 'foo'}, [environment.SetEnv('FOO', 'foo')]),
- # Unset environment variables
- ({'FOO': 'foo'}, {}, [environment.UnsetEnv('FOO')]),
- # Append paths to an environment variable
- ({'FOO_PATH': '/a/path'}, {'FOO_PATH': '/a/path:/b/path'},
- [environment.AppendPath('FOO_PATH', '/b/path')]),
- ({}, {'FOO_PATH': '/a/path' + os.sep + '/b/path'}, [
- environment.AppendPath('FOO_PATH', '/a/path' + os.sep + '/b/path')
- ]),
- ({'FOO_PATH': '/a/path:/b/path'}, {'FOO_PATH': '/b/path'}, [
- environment.RemovePath('FOO_PATH', '/a/path')
- ]),
- ({'FOO_PATH': '/a/path:/b/path'}, {'FOO_PATH': '/a/path:/c/path'}, [
- environment.RemovePath('FOO_PATH', '/b/path'),
- environment.AppendPath('FOO_PATH', '/c/path')
- ]),
- ({'FOO_PATH': '/a/path:/b/path'}, {'FOO_PATH': '/c/path:/a/path'}, [
- environment.RemovePath('FOO_PATH', '/b/path'),
- environment.PrependPath('FOO_PATH', '/c/path')
- ]),
- # Modify two variables in the same environment
- ({'FOO': 'foo', 'BAR': 'bar'}, {'FOO': 'baz', 'BAR': 'baz'}, [
- environment.SetEnv('FOO', 'baz'),
- environment.SetEnv('BAR', 'baz'),
- ]),
-])
+@pytest.mark.regression("12085")
+@pytest.mark.parametrize(
+ "before,after,search_list",
+ [
+ # Set environment variables
+ ({}, {"FOO": "foo"}, [environment.SetEnv("FOO", "foo")]),
+ # Unset environment variables
+ ({"FOO": "foo"}, {}, [environment.UnsetEnv("FOO")]),
+ # Append paths to an environment variable
+ (
+ {"FOO_PATH": "/a/path"},
+ {"FOO_PATH": "/a/path:/b/path"},
+ [environment.AppendPath("FOO_PATH", "/b/path")],
+ ),
+ (
+ {},
+ {"FOO_PATH": "/a/path" + os.sep + "/b/path"},
+ [environment.AppendPath("FOO_PATH", "/a/path" + os.sep + "/b/path")],
+ ),
+ (
+ {"FOO_PATH": "/a/path:/b/path"},
+ {"FOO_PATH": "/b/path"},
+ [environment.RemovePath("FOO_PATH", "/a/path")],
+ ),
+ (
+ {"FOO_PATH": "/a/path:/b/path"},
+ {"FOO_PATH": "/a/path:/c/path"},
+ [
+ environment.RemovePath("FOO_PATH", "/b/path"),
+ environment.AppendPath("FOO_PATH", "/c/path"),
+ ],
+ ),
+ (
+ {"FOO_PATH": "/a/path:/b/path"},
+ {"FOO_PATH": "/c/path:/a/path"},
+ [
+ environment.RemovePath("FOO_PATH", "/b/path"),
+ environment.PrependPath("FOO_PATH", "/c/path"),
+ ],
+ ),
+ # Modify two variables in the same environment
+ (
+ {"FOO": "foo", "BAR": "bar"},
+ {"FOO": "baz", "BAR": "baz"},
+ [
+ environment.SetEnv("FOO", "baz"),
+ environment.SetEnv("BAR", "baz"),
+ ],
+ ),
+ ],
+)
def test_from_environment_diff(before, after, search_list):
- mod = environment.EnvironmentModifications.from_environment_diff(
- before, after
- )
+ mod = environment.EnvironmentModifications.from_environment_diff(before, after)
for item in search_list:
assert item in mod
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="LMod not supported on Windows")
-@pytest.mark.regression('15775')
+@pytest.mark.skipif(sys.platform == "win32", reason="LMod not supported on Windows")
+@pytest.mark.regression("15775")
def test_exclude_lmod_variables():
# Construct the list of environment modifications
- file = os.path.join(datadir, 'sourceme_lmod.sh')
+ file = os.path.join(datadir, "sourceme_lmod.sh")
env = EnvironmentModifications.from_sourcing_file(file)
# Check that variables related to lmod are not in there
modifications = env.group_by_name()
- assert not any(x.startswith('LMOD_') for x in modifications)
+ assert not any(x.startswith("LMOD_") for x in modifications)
diff --git a/lib/spack/spack/test/fetch_strategy.py b/lib/spack/spack/test/fetch_strategy.py
index 28b71fd750..4609a5b0a8 100644
--- a/lib/spack/spack/test/fetch_strategy.py
+++ b/lib/spack/spack/test/fetch_strategy.py
@@ -13,5 +13,4 @@ def test_fetchstrategy_bad_url_scheme():
unsupported scheme fails as expected."""
with pytest.raises(ValueError):
- fetcher = from_url_scheme( # noqa: F841
- 'bogus-scheme://example.com/a/b/c')
+ fetcher = from_url_scheme("bogus-scheme://example.com/a/b/c") # noqa: F841
diff --git a/lib/spack/spack/test/flag_handlers.py b/lib/spack/spack/test/flag_handlers.py
index ec197515c2..d55a059769 100644
--- a/lib/spack/spack/test/flag_handlers.py
+++ b/lib/spack/spack/test/flag_handlers.py
@@ -23,69 +23,71 @@ def temp_env():
def add_o3_to_build_system_cflags(pkg, name, flags):
build_system_flags = []
- if name == 'cflags':
- build_system_flags.append('-O3')
+ if name == "cflags":
+ build_system_flags.append("-O3")
return flags, None, build_system_flags
-@pytest.mark.usefixtures('config', 'mock_packages')
+@pytest.mark.usefixtures("config", "mock_packages")
class TestFlagHandlers(object):
def test_no_build_system_flags(self, temp_env):
# Test that both autotools and cmake work getting no build_system flags
- s1 = spack.spec.Spec('cmake-client').concretized()
+ s1 = spack.spec.Spec("cmake-client").concretized()
spack.build_environment.setup_package(s1.package, False)
- s2 = spack.spec.Spec('patchelf').concretized()
+ s2 = spack.spec.Spec("patchelf").concretized()
spack.build_environment.setup_package(s2.package, False)
# Use cppflags as a canary
- assert 'SPACK_CPPFLAGS' not in os.environ
- assert 'CPPFLAGS' not in os.environ
+ assert "SPACK_CPPFLAGS" not in os.environ
+ assert "CPPFLAGS" not in os.environ
def test_unbound_method(self, temp_env):
# Other tests test flag_handlers set as bound methods and functions.
# This tests an unbound method in python2 (no change in python3).
- s = spack.spec.Spec('mpileaks cppflags=-g').concretized()
+ s = spack.spec.Spec("mpileaks cppflags=-g").concretized()
s.package.flag_handler = s.package.__class__.inject_flags
spack.build_environment.setup_package(s.package, False)
- assert os.environ['SPACK_CPPFLAGS'] == '-g'
- assert 'CPPFLAGS' not in os.environ
+ assert os.environ["SPACK_CPPFLAGS"] == "-g"
+ assert "CPPFLAGS" not in os.environ
def test_inject_flags(self, temp_env):
- s = spack.spec.Spec('mpileaks cppflags=-g').concretized()
+ s = spack.spec.Spec("mpileaks cppflags=-g").concretized()
s.package.flag_handler = inject_flags
spack.build_environment.setup_package(s.package, False)
- assert os.environ['SPACK_CPPFLAGS'] == '-g'
- assert 'CPPFLAGS' not in os.environ
+ assert os.environ["SPACK_CPPFLAGS"] == "-g"
+ assert "CPPFLAGS" not in os.environ
def test_env_flags(self, temp_env):
- s = spack.spec.Spec('mpileaks cppflags=-g').concretized()
+ s = spack.spec.Spec("mpileaks cppflags=-g").concretized()
s.package.flag_handler = env_flags
spack.build_environment.setup_package(s.package, False)
- assert os.environ['CPPFLAGS'] == '-g'
- assert 'SPACK_CPPFLAGS' not in os.environ
+ assert os.environ["CPPFLAGS"] == "-g"
+ assert "SPACK_CPPFLAGS" not in os.environ
def test_build_system_flags_cmake(self, temp_env):
- s = spack.spec.Spec('cmake-client cppflags=-g').concretized()
+ s = spack.spec.Spec("cmake-client cppflags=-g").concretized()
s.package.flag_handler = build_system_flags
spack.build_environment.setup_package(s.package, False)
- assert 'SPACK_CPPFLAGS' not in os.environ
- assert 'CPPFLAGS' not in os.environ
+ assert "SPACK_CPPFLAGS" not in os.environ
+ assert "CPPFLAGS" not in os.environ
assert set(s.package.cmake_flag_args) == {
- '-DCMAKE_C_FLAGS=-g', '-DCMAKE_CXX_FLAGS=-g', '-DCMAKE_Fortran_FLAGS=-g'
+ "-DCMAKE_C_FLAGS=-g",
+ "-DCMAKE_CXX_FLAGS=-g",
+ "-DCMAKE_Fortran_FLAGS=-g",
}
def test_build_system_flags_autotools(self, temp_env):
- s = spack.spec.Spec('patchelf cppflags=-g').concretized()
+ s = spack.spec.Spec("patchelf cppflags=-g").concretized()
s.package.flag_handler = build_system_flags
spack.build_environment.setup_package(s.package, False)
- assert 'SPACK_CPPFLAGS' not in os.environ
- assert 'CPPFLAGS' not in os.environ
- assert 'CPPFLAGS=-g' in s.package.configure_flag_args
+ assert "SPACK_CPPFLAGS" not in os.environ
+ assert "CPPFLAGS" not in os.environ
+ assert "CPPFLAGS=-g" in s.package.configure_flag_args
def test_build_system_flags_not_implemented(self, temp_env):
"""Test the command line flags method raises a NotImplementedError"""
- s = spack.spec.Spec('mpileaks cppflags=-g').concretized()
+ s = spack.spec.Spec("mpileaks cppflags=-g").concretized()
s.package.flag_handler = build_system_flags
try:
spack.build_environment.setup_package(s.package, False)
@@ -94,42 +96,42 @@ class TestFlagHandlers(object):
assert True
def test_add_build_system_flags_autotools(self, temp_env):
- s = spack.spec.Spec('patchelf cppflags=-g').concretized()
+ s = spack.spec.Spec("patchelf cppflags=-g").concretized()
s.package.flag_handler = add_o3_to_build_system_cflags
spack.build_environment.setup_package(s.package, False)
- assert '-g' in os.environ['SPACK_CPPFLAGS']
- assert 'CPPFLAGS' not in os.environ
- assert s.package.configure_flag_args == ['CFLAGS=-O3']
+ assert "-g" in os.environ["SPACK_CPPFLAGS"]
+ assert "CPPFLAGS" not in os.environ
+ assert s.package.configure_flag_args == ["CFLAGS=-O3"]
def test_add_build_system_flags_cmake(self, temp_env):
- s = spack.spec.Spec('cmake-client cppflags=-g').concretized()
+ s = spack.spec.Spec("cmake-client cppflags=-g").concretized()
s.package.flag_handler = add_o3_to_build_system_cflags
spack.build_environment.setup_package(s.package, False)
- assert '-g' in os.environ['SPACK_CPPFLAGS']
- assert 'CPPFLAGS' not in os.environ
- assert s.package.cmake_flag_args == ['-DCMAKE_C_FLAGS=-O3']
+ assert "-g" in os.environ["SPACK_CPPFLAGS"]
+ assert "CPPFLAGS" not in os.environ
+ assert s.package.cmake_flag_args == ["-DCMAKE_C_FLAGS=-O3"]
def test_ld_flags_cmake(self, temp_env):
- s = spack.spec.Spec('cmake-client ldflags=-mthreads').concretized()
+ s = spack.spec.Spec("cmake-client ldflags=-mthreads").concretized()
s.package.flag_handler = build_system_flags
spack.build_environment.setup_package(s.package, False)
- assert 'SPACK_LDFLAGS' not in os.environ
- assert 'LDFLAGS' not in os.environ
+ assert "SPACK_LDFLAGS" not in os.environ
+ assert "LDFLAGS" not in os.environ
assert set(s.package.cmake_flag_args) == {
- '-DCMAKE_EXE_LINKER_FLAGS=-mthreads',
- '-DCMAKE_MODULE_LINKER_FLAGS=-mthreads',
- '-DCMAKE_SHARED_LINKER_FLAGS=-mthreads',
- '-DCMAKE_STATIC_LINKER_FLAGS=-mthreads'
+ "-DCMAKE_EXE_LINKER_FLAGS=-mthreads",
+ "-DCMAKE_MODULE_LINKER_FLAGS=-mthreads",
+ "-DCMAKE_SHARED_LINKER_FLAGS=-mthreads",
+ "-DCMAKE_STATIC_LINKER_FLAGS=-mthreads",
}
def test_ld_libs_cmake(self, temp_env):
- s = spack.spec.Spec('cmake-client ldlibs=-lfoo').concretized()
+ s = spack.spec.Spec("cmake-client ldlibs=-lfoo").concretized()
s.package.flag_handler = build_system_flags
spack.build_environment.setup_package(s.package, False)
- assert 'SPACK_LDLIBS' not in os.environ
- assert 'LDLIBS' not in os.environ
+ assert "SPACK_LDLIBS" not in os.environ
+ assert "LDLIBS" not in os.environ
assert set(s.package.cmake_flag_args) == {
- '-DCMAKE_C_STANDARD_LIBRARIES=-lfoo',
- '-DCMAKE_CXX_STANDARD_LIBRARIES=-lfoo',
- '-DCMAKE_Fortran_STANDARD_LIBRARIES=-lfoo'
+ "-DCMAKE_C_STANDARD_LIBRARIES=-lfoo",
+ "-DCMAKE_CXX_STANDARD_LIBRARIES=-lfoo",
+ "-DCMAKE_Fortran_STANDARD_LIBRARIES=-lfoo",
}
diff --git a/lib/spack/spack/test/gcs_fetch.py b/lib/spack/spack/test/gcs_fetch.py
index ab9df6ad1b..90657d4693 100644
--- a/lib/spack/spack/test/gcs_fetch.py
+++ b/lib/spack/spack/test/gcs_fetch.py
@@ -12,21 +12,21 @@ import spack.fetch_strategy
import spack.stage
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_gcsfetchstrategy_without_url(_fetch_method):
"""Ensure constructor with no URL fails."""
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(ValueError):
spack.fetch_strategy.GCSFetchStrategy(None)
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_gcsfetchstrategy_bad_url(tmpdir, _fetch_method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
- with spack.config.override('config:url_fetch_method', _fetch_method):
- fetcher = spack.fetch_strategy.GCSFetchStrategy(url='file:///does-not-exist')
+ with spack.config.override("config:url_fetch_method", _fetch_method):
+ fetcher = spack.fetch_strategy.GCSFetchStrategy(url="file:///does-not-exist")
assert fetcher is not None
with spack.stage.Stage(fetcher, path=testpath) as stage:
@@ -36,19 +36,20 @@ def test_gcsfetchstrategy_bad_url(tmpdir, _fetch_method):
fetcher.fetch()
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_gcsfetchstrategy_downloaded(tmpdir, _fetch_method):
"""Ensure fetch with archive file already downloaded is a noop."""
testpath = str(tmpdir)
- archive = os.path.join(testpath, 'gcs.tar.gz')
+ archive = os.path.join(testpath, "gcs.tar.gz")
+
+ with spack.config.override("config:url_fetch_method", _fetch_method):
- with spack.config.override('config:url_fetch_method', _fetch_method):
class Archived_GCSFS(spack.fetch_strategy.GCSFetchStrategy):
@property
def archive_file(self):
return archive
- url = 'gcs:///{0}'.format(archive)
+ url = "gcs:///{0}".format(archive)
fetcher = Archived_GCSFS(url=url)
with spack.stage.Stage(fetcher, path=testpath):
fetcher.fetch()
diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py
index cb3d71d5b3..0f9f587421 100644
--- a/lib/spack/spack/test/git_fetch.py
+++ b/lib/spack/spack/test/git_fetch.py
@@ -19,15 +19,13 @@ from spack.stage import Stage
from spack.util.executable import which
from spack.version import ver
-pytestmark = pytest.mark.skipif(
- not which('git'), reason='requires git to be installed')
+pytestmark = pytest.mark.skipif(not which("git"), reason="requires git to be installed")
-_mock_transport_error = 'Mock HTTP transport error'
+_mock_transport_error = "Mock HTTP transport error"
-@pytest.fixture(params=[None, '1.8.5.2', '1.8.5.1',
- '1.7.10', '1.7.1', '1.7.0'])
+@pytest.fixture(params=[None, "1.8.5.2", "1.8.5.1", "1.7.10", "1.7.1", "1.7.0"])
def git_version(request, monkeypatch):
"""Tests GitFetchStrategy behavior for different git versions.
@@ -36,9 +34,8 @@ def git_version(request, monkeypatch):
paths for old versions still work, we fake it out here and make it
use the backward-compatibility code paths with newer git versions.
"""
- git = which('git', required=True)
- real_git_version = (
- spack.fetch_strategy.GitFetchStrategy.version_from_git(git))
+ git = which("git", required=True)
+ real_git_version = spack.fetch_strategy.GitFetchStrategy.version_from_git(git)
if request.param is None:
# Don't patch; run with the real git_version method.
@@ -51,7 +48,7 @@ def git_version(request, monkeypatch):
# Patch the fetch strategy to think it's using a lower git version.
# we use this to test what we'd need to do with older git versions
# using a newer git installation.
- monkeypatch.setattr(GitFetchStrategy, 'git_version', test_git_version)
+ monkeypatch.setattr(GitFetchStrategy, "git_version", test_git_version)
yield test_git_version
@@ -61,14 +58,15 @@ def mock_bad_git(monkeypatch):
Test GitFetchStrategy behavior with a bad git command for git >= 1.7.1
to trigger a SpackError.
"""
+
def bad_git(*args, **kwargs):
"""Raise a SpackError with the transport message."""
raise spack.error.SpackError(_mock_transport_error)
# Patch the fetch strategy to think it's using a git version that
# will error out when git is called.
- monkeypatch.setattr(GitFetchStrategy, 'git', bad_git)
- monkeypatch.setattr(GitFetchStrategy, 'git_version', ver('1.7.1'))
+ monkeypatch.setattr(GitFetchStrategy, "git", bad_git)
+ monkeypatch.setattr(GitFetchStrategy, "git_version", ver("1.7.1"))
yield
@@ -77,20 +75,16 @@ def test_bad_git(tmpdir, mock_bad_git):
testpath = str(tmpdir)
with pytest.raises(spack.error.SpackError):
- fetcher = GitFetchStrategy(git='file:///not-a-real-git-repo')
+ fetcher = GitFetchStrategy(git="file:///not-a-real-git-repo")
with Stage(fetcher, path=testpath):
fetcher.fetch()
-@pytest.mark.parametrize("type_of_test", ['default', 'branch', 'tag', 'commit'])
+@pytest.mark.parametrize("type_of_test", ["default", "branch", "tag", "commit"])
@pytest.mark.parametrize("secure", [True, False])
-def test_fetch(type_of_test,
- secure,
- mock_git_repository,
- config,
- mutable_mock_repo,
- git_version,
- monkeypatch):
+def test_fetch(
+ type_of_test, secure, mock_git_repository, config, mutable_mock_repo, git_version, monkeypatch
+):
"""Tries to:
1. Fetch the repo using a fetch strategy constructed with
@@ -104,22 +98,22 @@ def test_fetch(type_of_test,
t = mock_git_repository.checks[type_of_test]
h = mock_git_repository.hash
- pkg_class = spack.repo.path.get_pkg_class('git-test')
+ pkg_class = spack.repo.path.get_pkg_class("git-test")
# This would fail using the default-no-per-version-git check but that
# isn't included in this test
- monkeypatch.delattr(pkg_class, 'git')
+ monkeypatch.delattr(pkg_class, "git")
# Construct the package under test
- s = Spec('git-test').concretized()
- monkeypatch.setitem(s.package.versions, ver('git'), t.args)
+ s = Spec("git-test").concretized()
+ monkeypatch.setitem(s.package.versions, ver("git"), t.args)
# Enter the stage directory and check some properties
with s.package.stage:
- with spack.config.override('config:verify_ssl', secure):
+ with spack.config.override("config:verify_ssl", secure):
s.package.do_stage()
with working_dir(s.package.stage.source_path):
- assert h('HEAD') == h(t.revision)
+ assert h("HEAD") == h(t.revision)
file_path = os.path.join(s.package.stage.source_path, t.file)
assert os.path.isdir(s.package.stage.source_path)
@@ -128,7 +122,7 @@ def test_fetch(type_of_test,
os.unlink(file_path)
assert not os.path.isfile(file_path)
- untracked_file = 'foobarbaz'
+ untracked_file = "foobarbaz"
touch(untracked_file)
assert os.path.isfile(untracked_file)
s.package.do_restage()
@@ -137,85 +131,80 @@ def test_fetch(type_of_test,
assert os.path.isdir(s.package.stage.source_path)
assert os.path.isfile(file_path)
- assert h('HEAD') == h(t.revision)
+ assert h("HEAD") == h(t.revision)
@pytest.mark.disable_clean_stage_check
def test_fetch_pkg_attr_submodule_init(
- mock_git_repository,
- config,
- mutable_mock_repo,
- monkeypatch,
- mock_stage):
+ mock_git_repository, config, mutable_mock_repo, monkeypatch, mock_stage
+):
"""In this case the version() args do not contain a 'git' URL, so
the fetcher must be assembled using the Package-level 'git' attribute.
This test ensures that the submodules are properly initialized and the
expected branch file is present.
"""
- t = mock_git_repository.checks['default-no-per-version-git']
- pkg_class = spack.repo.path.get_pkg_class('git-test')
+ t = mock_git_repository.checks["default-no-per-version-git"]
+ pkg_class = spack.repo.path.get_pkg_class("git-test")
# For this test, the version args don't specify 'git' (which is
# the majority of version specifications)
- monkeypatch.setattr(pkg_class, 'git', mock_git_repository.url)
+ monkeypatch.setattr(pkg_class, "git", mock_git_repository.url)
# Construct the package under test
- s = Spec('git-test').concretized()
- monkeypatch.setitem(s.package.versions, ver('git'), t.args)
+ s = Spec("git-test").concretized()
+ monkeypatch.setitem(s.package.versions, ver("git"), t.args)
s.package.do_stage()
collected_fnames = set()
for root, dirs, files in os.walk(s.package.stage.source_path):
collected_fnames.update(files)
# The submodules generate files with the prefix "r0_file_"
- assert {'r0_file_0', 'r0_file_1', t.file} < collected_fnames
-
-
-@pytest.mark.skipif(str(spack.platforms.host()) == 'windows',
- reason=('Git fails to clone because the src/dst paths'
- ' are too long: the name of the staging directory'
- ' for ad-hoc Git commit versions is longer than'
- ' other staged sources'))
+ assert {"r0_file_0", "r0_file_1", t.file} < collected_fnames
+
+
+@pytest.mark.skipif(
+ str(spack.platforms.host()) == "windows",
+ reason=(
+ "Git fails to clone because the src/dst paths"
+ " are too long: the name of the staging directory"
+ " for ad-hoc Git commit versions is longer than"
+ " other staged sources"
+ ),
+)
@pytest.mark.disable_clean_stage_check
def test_adhoc_version_submodules(
- mock_git_repository,
- config,
- mutable_mock_repo,
- monkeypatch,
- mock_stage):
+ mock_git_repository, config, mutable_mock_repo, monkeypatch, mock_stage
+):
- t = mock_git_repository.checks['tag']
+ t = mock_git_repository.checks["tag"]
# Construct the package under test
- pkg_class = spack.repo.path.get_pkg_class('git-test')
- monkeypatch.setitem(pkg_class.versions, ver('git'), t.args)
- monkeypatch.setattr(pkg_class, 'git', 'file://%s' % mock_git_repository.path,
- raising=False)
+ pkg_class = spack.repo.path.get_pkg_class("git-test")
+ monkeypatch.setitem(pkg_class.versions, ver("git"), t.args)
+ monkeypatch.setattr(pkg_class, "git", "file://%s" % mock_git_repository.path, raising=False)
- spec = Spec('git-test@{0}'.format(mock_git_repository.unversioned_commit))
+ spec = Spec("git-test@{0}".format(mock_git_repository.unversioned_commit))
spec.concretize()
spec.package.do_stage()
collected_fnames = set()
for root, dirs, files in os.walk(spec.package.stage.source_path):
collected_fnames.update(files)
# The submodules generate files with the prefix "r0_file_"
- assert set(['r0_file_0', 'r0_file_1']) < collected_fnames
+ assert set(["r0_file_0", "r0_file_1"]) < collected_fnames
-@pytest.mark.parametrize("type_of_test", ['branch', 'commit'])
-def test_debug_fetch(
- mock_packages, type_of_test, mock_git_repository, config, monkeypatch
-):
+@pytest.mark.parametrize("type_of_test", ["branch", "commit"])
+def test_debug_fetch(mock_packages, type_of_test, mock_git_repository, config, monkeypatch):
"""Fetch the repo with debug enabled."""
# Retrieve the right test parameters
t = mock_git_repository.checks[type_of_test]
# Construct the package under test
- s = Spec('git-test').concretized()
- monkeypatch.setitem(s.package.versions, ver('git'), t.args)
+ s = Spec("git-test").concretized()
+ monkeypatch.setitem(s.package.versions, ver("git"), t.args)
# Fetch then ensure source path exists
with s.package.stage:
- with spack.config.override('config:debug', True):
+ with spack.config.override("config:debug", True):
s.package.do_fetch()
assert os.path.isdir(s.package.stage.source_path)
@@ -224,68 +213,67 @@ def test_git_extra_fetch(tmpdir):
"""Ensure a fetch after 'expanding' is effectively a no-op."""
testpath = str(tmpdir)
- fetcher = GitFetchStrategy(git='file:///not-a-real-git-repo')
+ fetcher = GitFetchStrategy(git="file:///not-a-real-git-repo")
with Stage(fetcher, path=testpath) as stage:
mkdirp(stage.source_path)
- fetcher.fetch() # Use fetcher to fetch for code coverage
+ fetcher.fetch() # Use fetcher to fetch for code coverage
shutil.rmtree(stage.source_path)
def test_needs_stage():
"""Trigger a NoStageError when attempt a fetch without a stage."""
- with pytest.raises(spack.fetch_strategy.NoStageError,
- match=r"set_stage.*before calling fetch"):
- fetcher = GitFetchStrategy(git='file:///not-a-real-git-repo')
+ with pytest.raises(
+ spack.fetch_strategy.NoStageError, match=r"set_stage.*before calling fetch"
+ ):
+ fetcher = GitFetchStrategy(git="file:///not-a-real-git-repo")
fetcher.fetch()
@pytest.mark.parametrize("get_full_repo", [True, False])
-def test_get_full_repo(get_full_repo, git_version, mock_git_repository,
- config, mutable_mock_repo, monkeypatch):
+def test_get_full_repo(
+ get_full_repo, git_version, mock_git_repository, config, mutable_mock_repo, monkeypatch
+):
"""Ensure that we can clone a full repository."""
- if git_version < ver('1.7.1'):
- pytest.skip('Not testing get_full_repo for older git {0}'.
- format(git_version))
+ if git_version < ver("1.7.1"):
+ pytest.skip("Not testing get_full_repo for older git {0}".format(git_version))
secure = True
- type_of_test = 'tag-branch'
+ type_of_test = "tag-branch"
t = mock_git_repository.checks[type_of_test]
- s = Spec('git-test').concretized()
+ s = Spec("git-test").concretized()
args = copy.copy(t.args)
- args['get_full_repo'] = get_full_repo
- monkeypatch.setitem(s.package.versions, ver('git'), args)
+ args["get_full_repo"] = get_full_repo
+ monkeypatch.setitem(s.package.versions, ver("git"), args)
with s.package.stage:
- with spack.config.override('config:verify_ssl', secure):
+ with spack.config.override("config:verify_ssl", secure):
s.package.do_stage()
with working_dir(s.package.stage.source_path):
- branches\
- = mock_git_repository.git_exe('branch', '-a',
- output=str).splitlines()
+ branches = mock_git_repository.git_exe("branch", "-a", output=str).splitlines()
nbranches = len(branches)
- commits\
- = mock_git_repository.\
- git_exe('log', '--graph',
- '--pretty=format:%h -%d %s (%ci) <%an>',
- '--abbrev-commit',
- output=str).splitlines()
+ commits = mock_git_repository.git_exe(
+ "log",
+ "--graph",
+ "--pretty=format:%h -%d %s (%ci) <%an>",
+ "--abbrev-commit",
+ output=str,
+ ).splitlines()
ncommits = len(commits)
if get_full_repo:
- assert(nbranches >= 5)
- assert(ncommits == 2)
+ assert nbranches >= 5
+ assert ncommits == 2
else:
- assert(nbranches == 2)
- assert(ncommits == 1)
+ assert nbranches == 2
+ assert ncommits == 1
@pytest.mark.disable_clean_stage_check
@pytest.mark.parametrize("submodules", [True, False])
-def test_gitsubmodule(submodules, mock_git_repository, config,
- mutable_mock_repo, monkeypatch):
+def test_gitsubmodule(submodules, mock_git_repository, config, mutable_mock_repo, monkeypatch):
"""
Test GitFetchStrategy behavior with submodules. This package
has a `submodules` property which is always True: when a specific
@@ -294,20 +282,21 @@ def test_gitsubmodule(submodules, mock_git_repository, config,
submodules *not* be initialized, this should override the
Package-level request.
"""
- type_of_test = 'tag-branch'
+ type_of_test = "tag-branch"
t = mock_git_repository.checks[type_of_test]
# Construct the package under test
- s = Spec('git-test').concretized()
+ s = Spec("git-test").concretized()
args = copy.copy(t.args)
- args['submodules'] = submodules
- monkeypatch.setitem(s.package.versions, ver('git'), args)
+ args["submodules"] = submodules
+ monkeypatch.setitem(s.package.versions, ver("git"), args)
s.package.do_stage()
with working_dir(s.package.stage.source_path):
for submodule_count in range(2):
- file_path = os.path.join(s.package.stage.source_path,
- 'third_party/submodule{0}/r0_file_{0}'
- .format(submodule_count))
+ file_path = os.path.join(
+ s.package.stage.source_path,
+ "third_party/submodule{0}/r0_file_{0}".format(submodule_count),
+ )
if submodules:
assert os.path.isfile(file_path)
else:
@@ -315,56 +304,48 @@ def test_gitsubmodule(submodules, mock_git_repository, config,
@pytest.mark.disable_clean_stage_check
-def test_gitsubmodules_callable(
- mock_git_repository, config, mutable_mock_repo, monkeypatch
-):
+def test_gitsubmodules_callable(mock_git_repository, config, mutable_mock_repo, monkeypatch):
"""
Test GitFetchStrategy behavior with submodules selected after concretization
"""
+
def submodules_callback(package):
- name = 'third_party/submodule0'
+ name = "third_party/submodule0"
return [name]
- type_of_test = 'tag-branch'
+ type_of_test = "tag-branch"
t = mock_git_repository.checks[type_of_test]
# Construct the package under test
- s = Spec('git-test').concretized()
+ s = Spec("git-test").concretized()
args = copy.copy(t.args)
- args['submodules'] = submodules_callback
- monkeypatch.setitem(s.package.versions, ver('git'), args)
+ args["submodules"] = submodules_callback
+ monkeypatch.setitem(s.package.versions, ver("git"), args)
s.package.do_stage()
with working_dir(s.package.stage.source_path):
- file_path = os.path.join(s.package.stage.source_path,
- 'third_party/submodule0/r0_file_0')
+ file_path = os.path.join(s.package.stage.source_path, "third_party/submodule0/r0_file_0")
assert os.path.isfile(file_path)
- file_path = os.path.join(s.package.stage.source_path,
- 'third_party/submodule1/r0_file_1')
+ file_path = os.path.join(s.package.stage.source_path, "third_party/submodule1/r0_file_1")
assert not os.path.isfile(file_path)
@pytest.mark.disable_clean_stage_check
-def test_gitsubmodules_delete(
- mock_git_repository, config, mutable_mock_repo, monkeypatch
-):
+def test_gitsubmodules_delete(mock_git_repository, config, mutable_mock_repo, monkeypatch):
"""
Test GitFetchStrategy behavior with submodules_delete
"""
- type_of_test = 'tag-branch'
+ type_of_test = "tag-branch"
t = mock_git_repository.checks[type_of_test]
# Construct the package under test
- s = Spec('git-test').concretized()
+ s = Spec("git-test").concretized()
args = copy.copy(t.args)
- args['submodules'] = True
- args['submodules_delete'] = ['third_party/submodule0',
- 'third_party/submodule1']
- monkeypatch.setitem(s.package.versions, ver('git'), args)
+ args["submodules"] = True
+ args["submodules_delete"] = ["third_party/submodule0", "third_party/submodule1"]
+ monkeypatch.setitem(s.package.versions, ver("git"), args)
s.package.do_stage()
with working_dir(s.package.stage.source_path):
- file_path = os.path.join(s.package.stage.source_path,
- 'third_party/submodule0')
+ file_path = os.path.join(s.package.stage.source_path, "third_party/submodule0")
assert not os.path.isdir(file_path)
- file_path = os.path.join(s.package.stage.source_path,
- 'third_party/submodule1')
+ file_path = os.path.join(s.package.stage.source_path, "third_party/submodule1")
assert not os.path.isdir(file_path)
diff --git a/lib/spack/spack/test/graph.py b/lib/spack/spack/test/graph.py
index f3a7db1ea0..e7aafe4b0d 100644
--- a/lib/spack/spack/test/graph.py
+++ b/lib/spack/spack/test/graph.py
@@ -12,18 +12,18 @@ import spack.repo
import spack.spec
-@pytest.mark.parametrize('spec_str', ['mpileaks', 'callpath'])
+@pytest.mark.parametrize("spec_str", ["mpileaks", "callpath"])
def test_topo_sort(spec_str, config, mock_packages):
"""Ensure nodes are ordered topologically"""
s = spack.spec.Spec(spec_str).concretized()
nodes = spack.graph.topological_sort(s)
for idx, current in enumerate(nodes):
- assert all(following not in current for following in nodes[idx + 1:])
+ assert all(following not in current for following in nodes[idx + 1 :])
def test_static_graph_mpileaks(config, mock_packages):
"""Test a static spack graph for a simple package."""
- s = spack.spec.Spec('mpileaks').normalized()
+ s = spack.spec.Spec("mpileaks").normalized()
stream = six.StringIO()
spack.graph.graph_dot([s], static=True, out=stream)
@@ -31,72 +31,68 @@ def test_static_graph_mpileaks(config, mock_packages):
dot = stream.getvalue()
assert ' "mpileaks" [label="mpileaks"]\n' in dot
- assert ' "dyninst" [label="dyninst"]\n' in dot
+ assert ' "dyninst" [label="dyninst"]\n' in dot
assert ' "callpath" [label="callpath"]\n' in dot
- assert ' "libelf" [label="libelf"]\n' in dot
+ assert ' "libelf" [label="libelf"]\n' in dot
assert ' "libdwarf" [label="libdwarf"]\n' in dot
- mpi_providers = spack.repo.path.providers_for('mpi')
+ mpi_providers = spack.repo.path.providers_for("mpi")
for spec in mpi_providers:
assert ('"mpileaks" -> "%s"' % spec.name) in dot
assert ('"callpath" -> "%s"' % spec.name) in dot
- assert ' "dyninst" -> "libdwarf"\n' in dot
- assert ' "callpath" -> "dyninst"\n' in dot
- assert ' "libdwarf" -> "libelf"\n' in dot
+ assert ' "dyninst" -> "libdwarf"\n' in dot
+ assert ' "callpath" -> "dyninst"\n' in dot
+ assert ' "libdwarf" -> "libelf"\n' in dot
assert ' "mpileaks" -> "callpath"\n' in dot
- assert ' "dyninst" -> "libelf"\n' in dot
+ assert ' "dyninst" -> "libelf"\n' in dot
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_dynamic_dot_graph_mpileaks(mock_packages, config):
"""Test dynamically graphing the mpileaks package."""
- s = spack.spec.Spec('mpileaks').concretized()
+ s = spack.spec.Spec("mpileaks").concretized()
stream = six.StringIO()
spack.graph.graph_dot([s], static=False, out=stream)
dot = stream.getvalue()
- nodes_to_check = ['mpileaks', 'mpi', 'callpath', 'dyninst', 'libdwarf', 'libelf']
+ nodes_to_check = ["mpileaks", "mpi", "callpath", "dyninst", "libdwarf", "libelf"]
hashes = {}
for name in nodes_to_check:
current = s[name]
current_hash = current.dag_hash()
hashes[name] = current_hash
- assert ' "{0}" [label="{1}"]\n'.format(
- current_hash, spack.graph.node_label(current)
- ) in dot
+ assert (
+ ' "{0}" [label="{1}"]\n'.format(current_hash, spack.graph.node_label(current)) in dot
+ )
dependencies_to_check = [
- ('dyninst', 'libdwarf'),
- ('callpath', 'dyninst'),
- ('mpileaks', 'mpi'),
- ('libdwarf', 'libelf'),
- ('callpath', 'mpi'),
- ('mpileaks', 'callpath'),
- ('dyninst', 'libelf')
+ ("dyninst", "libdwarf"),
+ ("callpath", "dyninst"),
+ ("mpileaks", "mpi"),
+ ("libdwarf", "libelf"),
+ ("callpath", "mpi"),
+ ("mpileaks", "callpath"),
+ ("dyninst", "libelf"),
]
for parent, child in dependencies_to_check:
assert ' "{0}" -> "{1}"\n'.format(hashes[parent], hashes[child]) in dot
-@pytest.mark.skipif(
- sys.version_info < (3, 6), reason="Ordering might not be consistent"
-)
+@pytest.mark.skipif(sys.version_info < (3, 6), reason="Ordering might not be consistent")
def test_ascii_graph_mpileaks(config, mock_packages, monkeypatch):
- monkeypatch.setattr(
- spack.graph.AsciiGraph, '_node_label',
- lambda self, node: node.name
- )
- s = spack.spec.Spec('mpileaks').concretized()
+ monkeypatch.setattr(spack.graph.AsciiGraph, "_node_label", lambda self, node: node.name)
+ s = spack.spec.Spec("mpileaks").concretized()
stream = six.StringIO()
graph = spack.graph.AsciiGraph()
graph.write(s, out=stream, color=False)
graph_str = stream.getvalue()
- graph_str = '\n'.join([line.rstrip() for line in graph_str.split('\n')])
+ graph_str = "\n".join([line.rstrip() for line in graph_str.split("\n")])
- assert graph_str == r'''o mpileaks
+ assert (
+ graph_str
+ == r"""o mpileaks
|\
| o callpath
|/|
@@ -107,12 +103,13 @@ o dyninst
| o libdwarf
|/
o libelf
-'''
+"""
+ )
def test_topological_sort_filtering_dependency_types(config, mock_packages):
- s = spack.spec.Spec('both-link-and-build-dep-a').concretized()
+ s = spack.spec.Spec("both-link-and-build-dep-a").concretized()
- nodes = spack.graph.topological_sort(s, deptype=('link',))
+ nodes = spack.graph.topological_sort(s, deptype=("link",))
names = [s.name for s in nodes]
- assert names == ['both-link-and-build-dep-c', 'both-link-and-build-dep-a']
+ assert names == ["both-link-and-build-dep-c", "both-link-and-build-dep-a"]
diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py
index c086e5cdc9..400fc79563 100644
--- a/lib/spack/spack/test/hg_fetch.py
+++ b/lib/spack/spack/test/hg_fetch.py
@@ -20,22 +20,15 @@ from spack.version import ver
# Test functionality covered is supported on Windows, but currently failing
# and expected to be fixed
-pytestmark = [pytest.mark.skipif(
- not which('hg'), reason='requires mercurial to be installed'),
- pytest.mark.skipif(
- sys.platform == 'win32', reason="Failing on Win")]
+pytestmark = [
+ pytest.mark.skipif(not which("hg"), reason="requires mercurial to be installed"),
+ pytest.mark.skipif(sys.platform == "win32", reason="Failing on Win"),
+]
-@pytest.mark.parametrize("type_of_test", ['default', 'rev0'])
+@pytest.mark.parametrize("type_of_test", ["default", "rev0"])
@pytest.mark.parametrize("secure", [True, False])
-def test_fetch(
- type_of_test,
- secure,
- mock_hg_repository,
- config,
- mutable_mock_repo,
- monkeypatch
-):
+def test_fetch(type_of_test, secure, mock_hg_repository, config, mutable_mock_repo, monkeypatch):
"""Tries to:
1. Fetch the repo using a fetch strategy constructed with
@@ -50,12 +43,12 @@ def test_fetch(
h = mock_hg_repository.hash
# Construct the package under test
- s = Spec('hg-test').concretized()
- monkeypatch.setitem(s.package.versions, ver('hg'), t.args)
+ s = Spec("hg-test").concretized()
+ monkeypatch.setitem(s.package.versions, ver("hg"), t.args)
# Enter the stage directory and check some properties
with s.package.stage:
- with spack.config.override('config:verify_ssl', secure):
+ with spack.config.override("config:verify_ssl", secure):
s.package.do_stage()
with working_dir(s.package.stage.source_path):
@@ -68,7 +61,7 @@ def test_fetch(
os.unlink(file_path)
assert not os.path.isfile(file_path)
- untracked_file = 'foobarbaz'
+ untracked_file = "foobarbaz"
touch(untracked_file)
assert os.path.isfile(untracked_file)
s.package.do_restage()
@@ -84,7 +77,7 @@ def test_hg_extra_fetch(tmpdir):
"""Ensure a fetch after expanding is effectively a no-op."""
testpath = str(tmpdir)
- fetcher = HgFetchStrategy(hg='file:///not-a-real-hg-repo')
+ fetcher = HgFetchStrategy(hg="file:///not-a-real-hg-repo")
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path
mkdirp(source_path)
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index f214c340df..079ff8851c 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -27,12 +27,11 @@ from spack.spec import Spec
def find_nothing(*args):
- raise spack.repo.UnknownPackageError(
- 'Repo package access is disabled for test')
+ raise spack.repo.UnknownPackageError("Repo package access is disabled for test")
def test_install_and_uninstall(install_mockery, mock_fetch, monkeypatch):
- spec = Spec('trivial-install-test-package').concretized()
+ spec = Spec("trivial-install-test-package").concretized()
spec.package.do_install()
assert spec.installed
@@ -41,17 +40,17 @@ def test_install_and_uninstall(install_mockery, mock_fetch, monkeypatch):
assert not spec.installed
-@pytest.mark.regression('11870')
+@pytest.mark.regression("11870")
def test_uninstall_non_existing_package(install_mockery, mock_fetch, monkeypatch):
"""Ensure that we can uninstall a package that has been deleted from the repo"""
- spec = Spec('trivial-install-test-package').concretized()
+ spec = Spec("trivial-install-test-package").concretized()
spec.package.do_install()
assert spec.installed
# Mock deletion of the package
spec._package = None
- monkeypatch.setattr(spack.repo.path, 'get', find_nothing)
+ monkeypatch.setattr(spack.repo.path, "get", find_nothing)
with pytest.raises(spack.repo.UnknownPackageError):
spec.package
@@ -62,52 +61,50 @@ def test_uninstall_non_existing_package(install_mockery, mock_fetch, monkeypatch
def test_pkg_attributes(install_mockery, mock_fetch, monkeypatch):
# Get a basic concrete spec for the dummy package.
- spec = Spec('attributes-foo-app ^attributes-foo')
+ spec = Spec("attributes-foo-app ^attributes-foo")
spec.concretize()
assert spec.concrete
pkg = spec.package
pkg.do_install()
- foo = 'attributes-foo'
- assert spec['bar'].prefix == spec[foo].prefix
- assert spec['baz'].prefix == spec[foo].prefix
+ foo = "attributes-foo"
+ assert spec["bar"].prefix == spec[foo].prefix
+ assert spec["baz"].prefix == spec[foo].prefix
assert spec[foo].home == spec[foo].prefix
- assert spec['bar'].home == spec[foo].home
- assert spec['baz'].home == spec[foo].prefix.baz
+ assert spec["bar"].home == spec[foo].home
+ assert spec["baz"].home == spec[foo].prefix.baz
foo_headers = spec[foo].headers
# assert foo_headers.basenames == ['foo.h']
assert foo_headers.directories == [spec[foo].home.include]
- bar_headers = spec['bar'].headers
+ bar_headers = spec["bar"].headers
# assert bar_headers.basenames == ['bar.h']
- assert bar_headers.directories == [spec['bar'].home.include]
- baz_headers = spec['baz'].headers
+ assert bar_headers.directories == [spec["bar"].home.include]
+ baz_headers = spec["baz"].headers
# assert baz_headers.basenames == ['baz.h']
- assert baz_headers.directories == [spec['baz'].home.include]
+ assert baz_headers.directories == [spec["baz"].home.include]
- if 'platform=windows' in spec:
- lib_suffix = '.lib'
- elif 'platform=darwin' in spec:
- lib_suffix = '.dylib'
+ if "platform=windows" in spec:
+ lib_suffix = ".lib"
+ elif "platform=darwin" in spec:
+ lib_suffix = ".dylib"
else:
- lib_suffix = '.so'
+ lib_suffix = ".so"
foo_libs = spec[foo].libs
- assert foo_libs.basenames == ['libFoo' + lib_suffix]
+ assert foo_libs.basenames == ["libFoo" + lib_suffix]
assert foo_libs.directories == [spec[foo].home.lib64]
- bar_libs = spec['bar'].libs
- assert bar_libs.basenames == ['libFooBar' + lib_suffix]
- assert bar_libs.directories == [spec['bar'].home.lib64]
- baz_libs = spec['baz'].libs
- assert baz_libs.basenames == ['libFooBaz' + lib_suffix]
- assert baz_libs.directories == [spec['baz'].home.lib]
+ bar_libs = spec["bar"].libs
+ assert bar_libs.basenames == ["libFooBar" + lib_suffix]
+ assert bar_libs.directories == [spec["bar"].home.lib64]
+ baz_libs = spec["baz"].libs
+ assert baz_libs.basenames == ["libFooBaz" + lib_suffix]
+ assert baz_libs.directories == [spec["baz"].home.lib]
def mock_remove_prefix(*args):
- raise MockInstallError(
- "Intentional error",
- "Mock remove_prefix method intentionally fails")
+ raise MockInstallError("Intentional error", "Mock remove_prefix method intentionally fails")
class RemovePrefixChecker(object):
@@ -141,14 +138,14 @@ class MockStage(object):
self.wrapped_stage.create()
def __getattr__(self, attr):
- if attr == 'wrapped_stage':
+ if attr == "wrapped_stage":
# This attribute may not be defined at some point during unpickling
raise AttributeError()
return getattr(self.wrapped_stage, attr)
def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch):
- s = Spec('canfail').concretized()
+ s = Spec("canfail").concretized()
instance_rm_prefix = s.package.remove_prefix
@@ -177,21 +174,19 @@ def test_partial_install_delete_prefix_and_stage(install_mockery, mock_fetch):
@pytest.mark.disable_clean_stage_check
-def test_failing_overwrite_install_should_keep_previous_installation(
- mock_fetch, install_mockery
-):
+def test_failing_overwrite_install_should_keep_previous_installation(mock_fetch, install_mockery):
"""
Make sure that whenever `spack install --overwrite` fails, spack restores
the original install prefix instead of cleaning it.
"""
# Do a successful install
- s = Spec('canfail').concretized()
+ s = Spec("canfail").concretized()
s.package.succeed = True
# Do a failing overwrite install
s.package.do_install()
s.package.succeed = False
- kwargs = {'overwrite': [s.dag_hash()]}
+ kwargs = {"overwrite": [s.dag_hash()]}
with pytest.raises(Exception):
s.package.do_install(**kwargs)
@@ -200,91 +195,84 @@ def test_failing_overwrite_install_should_keep_previous_installation(
assert os.path.exists(s.prefix)
-def test_dont_add_patches_to_installed_package(
- install_mockery, mock_fetch, monkeypatch
-):
- dependency = Spec('dependency-install')
+def test_dont_add_patches_to_installed_package(install_mockery, mock_fetch, monkeypatch):
+ dependency = Spec("dependency-install")
dependency.concretize()
dependency.package.do_install()
dependency_hash = dependency.dag_hash()
- dependent = Spec('dependent-install ^/' + dependency_hash)
+ dependent = Spec("dependent-install ^/" + dependency_hash)
dependent.concretize()
- monkeypatch.setitem(dependency.package.patches, 'dependency-install', [
- spack.patch.UrlPatch(
- dependent.package, 'file://fake.patch', sha256='unused-hash'
- )
- ])
+ monkeypatch.setitem(
+ dependency.package.patches,
+ "dependency-install",
+ [spack.patch.UrlPatch(dependent.package, "file://fake.patch", sha256="unused-hash")],
+ )
- assert dependent['dependency-install'] == dependency
+ assert dependent["dependency-install"] == dependency
-def test_installed_dependency_request_conflicts(
- install_mockery, mock_fetch, mutable_mock_repo):
- dependency = Spec('dependency-install')
+def test_installed_dependency_request_conflicts(install_mockery, mock_fetch, mutable_mock_repo):
+ dependency = Spec("dependency-install")
dependency.concretize()
dependency.package.do_install()
dependency_hash = dependency.dag_hash()
- dependent = Spec(
- 'conflicting-dependent ^/' + dependency_hash)
+ dependent = Spec("conflicting-dependent ^/" + dependency_hash)
with pytest.raises(spack.error.UnsatisfiableSpecError):
dependent.concretize()
-def test_install_dependency_symlinks_pkg(
- install_mockery, mock_fetch, mutable_mock_repo):
+def test_install_dependency_symlinks_pkg(install_mockery, mock_fetch, mutable_mock_repo):
"""Test dependency flattening/symlinks mock package."""
- spec = Spec('flatten-deps')
+ spec = Spec("flatten-deps")
spec.concretize()
pkg = spec.package
pkg.do_install()
# Ensure dependency directory exists after the installation.
- dependency_dir = os.path.join(pkg.prefix, 'dependency-install')
+ dependency_dir = os.path.join(pkg.prefix, "dependency-install")
assert os.path.isdir(dependency_dir)
-def test_install_times(
- install_mockery, mock_fetch, mutable_mock_repo):
+def test_install_times(install_mockery, mock_fetch, mutable_mock_repo):
"""Test install times added."""
- spec = Spec('dev-build-test-install-phases')
+ spec = Spec("dev-build-test-install-phases")
spec.concretize()
pkg = spec.package
pkg.do_install()
# Ensure dependency directory exists after the installation.
- install_times = os.path.join(pkg.prefix, ".spack", 'install_times.json')
+ install_times = os.path.join(pkg.prefix, ".spack", "install_times.json")
assert os.path.isfile(install_times)
# Ensure the phases are included
- with open(install_times, 'r') as timefile:
+ with open(install_times, "r") as timefile:
times = sjson.load(timefile.read())
# The order should be maintained
- phases = [x['name'] for x in times['phases']]
- total = sum([x['seconds'] for x in times['phases']])
- for name in ['one', 'two', 'three', 'install']:
+ phases = [x["name"] for x in times["phases"]]
+ total = sum([x["seconds"] for x in times["phases"]])
+ for name in ["one", "two", "three", "install"]:
assert name in phases
# Give a generous difference threshold
- assert abs(total - times['total']['seconds']) < 5
+ assert abs(total - times["total"]["seconds"]) < 5
-def test_flatten_deps(
- install_mockery, mock_fetch, mutable_mock_repo):
+def test_flatten_deps(install_mockery, mock_fetch, mutable_mock_repo):
"""Explicitly test the flattening code for coverage purposes."""
# Unfortunately, executing the 'flatten-deps' spec's installation does
# not affect code coverage results, so be explicit here.
- spec = Spec('dependent-install')
+ spec = Spec("dependent-install")
spec.concretize()
pkg = spec.package
pkg.do_install()
# Demonstrate that the directory does not appear under the spec
# prior to the flatten operation.
- dependency_name = 'dependency-install'
+ dependency_name = "dependency-install"
assert dependency_name not in os.listdir(pkg.prefix)
# Flatten the dependencies and ensure the dependency directory is there.
@@ -301,19 +289,17 @@ def install_upstream(tmpdir_factory, gen_mock_layout, install_mockery):
upstream, as well as the upstream layout (for verifying that dependent
installs are using the upstream installs).
"""
- mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))
+ mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
prepared_db = spack.database.Database(mock_db_root)
- upstream_layout = gen_mock_layout('/a/')
+ upstream_layout = gen_mock_layout("/a/")
def _install_upstream(*specs):
for spec_str in specs:
s = spack.spec.Spec(spec_str).concretized()
prepared_db.add(s, upstream_layout)
- downstream_root = str(tmpdir_factory.mktemp('mock_downstream_db_root'))
- db_for_test = spack.database.Database(
- downstream_root, upstream_dbs=[prepared_db]
- )
+ downstream_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
+ db_for_test = spack.database.Database(downstream_root, upstream_dbs=[prepared_db])
store = spack.store.Store(downstream_root)
store.db = db_for_test
return store, upstream_layout
@@ -325,15 +311,14 @@ def test_installed_upstream_external(install_upstream, mock_fetch):
"""Check that when a dependency package is recorded as installed in
an upstream database that it is not reinstalled.
"""
- s, _ = install_upstream('externaltool')
+ s, _ = install_upstream("externaltool")
with spack.store.use_store(s):
- dependent = spack.spec.Spec('externaltest')
+ dependent = spack.spec.Spec("externaltest")
dependent.concretize()
- new_dependency = dependent['externaltool']
+ new_dependency = dependent["externaltool"]
assert new_dependency.external
- assert new_dependency.prefix == \
- os.path.sep + os.path.join('path', 'to', 'external_tool')
+ assert new_dependency.prefix == os.path.sep + os.path.join("path", "to", "external_tool")
dependent.package.do_install()
@@ -345,15 +330,14 @@ def test_installed_upstream(install_upstream, mock_fetch):
"""Check that when a dependency package is recorded as installed in
an upstream database that it is not reinstalled.
"""
- s, upstream_layout = install_upstream('dependency-install')
+ s, upstream_layout = install_upstream("dependency-install")
with spack.store.use_store(s):
- dependency = spack.spec.Spec('dependency-install').concretized()
- dependent = spack.spec.Spec('dependent-install').concretized()
+ dependency = spack.spec.Spec("dependency-install").concretized()
+ dependent = spack.spec.Spec("dependent-install").concretized()
- new_dependency = dependent['dependency-install']
+ new_dependency = dependent["dependency-install"]
assert new_dependency.installed_upstream
- assert (new_dependency.prefix ==
- upstream_layout.path_for_spec(dependency))
+ assert new_dependency.prefix == upstream_layout.path_for_spec(dependency)
dependent.package.do_install()
@@ -363,11 +347,11 @@ def test_installed_upstream(install_upstream, mock_fetch):
@pytest.mark.disable_clean_stage_check
def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch):
- s = Spec('canfail').concretized()
+ s = Spec("canfail").concretized()
# If remove_prefix is called at any point in this test, that is an error
s.package.succeed = False # make the build fail
- monkeypatch.setattr(spack.package_base.Package, 'remove_prefix', mock_remove_prefix)
+ monkeypatch.setattr(spack.package_base.Package, "remove_prefix", mock_remove_prefix)
with pytest.raises(spack.build_environment.ChildError):
s.package.do_install(keep_prefix=True)
assert os.path.exists(s.package.prefix)
@@ -375,7 +359,7 @@ def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch):
# must clear failure markings for the package before re-installing it
spack.store.db.clear_failure(s, True)
- s.package.succeed = True # make the build succeed
+ s.package.succeed = True # make the build succeed
s.package.stage = MockStage(s.package.stage)
s.package.do_install(keep_prefix=True)
assert s.package.spec.installed
@@ -383,8 +367,8 @@ def test_partial_install_keep_prefix(install_mockery, mock_fetch, monkeypatch):
def test_second_install_no_overwrite_first(install_mockery, mock_fetch, monkeypatch):
- s = Spec('canfail').concretized()
- monkeypatch.setattr(spack.package_base.Package, 'remove_prefix', mock_remove_prefix)
+ s = Spec("canfail").concretized()
+ monkeypatch.setattr(spack.package_base.Package, "remove_prefix", mock_remove_prefix)
s.package.succeed = True
s.package.do_install()
@@ -400,12 +384,12 @@ def test_install_prefix_collision_fails(config, mock_fetch, mock_packages, tmpdi
Test that different specs with coinciding install prefixes will fail
to install.
"""
- projections = {'all': 'all-specs-project-to-this-prefix'}
+ projections = {"all": "all-specs-project-to-this-prefix"}
store = spack.store.Store(str(tmpdir), projections=projections)
with spack.store.use_store(store):
- with spack.config.override('config:checksum', False):
- pkg_a = Spec('libelf@0.8.13').concretized().package
- pkg_b = Spec('libelf@0.8.12').concretized().package
+ with spack.config.override("config:checksum", False):
+ pkg_a = Spec("libelf@0.8.13").concretized().package
+ pkg_b = Spec("libelf@0.8.12").concretized().package
pkg_a.do_install()
with pytest.raises(InstallError, match="Install prefix collision"):
@@ -413,17 +397,17 @@ def test_install_prefix_collision_fails(config, mock_fetch, mock_packages, tmpdi
def test_store(install_mockery, mock_fetch):
- spec = Spec('cmake-client').concretized()
+ spec = Spec("cmake-client").concretized()
pkg = spec.package
pkg.do_install()
@pytest.mark.disable_clean_stage_check
def test_failing_build(install_mockery, mock_fetch, capfd):
- spec = Spec('failing-build').concretized()
+ spec = Spec("failing-build").concretized()
pkg = spec.package
- with pytest.raises(spack.build_environment.ChildError, match='Expected failure'):
+ with pytest.raises(spack.build_environment.ChildError, match="Expected failure"):
pkg.do_install()
@@ -435,22 +419,21 @@ def test_uninstall_by_spec_errors(mutable_database):
"""Test exceptional cases with the uninstall command."""
# Try to uninstall a spec that has not been installed
- spec = Spec('dependent-install')
+ spec = Spec("dependent-install")
spec.concretize()
with pytest.raises(InstallError, match="is not installed"):
PackageBase.uninstall_by_spec(spec)
# Try an unforced uninstall of a spec with dependencies
- rec = mutable_database.get_record('mpich')
+ rec = mutable_database.get_record("mpich")
with pytest.raises(PackageStillNeededError, match="Cannot uninstall"):
PackageBase.uninstall_by_spec(rec.spec)
@pytest.mark.disable_clean_stage_check
-def test_nosource_pkg_install(
- install_mockery, mock_fetch, mock_packages, capfd):
+def test_nosource_pkg_install(install_mockery, mock_fetch, mock_packages, capfd):
"""Test install phases with the nosource package."""
- spec = Spec('nosource').concretized()
+ spec = Spec("nosource").concretized()
pkg = spec.package
# Make sure install works even though there is no associated code.
@@ -463,10 +446,9 @@ def test_nosource_pkg_install(
@pytest.mark.disable_clean_stage_check
-def test_nosource_bundle_pkg_install(
- install_mockery, mock_fetch, mock_packages, capfd):
+def test_nosource_bundle_pkg_install(install_mockery, mock_fetch, mock_packages, capfd):
"""Test install phases with the nosource-bundle package."""
- spec = Spec('nosource-bundle').concretized()
+ spec = Spec("nosource-bundle").concretized()
pkg = spec.package
# Make sure install works even though there is no associated code.
@@ -478,27 +460,26 @@ def test_nosource_bundle_pkg_install(
assert "Missing a source id for nosource" not in out[1]
-def test_nosource_pkg_install_post_install(
- install_mockery, mock_fetch, mock_packages):
+def test_nosource_pkg_install_post_install(install_mockery, mock_fetch, mock_packages):
"""Test install phases with the nosource package with post-install."""
- spec = Spec('nosource-install').concretized()
+ spec = Spec("nosource-install").concretized()
pkg = spec.package
# Make sure both the install and post-install package methods work.
pkg.do_install()
# Ensure the file created in the package's `install` method exists.
- install_txt = os.path.join(spec.prefix, 'install.txt')
+ install_txt = os.path.join(spec.prefix, "install.txt")
assert os.path.isfile(install_txt)
# Ensure the file created in the package's `post-install` method exists.
- post_install_txt = os.path.join(spec.prefix, 'post-install.txt')
+ post_install_txt = os.path.join(spec.prefix, "post-install.txt")
assert os.path.isfile(post_install_txt)
def test_pkg_build_paths(install_mockery):
# Get a basic concrete spec for the trivial install package.
- spec = Spec('trivial-install-test-package').concretized()
+ spec = Spec("trivial-install-test-package").concretized()
log_path = spec.package.log_path
assert log_path.endswith(_spack_build_logfile)
@@ -511,17 +492,17 @@ def test_pkg_build_paths(install_mockery):
fs.mkdirp(log_dir)
with fs.working_dir(log_dir):
# Start with the older of the previous log filenames
- older_log = 'spack-build.out'
+ older_log = "spack-build.out"
fs.touch(older_log)
assert spec.package.log_path.endswith(older_log)
# Now check the newer log filename
- last_log = 'spack-build.txt'
+ last_log = "spack-build.txt"
fs.rename(older_log, last_log)
assert spec.package.log_path.endswith(last_log)
# Check the old environment file
- last_env = 'spack-build.env'
+ last_env = "spack-build.env"
fs.rename(last_log, last_env)
assert spec.package.env_path.endswith(last_env)
@@ -531,15 +512,15 @@ def test_pkg_build_paths(install_mockery):
def test_pkg_install_paths(install_mockery):
# Get a basic concrete spec for the trivial install package.
- spec = Spec('trivial-install-test-package').concretized()
+ spec = Spec("trivial-install-test-package").concretized()
- log_path = os.path.join(spec.prefix, '.spack', _spack_build_logfile)
+ log_path = os.path.join(spec.prefix, ".spack", _spack_build_logfile)
assert spec.package.install_log_path == log_path
- env_path = os.path.join(spec.prefix, '.spack', _spack_build_envfile)
+ env_path = os.path.join(spec.prefix, ".spack", _spack_build_envfile)
assert spec.package.install_env_path == env_path
- args_path = os.path.join(spec.prefix, '.spack', _spack_configure_argsfile)
+ args_path = os.path.join(spec.prefix, ".spack", _spack_configure_argsfile)
assert spec.package.install_configure_args_path == args_path
# Backward compatibility checks
@@ -547,17 +528,17 @@ def test_pkg_install_paths(install_mockery):
fs.mkdirp(log_dir)
with fs.working_dir(log_dir):
# Start with the older of the previous install log filenames
- older_log = 'build.out'
+ older_log = "build.out"
fs.touch(older_log)
assert spec.package.install_log_path.endswith(older_log)
# Now check the newer install log filename
- last_log = 'build.txt'
+ last_log = "build.txt"
fs.rename(older_log, last_log)
assert spec.package.install_log_path.endswith(last_log)
# Check the old install environment file
- last_env = 'build.env'
+ last_env = "build.env"
fs.rename(last_log, last_env)
assert spec.package.install_env_path.endswith(last_env)
@@ -568,7 +549,7 @@ def test_pkg_install_paths(install_mockery):
def test_log_install_without_build_files(install_mockery):
"""Test the installer log function when no build files are present."""
# Get a basic concrete spec for the trivial install package.
- spec = Spec('trivial-install-test-package').concretized()
+ spec = Spec("trivial-install-test-package").concretized()
# Attempt installing log without the build log file
with pytest.raises(IOError, match="No such file or directory"):
@@ -577,7 +558,7 @@ def test_log_install_without_build_files(install_mockery):
def test_log_install_with_build_files(install_mockery, monkeypatch):
"""Test the installer's log function when have build files."""
- config_log = 'config.log'
+ config_log = "config.log"
# Retain the original function for use in the monkey patch that is used
# to raise an exception under the desired condition for test coverage.
@@ -586,11 +567,11 @@ def test_log_install_with_build_files(install_mockery, monkeypatch):
def _install(src, dest):
orig_install_fn(src, dest)
if src.endswith(config_log):
- raise Exception('Mock log install error')
+ raise Exception("Mock log install error")
- monkeypatch.setattr(fs, 'install', _install)
+ monkeypatch.setattr(fs, "install", _install)
- spec = Spec('trivial-install-test-package').concretized()
+ spec = Spec("trivial-install-test-package").concretized()
# Set up mock build files and try again to include archive failure
log_path = spec.package.log_path
@@ -606,9 +587,9 @@ def test_log_install_with_build_files(install_mockery, monkeypatch):
fs.mkdirp(install_path)
source = spec.package.stage.source_path
- config = os.path.join(source, 'config.log')
+ config = os.path.join(source, "config.log")
fs.touchp(config)
- spec.package.archive_files = ['missing', '..', config]
+ spec.package.archive_files = ["missing", "..", config]
spack.installer.log(spec.package)
@@ -616,18 +597,15 @@ def test_log_install_with_build_files(install_mockery, monkeypatch):
assert os.path.exists(spec.package.install_env_path)
assert os.path.exists(spec.package.install_configure_args_path)
- archive_dir = os.path.join(install_path, 'archived-files')
+ archive_dir = os.path.join(install_path, "archived-files")
source_dir = os.path.dirname(source)
rel_config = os.path.relpath(config, source_dir)
assert os.path.exists(os.path.join(archive_dir, rel_config))
- assert not os.path.exists(os.path.join(archive_dir, 'missing'))
+ assert not os.path.exists(os.path.join(archive_dir, "missing"))
- expected_errs = [
- 'OUTSIDE SOURCE PATH', # for '..'
- 'FAILED TO ARCHIVE' # for rel_config
- ]
- with open(os.path.join(archive_dir, 'errors.txt'), 'r') as fd:
+ expected_errs = ["OUTSIDE SOURCE PATH", "FAILED TO ARCHIVE"] # for '..' # for rel_config
+ with open(os.path.join(archive_dir, "errors.txt"), "r") as fd:
for ln, expected in zip(fd, expected_errs):
assert expected in ln
@@ -637,10 +615,10 @@ def test_log_install_with_build_files(install_mockery, monkeypatch):
def test_unconcretized_install(install_mockery, mock_fetch, mock_packages):
"""Test attempts to perform install phases with unconcretized spec."""
- spec = Spec('trivial-install-test-package')
+ spec = Spec("trivial-install-test-package")
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
- with pytest.raises(ValueError, match='must have a concrete spec'):
+ with pytest.raises(ValueError, match="must have a concrete spec"):
pkg_cls(spec).do_install()
with pytest.raises(ValueError, match="only patch concrete packages"):
@@ -649,23 +627,20 @@ def test_unconcretized_install(install_mockery, mock_fetch, mock_packages):
def test_install_error():
try:
- msg = 'test install error'
- long_msg = 'this is the long version of test install error'
+ msg = "test install error"
+ long_msg = "this is the long version of test install error"
raise InstallError(msg, long_msg=long_msg)
except Exception as exc:
- assert exc.__class__.__name__ == 'InstallError'
+ assert exc.__class__.__name__ == "InstallError"
assert exc.message == msg
assert exc.long_message == long_msg
@pytest.mark.disable_clean_stage_check
def test_empty_install_sanity_check_prefix(
- monkeypatch, install_mockery, mock_fetch, mock_packages
+ monkeypatch, install_mockery, mock_fetch, mock_packages
):
"""Test empty install triggers sanity_check_prefix."""
- spec = Spec('failing-empty-install').concretized()
- with pytest.raises(
- spack.build_environment.ChildError,
- match='Nothing was installed'
- ):
+ spec = Spec("failing-empty-install").concretized()
+ with pytest.raises(spack.build_environment.ChildError, match="Nothing was installed"):
spec.package.do_install()
diff --git a/lib/spack/spack/test/installer.py b/lib/spack/spack/test/installer.py
index 3d8b53e2eb..7f608d6c89 100644
--- a/lib/spack/spack/test/installer.py
+++ b/lib/spack/spack/test/installer.py
@@ -24,7 +24,7 @@ import spack.spec
import spack.store
import spack.util.lock as lk
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
def _mock_repo(root, namespace):
@@ -36,11 +36,15 @@ def _mock_repo(root, namespace):
"""
repodir = py.path.local(root) if isinstance(root, str) else root
repodir.ensure(spack.repo.packages_dir_name, dir=True)
- yaml = repodir.join('repo.yaml')
- yaml.write("""
+ yaml = repodir.join("repo.yaml")
+ yaml.write(
+ """
repo:
namespace: {0}
-""".format(namespace))
+""".format(
+ namespace
+ )
+ )
def _noop(*args, **kwargs):
@@ -55,7 +59,7 @@ def _none(*args, **kwargs):
def _not_locked(installer, lock_type, pkg):
"""Generic monkeypatch function for _ensure_locked to return no lock"""
- tty.msg('{0} locked {1}' .format(lock_type, pkg.spec.name))
+ tty.msg("{0} locked {1}".format(lock_type, pkg.spec.name))
return lock_type, None
@@ -113,19 +117,17 @@ def installer_args(spec_names, kwargs={}):
return arg
-@pytest.mark.parametrize('sec,result', [
- (86400, "24h"),
- (3600, "1h"),
- (60, "1m"),
- (1.802, "1.80s"),
- (3723.456, "1h 2m 3.46s")])
+@pytest.mark.parametrize(
+ "sec,result",
+ [(86400, "24h"), (3600, "1h"), (60, "1m"), (1.802, "1.80s"), (3723.456, "1h 2m 3.46s")],
+)
def test_hms(sec, result):
assert inst._hms(sec) == result
def test_get_dependent_ids(install_mockery, mock_packages):
# Concretize the parent package, which handle dependency too
- spec = spack.spec.Spec('a')
+ spec = spack.spec.Spec("a")
spec.concretize()
assert spec.concrete
@@ -140,25 +142,25 @@ def test_get_dependent_ids(install_mockery, mock_packages):
def test_install_msg(monkeypatch):
"""Test results of call to install_msg based on debug level."""
- name = 'some-package'
+ name = "some-package"
pid = 123456
- install_msg = 'Installing {0}'.format(name)
+ install_msg = "Installing {0}".format(name)
- monkeypatch.setattr(tty, '_debug', 0)
+ monkeypatch.setattr(tty, "_debug", 0)
assert inst.install_msg(name, pid) == install_msg
- monkeypatch.setattr(tty, '_debug', 1)
+ monkeypatch.setattr(tty, "_debug", 1)
assert inst.install_msg(name, pid) == install_msg
# Expect the PID to be added at debug level 2
- monkeypatch.setattr(tty, '_debug', 2)
+ monkeypatch.setattr(tty, "_debug", 2)
expected = "{0}: {1}".format(pid, install_msg)
assert inst.install_msg(name, pid) == expected
def test_install_from_cache_errors(install_mockery, capsys):
"""Test to ensure cover _install_from_cache errors."""
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
@@ -167,8 +169,8 @@ def test_install_from_cache_errors(install_mockery, capsys):
inst._install_from_cache(spec.package, True, True, False)
captured = str(capsys.readouterr())
- assert 'No binary' in captured
- assert 'found when cache-only specified' in captured
+ assert "No binary" in captured
+ assert "found when cache-only specified" in captured
assert not spec.package.installed_from_binary_cache
# Check when don't expect to install only from binary cache
@@ -178,73 +180,72 @@ def test_install_from_cache_errors(install_mockery, capsys):
def test_install_from_cache_ok(install_mockery, monkeypatch):
"""Test to ensure cover _install_from_cache to the return."""
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
- monkeypatch.setattr(inst, '_try_install_from_binary_cache', _true)
- monkeypatch.setattr(spack.hooks, 'post_install', _noop)
+ monkeypatch.setattr(inst, "_try_install_from_binary_cache", _true)
+ monkeypatch.setattr(spack.hooks, "post_install", _noop)
assert inst._install_from_cache(spec.package, True, True, False)
def test_process_external_package_module(install_mockery, monkeypatch, capfd):
"""Test to simply cover the external module message path."""
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
# Ensure take the external module path WITHOUT any changes to the database
- monkeypatch.setattr(spack.database.Database, 'get_record', _none)
+ monkeypatch.setattr(spack.database.Database, "get_record", _none)
- spec.external_path = '/actual/external/path/not/checked'
- spec.external_modules = ['unchecked_module']
+ spec.external_path = "/actual/external/path/not/checked"
+ spec.external_modules = ["unchecked_module"]
inst._process_external_package(spec.package, False)
out = capfd.readouterr()[0]
- assert 'has external module in {0}'.format(spec.external_modules) in out
+ assert "has external module in {0}".format(spec.external_modules) in out
-def test_process_binary_cache_tarball_none(install_mockery, monkeypatch,
- capfd):
+def test_process_binary_cache_tarball_none(install_mockery, monkeypatch, capfd):
"""Tests of _process_binary_cache_tarball when no tarball."""
- monkeypatch.setattr(spack.binary_distribution, 'download_tarball', _none)
+ monkeypatch.setattr(spack.binary_distribution, "download_tarball", _none)
- s = spack.spec.Spec('trivial-install-test-package').concretized()
+ s = spack.spec.Spec("trivial-install-test-package").concretized()
assert not inst._process_binary_cache_tarball(s.package, None, False, False)
- assert 'exists in binary cache but' in capfd.readouterr()[0]
+ assert "exists in binary cache but" in capfd.readouterr()[0]
def test_process_binary_cache_tarball_tar(install_mockery, monkeypatch, capfd):
"""Tests of _process_binary_cache_tarball with a tar file."""
+
def _spec(spec, unsigned=False, mirrors_for_spec=None):
return spec
# Skip binary distribution functionality since assume tested elsewhere
- monkeypatch.setattr(spack.binary_distribution, 'download_tarball', _spec)
- monkeypatch.setattr(spack.binary_distribution, 'extract_tarball', _noop)
+ monkeypatch.setattr(spack.binary_distribution, "download_tarball", _spec)
+ monkeypatch.setattr(spack.binary_distribution, "extract_tarball", _noop)
# Skip database updates
- monkeypatch.setattr(spack.database.Database, 'add', _noop)
+ monkeypatch.setattr(spack.database.Database, "add", _noop)
- spec = spack.spec.Spec('a').concretized()
+ spec = spack.spec.Spec("a").concretized()
assert inst._process_binary_cache_tarball(spec.package, spec, False, False)
out = capfd.readouterr()[0]
- assert 'Extracting a' in out
- assert 'from binary cache' in out
+ assert "Extracting a" in out
+ assert "from binary cache" in out
-def test_try_install_from_binary_cache(install_mockery, mock_packages,
- monkeypatch):
+def test_try_install_from_binary_cache(install_mockery, mock_packages, monkeypatch):
"""Test return false when no match exists in the mirror"""
- spec = spack.spec.Spec('mpich')
+ spec = spack.spec.Spec("mpich")
spec.concretize()
result = inst._try_install_from_binary_cache(spec.package, False, False)
- assert(not result)
+ assert not result
def test_installer_repr(install_mockery):
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
irep = installer.__repr__()
@@ -254,7 +255,7 @@ def test_installer_repr(install_mockery):
def test_installer_str(install_mockery):
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
istr = str(installer)
@@ -264,46 +265,46 @@ def test_installer_str(install_mockery):
def test_check_before_phase_error(install_mockery):
- s = spack.spec.Spec('trivial-install-test-package').concretized()
- s.package.stop_before_phase = 'beforephase'
+ s = spack.spec.Spec("trivial-install-test-package").concretized()
+ s.package.stop_before_phase = "beforephase"
with pytest.raises(inst.BadInstallPhase) as exc_info:
inst._check_last_phase(s.package)
err = str(exc_info.value)
- assert 'is not a valid phase' in err
+ assert "is not a valid phase" in err
assert s.package.stop_before_phase in err
def test_check_last_phase_error(install_mockery):
- s = spack.spec.Spec('trivial-install-test-package').concretized()
+ s = spack.spec.Spec("trivial-install-test-package").concretized()
s.package.stop_before_phase = None
- s.package.last_phase = 'badphase'
+ s.package.last_phase = "badphase"
with pytest.raises(inst.BadInstallPhase) as exc_info:
inst._check_last_phase(s.package)
err = str(exc_info.value)
- assert 'is not a valid phase' in err
+ assert "is not a valid phase" in err
assert s.package.last_phase in err
def test_installer_ensure_ready_errors(install_mockery, monkeypatch):
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
- fmt = r'cannot be installed locally.*{0}'
+ fmt = r"cannot be installed locally.*{0}"
# Force an external package error
path, modules = spec.external_path, spec.external_modules
- spec.external_path = '/actual/external/path/not/checked'
- spec.external_modules = ['unchecked_module']
- msg = fmt.format('is external')
+ spec.external_path = "/actual/external/path/not/checked"
+ spec.external_modules = ["unchecked_module"]
+ msg = fmt.format("is external")
with pytest.raises(inst.ExternalPackageError, match=msg):
installer._ensure_install_ready(spec.package)
# Force an upstream package error
spec.external_path, spec.external_modules = path, modules
monkeypatch.setattr(spack.spec.Spec, "installed_upstream", True)
- msg = fmt.format('is upstream')
+ msg = fmt.format("is upstream")
with pytest.raises(inst.UpstreamPackageError, match=msg):
installer._ensure_install_ready(spec.package)
@@ -311,55 +312,55 @@ def test_installer_ensure_ready_errors(install_mockery, monkeypatch):
# we are calling an internal method prior to any lock-related setup
monkeypatch.setattr(spack.spec.Spec, "installed_upstream", False)
assert len(installer.locks) == 0
- with pytest.raises(inst.InstallLockError, match=fmt.format('not locked')):
+ with pytest.raises(inst.InstallLockError, match=fmt.format("not locked")):
installer._ensure_install_ready(spec.package)
def test_ensure_locked_err(install_mockery, monkeypatch, tmpdir, capsys):
"""Test _ensure_locked when a non-lock exception is raised."""
- mock_err_msg = 'Mock exception error'
+ mock_err_msg = "Mock exception error"
def _raise(lock, timeout):
raise RuntimeError(mock_err_msg)
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
- monkeypatch.setattr(ulk.Lock, 'acquire_read', _raise)
+ monkeypatch.setattr(ulk.Lock, "acquire_read", _raise)
with tmpdir.as_cwd():
with pytest.raises(RuntimeError):
- installer._ensure_locked('read', spec.package)
+ installer._ensure_locked("read", spec.package)
out = str(capsys.readouterr()[1])
- assert 'Failed to acquire a read lock' in out
+ assert "Failed to acquire a read lock" in out
assert mock_err_msg in out
def test_ensure_locked_have(install_mockery, tmpdir, capsys):
"""Test _ensure_locked when already have lock."""
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
pkg_id = inst.package_id(spec.package)
with tmpdir.as_cwd():
# Test "downgrade" of a read lock (to a read lock)
- lock = lk.Lock('./test', default_timeout=1e-9, desc='test')
- lock_type = 'read'
+ lock = lk.Lock("./test", default_timeout=1e-9, desc="test")
+ lock_type = "read"
tpl = (lock_type, lock)
installer.locks[pkg_id] = tpl
assert installer._ensure_locked(lock_type, spec.package) == tpl
# Test "upgrade" of a read lock without read count to a write
- lock_type = 'write'
- err = 'Cannot upgrade lock'
+ lock_type = "write"
+ err = "Cannot upgrade lock"
with pytest.raises(ulk.LockUpgradeError, match=err):
installer._ensure_locked(lock_type, spec.package)
out = str(capsys.readouterr()[1])
- assert 'Failed to upgrade to a write lock' in out
- assert 'exception when releasing read lock' in out
+ assert "Failed to upgrade to a write lock" in out
+ assert "exception when releasing read lock" in out
# Test "upgrade" of the read lock *with* read count to a write
lock._reads = 1
@@ -367,17 +368,14 @@ def test_ensure_locked_have(install_mockery, tmpdir, capsys):
assert installer._ensure_locked(lock_type, spec.package) == tpl
# Test "downgrade" of the write lock to a read lock
- lock_type = 'read'
+ lock_type = "read"
tpl = (lock_type, lock)
assert installer._ensure_locked(lock_type, spec.package) == tpl
-@pytest.mark.parametrize('lock_type,reads,writes', [
- ('read', 1, 0),
- ('write', 0, 1)])
-def test_ensure_locked_new_lock(
- install_mockery, tmpdir, lock_type, reads, writes):
- pkg_id = 'a'
+@pytest.mark.parametrize("lock_type,reads,writes", [("read", 1, 0), ("write", 0, 1)])
+def test_ensure_locked_new_lock(install_mockery, tmpdir, lock_type, reads, writes):
+ pkg_id = "a"
const_arg = installer_args([pkg_id], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
@@ -397,31 +395,31 @@ def test_ensure_locked_new_warn(install_mockery, monkeypatch, tmpdir, capsys):
lock.default_timeout = 1e-9 if timeout is None else None
return lock
- pkg_id = 'a'
+ pkg_id = "a"
const_arg = installer_args([pkg_id], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
- monkeypatch.setattr(spack.database.Database, 'prefix_lock', _pl)
+ monkeypatch.setattr(spack.database.Database, "prefix_lock", _pl)
- lock_type = 'read'
+ lock_type = "read"
ltype, lock = installer._ensure_locked(lock_type, spec.package)
assert ltype == lock_type
assert lock is not None
out = str(capsys.readouterr()[1])
- assert 'Expected prefix lock timeout' in out
+ assert "Expected prefix lock timeout" in out
def test_package_id_err(install_mockery):
- s = spack.spec.Spec('trivial-install-test-package')
+ s = spack.spec.Spec("trivial-install-test-package")
pkg_cls = spack.repo.path.get_pkg_class(s.name)
- with pytest.raises(ValueError, match='spec is not concretized'):
+ with pytest.raises(ValueError, match="spec is not concretized"):
inst.package_id(pkg_cls(s))
def test_package_id_ok(install_mockery):
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
pkg = spec.package
@@ -429,7 +427,7 @@ def test_package_id_ok(install_mockery):
def test_fake_install(install_mockery):
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
@@ -439,42 +437,40 @@ def test_fake_install(install_mockery):
def test_packages_needed_to_bootstrap_compiler_none(install_mockery):
- spec = spack.spec.Spec('trivial-install-test-package')
+ spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
packages = inst._packages_needed_to_bootstrap_compiler(
- spec.compiler, spec.architecture, [spec.package])
+ spec.compiler, spec.architecture, [spec.package]
+ )
assert not packages
-@pytest.mark.xfail(
- reason="fails when assuming Spec.package can only be called on concrete specs"
-)
-def test_packages_needed_to_bootstrap_compiler_packages(
- install_mockery, monkeypatch
-):
- spec = spack.spec.Spec('trivial-install-test-package')
+@pytest.mark.xfail(reason="fails when assuming Spec.package can only be called on concrete specs")
+def test_packages_needed_to_bootstrap_compiler_packages(install_mockery, monkeypatch):
+ spec = spack.spec.Spec("trivial-install-test-package")
spec.concretize()
def _conc_spec(compiler):
- return spack.spec.Spec('a').concretized()
+ return spack.spec.Spec("a").concretized()
# Ensure we can get past functions that are precluding obtaining
# packages.
- monkeypatch.setattr(spack.compilers, 'compilers_for_spec', _none)
- monkeypatch.setattr(spack.compilers, 'pkg_spec_for_compiler', _conc_spec)
- monkeypatch.setattr(spack.spec.Spec, 'concretize', _noop)
+ monkeypatch.setattr(spack.compilers, "compilers_for_spec", _none)
+ monkeypatch.setattr(spack.compilers, "pkg_spec_for_compiler", _conc_spec)
+ monkeypatch.setattr(spack.spec.Spec, "concretize", _noop)
packages = inst._packages_needed_to_bootstrap_compiler(
- spec.compiler, spec.architecture, [spec.package])
+ spec.compiler, spec.architecture, [spec.package]
+ )
assert packages
def test_dump_packages_deps_ok(install_mockery, tmpdir, mock_packages):
"""Test happy path for dump_packages with dependencies."""
- spec_name = 'simple-inheritance'
+ spec_name = "simple-inheritance"
spec = spack.spec.Spec(spec_name).concretized()
inst.dump_packages(spec, str(tmpdir))
@@ -497,26 +493,26 @@ def test_dump_packages_deps_errs(install_mockery, tmpdir, monkeypatch, capsys):
return source
def _repoerr(repo, name):
- if name == 'cmake':
+ if name == "cmake":
raise spack.repo.RepoError(repo_err_msg)
else:
return orig_dirname(repo, name)
# Now mock the creation of the required directory structure to cover
# the try-except block
- monkeypatch.setattr(spack.store.layout, 'build_packages_path', bpp_path)
+ monkeypatch.setattr(spack.store.layout, "build_packages_path", bpp_path)
- spec = spack.spec.Spec('simple-inheritance').concretized()
+ spec = spack.spec.Spec("simple-inheritance").concretized()
path = str(tmpdir)
# The call to install_tree will raise the exception since not mocking
# creation of dependency package files within *install* directories.
- with pytest.raises(IOError, match=path if not is_windows else ''):
+ with pytest.raises(IOError, match=path if not is_windows else ""):
inst.dump_packages(spec, path)
# Now try the error path, which requires the mock directory structure
# above
- monkeypatch.setattr(spack.repo.Repo, 'dirname_for_package_name', _repoerr)
+ monkeypatch.setattr(spack.repo.Repo, "dirname_for_package_name", _repoerr)
with pytest.raises(spack.repo.RepoError, match=repo_err_msg):
inst.dump_packages(spec, path)
@@ -524,22 +520,22 @@ def test_dump_packages_deps_errs(install_mockery, tmpdir, monkeypatch, capsys):
assert "Couldn't copy in provenance for cmake" in out
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_clear_failures_success(install_mockery):
"""Test the clear_failures happy path."""
# Set up a test prefix failure lock
- lock = lk.Lock(spack.store.db.prefix_fail_path, start=1, length=1,
- default_timeout=1e-9, desc='test')
+ lock = lk.Lock(
+ spack.store.db.prefix_fail_path, start=1, length=1, default_timeout=1e-9, desc="test"
+ )
try:
lock.acquire_write()
except lk.LockTimeoutError:
- tty.warn('Failed to write lock the test install failure')
- spack.store.db._prefix_failures['test'] = lock
+ tty.warn("Failed to write lock the test install failure")
+ spack.store.db._prefix_failures["test"] = lock
# Set up a fake failure mark (or file)
- fs.touch(os.path.join(spack.store.db._failure_dir, 'test'))
+ fs.touch(os.path.join(spack.store.db._failure_dir, "test"))
# Now clear failure tracking
inst.clear_failures()
@@ -556,26 +552,26 @@ def test_clear_failures_success(install_mockery):
def test_clear_failures_errs(install_mockery, monkeypatch, capsys):
"""Test the clear_failures exception paths."""
orig_fn = os.remove
- err_msg = 'Mock os remove'
+ err_msg = "Mock os remove"
def _raise_except(path):
raise OSError(err_msg)
# Set up a fake failure mark (or file)
- fs.touch(os.path.join(spack.store.db._failure_dir, 'test'))
+ fs.touch(os.path.join(spack.store.db._failure_dir, "test"))
- monkeypatch.setattr(os, 'remove', _raise_except)
+ monkeypatch.setattr(os, "remove", _raise_except)
# Clear failure tracking
inst.clear_failures()
# Ensure expected warning generated
out = str(capsys.readouterr()[1])
- assert 'Unable to remove failure' in out
+ assert "Unable to remove failure" in out
assert err_msg in out
# Restore remove for teardown
- monkeypatch.setattr(os, 'remove', orig_fn)
+ monkeypatch.setattr(os, "remove", orig_fn)
def test_combine_phase_logs(tmpdir):
@@ -583,20 +579,20 @@ def test_combine_phase_logs(tmpdir):
to combine them into one file. We aren't currently using this function,
but it's available when the logs are refactored to be written separately.
"""
- log_files = ['configure-out.txt', 'install-out.txt', 'build-out.txt']
+ log_files = ["configure-out.txt", "install-out.txt", "build-out.txt"]
phase_log_files = []
# Create and write to dummy phase log files
for log_file in log_files:
phase_log_file = os.path.join(str(tmpdir), log_file)
- with open(phase_log_file, 'w') as plf:
- plf.write('Output from %s\n' % log_file)
+ with open(phase_log_file, "w") as plf:
+ plf.write("Output from %s\n" % log_file)
phase_log_files.append(phase_log_file)
# This is the output log we will combine them into
combined_log = os.path.join(str(tmpdir), "combined-out.txt")
spack.installer.combine_phase_logs(phase_log_files, combined_log)
- with open(combined_log, 'r') as log_file:
+ with open(combined_log, "r") as log_file:
out = log_file.read()
# Ensure each phase log file is represented
@@ -605,66 +601,65 @@ def test_combine_phase_logs(tmpdir):
def test_check_deps_status_install_failure(install_mockery, monkeypatch):
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
request = installer.build_requests[0]
# Make sure the package is identified as failed
- monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
+ monkeypatch.setattr(spack.database.Database, "prefix_failed", _true)
- with pytest.raises(inst.InstallError, match='install failure'):
+ with pytest.raises(inst.InstallError, match="install failure"):
installer._check_deps_status(request)
def test_check_deps_status_write_locked(install_mockery, monkeypatch):
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
request = installer.build_requests[0]
# Ensure the lock is not acquired
- monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked', _not_locked)
+ monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
- with pytest.raises(inst.InstallError, match='write locked by another'):
+ with pytest.raises(inst.InstallError, match="write locked by another"):
installer._check_deps_status(request)
def test_check_deps_status_external(install_mockery, monkeypatch):
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
request = installer.build_requests[0]
# Mock the known dependent, b, as external so assumed to be installed
- monkeypatch.setattr(spack.spec.Spec, 'external', True)
+ monkeypatch.setattr(spack.spec.Spec, "external", True)
installer._check_deps_status(request)
- assert list(installer.installed)[0].startswith('b')
+ assert list(installer.installed)[0].startswith("b")
def test_check_deps_status_upstream(install_mockery, monkeypatch):
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
request = installer.build_requests[0]
# Mock the known dependent, b, as installed upstream
- monkeypatch.setattr(spack.spec.Spec, 'installed_upstream', True)
+ monkeypatch.setattr(spack.spec.Spec, "installed_upstream", True)
installer._check_deps_status(request)
- assert list(installer.installed)[0].startswith('b')
+ assert list(installer.installed)[0].startswith("b")
def test_add_bootstrap_compilers(install_mockery, monkeypatch):
from collections import defaultdict
def _pkgs(compiler, architecture, pkgs):
- spec = spack.spec.Spec('mpi').concretized()
+ spec = spack.spec.Spec("mpi").concretized()
return [(spec.package, True)]
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
request = installer.build_requests[0]
all_deps = defaultdict(set)
- monkeypatch.setattr(inst, '_packages_needed_to_bootstrap_compiler', _pkgs)
- installer._add_bootstrap_compilers(
- 'fake', 'fake', [request.pkg], request, all_deps)
+ monkeypatch.setattr(inst, "_packages_needed_to_bootstrap_compiler", _pkgs)
+ installer._add_bootstrap_compilers("fake", "fake", [request.pkg], request, all_deps)
ids = list(installer.build_tasks)
assert len(ids) == 1
@@ -674,22 +669,22 @@ def test_add_bootstrap_compilers(install_mockery, monkeypatch):
def test_prepare_for_install_on_installed(install_mockery, monkeypatch):
"""Test of _prepare_for_install's early return for installed task path."""
- const_arg = installer_args(['dependent-install'], {})
+ const_arg = installer_args(["dependent-install"], {})
installer = create_installer(const_arg)
request = installer.build_requests[0]
- install_args = {'keep_prefix': True, 'keep_stage': True, 'restage': False}
+ install_args = {"keep_prefix": True, "keep_stage": True, "restage": False}
task = create_build_task(request.pkg, install_args)
installer.installed.add(task.pkg_id)
- monkeypatch.setattr(inst.PackageInstaller, '_ensure_install_ready', _noop)
+ monkeypatch.setattr(inst.PackageInstaller, "_ensure_install_ready", _noop)
installer._prepare_for_install(task)
def test_installer_init_requests(install_mockery):
"""Test of installer initial requests."""
- spec_name = 'dependent-install'
- with spack.config.override('config:install_missing_compilers', True):
+ spec_name = "dependent-install"
+ with spack.config.override("config:install_missing_compilers", True):
const_arg = installer_args([spec_name], {})
installer = create_installer(const_arg)
@@ -700,33 +695,33 @@ def test_installer_init_requests(install_mockery):
def test_install_task_use_cache(install_mockery, monkeypatch):
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
request = installer.build_requests[0]
task = create_build_task(request.pkg)
- monkeypatch.setattr(inst, '_install_from_cache', _true)
+ monkeypatch.setattr(inst, "_install_from_cache", _true)
installer._install_task(task)
assert request.pkg_id in installer.installed
def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
- config_msg = 'mock add_compilers_to_config'
+ config_msg = "mock add_compilers_to_config"
def _add(_compilers):
tty.msg(config_msg)
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
task = create_build_task(installer.build_requests[0].pkg)
task.compiler = True
# Preclude any meaningful side-effects
- monkeypatch.setattr(spack.package_base.PackageBase, 'unit_test_check', _true)
- monkeypatch.setattr(inst.PackageInstaller, '_setup_install_dir', _noop)
- monkeypatch.setattr(spack.build_environment, 'start_build_process', _noop)
- monkeypatch.setattr(spack.database.Database, 'add', _noop)
- monkeypatch.setattr(spack.compilers, 'add_compilers_to_config', _add)
+ monkeypatch.setattr(spack.package_base.PackageBase, "unit_test_check", _true)
+ monkeypatch.setattr(inst.PackageInstaller, "_setup_install_dir", _noop)
+ monkeypatch.setattr(spack.build_environment, "start_build_process", _noop)
+ monkeypatch.setattr(spack.database.Database, "add", _noop)
+ monkeypatch.setattr(spack.compilers, "add_compilers_to_config", _add)
installer._install_task(task)
@@ -736,25 +731,25 @@ def test_install_task_add_compiler(install_mockery, monkeypatch, capfd):
def test_release_lock_write_n_exception(install_mockery, tmpdir, capsys):
"""Test _release_lock for supposed write lock with exception."""
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
- pkg_id = 'test'
+ pkg_id = "test"
with tmpdir.as_cwd():
- lock = lk.Lock('./test', default_timeout=1e-9, desc='test')
- installer.locks[pkg_id] = ('write', lock)
+ lock = lk.Lock("./test", default_timeout=1e-9, desc="test")
+ installer.locks[pkg_id] = ("write", lock)
assert lock._writes == 0
installer._release_lock(pkg_id)
out = str(capsys.readouterr()[1])
- msg = 'exception when releasing write lock for {0}'.format(pkg_id)
+ msg = "exception when releasing write lock for {0}".format(pkg_id)
assert msg in out
-@pytest.mark.parametrize('installed', [True, False])
+@pytest.mark.parametrize("installed", [True, False])
def test_push_task_skip_processed(install_mockery, installed):
"""Test to ensure skip re-queueing a processed package."""
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
assert len(list(installer.build_tasks)) == 0
@@ -772,7 +767,7 @@ def test_push_task_skip_processed(install_mockery, installed):
def test_requeue_task(install_mockery, capfd):
"""Test to ensure cover _requeue_task."""
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
task = create_build_task(installer.build_requests[0].pkg)
@@ -790,38 +785,39 @@ def test_requeue_task(install_mockery, capfd):
assert qtask.attempts == task.attempts + 1
out = capfd.readouterr()[1]
- assert 'Installing a' in out
- assert ' in progress by another process' in out
+ assert "Installing a" in out
+ assert " in progress by another process" in out
def test_cleanup_all_tasks(install_mockery, monkeypatch):
"""Test to ensure cover _cleanup_all_tasks."""
+
def _mktask(pkg):
return create_build_task(pkg)
def _rmtask(installer, pkg_id):
- raise RuntimeError('Raise an exception to test except path')
+ raise RuntimeError("Raise an exception to test except path")
- const_arg = installer_args(['a'], {})
+ const_arg = installer_args(["a"], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
# Cover task removal happy path
- installer.build_tasks['a'] = _mktask(spec.package)
+ installer.build_tasks["a"] = _mktask(spec.package)
installer._cleanup_all_tasks()
assert len(installer.build_tasks) == 0
# Cover task removal exception path
- installer.build_tasks['a'] = _mktask(spec.package)
- monkeypatch.setattr(inst.PackageInstaller, '_remove_task', _rmtask)
+ installer.build_tasks["a"] = _mktask(spec.package)
+ monkeypatch.setattr(inst.PackageInstaller, "_remove_task", _rmtask)
installer._cleanup_all_tasks()
assert len(installer.build_tasks) == 1
def test_setup_install_dir_grp(install_mockery, monkeypatch, capfd):
"""Test _setup_install_dir's group change."""
- mock_group = 'mockgroup'
- mock_chgrp_msg = 'Changing group for {0} to {1}'
+ mock_group = "mockgroup"
+ mock_chgrp_msg = "Changing group for {0} to {1}"
def _get_group(spec):
return mock_group
@@ -829,10 +825,10 @@ def test_setup_install_dir_grp(install_mockery, monkeypatch, capfd):
def _chgrp(path, group, follow_symlinks=True):
tty.msg(mock_chgrp_msg.format(path, group))
- monkeypatch.setattr(prefs, 'get_package_group', _get_group)
- monkeypatch.setattr(fs, 'chgrp', _chgrp)
+ monkeypatch.setattr(prefs, "get_package_group", _get_group)
+ monkeypatch.setattr(fs, "chgrp", _chgrp)
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
@@ -854,29 +850,29 @@ def test_setup_install_dir_grp(install_mockery, monkeypatch, capfd):
def test_cleanup_failed_err(install_mockery, tmpdir, monkeypatch, capsys):
"""Test _cleanup_failed exception path."""
- msg = 'Fake release_write exception'
+ msg = "Fake release_write exception"
def _raise_except(lock):
raise RuntimeError(msg)
- const_arg = installer_args(['trivial-install-test-package'], {})
+ const_arg = installer_args(["trivial-install-test-package"], {})
installer = create_installer(const_arg)
- monkeypatch.setattr(lk.Lock, 'release_write', _raise_except)
- pkg_id = 'test'
+ monkeypatch.setattr(lk.Lock, "release_write", _raise_except)
+ pkg_id = "test"
with tmpdir.as_cwd():
- lock = lk.Lock('./test', default_timeout=1e-9, desc='test')
+ lock = lk.Lock("./test", default_timeout=1e-9, desc="test")
installer.failed[pkg_id] = lock
installer._cleanup_failed(pkg_id)
out = str(capsys.readouterr()[1])
- assert 'exception when removing failure tracking' in out
+ assert "exception when removing failure tracking" in out
assert msg in out
def test_update_failed_no_dependent_task(install_mockery):
"""Test _update_failed with missing dependent build tasks."""
- const_arg = installer_args(['dependent-install'], {})
+ const_arg = installer_args(["dependent-install"], {})
installer = create_installer(const_arg)
spec = installer.build_requests[0].pkg.spec
@@ -888,58 +884,58 @@ def test_update_failed_no_dependent_task(install_mockery):
def test_install_uninstalled_deps(install_mockery, monkeypatch, capsys):
"""Test install with uninstalled dependencies."""
- const_arg = installer_args(['dependent-install'], {})
+ const_arg = installer_args(["dependent-install"], {})
installer = create_installer(const_arg)
# Skip the actual installation and any status updates
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _noop)
- monkeypatch.setattr(inst.PackageInstaller, '_update_installed', _noop)
- monkeypatch.setattr(inst.PackageInstaller, '_update_failed', _noop)
+ monkeypatch.setattr(inst.PackageInstaller, "_install_task", _noop)
+ monkeypatch.setattr(inst.PackageInstaller, "_update_installed", _noop)
+ monkeypatch.setattr(inst.PackageInstaller, "_update_failed", _noop)
- msg = 'Cannot proceed with dependent-install'
+ msg = "Cannot proceed with dependent-install"
with pytest.raises(inst.InstallError, match=msg):
installer.install()
out = str(capsys.readouterr())
- assert 'Detected uninstalled dependencies for' in out
+ assert "Detected uninstalled dependencies for" in out
def test_install_failed(install_mockery, monkeypatch, capsys):
"""Test install with failed install."""
- const_arg = installer_args(['b'], {})
+ const_arg = installer_args(["b"], {})
installer = create_installer(const_arg)
# Make sure the package is identified as failed
- monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
+ monkeypatch.setattr(spack.database.Database, "prefix_failed", _true)
- with pytest.raises(inst.InstallError, match='request failed'):
+ with pytest.raises(inst.InstallError, match="request failed"):
installer.install()
out = str(capsys.readouterr())
assert installer.build_requests[0].pkg_id in out
- assert 'failed to install' in out
+ assert "failed to install" in out
def test_install_failed_not_fast(install_mockery, monkeypatch, capsys):
"""Test install with failed install."""
- const_arg = installer_args(['a'], {'fail_fast': False})
+ const_arg = installer_args(["a"], {"fail_fast": False})
installer = create_installer(const_arg)
# Make sure the package is identified as failed
- monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
+ monkeypatch.setattr(spack.database.Database, "prefix_failed", _true)
- with pytest.raises(inst.InstallError, match='request failed'):
+ with pytest.raises(inst.InstallError, match="request failed"):
installer.install()
out = str(capsys.readouterr())
- assert 'failed to install' in out
- assert 'Skipping build of a' in out
+ assert "failed to install" in out
+ assert "Skipping build of a" in out
def test_install_fail_on_interrupt(install_mockery, monkeypatch):
"""Test ctrl-c interrupted install."""
- spec_name = 'a'
- err_msg = 'mock keyboard interrupt for {0}'.format(spec_name)
+ spec_name = "a"
+ err_msg = "mock keyboard interrupt for {0}".format(spec_name)
def _interrupt(installer, task, **kwargs):
if task.pkg.name == spec_name:
@@ -951,19 +947,19 @@ def test_install_fail_on_interrupt(install_mockery, monkeypatch):
installer = create_installer(const_arg)
# Raise a KeyboardInterrupt error to trigger early termination
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _interrupt)
+ monkeypatch.setattr(inst.PackageInstaller, "_install_task", _interrupt)
with pytest.raises(KeyboardInterrupt, match=err_msg):
installer.install()
- assert 'b' in installer.installed # ensure dependency of a is 'installed'
+ assert "b" in installer.installed # ensure dependency of a is 'installed'
assert spec_name not in installer.installed
def test_install_fail_single(install_mockery, monkeypatch):
"""Test expected results for failure of single package."""
- spec_name = 'a'
- err_msg = 'mock internal package build error for {0}'.format(spec_name)
+ spec_name = "a"
+ err_msg = "mock internal package build error for {0}".format(spec_name)
class MyBuildException(Exception):
pass
@@ -978,19 +974,19 @@ def test_install_fail_single(install_mockery, monkeypatch):
installer = create_installer(const_arg)
# Raise a KeyboardInterrupt error to trigger early termination
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
+ monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install)
with pytest.raises(MyBuildException, match=err_msg):
installer.install()
- assert 'b' in installer.installed # ensure dependency of a is 'installed'
+ assert "b" in installer.installed # ensure dependency of a is 'installed'
assert spec_name not in installer.installed
def test_install_fail_multi(install_mockery, monkeypatch):
"""Test expected results for failure of multiple packages."""
- spec_name = 'c'
- err_msg = 'mock internal package build error'
+ spec_name = "c"
+ err_msg = "mock internal package build error"
class MyBuildException(Exception):
pass
@@ -1001,23 +997,23 @@ def test_install_fail_multi(install_mockery, monkeypatch):
else:
installer.installed.add(task.pkg.name)
- const_arg = installer_args([spec_name, 'a'], {})
+ const_arg = installer_args([spec_name, "a"], {})
installer = create_installer(const_arg)
# Raise a KeyboardInterrupt error to trigger early termination
- monkeypatch.setattr(inst.PackageInstaller, '_install_task', _install)
+ monkeypatch.setattr(inst.PackageInstaller, "_install_task", _install)
- with pytest.raises(inst.InstallError, match='Installation request failed'):
+ with pytest.raises(inst.InstallError, match="Installation request failed"):
installer.install()
- assert 'a' in installer.installed # ensure the the second spec installed
+ assert "a" in installer.installed # ensure the the second spec installed
assert spec_name not in installer.installed
def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure is detected."""
- const_arg = installer_args(['b'], {'fail_fast': False})
- const_arg.extend(installer_args(['c'], {'fail_fast': True}))
+ const_arg = installer_args(["b"], {"fail_fast": False})
+ const_arg.extend(installer_args(["c"], {"fail_fast": True}))
installer = create_installer(const_arg)
pkg_ids = [inst.package_id(spec.package) for spec, _ in const_arg]
@@ -1025,29 +1021,28 @@ def test_install_fail_fast_on_detect(install_mockery, monkeypatch, capsys):
#
# This will prevent b from installing, which will cause the build of a
# to be skipped.
- monkeypatch.setattr(spack.database.Database, 'prefix_failed', _true)
+ monkeypatch.setattr(spack.database.Database, "prefix_failed", _true)
- with pytest.raises(inst.InstallError, match='after first install failure'):
+ with pytest.raises(inst.InstallError, match="after first install failure"):
installer.install()
- assert pkg_ids[0] in installer.failed, 'Expected b to be marked as failed'
- assert pkg_ids[1] not in installer.failed, \
- 'Expected no attempt to install c'
+ assert pkg_ids[0] in installer.failed, "Expected b to be marked as failed"
+ assert pkg_ids[1] not in installer.failed, "Expected no attempt to install c"
out = capsys.readouterr()[1]
- assert '{0} failed to install'.format(pkg_ids[0]) in out
+ assert "{0} failed to install".format(pkg_ids[0]) in out
def _test_install_fail_fast_on_except_patch(installer, **kwargs):
"""Helper for test_install_fail_fast_on_except."""
# This is a module-scope function and not a local function because it
# needs to be pickleable.
- raise RuntimeError('mock patch failure')
+ raise RuntimeError("mock patch failure")
def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
"""Test fail_fast install when an install failure results from an error."""
- const_arg = installer_args(['a'], {'fail_fast': True})
+ const_arg = installer_args(["a"], {"fail_fast": True})
installer = create_installer(const_arg)
# Raise a non-KeyboardInterrupt exception to trigger fast failure.
@@ -1055,73 +1050,71 @@ def test_install_fail_fast_on_except(install_mockery, monkeypatch, capsys):
# This will prevent b from installing, which will cause the build of a
# to be skipped.
monkeypatch.setattr(
- spack.package_base.PackageBase,
- 'do_patch',
- _test_install_fail_fast_on_except_patch
+ spack.package_base.PackageBase, "do_patch", _test_install_fail_fast_on_except_patch
)
- with pytest.raises(inst.InstallError, match='mock patch failure'):
+ with pytest.raises(inst.InstallError, match="mock patch failure"):
installer.install()
out = str(capsys.readouterr())
- assert 'Skipping build of a' in out
+ assert "Skipping build of a" in out
def test_install_lock_failures(install_mockery, monkeypatch, capfd):
"""Cover basic install lock failure handling in a single pass."""
+
def _requeued(installer, task):
- tty.msg('requeued {0}' .format(task.pkg.spec.name))
+ tty.msg("requeued {0}".format(task.pkg.spec.name))
- const_arg = installer_args(['b'], {})
+ const_arg = installer_args(["b"], {})
installer = create_installer(const_arg)
# Ensure never acquire a lock
- monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked', _not_locked)
+ monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
# Ensure don't continually requeue the task
- monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
+ monkeypatch.setattr(inst.PackageInstaller, "_requeue_task", _requeued)
- with pytest.raises(inst.InstallError, match='request failed'):
+ with pytest.raises(inst.InstallError, match="request failed"):
installer.install()
out = capfd.readouterr()[0]
- expected = ['write locked', 'read locked', 'requeued']
- for exp, ln in zip(expected, out.split('\n')):
+ expected = ["write locked", "read locked", "requeued"]
+ for exp, ln in zip(expected, out.split("\n")):
assert exp in ln
def test_install_lock_installed_requeue(install_mockery, monkeypatch, capfd):
"""Cover basic install handling for installed package."""
- const_arg = installer_args(['b'], {})
+ const_arg = installer_args(["b"], {})
b, _ = const_arg[0]
installer = create_installer(const_arg)
b_pkg_id = inst.package_id(b.package)
def _prep(installer, task):
installer.installed.add(b_pkg_id)
- tty.msg('{0} is installed' .format(b_pkg_id))
+ tty.msg("{0} is installed".format(b_pkg_id))
# also do not allow the package to be locked again
- monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked',
- _not_locked)
+ monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _not_locked)
def _requeued(installer, task):
- tty.msg('requeued {0}' .format(inst.package_id(task.pkg)))
+ tty.msg("requeued {0}".format(inst.package_id(task.pkg)))
# Flag the package as installed
- monkeypatch.setattr(inst.PackageInstaller, '_prepare_for_install', _prep)
+ monkeypatch.setattr(inst.PackageInstaller, "_prepare_for_install", _prep)
# Ensure don't continually requeue the task
- monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
+ monkeypatch.setattr(inst.PackageInstaller, "_requeue_task", _requeued)
- with pytest.raises(inst.InstallError, match='request failed'):
+ with pytest.raises(inst.InstallError, match="request failed"):
installer.install()
assert b_pkg_id not in installer.installed
out = capfd.readouterr()[0]
- expected = ['is installed', 'read locked', 'requeued']
- for exp, ln in zip(expected, out.split('\n')):
+ expected = ["is installed", "read locked", "requeued"]
+ for exp, ln in zip(expected, out.split("\n")):
assert exp in ln
@@ -1130,44 +1123,43 @@ def test_install_read_locked_requeue(install_mockery, monkeypatch, capfd):
orig_fn = inst.PackageInstaller._ensure_locked
def _read(installer, lock_type, pkg):
- tty.msg('{0}->read locked {1}' .format(lock_type, pkg.spec.name))
- return orig_fn(installer, 'read', pkg)
+ tty.msg("{0}->read locked {1}".format(lock_type, pkg.spec.name))
+ return orig_fn(installer, "read", pkg)
def _prep(installer, task):
- tty.msg('preparing {0}' .format(task.pkg.spec.name))
+ tty.msg("preparing {0}".format(task.pkg.spec.name))
assert task.pkg.spec.name not in installer.installed
def _requeued(installer, task):
- tty.msg('requeued {0}' .format(task.pkg.spec.name))
+ tty.msg("requeued {0}".format(task.pkg.spec.name))
# Force a read lock
- monkeypatch.setattr(inst.PackageInstaller, '_ensure_locked', _read)
+ monkeypatch.setattr(inst.PackageInstaller, "_ensure_locked", _read)
# Flag the package as installed
- monkeypatch.setattr(inst.PackageInstaller, '_prepare_for_install', _prep)
+ monkeypatch.setattr(inst.PackageInstaller, "_prepare_for_install", _prep)
# Ensure don't continually requeue the task
- monkeypatch.setattr(inst.PackageInstaller, '_requeue_task', _requeued)
+ monkeypatch.setattr(inst.PackageInstaller, "_requeue_task", _requeued)
- const_arg = installer_args(['b'], {})
+ const_arg = installer_args(["b"], {})
installer = create_installer(const_arg)
- with pytest.raises(inst.InstallError, match='request failed'):
+ with pytest.raises(inst.InstallError, match="request failed"):
installer.install()
- assert 'b' not in installer.installed
+ assert "b" not in installer.installed
out = capfd.readouterr()[0]
- expected = ['write->read locked', 'preparing', 'requeued']
- for exp, ln in zip(expected, out.split('\n')):
+ expected = ["write->read locked", "preparing", "requeued"]
+ for exp, ln in zip(expected, out.split("\n")):
assert exp in ln
def test_install_skip_patch(install_mockery, mock_fetch):
"""Test the path skip_patch install path."""
- spec_name = 'b'
- const_arg = installer_args([spec_name],
- {'fake': False, 'skip_patch': True})
+ spec_name = "b"
+ const_arg = installer_args([spec_name], {"fake": False, "skip_patch": True})
installer = create_installer(const_arg)
installer.install()
@@ -1178,18 +1170,16 @@ def test_install_skip_patch(install_mockery, mock_fetch):
def test_install_implicit(install_mockery, mock_fetch):
"""Test the path skip_patch install path."""
- spec_name = 'trivial-install-test-package'
- const_arg = installer_args([spec_name],
- {'fake': False})
+ spec_name = "trivial-install-test-package"
+ const_arg = installer_args([spec_name], {"fake": False})
installer = create_installer(const_arg)
pkg = installer.build_requests[0].pkg
- assert not create_build_task(pkg, {'explicit': False}).explicit
- assert create_build_task(pkg, {'explicit': True}).explicit
+ assert not create_build_task(pkg, {"explicit": False}).explicit
+ assert create_build_task(pkg, {"explicit": True}).explicit
assert create_build_task(pkg).explicit
-def test_overwrite_install_backup_success(temporary_store, config, mock_packages,
- tmpdir):
+def test_overwrite_install_backup_success(temporary_store, config, mock_packages, tmpdir):
"""
When doing an overwrite install that fails, Spack should restore the backup
of the original prefix, and leave the original spec marked installed.
@@ -1201,7 +1191,7 @@ def test_overwrite_install_backup_success(temporary_store, config, mock_packages
task = installer._pop_task()
# Make sure the install prefix exists with some trivial file
- installed_file = os.path.join(task.pkg.prefix, 'some_file')
+ installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
class InstallerThatWipesThePrefixDir:
@@ -1222,7 +1212,7 @@ def test_overwrite_install_backup_success(temporary_store, config, mock_packages
# Installation should throw the installation exception, not the backup
# failure.
- with pytest.raises(Exception, match='Some fatal install error'):
+ with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
# Make sure the package is not marked uninstalled and the original dir
@@ -1231,20 +1221,19 @@ def test_overwrite_install_backup_success(temporary_store, config, mock_packages
assert os.path.exists(installed_file)
-def test_overwrite_install_backup_failure(temporary_store, config, mock_packages,
- tmpdir):
+def test_overwrite_install_backup_failure(temporary_store, config, mock_packages, tmpdir):
"""
When doing an overwrite install that fails, Spack should try to recover the
original prefix. If that fails, the spec is lost, and it should be removed
from the database.
"""
+
class InstallerThatAccidentallyDeletesTheBackupDir:
def _install_task(self, task):
# Remove the backup directory, which is at the same level as the prefix,
# starting with .backup
backup_glob = os.path.join(
- os.path.dirname(os.path.normpath(task.pkg.prefix)),
- '.backup*'
+ os.path.dirname(os.path.normpath(task.pkg.prefix)), ".backup*"
)
for backup in glob.iglob(backup_glob):
shutil.rmtree(backup)
@@ -1263,7 +1252,7 @@ def test_overwrite_install_backup_failure(temporary_store, config, mock_packages
task = installer._pop_task()
# Make sure the install prefix exists
- installed_file = os.path.join(task.pkg.prefix, 'some_file')
+ installed_file = os.path.join(task.pkg.prefix, "some_file")
fs.touchp(installed_file)
fake_installer = InstallerThatAccidentallyDeletesTheBackupDir()
@@ -1272,7 +1261,7 @@ def test_overwrite_install_backup_failure(temporary_store, config, mock_packages
# Installation should throw the installation exception, not the backup
# failure.
- with pytest.raises(Exception, match='Some fatal install error'):
+ with pytest.raises(Exception, match="Some fatal install error"):
overwrite_install.install()
# Make sure that `remove` was called on the database after an unsuccessful
@@ -1291,14 +1280,13 @@ def test_term_status_line():
x.clear()
-@pytest.mark.parametrize('explicit_args,is_explicit', [
- ({'explicit': False}, False),
- ({'explicit': True}, True),
- ({}, True)
-])
+@pytest.mark.parametrize(
+ "explicit_args,is_explicit",
+ [({"explicit": False}, False), ({"explicit": True}, True), ({}, True)],
+)
def test_single_external_implicit_install(install_mockery, explicit_args, is_explicit):
- pkg = 'trivial-install-test-package'
+ pkg = "trivial-install-test-package"
s = spack.spec.Spec(pkg).concretized()
- s.external_path = '/usr'
+ s.external_path = "/usr"
create_installer([(s, explicit_args)]).install()
assert spack.store.db.get_record(pkg).explicit == is_explicit
diff --git a/lib/spack/spack/test/link_paths.py b/lib/spack/spack/test/link_paths.py
index 4e08a26fdf..d9d21f259b 100644
--- a/lib/spack/spack/test/link_paths.py
+++ b/lib/spack/spack/test/link_paths.py
@@ -20,15 +20,14 @@ if is_windows:
root = drive + os.sep
#: directory with sample compiler data
-datadir = os.path.join(spack.paths.test_path, 'data',
- 'compiler_verbose_output')
+datadir = os.path.join(spack.paths.test_path, "data", "compiler_verbose_output")
@pytest.fixture(autouse=True)
def allow_nonexistent_paths(monkeypatch):
# Allow nonexistent paths to be detected as part of the output
# for testing purposes.
- monkeypatch.setattr(os.path, 'isdir', lambda x: True)
+ monkeypatch.setattr(os.path, "isdir", lambda x: True)
def check_link_paths(filename, paths):
@@ -51,7 +50,8 @@ def check_link_paths(filename, paths):
def test_icc16_link_paths():
prefix = os.path.join(root, "usr", "tce", "packages")
check_link_paths(
- 'icc-16.0.3.txt', [
+ "icc-16.0.3.txt",
+ [
os.path.join(
prefix,
"intel",
@@ -66,35 +66,37 @@ def test_icc16_link_paths():
prefix, "gcc", "gcc-4.9.3", "lib64", "gcc", "x86_64-unknown-linux-gnu", "4.9.3"
),
os.path.join(prefix, "gcc", "gcc-4.9.3", "lib64"),
- ]
+ ],
)
def test_pgi_link_paths():
check_link_paths(
- 'pgcc-16.3.txt', [
+ "pgcc-16.3.txt",
+ [
os.path.join(
root, "usr", "tce", "packages", "pgi", "pgi-16.3", "linux86-64", "16.3", "lib"
)
- ]
+ ],
)
def test_gcc7_link_paths():
- check_link_paths('gcc-7.3.1.txt', [])
+ check_link_paths("gcc-7.3.1.txt", [])
def test_clang4_link_paths():
- check_link_paths('clang-4.0.1.txt', [])
+ check_link_paths("clang-4.0.1.txt", [])
def test_xl_link_paths():
check_link_paths(
- 'xl-13.1.5.txt', [
+ "xl-13.1.5.txt",
+ [
os.path.join(root, "opt", "ibm", "xlsmp", "4.1.5", "lib"),
os.path.join(root, "opt", "ibm", "xlmass", "8.1.5", "lib"),
os.path.join(root, "opt", "ibm", "xlC", "13.1.5", "lib"),
- ]
+ ],
)
@@ -102,7 +104,8 @@ def test_cce_link_paths():
gcc = os.path.join(root, "opt", "gcc")
cray = os.path.join(root, "opt", "cray")
check_link_paths(
- 'cce-8.6.5.txt', [
+ "cce-8.6.5.txt",
+ [
os.path.join(gcc, "6.1.0", "snos", "lib64"),
os.path.join(cray, "dmapp", "default", "lib64"),
os.path.join(cray, "pe", "mpt", "7.7.0", "gni", "mpich-cray", "8.6", "lib"),
@@ -119,22 +122,16 @@ def test_cce_link_paths():
os.path.join(cray, "wlm_detect", "1.3.2-6.0.5.0_3.1__g388ccd5.ari", "lib64"),
os.path.join(gcc, "6.1.0", "snos", "lib", "gcc", "x86_64-suse-linux", "6.1.0"),
os.path.join(
- cray,
- "pe",
- "cce",
- "8.6.5",
- "binutils",
- "x86_64",
- "x86_64-unknown-linux-gnu",
- "lib"
+ cray, "pe", "cce", "8.6.5", "binutils", "x86_64", "x86_64-unknown-linux-gnu", "lib"
),
- ]
+ ],
)
def test_clang_apple_ld_link_paths():
check_link_paths(
- 'clang-9.0.0-apple-ld.txt', [
+ "clang-9.0.0-apple-ld.txt",
+ [
os.path.join(
root,
"Applications",
@@ -149,7 +146,7 @@ def test_clang_apple_ld_link_paths():
"usr",
"lib",
)
- ]
+ ],
)
@@ -170,11 +167,12 @@ def test_nag_mixed_gcc_gnu_ld_link_paths():
)
check_link_paths(
- 'collect2-6.3.0-gnu-ld.txt', [
+ "collect2-6.3.0-gnu-ld.txt",
+ [
os.path.join(prefix, "lib", "gcc", "x86_64-pc-linux-gnu", "6.5.0"),
os.path.join(prefix, "lib64"),
os.path.join(prefix, "lib"),
- ]
+ ],
)
@@ -195,11 +193,12 @@ def test_nag_link_paths():
)
check_link_paths(
- 'nag-6.2-gcc-6.5.0.txt', [
+ "nag-6.2-gcc-6.5.0.txt",
+ [
os.path.join(prefix, "lib", "gcc", "x86_64-pc-linux-gnu", "6.5.0"),
os.path.join(prefix, "lib64"),
os.path.join(prefix, "lib"),
- ]
+ ],
)
@@ -214,4 +213,4 @@ def test_obscure_parsing_rules():
if is_windows:
paths.remove(os.path.join(root, "second", "path"))
- check_link_paths('obscure-parsing-rules.txt', paths)
+ check_link_paths("obscure-parsing-rules.txt", paths)
diff --git a/lib/spack/spack/test/llnl/util/argparsewriter.py b/lib/spack/spack/test/llnl/util/argparsewriter.py
index 00e967a97a..e8d88cd70b 100644
--- a/lib/spack/spack/test/llnl/util/argparsewriter.py
+++ b/lib/spack/spack/test/llnl/util/argparsewriter.py
@@ -20,17 +20,17 @@ spack.main.add_all_commands(parser)
def test_format_not_overridden():
- writer = aw.ArgparseWriter('spack')
+ writer = aw.ArgparseWriter("spack")
with pytest.raises(NotImplementedError):
writer.write(parser)
def test_completion_format_not_overridden():
- writer = aw.ArgparseCompletionWriter('spack')
+ writer = aw.ArgparseCompletionWriter("spack")
- assert writer.positionals([]) == ''
- assert writer.optionals([]) == ''
- assert writer.subcommands([]) == ''
+ assert writer.positionals([]) == ""
+ assert writer.optionals([]) == ""
+ assert writer.subcommands([]) == ""
writer.write(parser)
diff --git a/lib/spack/spack/test/llnl/util/file_list.py b/lib/spack/spack/test/llnl/util/file_list.py
index e3c60aa65e..9ae33a90b5 100644
--- a/lib/spack/spack/test/llnl/util/file_list.py
+++ b/lib/spack/spack/test/llnl/util/file_list.py
@@ -25,11 +25,11 @@ def library_list():
"""Returns an instance of LibraryList."""
# Test all valid extensions: ['.a', '.dylib', '.so']
libs = [
- '/dir1/liblapack.a',
- '/dir2/libpython3.6.dylib', # name may contain periods
- '/dir1/libblas.a',
- '/dir3/libz.so',
- 'libmpi.so.20.10.1', # shared object libraries may be versioned
+ "/dir1/liblapack.a",
+ "/dir2/libpython3.6.dylib", # name may contain periods
+ "/dir1/libblas.a",
+ "/dir3/libz.so",
+ "libmpi.so.20.10.1", # shared object libraries may be versioned
]
return LibraryList(libs)
@@ -40,20 +40,19 @@ def header_list():
"""Returns an instance of header list"""
# Test all valid extensions: ['.h', '.hpp', '.hh', '.cuh']
headers = [
- '/dir1/Python.h',
- '/dir2/date.time.h',
- '/dir1/pyconfig.hpp',
- '/dir3/core.hh',
- 'pymem.cuh',
+ "/dir1/Python.h",
+ "/dir2/date.time.h",
+ "/dir1/pyconfig.hpp",
+ "/dir3/core.hh",
+ "pymem.cuh",
]
h = HeaderList(headers)
- h.add_macro('-DBOOST_LIB_NAME=boost_regex')
- h.add_macro('-DBOOST_DYN_LINK')
+ h.add_macro("-DBOOST_LIB_NAME=boost_regex")
+ h.add_macro("-DBOOST_DYN_LINK")
return h
class TestLibraryList(object):
-
def test_repr(self, library_list):
x = eval(repr(library_list))
assert library_list == x
@@ -61,60 +60,64 @@ class TestLibraryList(object):
def test_joined_and_str(self, library_list):
s1 = library_list.joined()
- expected = " ".join([
- "/dir1/liblapack.a",
- "/dir2/libpython3.6.dylib",
- "/dir1/libblas.a",
- "/dir3/libz.so",
- "libmpi.so.20.10.1",
- ])
+ expected = " ".join(
+ [
+ "/dir1/liblapack.a",
+ "/dir2/libpython3.6.dylib",
+ "/dir1/libblas.a",
+ "/dir3/libz.so",
+ "libmpi.so.20.10.1",
+ ]
+ )
assert s1 == expected
s2 = str(library_list)
assert s1 == s2
- s3 = library_list.joined(';')
- expected = ";".join([
- "/dir1/liblapack.a",
- "/dir2/libpython3.6.dylib",
- "/dir1/libblas.a",
- "/dir3/libz.so",
- "libmpi.so.20.10.1",
- ])
+ s3 = library_list.joined(";")
+ expected = ";".join(
+ [
+ "/dir1/liblapack.a",
+ "/dir2/libpython3.6.dylib",
+ "/dir1/libblas.a",
+ "/dir3/libz.so",
+ "libmpi.so.20.10.1",
+ ]
+ )
assert s3 == expected
def test_flags(self, library_list):
search_flags = library_list.search_flags
- assert '-L/dir1' in search_flags
- assert '-L/dir2' in search_flags
- assert '-L/dir3' in search_flags
+ assert "-L/dir1" in search_flags
+ assert "-L/dir2" in search_flags
+ assert "-L/dir3" in search_flags
assert isinstance(search_flags, str)
- assert search_flags == '-L/dir1 -L/dir2 -L/dir3'
+ assert search_flags == "-L/dir1 -L/dir2 -L/dir3"
link_flags = library_list.link_flags
- assert '-llapack' in link_flags
- assert '-lpython3.6' in link_flags
- assert '-lblas' in link_flags
- assert '-lz' in link_flags
- assert '-lmpi' in link_flags
+ assert "-llapack" in link_flags
+ assert "-lpython3.6" in link_flags
+ assert "-lblas" in link_flags
+ assert "-lz" in link_flags
+ assert "-lmpi" in link_flags
assert isinstance(link_flags, str)
- assert link_flags == '-llapack -lpython3.6 -lblas -lz -lmpi'
+ assert link_flags == "-llapack -lpython3.6 -lblas -lz -lmpi"
ld_flags = library_list.ld_flags
assert isinstance(ld_flags, str)
- assert ld_flags == search_flags + ' ' + link_flags
+ assert ld_flags == search_flags + " " + link_flags
def test_paths_manipulation(self, library_list):
names = library_list.names
- assert names == ['lapack', 'python3.6', 'blas', 'z', 'mpi']
+ assert names == ["lapack", "python3.6", "blas", "z", "mpi"]
directories = library_list.directories
- assert directories == ['/dir1', '/dir2', '/dir3']
+ assert directories == ["/dir1", "/dir2", "/dir3"]
def test_get_item(self, library_list):
a = library_list[0]
- assert a == '/dir1/liblapack.a'
+ assert a == "/dir1/liblapack.a"
b = library_list[:]
assert type(b) == type(library_list)
@@ -123,9 +126,9 @@ class TestLibraryList(object):
def test_add(self, library_list):
pylist = [
- '/dir1/liblapack.a', # removed from the final list
- '/dir2/libmpi.so',
- '/dir4/libnew.a'
+ "/dir1/liblapack.a", # removed from the final list
+ "/dir2/libmpi.so",
+ "/dir4/libnew.a",
]
another = LibraryList(pylist)
both = library_list + another
@@ -140,63 +143,66 @@ class TestLibraryList(object):
class TestHeaderList(object):
-
def test_repr(self, header_list):
x = eval(repr(header_list))
assert header_list == x
def test_joined_and_str(self, header_list):
s1 = header_list.joined()
- expected = " ".join([
- "/dir1/Python.h",
- "/dir2/date.time.h",
- "/dir1/pyconfig.hpp",
- "/dir3/core.hh",
- "pymem.cuh",
- ])
+ expected = " ".join(
+ [
+ "/dir1/Python.h",
+ "/dir2/date.time.h",
+ "/dir1/pyconfig.hpp",
+ "/dir3/core.hh",
+ "pymem.cuh",
+ ]
+ )
assert s1 == expected
s2 = str(header_list)
assert s1 == s2
- s3 = header_list.joined(';')
- expected = ";".join([
- "/dir1/Python.h",
- "/dir2/date.time.h",
- "/dir1/pyconfig.hpp",
- "/dir3/core.hh",
- "pymem.cuh",
- ])
+ s3 = header_list.joined(";")
+ expected = ";".join(
+ [
+ "/dir1/Python.h",
+ "/dir2/date.time.h",
+ "/dir1/pyconfig.hpp",
+ "/dir3/core.hh",
+ "pymem.cuh",
+ ]
+ )
assert s3 == expected
def test_flags(self, header_list):
include_flags = header_list.include_flags
- assert '-I/dir1' in include_flags
- assert '-I/dir2' in include_flags
- assert '-I/dir3' in include_flags
+ assert "-I/dir1" in include_flags
+ assert "-I/dir2" in include_flags
+ assert "-I/dir3" in include_flags
assert isinstance(include_flags, str)
- assert include_flags == '-I/dir1 -I/dir2 -I/dir3'
+ assert include_flags == "-I/dir1 -I/dir2 -I/dir3"
macros = header_list.macro_definitions
- assert '-DBOOST_LIB_NAME=boost_regex' in macros
- assert '-DBOOST_DYN_LINK' in macros
+ assert "-DBOOST_LIB_NAME=boost_regex" in macros
+ assert "-DBOOST_DYN_LINK" in macros
assert isinstance(macros, str)
- assert macros == '-DBOOST_LIB_NAME=boost_regex -DBOOST_DYN_LINK'
+ assert macros == "-DBOOST_LIB_NAME=boost_regex -DBOOST_DYN_LINK"
cpp_flags = header_list.cpp_flags
assert isinstance(cpp_flags, str)
- assert cpp_flags == include_flags + ' ' + macros
+ assert cpp_flags == include_flags + " " + macros
def test_paths_manipulation(self, header_list):
names = header_list.names
- assert names == ['Python', 'date.time', 'pyconfig', 'core', 'pymem']
+ assert names == ["Python", "date.time", "pyconfig", "core", "pymem"]
directories = header_list.directories
- assert directories == ['/dir1', '/dir2', '/dir3']
+ assert directories == ["/dir1", "/dir2", "/dir3"]
def test_get_item(self, header_list):
a = header_list[0]
- assert a == '/dir1/Python.h'
+ assert a == "/dir1/Python.h"
b = header_list[:]
assert type(b) == type(header_list)
@@ -205,9 +211,9 @@ class TestHeaderList(object):
def test_add(self, header_list):
pylist = [
- '/dir1/Python.h', # removed from the final list
- '/dir2/pyconfig.hpp',
- '/dir4/date.time.h'
+ "/dir1/Python.h", # removed from the final list
+ "/dir2/pyconfig.hpp",
+ "/dir4/date.time.h",
]
another = HeaderList(pylist)
h = header_list + another
@@ -222,46 +228,38 @@ class TestHeaderList(object):
#: Directory where the data for the test below is stored
-search_dir = os.path.join(spack.paths.test_path, 'data', 'directory_search')
-
-
-@pytest.mark.parametrize('search_fn,search_list,root,kwargs', [
- (find_libraries, 'liba', search_dir, {'recursive': True}),
- (find_libraries, ['liba'], search_dir, {'recursive': True}),
- (find_libraries, 'libb', search_dir, {'recursive': True}),
- (find_libraries, ['libc'], search_dir, {'recursive': True}),
- (find_libraries, ['libc', 'liba'], search_dir, {'recursive': True}),
- (find_libraries, ['liba', 'libc'], search_dir, {'recursive': True}),
- (find_libraries,
- ['libc', 'libb', 'liba'],
- search_dir,
- {'recursive': True}
- ),
- (find_libraries, ['liba', 'libc'], search_dir, {'recursive': True}),
- (find_libraries,
- ['libc', 'libb', 'liba'],
- search_dir,
- {'recursive': True, 'shared': False}
- ),
- (find_headers, 'a', search_dir, {'recursive': True}),
- (find_headers, ['a'], search_dir, {'recursive': True}),
- (find_headers, 'b', search_dir, {'recursive': True}),
- (find_headers, ['c'], search_dir, {'recursive': True}),
- (find_headers, ['c', 'a'], search_dir, {'recursive': True}),
- (find_headers, ['a', 'c'], search_dir, {'recursive': True}),
- (find_headers, ['c', 'b', 'a'], search_dir, {'recursive': True}),
- (find_headers, ['a', 'c'], search_dir, {'recursive': True}),
- (find_libraries,
- ['liba', 'libd'],
- os.path.join(search_dir, 'b'),
- {'recursive': False}
- ),
- (find_headers,
- ['b', 'd'],
- os.path.join(search_dir, 'b'),
- {'recursive': False}
- ),
-])
+search_dir = os.path.join(spack.paths.test_path, "data", "directory_search")
+
+
+@pytest.mark.parametrize(
+ "search_fn,search_list,root,kwargs",
+ [
+ (find_libraries, "liba", search_dir, {"recursive": True}),
+ (find_libraries, ["liba"], search_dir, {"recursive": True}),
+ (find_libraries, "libb", search_dir, {"recursive": True}),
+ (find_libraries, ["libc"], search_dir, {"recursive": True}),
+ (find_libraries, ["libc", "liba"], search_dir, {"recursive": True}),
+ (find_libraries, ["liba", "libc"], search_dir, {"recursive": True}),
+ (find_libraries, ["libc", "libb", "liba"], search_dir, {"recursive": True}),
+ (find_libraries, ["liba", "libc"], search_dir, {"recursive": True}),
+ (
+ find_libraries,
+ ["libc", "libb", "liba"],
+ search_dir,
+ {"recursive": True, "shared": False},
+ ),
+ (find_headers, "a", search_dir, {"recursive": True}),
+ (find_headers, ["a"], search_dir, {"recursive": True}),
+ (find_headers, "b", search_dir, {"recursive": True}),
+ (find_headers, ["c"], search_dir, {"recursive": True}),
+ (find_headers, ["c", "a"], search_dir, {"recursive": True}),
+ (find_headers, ["a", "c"], search_dir, {"recursive": True}),
+ (find_headers, ["c", "b", "a"], search_dir, {"recursive": True}),
+ (find_headers, ["a", "c"], search_dir, {"recursive": True}),
+ (find_libraries, ["liba", "libd"], os.path.join(search_dir, "b"), {"recursive": False}),
+ (find_headers, ["b", "d"], os.path.join(search_dir, "b"), {"recursive": False}),
+ ],
+)
def test_searching_order(search_fn, search_list, root, kwargs):
# Test search
@@ -292,18 +290,31 @@ def test_searching_order(search_fn, search_list, root, kwargs):
assert len(rlist) == 0
-@pytest.mark.parametrize('root,search_list,kwargs,expected', [
- (search_dir, '*/*bar.tx?', {'recursive': False}, [
- os.path.join(search_dir, os.path.join('a', 'foobar.txt')),
- os.path.join(search_dir, os.path.join('b', 'bar.txp')),
- os.path.join(search_dir, os.path.join('c', 'bar.txt')),
- ]),
- (search_dir, '*/*bar.tx?', {'recursive': True}, [
- os.path.join(search_dir, os.path.join('a', 'foobar.txt')),
- os.path.join(search_dir, os.path.join('b', 'bar.txp')),
- os.path.join(search_dir, os.path.join('c', 'bar.txt')),
- ])
-])
+@pytest.mark.parametrize(
+ "root,search_list,kwargs,expected",
+ [
+ (
+ search_dir,
+ "*/*bar.tx?",
+ {"recursive": False},
+ [
+ os.path.join(search_dir, os.path.join("a", "foobar.txt")),
+ os.path.join(search_dir, os.path.join("b", "bar.txp")),
+ os.path.join(search_dir, os.path.join("c", "bar.txt")),
+ ],
+ ),
+ (
+ search_dir,
+ "*/*bar.tx?",
+ {"recursive": True},
+ [
+ os.path.join(search_dir, os.path.join("a", "foobar.txt")),
+ os.path.join(search_dir, os.path.join("b", "bar.txp")),
+ os.path.join(search_dir, os.path.join("c", "bar.txt")),
+ ],
+ ),
+ ],
+)
def test_find_with_globbing(root, search_list, kwargs, expected):
matches = find(root, search_list, **kwargs)
assert sorted(matches) == sorted(expected)
diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py
index c2052223f9..559ff5bace 100644
--- a/lib/spack/spack/test/llnl/util/filesystem.py
+++ b/lib/spack/spack/test/llnl/util/filesystem.py
@@ -21,28 +21,28 @@ import spack.paths
def stage(tmpdir_factory):
"""Creates a stage with the directory structure for the tests."""
- s = tmpdir_factory.mktemp('filesystem_test')
+ s = tmpdir_factory.mktemp("filesystem_test")
with s.as_cwd():
# Create source file hierarchy
- fs.touchp('source/1')
- fs.touchp('source/a/b/2')
- fs.touchp('source/a/b/3')
- fs.touchp('source/c/4')
- fs.touchp('source/c/d/5')
- fs.touchp('source/c/d/6')
- fs.touchp('source/c/d/e/7')
- fs.touchp('source/g/h/i/8')
- fs.touchp('source/g/h/i/9')
- fs.touchp('source/g/i/j/10')
+ fs.touchp("source/1")
+ fs.touchp("source/a/b/2")
+ fs.touchp("source/a/b/3")
+ fs.touchp("source/c/4")
+ fs.touchp("source/c/d/5")
+ fs.touchp("source/c/d/6")
+ fs.touchp("source/c/d/e/7")
+ fs.touchp("source/g/h/i/8")
+ fs.touchp("source/g/h/i/9")
+ fs.touchp("source/g/i/j/10")
# Create symlinks
- symlink(os.path.abspath('source/1'), 'source/2')
- symlink('b/2', 'source/a/b2')
- symlink('a/b', 'source/f')
+ symlink(os.path.abspath("source/1"), "source/2")
+ symlink("b/2", "source/a/b2")
+ symlink("a/b", "source/f")
# Create destination directory
- fs.mkdirp('dest')
+ fs.mkdirp("dest")
yield s
@@ -54,42 +54,42 @@ class TestCopy:
"""Test using a filename as the destination."""
with fs.working_dir(str(stage)):
- fs.copy('source/1', 'dest/1')
+ fs.copy("source/1", "dest/1")
- assert os.path.exists('dest/1')
+ assert os.path.exists("dest/1")
def test_dir_dest(self, stage):
"""Test using a directory as the destination."""
with fs.working_dir(str(stage)):
- fs.copy('source/1', 'dest')
+ fs.copy("source/1", "dest")
- assert os.path.exists('dest/1')
+ assert os.path.exists("dest/1")
def test_glob_src(self, stage):
"""Test using a glob as the source."""
with fs.working_dir(str(stage)):
- fs.copy('source/a/*/*', 'dest')
+ fs.copy("source/a/*/*", "dest")
- assert os.path.exists('dest/2')
- assert os.path.exists('dest/3')
+ assert os.path.exists("dest/2")
+ assert os.path.exists("dest/3")
def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
- with pytest.raises(IOError, match='No such file or directory'):
- fs.copy('source/none', 'dest')
+ with pytest.raises(IOError, match="No such file or directory"):
+ fs.copy("source/none", "dest")
def test_multiple_src_file_dest(self, stage):
"""Test a glob that matches multiple source files and a dest
that is not a directory."""
with fs.working_dir(str(stage)):
- match = '.* matches multiple files but .* is not a directory'
+ match = ".* matches multiple files but .* is not a directory"
with pytest.raises(ValueError, match=match):
- fs.copy('source/a/*/*', 'dest/1')
+ fs.copy("source/a/*/*", "dest/1")
def check_added_exe_permissions(src, dst):
@@ -107,49 +107,49 @@ class TestInstall:
"""Test using a filename as the destination."""
with fs.working_dir(str(stage)):
- fs.install('source/1', 'dest/1')
+ fs.install("source/1", "dest/1")
- assert os.path.exists('dest/1')
- check_added_exe_permissions('source/1', 'dest/1')
+ assert os.path.exists("dest/1")
+ check_added_exe_permissions("source/1", "dest/1")
def test_dir_dest(self, stage):
"""Test using a directory as the destination."""
with fs.working_dir(str(stage)):
- fs.install('source/1', 'dest')
+ fs.install("source/1", "dest")
- assert os.path.exists('dest/1')
- check_added_exe_permissions('source/1', 'dest/1')
+ assert os.path.exists("dest/1")
+ check_added_exe_permissions("source/1", "dest/1")
def test_glob_src(self, stage):
"""Test using a glob as the source."""
with fs.working_dir(str(stage)):
- fs.install('source/a/*/*', 'dest')
+ fs.install("source/a/*/*", "dest")
- assert os.path.exists('dest/2')
- assert os.path.exists('dest/3')
- check_added_exe_permissions('source/a/b/2', 'dest/2')
- check_added_exe_permissions('source/a/b/3', 'dest/3')
+ assert os.path.exists("dest/2")
+ assert os.path.exists("dest/3")
+ check_added_exe_permissions("source/a/b/2", "dest/2")
+ check_added_exe_permissions("source/a/b/3", "dest/3")
def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
- with pytest.raises(IOError, match='No such file or directory'):
- fs.install('source/none', 'dest')
+ with pytest.raises(IOError, match="No such file or directory"):
+ fs.install("source/none", "dest")
def test_multiple_src_file_dest(self, stage):
"""Test a glob that matches multiple source files and a dest
that is not a directory."""
with fs.working_dir(str(stage)):
- match = '.* matches multiple files but .* is not a directory'
+ match = ".* matches multiple files but .* is not a directory"
with pytest.raises(ValueError, match=match):
- fs.install('source/a/*/*', 'dest/1')
+ fs.install("source/a/*/*", "dest/1")
-@pytest.mark.skipif(sys.platform == 'win32', reason="Skip test on Windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="Skip test on Windows")
class TestCopyTree:
"""Tests for ``filesystem.copy_tree``"""
@@ -157,82 +157,80 @@ class TestCopyTree:
"""Test copying to an existing directory."""
with fs.working_dir(str(stage)):
- fs.copy_tree('source', 'dest')
+ fs.copy_tree("source", "dest")
- assert os.path.exists('dest/a/b/2')
+ assert os.path.exists("dest/a/b/2")
def test_non_existing_dir(self, stage):
"""Test copying to a non-existing directory."""
with fs.working_dir(str(stage)):
- fs.copy_tree('source', 'dest/sub/directory')
+ fs.copy_tree("source", "dest/sub/directory")
- assert os.path.exists('dest/sub/directory/a/b/2')
+ assert os.path.exists("dest/sub/directory/a/b/2")
def test_symlinks_true(self, stage):
"""Test copying with symlink preservation."""
with fs.working_dir(str(stage)):
- fs.copy_tree('source', 'dest', symlinks=True)
+ fs.copy_tree("source", "dest", symlinks=True)
- assert os.path.exists('dest/2')
- assert islink('dest/2')
+ assert os.path.exists("dest/2")
+ assert islink("dest/2")
- assert os.path.exists('dest/a/b2')
- with fs.working_dir('dest/a'):
- assert os.path.exists(os.readlink('b2'))
+ assert os.path.exists("dest/a/b2")
+ with fs.working_dir("dest/a"):
+ assert os.path.exists(os.readlink("b2"))
- assert (os.path.realpath('dest/f/2') ==
- os.path.abspath('dest/a/b/2'))
- assert os.path.realpath('dest/2') == os.path.abspath('dest/1')
+ assert os.path.realpath("dest/f/2") == os.path.abspath("dest/a/b/2")
+ assert os.path.realpath("dest/2") == os.path.abspath("dest/1")
def test_symlinks_true_ignore(self, stage):
- """Test copying when specifying relative paths that should be ignored
- """
+ """Test copying when specifying relative paths that should be ignored"""
with fs.working_dir(str(stage)):
- ignore = lambda p: p in ['c/d/e', 'a']
- fs.copy_tree('source', 'dest', symlinks=True, ignore=ignore)
- assert not os.path.exists('dest/a')
- assert os.path.exists('dest/c/d')
- assert not os.path.exists('dest/c/d/e')
+ ignore = lambda p: p in ["c/d/e", "a"]
+ fs.copy_tree("source", "dest", symlinks=True, ignore=ignore)
+ assert not os.path.exists("dest/a")
+ assert os.path.exists("dest/c/d")
+ assert not os.path.exists("dest/c/d/e")
def test_symlinks_false(self, stage):
"""Test copying without symlink preservation."""
with fs.working_dir(str(stage)):
- fs.copy_tree('source', 'dest', symlinks=False)
+ fs.copy_tree("source", "dest", symlinks=False)
- assert os.path.exists('dest/2')
+ assert os.path.exists("dest/2")
if sys.platform != "win32":
- assert not os.path.islink('dest/2')
+ assert not os.path.islink("dest/2")
def test_glob_src(self, stage):
"""Test using a glob as the source."""
with fs.working_dir(str(stage)):
- fs.copy_tree('source/g/*', 'dest')
+ fs.copy_tree("source/g/*", "dest")
- assert os.path.exists('dest/i/8')
- assert os.path.exists('dest/i/9')
- assert os.path.exists('dest/j/10')
+ assert os.path.exists("dest/i/8")
+ assert os.path.exists("dest/i/9")
+ assert os.path.exists("dest/j/10")
def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
- with pytest.raises(IOError, match='No such file or directory'):
- fs.copy_tree('source/none', 'dest')
+ with pytest.raises(IOError, match="No such file or directory"):
+ fs.copy_tree("source/none", "dest")
def test_parent_dir(self, stage):
"""Test source as a parent directory of destination."""
with fs.working_dir(str(stage)):
- match = 'Cannot copy ancestor directory'
+ match = "Cannot copy ancestor directory"
with pytest.raises(ValueError, match=match):
- fs.copy_tree('source', 'source/sub/directory')
+ fs.copy_tree("source", "source/sub/directory")
-@pytest.mark.skipif(sys.platform == 'win32', reason="Skip test on Windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="Skip test on Windows")
class TestInstallTree:
"""Tests for ``filesystem.install_tree``"""
@@ -240,121 +238,118 @@ class TestInstallTree:
"""Test installing to an existing directory."""
with fs.working_dir(str(stage)):
- fs.install_tree('source', 'dest')
+ fs.install_tree("source", "dest")
- assert os.path.exists('dest/a/b/2')
- check_added_exe_permissions('source/a/b/2', 'dest/a/b/2')
+ assert os.path.exists("dest/a/b/2")
+ check_added_exe_permissions("source/a/b/2", "dest/a/b/2")
def test_non_existing_dir(self, stage):
"""Test installing to a non-existing directory."""
with fs.working_dir(str(stage)):
- fs.install_tree('source', 'dest/sub/directory')
+ fs.install_tree("source", "dest/sub/directory")
- assert os.path.exists('dest/sub/directory/a/b/2')
- check_added_exe_permissions(
- 'source/a/b/2', 'dest/sub/directory/a/b/2')
+ assert os.path.exists("dest/sub/directory/a/b/2")
+ check_added_exe_permissions("source/a/b/2", "dest/sub/directory/a/b/2")
def test_symlinks_true(self, stage):
"""Test installing with symlink preservation."""
with fs.working_dir(str(stage)):
- fs.install_tree('source', 'dest', symlinks=True)
+ fs.install_tree("source", "dest", symlinks=True)
- assert os.path.exists('dest/2')
+ assert os.path.exists("dest/2")
if sys.platform != "win32":
- assert os.path.islink('dest/2')
- check_added_exe_permissions('source/2', 'dest/2')
+ assert os.path.islink("dest/2")
+ check_added_exe_permissions("source/2", "dest/2")
def test_symlinks_false(self, stage):
"""Test installing without symlink preservation."""
with fs.working_dir(str(stage)):
- fs.install_tree('source', 'dest', symlinks=False)
+ fs.install_tree("source", "dest", symlinks=False)
- assert os.path.exists('dest/2')
+ assert os.path.exists("dest/2")
if sys.platform != "win32":
- assert not os.path.islink('dest/2')
- check_added_exe_permissions('source/2', 'dest/2')
+ assert not os.path.islink("dest/2")
+ check_added_exe_permissions("source/2", "dest/2")
def test_glob_src(self, stage):
"""Test using a glob as the source."""
with fs.working_dir(str(stage)):
- fs.install_tree('source/g/*', 'dest')
+ fs.install_tree("source/g/*", "dest")
- assert os.path.exists('dest/i/8')
- assert os.path.exists('dest/i/9')
- assert os.path.exists('dest/j/10')
- check_added_exe_permissions('source/g/h/i/8', 'dest/i/8')
- check_added_exe_permissions('source/g/h/i/9', 'dest/i/9')
- check_added_exe_permissions('source/g/i/j/10', 'dest/j/10')
+ assert os.path.exists("dest/i/8")
+ assert os.path.exists("dest/i/9")
+ assert os.path.exists("dest/j/10")
+ check_added_exe_permissions("source/g/h/i/8", "dest/i/8")
+ check_added_exe_permissions("source/g/h/i/9", "dest/i/9")
+ check_added_exe_permissions("source/g/i/j/10", "dest/j/10")
def test_non_existing_src(self, stage):
"""Test using a non-existing source."""
with fs.working_dir(str(stage)):
- with pytest.raises(IOError, match='No such file or directory'):
- fs.install_tree('source/none', 'dest')
+ with pytest.raises(IOError, match="No such file or directory"):
+ fs.install_tree("source/none", "dest")
def test_parent_dir(self, stage):
"""Test source as a parent directory of destination."""
with fs.working_dir(str(stage)):
- match = 'Cannot copy ancestor directory'
+ match = "Cannot copy ancestor directory"
with pytest.raises(ValueError, match=match):
- fs.install_tree('source', 'source/sub/directory')
+ fs.install_tree("source", "source/sub/directory")
def test_paths_containing_libs(dirs_with_libfiles):
lib_to_dirs, all_dirs = dirs_with_libfiles
- assert (set(fs.paths_containing_libs(all_dirs, ['libgfortran'])) ==
- set(lib_to_dirs['libgfortran']))
+ assert set(fs.paths_containing_libs(all_dirs, ["libgfortran"])) == set(
+ lib_to_dirs["libgfortran"]
+ )
- assert (set(fs.paths_containing_libs(all_dirs, ['libirc'])) ==
- set(lib_to_dirs['libirc']))
+ assert set(fs.paths_containing_libs(all_dirs, ["libirc"])) == set(lib_to_dirs["libirc"])
def test_move_transaction_commit(tmpdir):
- fake_library = tmpdir.mkdir('lib').join('libfoo.so')
- fake_library.write('Just some fake content.')
+ fake_library = tmpdir.mkdir("lib").join("libfoo.so")
+ fake_library.write("Just some fake content.")
- with fs.replace_directory_transaction(str(tmpdir.join('lib'))) as backup:
+ with fs.replace_directory_transaction(str(tmpdir.join("lib"))) as backup:
assert os.path.isdir(backup)
- fake_library = tmpdir.mkdir('lib').join('libfoo.so')
- fake_library.write('Other content.')
+ fake_library = tmpdir.mkdir("lib").join("libfoo.so")
+ fake_library.write("Other content.")
assert not os.path.lexists(backup)
- with open(str(tmpdir.join('lib', 'libfoo.so')), 'r') as f:
- assert 'Other content.' == f.read()
+ with open(str(tmpdir.join("lib", "libfoo.so")), "r") as f:
+ assert "Other content." == f.read()
def test_move_transaction_rollback(tmpdir):
- fake_library = tmpdir.mkdir('lib').join('libfoo.so')
- fake_library.write('Initial content.')
+ fake_library = tmpdir.mkdir("lib").join("libfoo.so")
+ fake_library.write("Initial content.")
try:
- with fs.replace_directory_transaction(str(tmpdir.join('lib'))) as backup:
+ with fs.replace_directory_transaction(str(tmpdir.join("lib"))) as backup:
assert os.path.isdir(backup)
- fake_library = tmpdir.mkdir('lib').join('libfoo.so')
- fake_library.write('New content.')
- raise RuntimeError('')
+ fake_library = tmpdir.mkdir("lib").join("libfoo.so")
+ fake_library.write("New content.")
+ raise RuntimeError("")
except RuntimeError:
pass
assert not os.path.lexists(backup)
- with open(str(tmpdir.join('lib', 'libfoo.so')), 'r') as f:
- assert 'Initial content.' == f.read()
+ with open(str(tmpdir.join("lib", "libfoo.so")), "r") as f:
+ assert "Initial content." == f.read()
-@pytest.mark.regression('10601')
-@pytest.mark.regression('10603')
-def test_recursive_search_of_headers_from_prefix(
- installation_dir_with_headers
-):
+@pytest.mark.regression("10601")
+@pytest.mark.regression("10603")
+def test_recursive_search_of_headers_from_prefix(installation_dir_with_headers):
# Try to inspect recursively from <prefix> and ensure we don't get
# subdirectories of the '<prefix>/include' path
prefix = str(installation_dir_with_headers)
@@ -367,52 +362,47 @@ def test_recursive_search_of_headers_from_prefix(
include_dirs = [dir.replace("/", "\\") for dir in include_dirs]
# Check that the header files we expect are all listed
- assert os.path.join(prefix, 'include', 'ex3.h') in header_list
- assert os.path.join(prefix, 'include', 'boost', 'ex3.h') in header_list
- assert os.path.join(prefix, 'path', 'to', 'ex1.h') in header_list
- assert os.path.join(prefix, 'path', 'to', 'subdir', 'ex2.h') in header_list
+ assert os.path.join(prefix, "include", "ex3.h") in header_list
+ assert os.path.join(prefix, "include", "boost", "ex3.h") in header_list
+ assert os.path.join(prefix, "path", "to", "ex1.h") in header_list
+ assert os.path.join(prefix, "path", "to", "subdir", "ex2.h") in header_list
# Check that when computing directories we exclude <prefix>/include/boost
- assert os.path.join(prefix, 'include') in include_dirs
- assert os.path.join(prefix, 'include', 'boost') not in include_dirs
- assert os.path.join(prefix, 'path', 'to') in include_dirs
- assert os.path.join(prefix, 'path', 'to', 'subdir') in include_dirs
+ assert os.path.join(prefix, "include") in include_dirs
+ assert os.path.join(prefix, "include", "boost") not in include_dirs
+ assert os.path.join(prefix, "path", "to") in include_dirs
+ assert os.path.join(prefix, "path", "to", "subdir") in include_dirs
if sys.platform == "win32":
dir_list = [
- (['C:/pfx/include/foo.h', 'C:/pfx/include/subdir/foo.h'], ['C:/pfx/include']),
- (['C:/pfx/include/foo.h', 'C:/pfx/subdir/foo.h'],
- ['C:/pfx/include', 'C:/pfx/subdir']),
- (['C:/pfx/include/subdir/foo.h', 'C:/pfx/subdir/foo.h'],
- ['C:/pfx/include', 'C:/pfx/subdir'])
+ (["C:/pfx/include/foo.h", "C:/pfx/include/subdir/foo.h"], ["C:/pfx/include"]),
+ (["C:/pfx/include/foo.h", "C:/pfx/subdir/foo.h"], ["C:/pfx/include", "C:/pfx/subdir"]),
+ (
+ ["C:/pfx/include/subdir/foo.h", "C:/pfx/subdir/foo.h"],
+ ["C:/pfx/include", "C:/pfx/subdir"],
+ ),
]
else:
dir_list = [
- (['/pfx/include/foo.h', '/pfx/include/subdir/foo.h'], ['/pfx/include']),
- (['/pfx/include/foo.h', '/pfx/subdir/foo.h'],
- ['/pfx/include', '/pfx/subdir']),
- (['/pfx/include/subdir/foo.h', '/pfx/subdir/foo.h'],
- ['/pfx/include', '/pfx/subdir'])
+ (["/pfx/include/foo.h", "/pfx/include/subdir/foo.h"], ["/pfx/include"]),
+ (["/pfx/include/foo.h", "/pfx/subdir/foo.h"], ["/pfx/include", "/pfx/subdir"]),
+ (["/pfx/include/subdir/foo.h", "/pfx/subdir/foo.h"], ["/pfx/include", "/pfx/subdir"]),
]
-@pytest.mark.parametrize('list_of_headers,expected_directories', dir_list)
-def test_computation_of_header_directories(
- list_of_headers, expected_directories
-):
+@pytest.mark.parametrize("list_of_headers,expected_directories", dir_list)
+def test_computation_of_header_directories(list_of_headers, expected_directories):
hl = fs.HeaderList(list_of_headers)
assert hl.directories == expected_directories
def test_headers_directory_setter():
if sys.platform == "win32":
- root = r'C:\pfx\include\subdir'
+ root = r"C:\pfx\include\subdir"
else:
root = "/pfx/include/subdir"
- hl = fs.HeaderList(
- [root + '/foo.h', root + '/bar.h']
- )
+ hl = fs.HeaderList([root + "/foo.h", root + "/bar.h"])
# Set directories using a list
hl.directories = [root]
@@ -429,7 +419,7 @@ def test_headers_directory_setter():
# TODO: Test with \\'s
hl.directories = "C:/pfx/include//subdir"
else:
- hl.directories = '/pfx/include//subdir/'
+ hl.directories = "/pfx/include//subdir/"
assert hl.directories == [root]
# Setting an empty list is allowed and returns an empty list
@@ -444,68 +434,64 @@ def test_headers_directory_setter():
if sys.platform == "win32":
# TODO: Test \\s
paths = [
- (r'C:\user\root', None,
- (['C:\\', r'C:\user', r'C:\user\root'], '', [])),
- (r'C:\user\root', 'C:\\', ([], 'C:\\', [r'C:\user', r'C:\user\root'])),
- (r'C:\user\root', r'user', (['C:\\'], r'C:\user', [r'C:\user\root'])),
- (r'C:\user\root', r'root', (['C:\\', r'C:\user'], r'C:\user\root', [])),
- (r'relative\path', None, ([r'relative', r'relative\path'], '', [])),
- (r'relative\path', r'relative', ([], r'relative', [r'relative\path'])),
- (r'relative\path', r'path', ([r'relative'], r'relative\path', []))
+ (r"C:\user\root", None, (["C:\\", r"C:\user", r"C:\user\root"], "", [])),
+ (r"C:\user\root", "C:\\", ([], "C:\\", [r"C:\user", r"C:\user\root"])),
+ (r"C:\user\root", r"user", (["C:\\"], r"C:\user", [r"C:\user\root"])),
+ (r"C:\user\root", r"root", (["C:\\", r"C:\user"], r"C:\user\root", [])),
+ (r"relative\path", None, ([r"relative", r"relative\path"], "", [])),
+ (r"relative\path", r"relative", ([], r"relative", [r"relative\path"])),
+ (r"relative\path", r"path", ([r"relative"], r"relative\path", [])),
]
else:
paths = [
- ('/tmp/user/root', None,
- (['/tmp', '/tmp/user', '/tmp/user/root'], '', [])),
- ('/tmp/user/root', 'tmp', ([], '/tmp', ['/tmp/user', '/tmp/user/root'])),
- ('/tmp/user/root', 'user', (['/tmp'], '/tmp/user', ['/tmp/user/root'])),
- ('/tmp/user/root', 'root', (['/tmp', '/tmp/user'], '/tmp/user/root', [])),
- ('relative/path', None, (['relative', 'relative/path'], '', [])),
- ('relative/path', 'relative', ([], 'relative', ['relative/path'])),
- ('relative/path', 'path', (['relative'], 'relative/path', []))
+ ("/tmp/user/root", None, (["/tmp", "/tmp/user", "/tmp/user/root"], "", [])),
+ ("/tmp/user/root", "tmp", ([], "/tmp", ["/tmp/user", "/tmp/user/root"])),
+ ("/tmp/user/root", "user", (["/tmp"], "/tmp/user", ["/tmp/user/root"])),
+ ("/tmp/user/root", "root", (["/tmp", "/tmp/user"], "/tmp/user/root", [])),
+ ("relative/path", None, (["relative", "relative/path"], "", [])),
+ ("relative/path", "relative", ([], "relative", ["relative/path"])),
+ ("relative/path", "path", (["relative"], "relative/path", [])),
]
-@pytest.mark.parametrize('path,entry,expected', paths)
+@pytest.mark.parametrize("path,entry,expected", paths)
def test_partition_path(path, entry, expected):
assert fs.partition_path(path, entry) == expected
if sys.platform == "win32":
path_list = [
- ('', []),
- (r'.\some\sub\dir', [r'.\some', r'.\some\sub', r'.\some\sub\dir']),
- (r'another\sub\dir', [r'another', r'another\sub', r'another\sub\dir'])
+ ("", []),
+ (r".\some\sub\dir", [r".\some", r".\some\sub", r".\some\sub\dir"]),
+ (r"another\sub\dir", [r"another", r"another\sub", r"another\sub\dir"]),
]
else:
path_list = [
- ('', []),
- ('/tmp/user/dir', ['/tmp', '/tmp/user', '/tmp/user/dir']),
- ('./some/sub/dir', ['./some', './some/sub', './some/sub/dir']),
- ('another/sub/dir', ['another', 'another/sub', 'another/sub/dir'])
+ ("", []),
+ ("/tmp/user/dir", ["/tmp", "/tmp/user", "/tmp/user/dir"]),
+ ("./some/sub/dir", ["./some", "./some/sub", "./some/sub/dir"]),
+ ("another/sub/dir", ["another", "another/sub", "another/sub/dir"]),
]
-@pytest.mark.parametrize('path,expected', path_list)
+@pytest.mark.parametrize("path,expected", path_list)
def test_prefixes(path, expected):
assert fs.prefixes(path) == expected
-@pytest.mark.regression('7358')
-@pytest.mark.parametrize('regex,replacement,filename,keyword_args', [
- (r"\<malloc\.h\>", "<stdlib.h>", 'x86_cpuid_info.c', {}),
- (r"CDIR", "CURRENT_DIRECTORY", 'selfextract.bsx',
- {'stop_at': '__ARCHIVE_BELOW__'})
-])
-def test_filter_files_with_different_encodings(
- regex, replacement, filename, tmpdir, keyword_args
-):
+@pytest.mark.regression("7358")
+@pytest.mark.parametrize(
+ "regex,replacement,filename,keyword_args",
+ [
+ (r"\<malloc\.h\>", "<stdlib.h>", "x86_cpuid_info.c", {}),
+ (r"CDIR", "CURRENT_DIRECTORY", "selfextract.bsx", {"stop_at": "__ARCHIVE_BELOW__"}),
+ ],
+)
+def test_filter_files_with_different_encodings(regex, replacement, filename, tmpdir, keyword_args):
# All files given as input to this test must satisfy the pre-requisite
# that the 'replacement' string is not present in the file initially and
# that there's at least one match for the regex
- original_file = os.path.join(
- spack.paths.test_path, 'data', 'filter_file', filename
- )
+ original_file = os.path.join(spack.paths.test_path, "data", "filter_file", filename)
target_file = os.path.join(str(tmpdir), filename)
shutil.copy(original_file, target_file)
# This should not raise exceptions
@@ -513,9 +499,9 @@ def test_filter_files_with_different_encodings(
# Check the strings have been replaced
extra_kwargs = {}
if sys.version_info > (3, 0):
- extra_kwargs = {'errors': 'surrogateescape'}
+ extra_kwargs = {"errors": "surrogateescape"}
- with open(target_file, mode='r', **extra_kwargs) as f:
+ with open(target_file, mode="r", **extra_kwargs) as f:
assert replacement in f.read()
@@ -523,96 +509,107 @@ def test_filter_files_multiple(tmpdir):
# All files given as input to this test must satisfy the pre-requisite
# that the 'replacement' string is not present in the file initially and
# that there's at least one match for the regex
- original_file = os.path.join(
- spack.paths.test_path, 'data', 'filter_file', 'x86_cpuid_info.c'
- )
- target_file = os.path.join(str(tmpdir), 'x86_cpuid_info.c')
+ original_file = os.path.join(spack.paths.test_path, "data", "filter_file", "x86_cpuid_info.c")
+ target_file = os.path.join(str(tmpdir), "x86_cpuid_info.c")
shutil.copy(original_file, target_file)
# This should not raise exceptions
- fs.filter_file(r'\<malloc.h\>', '<unistd.h>', target_file)
- fs.filter_file(r'\<string.h\>', '<unistd.h>', target_file)
- fs.filter_file(r'\<stdio.h\>', '<unistd.h>', target_file)
+ fs.filter_file(r"\<malloc.h\>", "<unistd.h>", target_file)
+ fs.filter_file(r"\<string.h\>", "<unistd.h>", target_file)
+ fs.filter_file(r"\<stdio.h\>", "<unistd.h>", target_file)
# Check the strings have been replaced
extra_kwargs = {}
if sys.version_info > (3, 0):
- extra_kwargs = {'errors': 'surrogateescape'}
+ extra_kwargs = {"errors": "surrogateescape"}
- with open(target_file, mode='r', **extra_kwargs) as f:
- assert '<malloc.h>' not in f.read()
- assert '<string.h>' not in f.read()
- assert '<stdio.h>' not in f.read()
+ with open(target_file, mode="r", **extra_kwargs) as f:
+ assert "<malloc.h>" not in f.read()
+ assert "<string.h>" not in f.read()
+ assert "<stdio.h>" not in f.read()
# Each test input is a tuple of entries which prescribe
# - the 'subdirs' to be created from tmpdir
# - the 'files' in that directory
# - what is to be removed
-@pytest.mark.parametrize('files_or_dirs', [
- # Remove a file over the two that are present
- [{'subdirs': None,
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['spack.lock']}],
- # Remove the entire directory where two files are stored
- [{'subdirs': 'myenv',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['myenv']}],
- # Combine a mix of directories and files
- [{'subdirs': None,
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['spack.lock']},
- {'subdirs': 'myenv',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['myenv']}],
- # Multiple subdirectories, remove root
- [{'subdirs': 'work/myenv1',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': []},
- {'subdirs': 'work/myenv2',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['work']}],
- # Multiple subdirectories, remove each one
- [{'subdirs': 'work/myenv1',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['work/myenv1']},
- {'subdirs': 'work/myenv2',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['work/myenv2']}],
- # Remove files with the same name in different directories
- [{'subdirs': 'work/myenv1',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['work/myenv1/spack.lock']},
- {'subdirs': 'work/myenv2',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['work/myenv2/spack.lock']}],
- # Remove first the directory, then a file within the directory
- [{'subdirs': 'myenv',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['myenv', 'myenv/spack.lock']}],
- # Remove first a file within a directory, then the directory
- [{'subdirs': 'myenv',
- 'files': ['spack.lock', 'spack.yaml'],
- 'remove': ['myenv/spack.lock', 'myenv']}],
-])
-@pytest.mark.regression('18441')
+@pytest.mark.parametrize(
+ "files_or_dirs",
+ [
+ # Remove a file over the two that are present
+ [{"subdirs": None, "files": ["spack.lock", "spack.yaml"], "remove": ["spack.lock"]}],
+ # Remove the entire directory where two files are stored
+ [{"subdirs": "myenv", "files": ["spack.lock", "spack.yaml"], "remove": ["myenv"]}],
+ # Combine a mix of directories and files
+ [
+ {"subdirs": None, "files": ["spack.lock", "spack.yaml"], "remove": ["spack.lock"]},
+ {"subdirs": "myenv", "files": ["spack.lock", "spack.yaml"], "remove": ["myenv"]},
+ ],
+ # Multiple subdirectories, remove root
+ [
+ {"subdirs": "work/myenv1", "files": ["spack.lock", "spack.yaml"], "remove": []},
+ {"subdirs": "work/myenv2", "files": ["spack.lock", "spack.yaml"], "remove": ["work"]},
+ ],
+ # Multiple subdirectories, remove each one
+ [
+ {
+ "subdirs": "work/myenv1",
+ "files": ["spack.lock", "spack.yaml"],
+ "remove": ["work/myenv1"],
+ },
+ {
+ "subdirs": "work/myenv2",
+ "files": ["spack.lock", "spack.yaml"],
+ "remove": ["work/myenv2"],
+ },
+ ],
+ # Remove files with the same name in different directories
+ [
+ {
+ "subdirs": "work/myenv1",
+ "files": ["spack.lock", "spack.yaml"],
+ "remove": ["work/myenv1/spack.lock"],
+ },
+ {
+ "subdirs": "work/myenv2",
+ "files": ["spack.lock", "spack.yaml"],
+ "remove": ["work/myenv2/spack.lock"],
+ },
+ ],
+ # Remove first the directory, then a file within the directory
+ [
+ {
+ "subdirs": "myenv",
+ "files": ["spack.lock", "spack.yaml"],
+ "remove": ["myenv", "myenv/spack.lock"],
+ }
+ ],
+ # Remove first a file within a directory, then the directory
+ [
+ {
+ "subdirs": "myenv",
+ "files": ["spack.lock", "spack.yaml"],
+ "remove": ["myenv/spack.lock", "myenv"],
+ }
+ ],
+ ],
+)
+@pytest.mark.regression("18441")
def test_safe_remove(files_or_dirs, tmpdir):
# Create a fake directory structure as prescribed by test input
to_be_removed, to_be_checked = [], []
for entry in files_or_dirs:
# Create relative dir
- subdirs = entry['subdirs']
- dir = tmpdir if not subdirs else tmpdir.ensure(
- *subdirs.split('/'), dir=True
- )
+ subdirs = entry["subdirs"]
+ dir = tmpdir if not subdirs else tmpdir.ensure(*subdirs.split("/"), dir=True)
# Create files in the directory
- files = entry['files']
+ files = entry["files"]
for f in files:
abspath = str(dir.join(f))
to_be_checked.append(abspath)
fs.touch(abspath)
# List of things to be removed
- for r in entry['remove']:
+ for r in entry["remove"]:
to_be_removed.append(str(tmpdir.join(r)))
# Assert that files are deleted in the context block,
@@ -621,49 +618,47 @@ def test_safe_remove(files_or_dirs, tmpdir):
with fs.safe_remove(*to_be_removed):
for entry in to_be_removed:
assert not os.path.exists(entry)
- raise RuntimeError('Mock a failure')
+ raise RuntimeError("Mock a failure")
# Assert that files are restored
for entry in to_be_checked:
assert os.path.exists(entry)
-@pytest.mark.regression('18441')
+@pytest.mark.regression("18441")
def test_content_of_files_with_same_name(tmpdir):
# Create two subdirectories containing a file with the same name,
# differentiate the files by their content
- file1 = tmpdir.ensure('myenv1/spack.lock')
- file2 = tmpdir.ensure('myenv2/spack.lock')
- file1.write('file1'), file2.write('file2')
+ file1 = tmpdir.ensure("myenv1/spack.lock")
+ file2 = tmpdir.ensure("myenv2/spack.lock")
+ file1.write("file1"), file2.write("file2")
# Use 'safe_remove' to remove the two files
with pytest.raises(RuntimeError):
with fs.safe_remove(str(file1), str(file2)):
- raise RuntimeError('Mock a failure')
+ raise RuntimeError("Mock a failure")
# Check both files have been restored correctly
# and have not been mixed
- assert file1.read().strip() == 'file1'
- assert file2.read().strip() == 'file2'
+ assert file1.read().strip() == "file1"
+ assert file2.read().strip() == "file2"
def test_keep_modification_time(tmpdir):
- file1 = tmpdir.ensure('file1')
- file2 = tmpdir.ensure('file2')
+ file1 = tmpdir.ensure("file1")
+ file2 = tmpdir.ensure("file2")
# Shift the modification time of the file 10 seconds back:
mtime1 = file1.mtime() - 10
file1.setmtime(mtime1)
- with fs.keep_modification_time(file1.strpath,
- file2.strpath,
- 'non-existing-file'):
- file1.write('file1')
+ with fs.keep_modification_time(file1.strpath, file2.strpath, "non-existing-file"):
+ file1.write("file1")
file2.remove()
# Assert that the modifications took place the modification time has not
# changed;
- assert file1.read().strip() == 'file1'
+ assert file1.read().strip() == "file1"
assert not file2.exists()
assert int(mtime1) == int(file1.mtime())
@@ -675,28 +670,28 @@ def test_temporary_dir_context_manager():
assert os.path.realpath(str(tmp_dir)) == os.path.realpath(os.getcwd())
-@pytest.mark.skipif(sys.platform == 'win32', reason="No shebang on Windows")
+@pytest.mark.skipif(sys.platform == "win32", reason="No shebang on Windows")
def test_is_nonsymlink_exe_with_shebang(tmpdir):
with tmpdir.as_cwd():
# Create an executable with shebang.
- with open('executable_script', 'wb') as f:
- f.write(b'#!/interpreter')
- os.chmod('executable_script', 0o100775)
+ with open("executable_script", "wb") as f:
+ f.write(b"#!/interpreter")
+ os.chmod("executable_script", 0o100775)
- with open('executable_but_not_script', 'wb') as f:
- f.write(b'#/not-a-shebang')
- os.chmod('executable_but_not_script', 0o100775)
+ with open("executable_but_not_script", "wb") as f:
+ f.write(b"#/not-a-shebang")
+ os.chmod("executable_but_not_script", 0o100775)
- with open('not_executable_with_shebang', 'wb') as f:
- f.write(b'#!/interpreter')
- os.chmod('not_executable_with_shebang', 0o100664)
+ with open("not_executable_with_shebang", "wb") as f:
+ f.write(b"#!/interpreter")
+ os.chmod("not_executable_with_shebang", 0o100664)
- os.symlink('executable_script', 'symlink_to_executable_script')
+ os.symlink("executable_script", "symlink_to_executable_script")
- assert fs.is_nonsymlink_exe_with_shebang('executable_script')
- assert not fs.is_nonsymlink_exe_with_shebang('executable_but_not_script')
- assert not fs.is_nonsymlink_exe_with_shebang('not_executable_with_shebang')
- assert not fs.is_nonsymlink_exe_with_shebang('symlink_to_executable_script')
+ assert fs.is_nonsymlink_exe_with_shebang("executable_script")
+ assert not fs.is_nonsymlink_exe_with_shebang("executable_but_not_script")
+ assert not fs.is_nonsymlink_exe_with_shebang("not_executable_with_shebang")
+ assert not fs.is_nonsymlink_exe_with_shebang("symlink_to_executable_script")
def test_lexists_islink_isdir(tmpdir):
@@ -736,6 +731,7 @@ def test_lexists_islink_isdir(tmpdir):
class RegisterVisitor(object):
"""A directory visitor that keeps track of all visited paths"""
+
def __init__(self, root, follow_dirs=True, follow_symlink_dirs=True):
self.files = []
self.dirs_before = []
@@ -774,110 +770,107 @@ class RegisterVisitor(object):
self.symlinked_dirs_after.append(rel_path)
-@pytest.mark.skipif(sys.platform == 'win32', reason="Requires symlinks")
+@pytest.mark.skipif(sys.platform == "win32", reason="Requires symlinks")
def test_visit_directory_tree_follow_all(noncyclical_dir_structure):
root = str(noncyclical_dir_structure)
visitor = RegisterVisitor(root, follow_dirs=True, follow_symlink_dirs=True)
fs.visit_directory_tree(root, visitor)
j = os.path.join
assert visitor.files == [
- j('a', 'file_1'),
- j('a', 'to_c', 'dangling_link'),
- j('a', 'to_c', 'file_2'),
- j('a', 'to_file_1'),
- j('b', 'file_1'),
- j('b', 'to_c', 'dangling_link'),
- j('b', 'to_c', 'file_2'),
- j('b', 'to_file_1'),
- j('c', 'dangling_link'),
- j('c', 'file_2'),
- j('file_3'),
+ j("a", "file_1"),
+ j("a", "to_c", "dangling_link"),
+ j("a", "to_c", "file_2"),
+ j("a", "to_file_1"),
+ j("b", "file_1"),
+ j("b", "to_c", "dangling_link"),
+ j("b", "to_c", "file_2"),
+ j("b", "to_file_1"),
+ j("c", "dangling_link"),
+ j("c", "file_2"),
+ j("file_3"),
]
assert visitor.dirs_before == [
- j('a'),
- j('a', 'd'),
- j('b', 'd'),
- j('c'),
+ j("a"),
+ j("a", "d"),
+ j("b", "d"),
+ j("c"),
]
assert visitor.dirs_after == [
- j('a', 'd'),
- j('a'),
- j('b', 'd'),
- j('c'),
+ j("a", "d"),
+ j("a"),
+ j("b", "d"),
+ j("c"),
]
assert visitor.symlinked_dirs_before == [
- j('a', 'to_c'),
- j('b'),
- j('b', 'to_c'),
+ j("a", "to_c"),
+ j("b"),
+ j("b", "to_c"),
]
assert visitor.symlinked_dirs_after == [
- j('a', 'to_c'),
- j('b', 'to_c'),
- j('b'),
+ j("a", "to_c"),
+ j("b", "to_c"),
+ j("b"),
]
-@pytest.mark.skipif(sys.platform == 'win32', reason="Requires symlinks")
+@pytest.mark.skipif(sys.platform == "win32", reason="Requires symlinks")
def test_visit_directory_tree_follow_dirs(noncyclical_dir_structure):
root = str(noncyclical_dir_structure)
visitor = RegisterVisitor(root, follow_dirs=True, follow_symlink_dirs=False)
fs.visit_directory_tree(root, visitor)
j = os.path.join
assert visitor.files == [
- j('a', 'file_1'),
- j('a', 'to_file_1'),
- j('c', 'dangling_link'),
- j('c', 'file_2'),
- j('file_3'),
+ j("a", "file_1"),
+ j("a", "to_file_1"),
+ j("c", "dangling_link"),
+ j("c", "file_2"),
+ j("file_3"),
]
assert visitor.dirs_before == [
- j('a'),
- j('a', 'd'),
- j('c'),
+ j("a"),
+ j("a", "d"),
+ j("c"),
]
assert visitor.dirs_after == [
- j('a', 'd'),
- j('a'),
- j('c'),
+ j("a", "d"),
+ j("a"),
+ j("c"),
]
assert visitor.symlinked_dirs_before == [
- j('a', 'to_c'),
- j('b'),
+ j("a", "to_c"),
+ j("b"),
]
assert not visitor.symlinked_dirs_after
-@pytest.mark.skipif(sys.platform == 'win32', reason="Requires symlinks")
+@pytest.mark.skipif(sys.platform == "win32", reason="Requires symlinks")
def test_visit_directory_tree_follow_none(noncyclical_dir_structure):
root = str(noncyclical_dir_structure)
visitor = RegisterVisitor(root, follow_dirs=False, follow_symlink_dirs=False)
fs.visit_directory_tree(root, visitor)
j = os.path.join
assert visitor.files == [
- j('file_3'),
+ j("file_3"),
]
assert visitor.dirs_before == [
- j('a'),
- j('c'),
+ j("a"),
+ j("c"),
]
assert not visitor.dirs_after
assert visitor.symlinked_dirs_before == [
- j('b'),
+ j("b"),
]
assert not visitor.symlinked_dirs_after
-@pytest.mark.regression('29687')
-@pytest.mark.parametrize('initial_mode', [
- stat.S_IRUSR | stat.S_IXUSR,
- stat.S_IWGRP
-])
-@pytest.mark.skipif(sys.platform == 'win32', reason='Windows might change permissions')
+@pytest.mark.regression("29687")
+@pytest.mark.parametrize("initial_mode", [stat.S_IRUSR | stat.S_IXUSR, stat.S_IWGRP])
+@pytest.mark.skipif(sys.platform == "win32", reason="Windows might change permissions")
def test_remove_linked_tree_doesnt_change_file_permission(tmpdir, initial_mode):
# Here we test that a failed call to remove_linked_tree, due to passing a file
# as an argument instead of a directory, doesn't leave the file with different
# permissions as a side effect of trying to handle the error.
- file_instead_of_dir = tmpdir.ensure('foo')
+ file_instead_of_dir = tmpdir.ensure("foo")
file_instead_of_dir.chmod(initial_mode)
initial_stat = os.stat(str(file_instead_of_dir))
fs.remove_linked_tree(str(file_instead_of_dir))
diff --git a/lib/spack/spack/test/llnl/util/lang.py b/lib/spack/spack/test/llnl/util/lang.py
index 0de43fa527..c299e690a0 100644
--- a/lib/spack/spack/test/llnl/util/lang.py
+++ b/lib/spack/spack/test/llnl/util/lang.py
@@ -21,7 +21,7 @@ def now():
@pytest.fixture()
def module_path(tmpdir):
- m = tmpdir.join('foo.py')
+ m = tmpdir.join("foo.py")
content = """
import os.path
@@ -33,8 +33,8 @@ path = os.path.join('/usr', 'bin')
yield str(m)
# Don't leave garbage in the module system
- if 'foo' in sys.modules:
- del sys.modules['foo']
+ if "foo" in sys.modules:
+ del sys.modules["foo"]
def test_pretty_date():
@@ -84,28 +84,34 @@ def test_pretty_date():
assert pretty_date(years, now) == "2 years ago"
-@pytest.mark.parametrize('delta,pretty_string', [
- (timedelta(days=1), 'a day ago'),
- (timedelta(days=1), 'yesterday'),
- (timedelta(days=1), '1 day ago'),
- (timedelta(weeks=1), '1 week ago'),
- (timedelta(weeks=3), '3 weeks ago'),
- (timedelta(days=30), '1 month ago'),
- (timedelta(days=730), '2 years ago'),
-])
+@pytest.mark.parametrize(
+ "delta,pretty_string",
+ [
+ (timedelta(days=1), "a day ago"),
+ (timedelta(days=1), "yesterday"),
+ (timedelta(days=1), "1 day ago"),
+ (timedelta(weeks=1), "1 week ago"),
+ (timedelta(weeks=3), "3 weeks ago"),
+ (timedelta(days=30), "1 month ago"),
+ (timedelta(days=730), "2 years ago"),
+ ],
+)
def test_pretty_string_to_date_delta(now, delta, pretty_string):
t1 = now - delta
t2 = llnl.util.lang.pretty_string_to_date(pretty_string, now)
assert t1 == t2
-@pytest.mark.parametrize('format,pretty_string', [
- ('%Y', '2018'),
- ('%Y-%m', '2015-03'),
- ('%Y-%m-%d', '2015-03-28'),
- ('%Y-%m-%d %H:%M', '2015-03-28 11:12'),
- ('%Y-%m-%d %H:%M:%S', '2015-03-28 23:34:45'),
-])
+@pytest.mark.parametrize(
+ "format,pretty_string",
+ [
+ ("%Y", "2018"),
+ ("%Y-%m", "2015-03"),
+ ("%Y-%m-%d", "2015-03-28"),
+ ("%Y-%m-%d %H:%M", "2015-03-28 11:12"),
+ ("%Y-%m-%d %H:%M:%S", "2015-03-28 23:34:45"),
+ ],
+)
def test_pretty_string_to_date(format, pretty_string):
t1 = datetime.strptime(pretty_string, format)
t2 = llnl.util.lang.pretty_string_to_date(pretty_string, now)
@@ -114,41 +120,41 @@ def test_pretty_string_to_date(format, pretty_string):
def test_match_predicate():
matcher = match_predicate(lambda x: True)
- assert matcher('foo')
- assert matcher('bar')
- assert matcher('baz')
+ assert matcher("foo")
+ assert matcher("bar")
+ assert matcher("baz")
- matcher = match_predicate(['foo', 'bar'])
- assert matcher('foo')
- assert matcher('bar')
- assert not matcher('baz')
+ matcher = match_predicate(["foo", "bar"])
+ assert matcher("foo")
+ assert matcher("bar")
+ assert not matcher("baz")
- matcher = match_predicate(r'^(foo|bar)$')
- assert matcher('foo')
- assert matcher('bar')
- assert not matcher('baz')
+ matcher = match_predicate(r"^(foo|bar)$")
+ assert matcher("foo")
+ assert matcher("bar")
+ assert not matcher("baz")
with pytest.raises(ValueError):
matcher = match_predicate(object())
- matcher('foo')
+ matcher("foo")
def test_load_modules_from_file(module_path):
# Check prerequisites
- assert 'foo' not in sys.modules
+ assert "foo" not in sys.modules
# Check that the module is loaded correctly from file
- foo = llnl.util.lang.load_module_from_file('foo', module_path)
- assert 'foo' in sys.modules
+ foo = llnl.util.lang.load_module_from_file("foo", module_path)
+ assert "foo" in sys.modules
assert foo.value == 1
- assert foo.path == os.path.join('/usr', 'bin')
+ assert foo.path == os.path.join("/usr", "bin")
# Check that the module is not reloaded a second time on subsequent calls
foo.value = 2
- foo = llnl.util.lang.load_module_from_file('foo', module_path)
- assert 'foo' in sys.modules
+ foo = llnl.util.lang.load_module_from_file("foo", module_path)
+ assert "foo" in sys.modules
assert foo.value == 2
- assert foo.path == os.path.join('/usr', 'bin')
+ assert foo.path == os.path.join("/usr", "bin")
def test_uniq():
@@ -162,6 +168,7 @@ def test_key_ordering():
"""Ensure that key ordering works correctly."""
with pytest.raises(TypeError):
+
@llnl.util.lang.key_ordering
class ClassThatHasNoCmpKeyMethod(object):
# this will raise b/c it does not define _cmp_key
@@ -212,7 +219,7 @@ def test_key_ordering():
"args1,kwargs1,args2,kwargs2",
[
# Ensure tuples passed in args are disambiguated from equivalent kwarg items.
- (('a', 3), {}, (), {'a': 3})
+ (("a", 3), {}, (), {"a": 3})
],
)
def test_unequal_args(args1, kwargs1, args2, kwargs2):
@@ -223,7 +230,7 @@ def test_unequal_args(args1, kwargs1, args2, kwargs2):
"args1,kwargs1,args2,kwargs2",
[
# Ensure that kwargs are stably sorted.
- ((), {'a': 3, 'b': 4}, (), {'b': 4, 'a': 3}),
+ ((), {"a": 3, "b": 4}, (), {"b": 4, "a": 3}),
],
)
def test_equal_args(args1, kwargs1, args2, kwargs2):
@@ -234,32 +241,32 @@ def test_equal_args(args1, kwargs1, args2, kwargs2):
"args, kwargs",
[
((1,), {}),
- ((), {'a': 3}),
- ((1,), {'a': 3}),
+ ((), {"a": 3}),
+ ((1,), {"a": 3}),
],
)
def test_memoized(args, kwargs):
@memoized
def f(*args, **kwargs):
- return 'return-value'
- assert f(*args, **kwargs) == 'return-value'
+ return "return-value"
+
+ assert f(*args, **kwargs) == "return-value"
key = stable_args(*args, **kwargs)
assert list(f.cache.keys()) == [key]
- assert f.cache[key] == 'return-value'
+ assert f.cache[key] == "return-value"
@pytest.mark.parametrize(
"args, kwargs",
- [
- (([1],), {}),
- ((), {'a': [1]})
- ],
+ [(([1],), {}), ((), {"a": [1]})],
)
def test_memoized_unhashable(args, kwargs):
"""Check that an exception is raised clearly"""
+
@memoized
def f(*args, **kwargs):
return None
+
with pytest.raises(llnl.util.lang.UnhashableArguments) as exc_info:
f(*args, **kwargs)
exc_msg = str(exc_info.value)
@@ -277,20 +284,25 @@ def test_grouped_exception():
h = llnl.util.lang.GroupedExceptionHandler()
def inner():
- raise ValueError('wow!')
+ raise ValueError("wow!")
- with h.forward('inner method'):
+ with h.forward("inner method"):
inner()
- with h.forward('top-level'):
- raise TypeError('ok')
+ with h.forward("top-level"):
+ raise TypeError("ok")
- assert h.grouped_message(with_tracebacks=False) == dedent("""\
+ assert h.grouped_message(with_tracebacks=False) == dedent(
+ """\
due to the following failures:
inner method raised ValueError: wow!
- top-level raised TypeError: ok""")
+ top-level raised TypeError: ok"""
+ )
- assert h.grouped_message(with_tracebacks=True) == dedent("""\
+ assert (
+ h.grouped_message(with_tracebacks=True)
+ == dedent(
+ """\
due to the following failures:
inner method raised ValueError: wow!
File "{0}", \
@@ -304,4 +316,6 @@ line 280, in inner
File "{0}", \
line 286, in test_grouped_exception
raise TypeError('ok')
- """).format(__file__)
+ """
+ ).format(__file__)
+ )
diff --git a/lib/spack/spack/test/llnl/util/link_tree.py b/lib/spack/spack/test/llnl/util/link_tree.py
index 3b66c376b2..2df5542e35 100644
--- a/lib/spack/spack/test/llnl/util/link_tree.py
+++ b/lib/spack/spack/test/llnl/util/link_tree.py
@@ -17,17 +17,17 @@ from spack.stage import Stage
@pytest.fixture()
def stage():
"""Creates a stage with the directory structure for the tests."""
- s = Stage('link-tree-test')
+ s = Stage("link-tree-test")
s.create()
with working_dir(s.path):
- touchp('source/1')
- touchp('source/a/b/2')
- touchp('source/a/b/3')
- touchp('source/c/4')
- touchp('source/c/d/5')
- touchp('source/c/d/6')
- touchp('source/c/d/e/7')
+ touchp("source/1")
+ touchp("source/a/b/2")
+ touchp("source/a/b/3")
+ touchp("source/c/4")
+ touchp("source/c/d/5")
+ touchp("source/c/d/6")
+ touchp("source/c/d/e/7")
yield s
@@ -37,15 +37,14 @@ def stage():
@pytest.fixture()
def link_tree(stage):
"""Return a properly initialized LinkTree instance."""
- source_path = os.path.join(stage.path, 'source')
+ source_path = os.path.join(stage.path, "source")
return LinkTree(source_path)
def check_file_link(filename, expected_target):
assert os.path.isfile(filename)
assert islink(filename)
- assert (os.path.abspath(os.path.realpath(filename)) ==
- os.path.abspath(expected_target))
+ assert os.path.abspath(os.path.realpath(filename)) == os.path.abspath(expected_target)
def check_dir(filename):
@@ -54,121 +53,121 @@ def check_dir(filename):
def test_merge_to_new_directory(stage, link_tree):
with working_dir(stage.path):
- link_tree.merge('dest')
+ link_tree.merge("dest")
- check_file_link('dest/1', 'source/1')
- check_file_link('dest/a/b/2', 'source/a/b/2')
- check_file_link('dest/a/b/3', 'source/a/b/3')
- check_file_link('dest/c/4', 'source/c/4')
- check_file_link('dest/c/d/5', 'source/c/d/5')
- check_file_link('dest/c/d/6', 'source/c/d/6')
- check_file_link('dest/c/d/e/7', 'source/c/d/e/7')
+ check_file_link("dest/1", "source/1")
+ check_file_link("dest/a/b/2", "source/a/b/2")
+ check_file_link("dest/a/b/3", "source/a/b/3")
+ check_file_link("dest/c/4", "source/c/4")
+ check_file_link("dest/c/d/5", "source/c/d/5")
+ check_file_link("dest/c/d/6", "source/c/d/6")
+ check_file_link("dest/c/d/e/7", "source/c/d/e/7")
- assert os.path.isabs(os.readlink('dest/1'))
- assert os.path.isabs(os.readlink('dest/a/b/2'))
- assert os.path.isabs(os.readlink('dest/a/b/3'))
- assert os.path.isabs(os.readlink('dest/c/4'))
- assert os.path.isabs(os.readlink('dest/c/d/5'))
- assert os.path.isabs(os.readlink('dest/c/d/6'))
- assert os.path.isabs(os.readlink('dest/c/d/e/7'))
+ assert os.path.isabs(os.readlink("dest/1"))
+ assert os.path.isabs(os.readlink("dest/a/b/2"))
+ assert os.path.isabs(os.readlink("dest/a/b/3"))
+ assert os.path.isabs(os.readlink("dest/c/4"))
+ assert os.path.isabs(os.readlink("dest/c/d/5"))
+ assert os.path.isabs(os.readlink("dest/c/d/6"))
+ assert os.path.isabs(os.readlink("dest/c/d/e/7"))
- link_tree.unmerge('dest')
+ link_tree.unmerge("dest")
- assert not os.path.exists('dest')
+ assert not os.path.exists("dest")
def test_merge_to_new_directory_relative(stage, link_tree):
with working_dir(stage.path):
- link_tree.merge('dest', relative=True)
+ link_tree.merge("dest", relative=True)
- check_file_link('dest/1', 'source/1')
- check_file_link('dest/a/b/2', 'source/a/b/2')
- check_file_link('dest/a/b/3', 'source/a/b/3')
- check_file_link('dest/c/4', 'source/c/4')
- check_file_link('dest/c/d/5', 'source/c/d/5')
- check_file_link('dest/c/d/6', 'source/c/d/6')
- check_file_link('dest/c/d/e/7', 'source/c/d/e/7')
+ check_file_link("dest/1", "source/1")
+ check_file_link("dest/a/b/2", "source/a/b/2")
+ check_file_link("dest/a/b/3", "source/a/b/3")
+ check_file_link("dest/c/4", "source/c/4")
+ check_file_link("dest/c/d/5", "source/c/d/5")
+ check_file_link("dest/c/d/6", "source/c/d/6")
+ check_file_link("dest/c/d/e/7", "source/c/d/e/7")
- assert not os.path.isabs(os.readlink('dest/1'))
- assert not os.path.isabs(os.readlink('dest/a/b/2'))
- assert not os.path.isabs(os.readlink('dest/a/b/3'))
- assert not os.path.isabs(os.readlink('dest/c/4'))
- assert not os.path.isabs(os.readlink('dest/c/d/5'))
- assert not os.path.isabs(os.readlink('dest/c/d/6'))
- assert not os.path.isabs(os.readlink('dest/c/d/e/7'))
+ assert not os.path.isabs(os.readlink("dest/1"))
+ assert not os.path.isabs(os.readlink("dest/a/b/2"))
+ assert not os.path.isabs(os.readlink("dest/a/b/3"))
+ assert not os.path.isabs(os.readlink("dest/c/4"))
+ assert not os.path.isabs(os.readlink("dest/c/d/5"))
+ assert not os.path.isabs(os.readlink("dest/c/d/6"))
+ assert not os.path.isabs(os.readlink("dest/c/d/e/7"))
- link_tree.unmerge('dest')
+ link_tree.unmerge("dest")
- assert not os.path.exists('dest')
+ assert not os.path.exists("dest")
def test_merge_to_existing_directory(stage, link_tree):
with working_dir(stage.path):
- touchp('dest/x')
- touchp('dest/a/b/y')
+ touchp("dest/x")
+ touchp("dest/a/b/y")
- link_tree.merge('dest')
+ link_tree.merge("dest")
- check_file_link('dest/1', 'source/1')
- check_file_link('dest/a/b/2', 'source/a/b/2')
- check_file_link('dest/a/b/3', 'source/a/b/3')
- check_file_link('dest/c/4', 'source/c/4')
- check_file_link('dest/c/d/5', 'source/c/d/5')
- check_file_link('dest/c/d/6', 'source/c/d/6')
- check_file_link('dest/c/d/e/7', 'source/c/d/e/7')
+ check_file_link("dest/1", "source/1")
+ check_file_link("dest/a/b/2", "source/a/b/2")
+ check_file_link("dest/a/b/3", "source/a/b/3")
+ check_file_link("dest/c/4", "source/c/4")
+ check_file_link("dest/c/d/5", "source/c/d/5")
+ check_file_link("dest/c/d/6", "source/c/d/6")
+ check_file_link("dest/c/d/e/7", "source/c/d/e/7")
- assert os.path.isfile('dest/x')
- assert os.path.isfile('dest/a/b/y')
+ assert os.path.isfile("dest/x")
+ assert os.path.isfile("dest/a/b/y")
- link_tree.unmerge('dest')
+ link_tree.unmerge("dest")
- assert os.path.isfile('dest/x')
- assert os.path.isfile('dest/a/b/y')
+ assert os.path.isfile("dest/x")
+ assert os.path.isfile("dest/a/b/y")
- assert not os.path.isfile('dest/1')
- assert not os.path.isfile('dest/a/b/2')
- assert not os.path.isfile('dest/a/b/3')
- assert not os.path.isfile('dest/c/4')
- assert not os.path.isfile('dest/c/d/5')
- assert not os.path.isfile('dest/c/d/6')
- assert not os.path.isfile('dest/c/d/e/7')
+ assert not os.path.isfile("dest/1")
+ assert not os.path.isfile("dest/a/b/2")
+ assert not os.path.isfile("dest/a/b/3")
+ assert not os.path.isfile("dest/c/4")
+ assert not os.path.isfile("dest/c/d/5")
+ assert not os.path.isfile("dest/c/d/6")
+ assert not os.path.isfile("dest/c/d/e/7")
def test_merge_with_empty_directories(stage, link_tree):
with working_dir(stage.path):
- mkdirp('dest/f/g')
- mkdirp('dest/a/b/h')
+ mkdirp("dest/f/g")
+ mkdirp("dest/a/b/h")
- link_tree.merge('dest')
- link_tree.unmerge('dest')
+ link_tree.merge("dest")
+ link_tree.unmerge("dest")
- assert not os.path.exists('dest/1')
- assert not os.path.exists('dest/a/b/2')
- assert not os.path.exists('dest/a/b/3')
- assert not os.path.exists('dest/c/4')
- assert not os.path.exists('dest/c/d/5')
- assert not os.path.exists('dest/c/d/6')
- assert not os.path.exists('dest/c/d/e/7')
+ assert not os.path.exists("dest/1")
+ assert not os.path.exists("dest/a/b/2")
+ assert not os.path.exists("dest/a/b/3")
+ assert not os.path.exists("dest/c/4")
+ assert not os.path.exists("dest/c/d/5")
+ assert not os.path.exists("dest/c/d/6")
+ assert not os.path.exists("dest/c/d/e/7")
- assert os.path.isdir('dest/a/b/h')
- assert os.path.isdir('dest/f/g')
+ assert os.path.isdir("dest/a/b/h")
+ assert os.path.isdir("dest/f/g")
def test_ignore(stage, link_tree):
with working_dir(stage.path):
- touchp('source/.spec')
- touchp('dest/.spec')
+ touchp("source/.spec")
+ touchp("dest/.spec")
- link_tree.merge('dest', ignore=lambda x: x == '.spec')
- link_tree.unmerge('dest', ignore=lambda x: x == '.spec')
+ link_tree.merge("dest", ignore=lambda x: x == ".spec")
+ link_tree.unmerge("dest", ignore=lambda x: x == ".spec")
- assert not os.path.exists('dest/1')
- assert not os.path.exists('dest/a')
- assert not os.path.exists('dest/c')
+ assert not os.path.exists("dest/1")
+ assert not os.path.exists("dest/a")
+ assert not os.path.exists("dest/c")
- assert os.path.isfile('source/.spec')
- assert os.path.isfile('dest/.spec')
+ assert os.path.isfile("source/.spec")
+ assert os.path.isfile("dest/.spec")
def test_source_merge_visitor_does_not_follow_symlinked_dirs_at_depth(tmpdir):
@@ -190,34 +189,34 @@ def test_source_merge_visitor_does_not_follow_symlinked_dirs_at_depth(tmpdir):
"""
j = os.path.join
with tmpdir.as_cwd():
- os.mkdir(j('a'))
- os.mkdir(j('a', 'b'))
- os.mkdir(j('a', 'b', 'c'))
- os.mkdir(j('a', 'b', 'c', 'd'))
- os.symlink(j('b'), j('a', 'symlink_b'))
- os.symlink(j('c'), j('a', 'b', 'symlink_c'))
- os.symlink(j('d'), j('a', 'b', 'c', 'symlink_d'))
- with open(j('a', 'b', 'c', 'd', 'file'), 'wb'):
+ os.mkdir(j("a"))
+ os.mkdir(j("a", "b"))
+ os.mkdir(j("a", "b", "c"))
+ os.mkdir(j("a", "b", "c", "d"))
+ os.symlink(j("b"), j("a", "symlink_b"))
+ os.symlink(j("c"), j("a", "b", "symlink_c"))
+ os.symlink(j("d"), j("a", "b", "c", "symlink_d"))
+ with open(j("a", "b", "c", "d", "file"), "wb"):
pass
visitor = SourceMergeVisitor()
visit_directory_tree(str(tmpdir), visitor)
assert [p for p in visitor.files.keys()] == [
- j('a', 'b', 'c', 'd', 'file'),
- j('a', 'b', 'c', 'symlink_d'), # treated as a file, not expanded
- j('a', 'b', 'symlink_c'), # treated as a file, not expanded
- j('a', 'symlink_b', 'c', 'd', 'file'), # symlink_b was expanded
- j('a', 'symlink_b', 'c', 'symlink_d'), # symlink_b was expanded
- j('a', 'symlink_b', 'symlink_c') # symlink_b was expanded
+ j("a", "b", "c", "d", "file"),
+ j("a", "b", "c", "symlink_d"), # treated as a file, not expanded
+ j("a", "b", "symlink_c"), # treated as a file, not expanded
+ j("a", "symlink_b", "c", "d", "file"), # symlink_b was expanded
+ j("a", "symlink_b", "c", "symlink_d"), # symlink_b was expanded
+ j("a", "symlink_b", "symlink_c"), # symlink_b was expanded
]
assert [p for p in visitor.directories.keys()] == [
- j('a'),
- j('a', 'b'),
- j('a', 'b', 'c'),
- j('a', 'b', 'c', 'd'),
- j('a', 'symlink_b'),
- j('a', 'symlink_b', 'c'),
- j('a', 'symlink_b', 'c', 'd'),
+ j("a"),
+ j("a", "b"),
+ j("a", "b", "c"),
+ j("a", "b", "c", "d"),
+ j("a", "symlink_b"),
+ j("a", "symlink_b", "c"),
+ j("a", "symlink_b", "c", "d"),
]
@@ -237,23 +236,20 @@ def test_source_merge_visitor_cant_be_cyclical(tmpdir):
"""
j = os.path.join
with tmpdir.as_cwd():
- os.mkdir(j('a'))
- os.symlink(j('..', 'b'), j('a', 'symlink_b'))
- os.symlink(j('symlink_b'), j('a', 'symlink_b_b'))
- os.mkdir(j('b'))
- os.symlink(j('..', 'a'), j('b', 'symlink_a'))
+ os.mkdir(j("a"))
+ os.symlink(j("..", "b"), j("a", "symlink_b"))
+ os.symlink(j("symlink_b"), j("a", "symlink_b_b"))
+ os.mkdir(j("b"))
+ os.symlink(j("..", "a"), j("b", "symlink_a"))
visitor = SourceMergeVisitor()
visit_directory_tree(str(tmpdir), visitor)
assert [p for p in visitor.files.keys()] == [
- j('a', 'symlink_b'),
- j('a', 'symlink_b_b'),
- j('b', 'symlink_a')
- ]
- assert [p for p in visitor.directories.keys()] == [
- j('a'),
- j('b')
+ j("a", "symlink_b"),
+ j("a", "symlink_b_b"),
+ j("b", "symlink_a"),
]
+ assert [p for p in visitor.directories.keys()] == [j("a"), j("b")]
def test_destination_merge_visitor_always_errors_on_symlinked_dirs(tmpdir):
@@ -263,47 +259,47 @@ def test_destination_merge_visitor_always_errors_on_symlinked_dirs(tmpdir):
j = os.path.join
# Here example_a and example_b are symlinks.
- with tmpdir.mkdir('dst').as_cwd():
- os.mkdir('a')
- os.symlink('a', 'example_a')
- os.symlink('a', 'example_b')
+ with tmpdir.mkdir("dst").as_cwd():
+ os.mkdir("a")
+ os.symlink("a", "example_a")
+ os.symlink("a", "example_b")
# Here example_a is a directory, and example_b is a (non-expanded) symlinked
# directory.
- with tmpdir.mkdir('src').as_cwd():
- os.mkdir('example_a')
- with open(j('example_a', 'file'), 'wb'):
+ with tmpdir.mkdir("src").as_cwd():
+ os.mkdir("example_a")
+ with open(j("example_a", "file"), "wb"):
pass
- os.symlink('..', 'example_b')
+ os.symlink("..", "example_b")
visitor = SourceMergeVisitor()
- visit_directory_tree(str(tmpdir.join('src')), visitor)
- visit_directory_tree(str(tmpdir.join('dst')), DestinationMergeVisitor(visitor))
+ visit_directory_tree(str(tmpdir.join("src")), visitor)
+ visit_directory_tree(str(tmpdir.join("dst")), DestinationMergeVisitor(visitor))
assert visitor.fatal_conflicts
conflicts = [c.dst for c in visitor.fatal_conflicts]
- assert 'example_a' in conflicts
- assert 'example_b' in conflicts
+ assert "example_a" in conflicts
+ assert "example_b" in conflicts
def test_destination_merge_visitor_file_dir_clashes(tmpdir):
"""Tests whether non-symlink file-dir and dir-file clashes as registered as fatal
errors"""
- with tmpdir.mkdir('a').as_cwd():
- os.mkdir('example')
+ with tmpdir.mkdir("a").as_cwd():
+ os.mkdir("example")
- with tmpdir.mkdir('b').as_cwd():
- with open('example', 'wb'):
+ with tmpdir.mkdir("b").as_cwd():
+ with open("example", "wb"):
pass
a_to_b = SourceMergeVisitor()
- visit_directory_tree(str(tmpdir.join('a')), a_to_b)
- visit_directory_tree(str(tmpdir.join('b')), DestinationMergeVisitor(a_to_b))
+ visit_directory_tree(str(tmpdir.join("a")), a_to_b)
+ visit_directory_tree(str(tmpdir.join("b")), DestinationMergeVisitor(a_to_b))
assert a_to_b.fatal_conflicts
- assert a_to_b.fatal_conflicts[0].dst == 'example'
+ assert a_to_b.fatal_conflicts[0].dst == "example"
b_to_a = SourceMergeVisitor()
- visit_directory_tree(str(tmpdir.join('b')), b_to_a)
- visit_directory_tree(str(tmpdir.join('a')), DestinationMergeVisitor(b_to_a))
+ visit_directory_tree(str(tmpdir.join("b")), b_to_a)
+ visit_directory_tree(str(tmpdir.join("a")), DestinationMergeVisitor(b_to_a))
assert b_to_a.fatal_conflicts
- assert b_to_a.fatal_conflicts[0].dst == 'example'
+ assert b_to_a.fatal_conflicts[0].dst == "example"
diff --git a/lib/spack/spack/test/llnl/util/lock.py b/lib/spack/spack/test/llnl/util/lock.py
index 7f25ddb057..a9c6b64db8 100644
--- a/lib/spack/spack/test/llnl/util/lock.py
+++ b/lib/spack/spack/test/llnl/util/lock.py
@@ -66,8 +66,7 @@ is_windows = sys.platform == "win32"
if not is_windows:
import fcntl
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
#
@@ -88,6 +87,7 @@ mpi = False
comm = None
try:
from mpi4py import MPI
+
comm = MPI.COMM_WORLD
if comm.size > 1:
mpi = True
@@ -105,8 +105,8 @@ system.
"""
locations = [
tempfile.gettempdir(),
- os.path.join('/nfs/tmp2/', getpass.getuser()),
- os.path.join('/p/lscratch*/', getpass.getuser()),
+ os.path.join("/nfs/tmp2/", getpass.getuser()),
+ os.path.join("/p/lscratch*/", getpass.getuser()),
]
"""This is the longest a failed multiproc test will take.
@@ -154,7 +154,7 @@ def read_only(*paths):
os.chmod(path, mode)
-@pytest.fixture(scope='session', params=locations)
+@pytest.fixture(scope="session", params=locations)
def lock_test_directory(request):
"""This fixture causes tests to be executed for many different mounts.
@@ -163,10 +163,12 @@ def lock_test_directory(request):
return request.param
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def lock_dir(lock_test_directory):
- parent = next((p for p in glob.glob(lock_test_directory)
- if os.path.exists(p) and os.access(p, os.W_OK)), None)
+ parent = next(
+ (p for p in glob.glob(lock_test_directory) if os.path.exists(p) and os.access(p, os.W_OK)),
+ None,
+ )
if not parent:
# Skip filesystems that don't exist or aren't writable
pytest.skip("requires filesystem: '%s'" % lock_test_directory)
@@ -200,9 +202,9 @@ def private_lock_path(lock_dir):
For other modes, it is the same as a shared lock.
"""
- lock_file = os.path.join(lock_dir, 'lockfile')
+ lock_file = os.path.join(lock_dir, "lockfile")
if mpi:
- lock_file += '.%s' % comm.rank
+ lock_file += ".%s" % comm.rank
yield lock_file
@@ -214,7 +216,7 @@ def private_lock_path(lock_dir):
@pytest.fixture
def lock_path(lock_dir):
"""This lock is shared among all processes in a multiproc test."""
- lock_file = os.path.join(lock_dir, 'lockfile')
+ lock_file = os.path.join(lock_dir, "lockfile")
yield lock_file
@@ -224,8 +226,7 @@ def lock_path(lock_dir):
def test_poll_interval_generator():
- interval_iter = iter(
- lk.Lock._poll_interval_generator(_wait_times=[1, 2, 3]))
+ interval_iter = iter(lk.Lock._poll_interval_generator(_wait_times=[1, 2, 3]))
intervals = list(next(interval_iter) for i in range(100))
assert intervals == [1] * 20 + [2] * 40 + [3] * 40
@@ -234,9 +235,8 @@ def local_multiproc_test(*functions, **kwargs):
"""Order some processes using simple barrier synchronization."""
b = mp.Barrier(len(functions), timeout=barrier_timeout)
- args = (b,) + tuple(kwargs.get('extra_args', ()))
- procs = [Process(target=f, args=args, name=f.__name__)
- for f in functions]
+ args = (b,) + tuple(kwargs.get("extra_args", ()))
+ procs = [Process(target=f, args=args, name=f.__name__) for f in functions]
for p in procs:
p.start()
@@ -269,6 +269,7 @@ def mpi_multiproc_test(*functions):
class subcomm_barrier(object):
"""Stand-in for multiproc barrier for MPI-parallel jobs."""
+
def wait(self):
subcomm.Barrier()
@@ -371,16 +372,11 @@ class TimeoutRead(object):
# exclusive lock is held.
#
def test_write_lock_timeout_on_write(lock_path):
- multiproc_test(
- AcquireWrite(lock_path),
- TimeoutWrite(lock_path))
+ multiproc_test(AcquireWrite(lock_path), TimeoutWrite(lock_path))
def test_write_lock_timeout_on_write_2(lock_path):
- multiproc_test(
- AcquireWrite(lock_path),
- TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ multiproc_test(AcquireWrite(lock_path), TimeoutWrite(lock_path), TimeoutWrite(lock_path))
def test_write_lock_timeout_on_write_3(lock_path):
@@ -388,13 +384,12 @@ def test_write_lock_timeout_on_write_3(lock_path):
AcquireWrite(lock_path),
TimeoutWrite(lock_path),
TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ TimeoutWrite(lock_path),
+ )
def test_write_lock_timeout_on_write_ranges(lock_path):
- multiproc_test(
- AcquireWrite(lock_path, 0, 1),
- TimeoutWrite(lock_path, 0, 1))
+ multiproc_test(AcquireWrite(lock_path, 0, 1), TimeoutWrite(lock_path, 0, 1))
def test_write_lock_timeout_on_write_ranges_2(lock_path):
@@ -402,7 +397,8 @@ def test_write_lock_timeout_on_write_ranges_2(lock_path):
AcquireWrite(lock_path, 0, 64),
AcquireWrite(lock_path, 65, 1),
TimeoutWrite(lock_path, 0, 1),
- TimeoutWrite(lock_path, 63, 1))
+ TimeoutWrite(lock_path, 63, 1),
+ )
def test_write_lock_timeout_on_write_ranges_3(lock_path):
@@ -411,7 +407,8 @@ def test_write_lock_timeout_on_write_ranges_3(lock_path):
AcquireWrite(lock_path, 1, 1),
TimeoutWrite(lock_path),
TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ TimeoutWrite(lock_path),
+ )
def test_write_lock_timeout_on_write_ranges_4(lock_path):
@@ -422,7 +419,8 @@ def test_write_lock_timeout_on_write_ranges_4(lock_path):
AcquireWrite(lock_path, 500, 64),
TimeoutWrite(lock_path),
TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ TimeoutWrite(lock_path),
+ )
#
@@ -430,16 +428,11 @@ def test_write_lock_timeout_on_write_ranges_4(lock_path):
# exclusive lock is held.
#
def test_read_lock_timeout_on_write(lock_path):
- multiproc_test(
- AcquireWrite(lock_path),
- TimeoutRead(lock_path))
+ multiproc_test(AcquireWrite(lock_path), TimeoutRead(lock_path))
def test_read_lock_timeout_on_write_2(lock_path):
- multiproc_test(
- AcquireWrite(lock_path),
- TimeoutRead(lock_path),
- TimeoutRead(lock_path))
+ multiproc_test(AcquireWrite(lock_path), TimeoutRead(lock_path), TimeoutRead(lock_path))
def test_read_lock_timeout_on_write_3(lock_path):
@@ -447,21 +440,18 @@ def test_read_lock_timeout_on_write_3(lock_path):
AcquireWrite(lock_path),
TimeoutRead(lock_path),
TimeoutRead(lock_path),
- TimeoutRead(lock_path))
+ TimeoutRead(lock_path),
+ )
def test_read_lock_timeout_on_write_ranges(lock_path):
"""small write lock, read whole file."""
- multiproc_test(
- AcquireWrite(lock_path, 0, 1),
- TimeoutRead(lock_path))
+ multiproc_test(AcquireWrite(lock_path, 0, 1), TimeoutRead(lock_path))
def test_read_lock_timeout_on_write_ranges_2(lock_path):
"""small write lock, small read lock"""
- multiproc_test(
- AcquireWrite(lock_path, 0, 1),
- TimeoutRead(lock_path, 0, 1))
+ multiproc_test(AcquireWrite(lock_path, 0, 1), TimeoutRead(lock_path, 0, 1))
def test_read_lock_timeout_on_write_ranges_3(lock_path):
@@ -470,23 +460,19 @@ def test_read_lock_timeout_on_write_ranges_3(lock_path):
AcquireWrite(lock_path, 0, 1),
AcquireWrite(lock_path, 64, 128),
TimeoutRead(lock_path, 0, 1),
- TimeoutRead(lock_path, 128, 256))
+ TimeoutRead(lock_path, 128, 256),
+ )
#
# Test that exclusive locks time out when shared locks are held.
#
def test_write_lock_timeout_on_read(lock_path):
- multiproc_test(
- AcquireRead(lock_path),
- TimeoutWrite(lock_path))
+ multiproc_test(AcquireRead(lock_path), TimeoutWrite(lock_path))
def test_write_lock_timeout_on_read_2(lock_path):
- multiproc_test(
- AcquireRead(lock_path),
- TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ multiproc_test(AcquireRead(lock_path), TimeoutWrite(lock_path), TimeoutWrite(lock_path))
def test_write_lock_timeout_on_read_3(lock_path):
@@ -494,19 +480,16 @@ def test_write_lock_timeout_on_read_3(lock_path):
AcquireRead(lock_path),
TimeoutWrite(lock_path),
TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ TimeoutWrite(lock_path),
+ )
def test_write_lock_timeout_on_read_ranges(lock_path):
- multiproc_test(
- AcquireRead(lock_path, 0, 1),
- TimeoutWrite(lock_path))
+ multiproc_test(AcquireRead(lock_path, 0, 1), TimeoutWrite(lock_path))
def test_write_lock_timeout_on_read_ranges_2(lock_path):
- multiproc_test(
- AcquireRead(lock_path, 0, 1),
- TimeoutWrite(lock_path, 0, 1))
+ multiproc_test(AcquireRead(lock_path, 0, 1), TimeoutWrite(lock_path, 0, 1))
def test_write_lock_timeout_on_read_ranges_3(lock_path):
@@ -514,14 +497,16 @@ def test_write_lock_timeout_on_read_ranges_3(lock_path):
AcquireRead(lock_path, 0, 1),
AcquireRead(lock_path, 10, 1),
TimeoutWrite(lock_path, 0, 1),
- TimeoutWrite(lock_path, 10, 1))
+ TimeoutWrite(lock_path, 10, 1),
+ )
def test_write_lock_timeout_on_read_ranges_4(lock_path):
multiproc_test(
AcquireRead(lock_path, 0, 64),
TimeoutWrite(lock_path, 10, 1),
- TimeoutWrite(lock_path, 32, 1))
+ TimeoutWrite(lock_path, 32, 1),
+ )
def test_write_lock_timeout_on_read_ranges_5(lock_path):
@@ -529,17 +514,15 @@ def test_write_lock_timeout_on_read_ranges_5(lock_path):
AcquireRead(lock_path, 64, 128),
TimeoutWrite(lock_path, 65, 1),
TimeoutWrite(lock_path, 127, 1),
- TimeoutWrite(lock_path, 90, 10))
+ TimeoutWrite(lock_path, 90, 10),
+ )
#
# Test that exclusive locks time while lots of shared locks are held.
#
def test_write_lock_timeout_with_multiple_readers_2_1(lock_path):
- multiproc_test(
- AcquireRead(lock_path),
- AcquireRead(lock_path),
- TimeoutWrite(lock_path))
+ multiproc_test(AcquireRead(lock_path), AcquireRead(lock_path), TimeoutWrite(lock_path))
def test_write_lock_timeout_with_multiple_readers_2_2(lock_path):
@@ -547,7 +530,8 @@ def test_write_lock_timeout_with_multiple_readers_2_2(lock_path):
AcquireRead(lock_path),
AcquireRead(lock_path),
TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ TimeoutWrite(lock_path),
+ )
def test_write_lock_timeout_with_multiple_readers_3_1(lock_path):
@@ -555,7 +539,8 @@ def test_write_lock_timeout_with_multiple_readers_3_1(lock_path):
AcquireRead(lock_path),
AcquireRead(lock_path),
AcquireRead(lock_path),
- TimeoutWrite(lock_path))
+ TimeoutWrite(lock_path),
+ )
def test_write_lock_timeout_with_multiple_readers_3_2(lock_path):
@@ -564,14 +549,14 @@ def test_write_lock_timeout_with_multiple_readers_3_2(lock_path):
AcquireRead(lock_path),
AcquireRead(lock_path),
TimeoutWrite(lock_path),
- TimeoutWrite(lock_path))
+ TimeoutWrite(lock_path),
+ )
def test_write_lock_timeout_with_multiple_readers_2_1_ranges(lock_path):
multiproc_test(
- AcquireRead(lock_path, 0, 10),
- AcquireRead(lock_path, 2, 10),
- TimeoutWrite(lock_path, 5, 5))
+ AcquireRead(lock_path, 0, 10), AcquireRead(lock_path, 2, 10), TimeoutWrite(lock_path, 5, 5)
+ )
def test_write_lock_timeout_with_multiple_readers_2_3_ranges(lock_path):
@@ -580,7 +565,8 @@ def test_write_lock_timeout_with_multiple_readers_2_3_ranges(lock_path):
AcquireRead(lock_path, 5, 15),
TimeoutWrite(lock_path, 0, 1),
TimeoutWrite(lock_path, 11, 3),
- TimeoutWrite(lock_path, 7, 1))
+ TimeoutWrite(lock_path, 7, 1),
+ )
def test_write_lock_timeout_with_multiple_readers_3_1_ranges(lock_path):
@@ -588,7 +574,8 @@ def test_write_lock_timeout_with_multiple_readers_3_1_ranges(lock_path):
AcquireRead(lock_path, 0, 5),
AcquireRead(lock_path, 5, 5),
AcquireRead(lock_path, 10, 5),
- TimeoutWrite(lock_path, 0, 15))
+ TimeoutWrite(lock_path, 0, 15),
+ )
def test_write_lock_timeout_with_multiple_readers_3_2_ranges(lock_path):
@@ -597,10 +584,11 @@ def test_write_lock_timeout_with_multiple_readers_3_2_ranges(lock_path):
AcquireRead(lock_path, 5, 5),
AcquireRead(lock_path, 10, 5),
TimeoutWrite(lock_path, 3, 10),
- TimeoutWrite(lock_path, 5, 1))
+ TimeoutWrite(lock_path, 5, 1),
+ )
-@pytest.mark.skipif(getuid() == 0, reason='user is root')
+@pytest.mark.skipif(getuid() == 0, reason="user is root")
def test_read_lock_on_read_only_lockfile(lock_dir, lock_path):
"""read-only directory, read-only lockfile."""
touch(lock_path)
@@ -628,7 +616,7 @@ def test_read_lock_read_only_dir_writable_lockfile(lock_dir, lock_path):
pass
-@pytest.mark.skipif(False if is_windows else getuid() == 0, reason='user is root')
+@pytest.mark.skipif(False if is_windows else getuid() == 0, reason="user is root")
def test_read_lock_no_lockfile(lock_dir, lock_path):
"""read-only directory, no lockfile (so can't create)."""
with read_only(lock_dir):
@@ -662,17 +650,17 @@ def test_upgrade_read_to_write(private_lock_path):
lock.acquire_read()
assert lock._reads == 1
assert lock._writes == 0
- assert lock._file.mode == 'r+'
+ assert lock._file.mode == "r+"
lock.acquire_write()
assert lock._reads == 1
assert lock._writes == 1
- assert lock._file.mode == 'r+'
+ assert lock._file.mode == "r+"
lock.release_write()
assert lock._reads == 1
assert lock._writes == 0
- assert lock._file.mode == 'r+'
+ assert lock._file.mode == "r+"
lock.release_read()
assert lock._reads == 0
@@ -694,7 +682,7 @@ def test_upgrade_read_to_write_fails_with_readonly_file(private_lock_path):
lock.acquire_read()
assert lock._reads == 1
assert lock._writes == 0
- assert lock._file.mode == 'r'
+ assert lock._file.mode == "r"
# upgrade to write here
with pytest.raises(lk.LockROFileError):
@@ -713,7 +701,7 @@ class ComplexAcquireAndRelease(object):
barrier.wait() # ---------------------------------------- 1
# others test timeout
barrier.wait() # ---------------------------------------- 2
- lock.release_write() # release and others acquire read
+ lock.release_write() # release and others acquire read
barrier.wait() # ---------------------------------------- 3
with pytest.raises(lk.LockTimeoutError):
lock.acquire_write(lock_fail_timeout)
@@ -820,7 +808,7 @@ class ComplexAcquireAndRelease(object):
barrier.wait() # ---------------------------------------- 10
# others test timeout
barrier.wait() # ---------------------------------------- 11
- lock.release_read() # release read AND write in opposite
+ lock.release_read() # release read AND write in opposite
lock.release_write() # order from before on p2
barrier.wait() # ---------------------------------------- 12
lock.acquire_read()
@@ -834,13 +822,12 @@ class ComplexAcquireAndRelease(object):
#
def test_complex_acquire_and_release_chain(lock_path):
test_chain = ComplexAcquireAndRelease(lock_path)
- multiproc_test(test_chain.p1,
- test_chain.p2,
- test_chain.p3)
+ multiproc_test(test_chain.p1, test_chain.p2, test_chain.p3)
class AssertLock(lk.Lock):
"""Test lock class that marks acquire/release events."""
+
def __init__(self, lock_path, vals):
super(AssertLock, self).__init__(lock_path)
self.vals = vals
@@ -854,105 +841,103 @@ class AssertLock(lk.Lock):
def acquire_read(self, timeout=None):
self.assert_acquire_read()
result = super(AssertLock, self).acquire_read(timeout)
- self.vals['acquired_read'] = True
+ self.vals["acquired_read"] = True
return result
def acquire_write(self, timeout=None):
self.assert_acquire_write()
result = super(AssertLock, self).acquire_write(timeout)
- self.vals['acquired_write'] = True
+ self.vals["acquired_write"] = True
return result
def release_read(self, release_fn=None):
self.assert_release_read()
result = super(AssertLock, self).release_read(release_fn)
- self.vals['released_read'] = True
+ self.vals["released_read"] = True
return result
def release_write(self, release_fn=None):
self.assert_release_write()
result = super(AssertLock, self).release_write(release_fn)
- self.vals['released_write'] = True
+ self.vals["released_write"] = True
return result
@pytest.mark.parametrize(
- "transaction,type",
- [(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
+ "transaction,type", [(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
)
def test_transaction(lock_path, transaction, type):
class MockLock(AssertLock):
def assert_acquire_read(self):
- assert not vals['entered_fn']
- assert not vals['exited_fn']
+ assert not vals["entered_fn"]
+ assert not vals["exited_fn"]
def assert_release_read(self):
- assert vals['entered_fn']
- assert not vals['exited_fn']
+ assert vals["entered_fn"]
+ assert not vals["exited_fn"]
def assert_acquire_write(self):
- assert not vals['entered_fn']
- assert not vals['exited_fn']
+ assert not vals["entered_fn"]
+ assert not vals["exited_fn"]
def assert_release_write(self):
- assert vals['entered_fn']
- assert not vals['exited_fn']
+ assert vals["entered_fn"]
+ assert not vals["exited_fn"]
def enter_fn():
# assert enter_fn is called while lock is held
- assert vals['acquired_%s' % type]
- vals['entered_fn'] = True
+ assert vals["acquired_%s" % type]
+ vals["entered_fn"] = True
def exit_fn(t, v, tb):
# assert exit_fn is called while lock is held
- assert not vals['released_%s' % type]
- vals['exited_fn'] = True
- vals['exception'] = (t or v or tb)
+ assert not vals["released_%s" % type]
+ vals["exited_fn"] = True
+ vals["exception"] = t or v or tb
vals = collections.defaultdict(lambda: False)
lock = MockLock(lock_path, vals)
with transaction(lock, acquire=enter_fn, release=exit_fn):
- assert vals['acquired_%s' % type]
- assert not vals['released_%s' % type]
+ assert vals["acquired_%s" % type]
+ assert not vals["released_%s" % type]
- assert vals['entered_fn']
- assert vals['exited_fn']
- assert vals['acquired_%s' % type]
- assert vals['released_%s' % type]
- assert not vals['exception']
+ assert vals["entered_fn"]
+ assert vals["exited_fn"]
+ assert vals["acquired_%s" % type]
+ assert vals["released_%s" % type]
+ assert not vals["exception"]
@pytest.mark.parametrize(
- "transaction,type",
- [(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
+ "transaction,type", [(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
)
def test_transaction_with_exception(lock_path, transaction, type):
class MockLock(AssertLock):
def assert_acquire_read(self):
- assert not vals['entered_fn']
- assert not vals['exited_fn']
+ assert not vals["entered_fn"]
+ assert not vals["exited_fn"]
def assert_release_read(self):
- assert vals['entered_fn']
- assert not vals['exited_fn']
+ assert vals["entered_fn"]
+ assert not vals["exited_fn"]
def assert_acquire_write(self):
- assert not vals['entered_fn']
- assert not vals['exited_fn']
+ assert not vals["entered_fn"]
+ assert not vals["exited_fn"]
def assert_release_write(self):
- assert vals['entered_fn']
- assert not vals['exited_fn']
+ assert vals["entered_fn"]
+ assert not vals["exited_fn"]
def enter_fn():
- assert vals['acquired_%s' % type]
- vals['entered_fn'] = True
+ assert vals["acquired_%s" % type]
+ vals["entered_fn"] = True
def exit_fn(t, v, tb):
- assert not vals['released_%s' % type]
- vals['exited_fn'] = True
- vals['exception'] = (t or v or tb)
+ assert not vals["released_%s" % type]
+ vals["exited_fn"] = True
+ vals["exception"] = t or v or tb
return exit_result
exit_result = False
@@ -963,9 +948,9 @@ def test_transaction_with_exception(lock_path, transaction, type):
with transaction(lock, acquire=enter_fn, release=exit_fn):
raise Exception()
- assert vals['entered_fn']
- assert vals['exited_fn']
- assert vals['exception']
+ assert vals["entered_fn"]
+ assert vals["exited_fn"]
+ assert vals["exception"]
# test suppression of exceptions from exit_fn
exit_result = True
@@ -975,47 +960,46 @@ def test_transaction_with_exception(lock_path, transaction, type):
with transaction(lock, acquire=enter_fn, release=exit_fn):
raise Exception()
- assert vals['entered_fn']
- assert vals['exited_fn']
- assert vals['exception']
+ assert vals["entered_fn"]
+ assert vals["exited_fn"]
+ assert vals["exception"]
@pytest.mark.parametrize(
- "transaction,type",
- [(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
+ "transaction,type", [(lk.ReadTransaction, "read"), (lk.WriteTransaction, "write")]
)
def test_transaction_with_context_manager(lock_path, transaction, type):
class MockLock(AssertLock):
def assert_acquire_read(self):
- assert not vals['entered_ctx']
- assert not vals['exited_ctx']
+ assert not vals["entered_ctx"]
+ assert not vals["exited_ctx"]
def assert_release_read(self):
- assert vals['entered_ctx']
- assert vals['exited_ctx']
+ assert vals["entered_ctx"]
+ assert vals["exited_ctx"]
def assert_acquire_write(self):
- assert not vals['entered_ctx']
- assert not vals['exited_ctx']
+ assert not vals["entered_ctx"]
+ assert not vals["exited_ctx"]
def assert_release_write(self):
- assert vals['entered_ctx']
- assert vals['exited_ctx']
+ assert vals["entered_ctx"]
+ assert vals["exited_ctx"]
class TestContextManager(object):
def __enter__(self):
- vals['entered_ctx'] = True
+ vals["entered_ctx"] = True
def __exit__(self, t, v, tb):
- assert not vals['released_%s' % type]
- vals['exited_ctx'] = True
- vals['exception_ctx'] = (t or v or tb)
+ assert not vals["released_%s" % type]
+ vals["exited_ctx"] = True
+ vals["exception_ctx"] = t or v or tb
return exit_ctx_result
def exit_fn(t, v, tb):
- assert not vals['released_%s' % type]
- vals['exited_fn'] = True
- vals['exception_fn'] = (t or v or tb)
+ assert not vals["released_%s" % type]
+ vals["exited_fn"] = True
+ vals["exception_fn"] = t or v or tb
return exit_fn_result
exit_fn_result, exit_ctx_result = False, False
@@ -1025,21 +1009,21 @@ def test_transaction_with_context_manager(lock_path, transaction, type):
with transaction(lock, acquire=TestContextManager, release=exit_fn):
pass
- assert vals['entered_ctx']
- assert vals['exited_ctx']
- assert vals['exited_fn']
- assert not vals['exception_ctx']
- assert not vals['exception_fn']
+ assert vals["entered_ctx"]
+ assert vals["exited_ctx"]
+ assert vals["exited_fn"]
+ assert not vals["exception_ctx"]
+ assert not vals["exception_fn"]
vals.clear()
with transaction(lock, acquire=TestContextManager):
pass
- assert vals['entered_ctx']
- assert vals['exited_ctx']
- assert not vals['exited_fn']
- assert not vals['exception_ctx']
- assert not vals['exception_fn']
+ assert vals["entered_ctx"]
+ assert vals["exited_ctx"]
+ assert not vals["exited_fn"]
+ assert not vals["exception_ctx"]
+ assert not vals["exception_fn"]
# below are tests for exceptions with and without suppression
def assert_ctx_and_fn_exception(raises=True):
@@ -1047,19 +1031,17 @@ def test_transaction_with_context_manager(lock_path, transaction, type):
if raises:
with pytest.raises(Exception):
- with transaction(
- lock, acquire=TestContextManager, release=exit_fn):
+ with transaction(lock, acquire=TestContextManager, release=exit_fn):
raise Exception()
else:
- with transaction(
- lock, acquire=TestContextManager, release=exit_fn):
+ with transaction(lock, acquire=TestContextManager, release=exit_fn):
raise Exception()
- assert vals['entered_ctx']
- assert vals['exited_ctx']
- assert vals['exited_fn']
- assert vals['exception_ctx']
- assert vals['exception_fn']
+ assert vals["entered_ctx"]
+ assert vals["exited_ctx"]
+ assert vals["exited_fn"]
+ assert vals["exception_ctx"]
+ assert vals["exception_fn"]
def assert_only_ctx_exception(raises=True):
vals.clear()
@@ -1072,11 +1054,11 @@ def test_transaction_with_context_manager(lock_path, transaction, type):
with transaction(lock, acquire=TestContextManager):
raise Exception()
- assert vals['entered_ctx']
- assert vals['exited_ctx']
- assert not vals['exited_fn']
- assert vals['exception_ctx']
- assert not vals['exception_fn']
+ assert vals["entered_ctx"]
+ assert vals["exited_ctx"]
+ assert not vals["exited_fn"]
+ assert vals["exception_ctx"]
+ assert not vals["exception_fn"]
# no suppression
assert_ctx_and_fn_exception(raises=True)
@@ -1102,107 +1084,107 @@ def test_nested_write_transaction(lock_path):
"""Ensure that the outermost write transaction writes."""
def write(t, v, tb):
- vals['wrote'] = True
+ vals["wrote"] = True
vals = collections.defaultdict(lambda: False)
lock = AssertLock(lock_path, vals)
# write/write
with lk.WriteTransaction(lock, release=write):
- assert not vals['wrote']
+ assert not vals["wrote"]
with lk.WriteTransaction(lock, release=write):
- assert not vals['wrote']
- assert not vals['wrote']
- assert vals['wrote']
+ assert not vals["wrote"]
+ assert not vals["wrote"]
+ assert vals["wrote"]
# read/write
vals.clear()
with lk.ReadTransaction(lock):
- assert not vals['wrote']
+ assert not vals["wrote"]
with lk.WriteTransaction(lock, release=write):
- assert not vals['wrote']
- assert vals['wrote']
+ assert not vals["wrote"]
+ assert vals["wrote"]
# write/read/write
vals.clear()
with lk.WriteTransaction(lock, release=write):
- assert not vals['wrote']
+ assert not vals["wrote"]
with lk.ReadTransaction(lock):
- assert not vals['wrote']
+ assert not vals["wrote"]
with lk.WriteTransaction(lock, release=write):
- assert not vals['wrote']
- assert not vals['wrote']
- assert not vals['wrote']
- assert vals['wrote']
+ assert not vals["wrote"]
+ assert not vals["wrote"]
+ assert not vals["wrote"]
+ assert vals["wrote"]
# read/write/read/write
vals.clear()
with lk.ReadTransaction(lock):
with lk.WriteTransaction(lock, release=write):
- assert not vals['wrote']
+ assert not vals["wrote"]
with lk.ReadTransaction(lock):
- assert not vals['wrote']
+ assert not vals["wrote"]
with lk.WriteTransaction(lock, release=write):
- assert not vals['wrote']
- assert not vals['wrote']
- assert not vals['wrote']
- assert vals['wrote']
+ assert not vals["wrote"]
+ assert not vals["wrote"]
+ assert not vals["wrote"]
+ assert vals["wrote"]
def test_nested_reads(lock_path):
"""Ensure that write transactions won't re-read data."""
def read():
- vals['read'] += 1
+ vals["read"] += 1
vals = collections.defaultdict(lambda: 0)
lock = AssertLock(lock_path, vals)
# read/read
vals.clear()
- assert vals['read'] == 0
+ assert vals["read"] == 0
with lk.ReadTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.ReadTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
# write/write
vals.clear()
- assert vals['read'] == 0
+ assert vals["read"] == 0
with lk.WriteTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.WriteTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
# read/write
vals.clear()
- assert vals['read'] == 0
+ assert vals["read"] == 0
with lk.ReadTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.WriteTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
# write/read/write
vals.clear()
- assert vals['read'] == 0
+ assert vals["read"] == 0
with lk.WriteTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.ReadTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.WriteTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
# read/write/read/write
vals.clear()
- assert vals['read'] == 0
+ assert vals["read"] == 0
with lk.ReadTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.WriteTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.ReadTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
with lk.WriteTransaction(lock, acquire=read):
- assert vals['read'] == 1
+ assert vals["read"] == 1
class LockDebugOutput(object):
@@ -1284,7 +1266,7 @@ def test_lock_debug_output(lock_path):
def test_lock_with_no_parent_directory(tmpdir):
"""Make sure locks work even when their parent directory does not exist."""
with tmpdir.as_cwd():
- lock = lk.Lock('foo/bar/baz/lockfile')
+ lock = lk.Lock("foo/bar/baz/lockfile")
with lk.WriteTransaction(lock):
pass
@@ -1293,7 +1275,7 @@ def test_lock_in_current_directory(tmpdir):
"""Make sure locks work even when their parent directory does not exist."""
with tmpdir.as_cwd():
# test we can create a lock in the current directory
- lock = lk.Lock('lockfile')
+ lock = lk.Lock("lockfile")
for i in range(10):
with lk.ReadTransaction(lock):
pass
@@ -1301,7 +1283,7 @@ def test_lock_in_current_directory(tmpdir):
pass
# and that we can do the same thing after it's already there
- lock = lk.Lock('lockfile')
+ lock = lk.Lock("lockfile")
for i in range(10):
with lk.ReadTransaction(lock):
pass
@@ -1310,23 +1292,23 @@ def test_lock_in_current_directory(tmpdir):
def test_attempts_str():
- assert lk._attempts_str(0, 0) == ''
- assert lk._attempts_str(0.12, 1) == ''
- assert lk._attempts_str(12.345, 2) == ' after 12.35s and 2 attempts'
+ assert lk._attempts_str(0, 0) == ""
+ assert lk._attempts_str(0.12, 1) == ""
+ assert lk._attempts_str(12.345, 2) == " after 12.35s and 2 attempts"
def test_lock_str():
- lock = lk.Lock('lockfile')
+ lock = lk.Lock("lockfile")
lockstr = str(lock)
- assert 'lockfile[0:0]' in lockstr
- assert 'timeout=None' in lockstr
- assert '#reads=0, #writes=0' in lockstr
+ assert "lockfile[0:0]" in lockstr
+ assert "timeout=None" in lockstr
+ assert "#reads=0, #writes=0" in lockstr
def test_downgrade_write_okay(tmpdir):
"""Test the lock write-to-read downgrade operation."""
with tmpdir.as_cwd():
- lock = lk.Lock('lockfile')
+ lock = lk.Lock("lockfile")
lock.acquire_write()
lock.downgrade_write_to_read()
assert lock._reads == 1
@@ -1336,29 +1318,34 @@ def test_downgrade_write_okay(tmpdir):
def test_downgrade_write_fails(tmpdir):
"""Test failing the lock write-to-read downgrade operation."""
with tmpdir.as_cwd():
- lock = lk.Lock('lockfile')
+ lock = lk.Lock("lockfile")
lock.acquire_read()
- msg = 'Cannot downgrade lock from write to read on file: lockfile'
+ msg = "Cannot downgrade lock from write to read on file: lockfile"
with pytest.raises(lk.LockDowngradeError, match=msg):
lock.downgrade_write_to_read()
-@pytest.mark.parametrize("err_num,err_msg",
- [(errno.EACCES, "Fake EACCES error"),
- (errno.EAGAIN, "Fake EAGAIN error"),
- (errno.ENOENT, "Fake ENOENT error")])
+@pytest.mark.parametrize(
+ "err_num,err_msg",
+ [
+ (errno.EACCES, "Fake EACCES error"),
+ (errno.EAGAIN, "Fake EAGAIN error"),
+ (errno.ENOENT, "Fake ENOENT error"),
+ ],
+)
def test_poll_lock_exception(tmpdir, monkeypatch, err_num, err_msg):
"""Test poll lock exception handling."""
+
def _lockf(fd, cmd, len, start, whence):
raise IOError(err_num, err_msg)
with tmpdir.as_cwd():
- lockfile = 'lockfile'
+ lockfile = "lockfile"
lock = lk.Lock(lockfile)
touch(lockfile)
- monkeypatch.setattr(fcntl, 'lockf', _lockf)
+ monkeypatch.setattr(fcntl, "lockf", _lockf)
if err_num in [errno.EAGAIN, errno.EACCES]:
assert not lock._poll_lock(fcntl.LOCK_EX)
@@ -1370,7 +1357,7 @@ def test_poll_lock_exception(tmpdir, monkeypatch, err_num, err_msg):
def test_upgrade_read_okay(tmpdir):
"""Test the lock read-to-write upgrade operation."""
with tmpdir.as_cwd():
- lock = lk.Lock('lockfile')
+ lock = lk.Lock("lockfile")
lock.acquire_read()
lock.upgrade_read_to_write()
assert lock._reads == 0
@@ -1380,8 +1367,8 @@ def test_upgrade_read_okay(tmpdir):
def test_upgrade_read_fails(tmpdir):
"""Test failing the lock read-to-write upgrade operation."""
with tmpdir.as_cwd():
- lock = lk.Lock('lockfile')
+ lock = lk.Lock("lockfile")
lock.acquire_write()
- msg = 'Cannot upgrade lock from read to write on file: lockfile'
+ msg = "Cannot upgrade lock from read to write on file: lockfile"
with pytest.raises(lk.LockUpgradeError, match=msg):
lock.upgrade_read_to_write()
diff --git a/lib/spack/spack/test/llnl/util/tty/log.py b/lib/spack/spack/test/llnl/util/tty/log.py
index 2491a5dbe3..b100f209e8 100644
--- a/lib/spack/spack/test/llnl/util/tty/log.py
+++ b/lib/spack/spack/test/llnl/util/tty/log.py
@@ -27,13 +27,13 @@ from spack.util.executable import which
termios = None # type: Optional[ModuleType]
try:
import termios as term_mod
+
termios = term_mod
except ImportError:
pass
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
@contextlib.contextmanager
@@ -43,62 +43,62 @@ def nullcontext():
def test_log_python_output_with_echo(capfd, tmpdir):
with tmpdir.as_cwd():
- with log.log_output('foo.txt', echo=True):
- print('logged')
+ with log.log_output("foo.txt", echo=True):
+ print("logged")
# foo.txt has output
- with open('foo.txt') as f:
- assert f.read() == 'logged\n'
+ with open("foo.txt") as f:
+ assert f.read() == "logged\n"
# output is also echoed.
- assert capfd.readouterr()[0] == 'logged\n'
+ assert capfd.readouterr()[0] == "logged\n"
def test_log_python_output_without_echo(capfd, tmpdir):
with tmpdir.as_cwd():
- with log.log_output('foo.txt'):
- print('logged')
+ with log.log_output("foo.txt"):
+ print("logged")
# foo.txt has output
- with open('foo.txt') as f:
- assert f.read() == 'logged\n'
+ with open("foo.txt") as f:
+ assert f.read() == "logged\n"
# nothing on stdout or stderr
- assert capfd.readouterr()[0] == ''
+ assert capfd.readouterr()[0] == ""
def test_log_python_output_with_invalid_utf8(capfd, tmpdir):
with tmpdir.as_cwd():
- with log.log_output('foo.txt'):
- sys.stdout.buffer.write(b'\xc3\x28\n')
+ with log.log_output("foo.txt"):
+ sys.stdout.buffer.write(b"\xc3\x28\n")
# python2 and 3 treat invalid UTF-8 differently
if sys.version_info.major == 2:
- expected = b'\xc3(\n'
+ expected = b"\xc3(\n"
else:
- expected = b'<line lost: output was not encoded as UTF-8>\n'
- with open('foo.txt', 'rb') as f:
+ expected = b"<line lost: output was not encoded as UTF-8>\n"
+ with open("foo.txt", "rb") as f:
written = f.read()
assert written == expected
# nothing on stdout or stderr
- assert capfd.readouterr()[0] == ''
+ assert capfd.readouterr()[0] == ""
def test_log_python_output_and_echo_output(capfd, tmpdir):
with tmpdir.as_cwd():
# echo two lines
- with log.log_output('foo.txt') as logger:
+ with log.log_output("foo.txt") as logger:
with logger.force_echo():
- print('force echo')
- print('logged')
+ print("force echo")
+ print("logged")
# log file contains everything
- with open('foo.txt') as f:
- assert f.read() == 'force echo\nlogged\n'
+ with open("foo.txt") as f:
+ assert f.read() == "force echo\nlogged\n"
# only force-echo'd stuff is in output
- assert capfd.readouterr()[0] == 'force echo\n'
+ assert capfd.readouterr()[0] == "force echo\n"
def _log_filter_fn(string):
@@ -107,63 +107,63 @@ def _log_filter_fn(string):
def test_log_output_with_filter(capfd, tmpdir):
with tmpdir.as_cwd():
- with log.log_output('foo.txt', filter_fn=_log_filter_fn):
- print('foo blah')
- print('blah foo')
- print('foo foo')
+ with log.log_output("foo.txt", filter_fn=_log_filter_fn):
+ print("foo blah")
+ print("blah foo")
+ print("foo foo")
# foo.txt output is not filtered
- with open('foo.txt') as f:
- assert f.read() == 'foo blah\nblah foo\nfoo foo\n'
+ with open("foo.txt") as f:
+ assert f.read() == "foo blah\nblah foo\nfoo foo\n"
# output is not echoed
- assert capfd.readouterr()[0] == ''
+ assert capfd.readouterr()[0] == ""
# now try with echo
with tmpdir.as_cwd():
- with log.log_output('foo.txt', echo=True, filter_fn=_log_filter_fn):
- print('foo blah')
- print('blah foo')
- print('foo foo')
+ with log.log_output("foo.txt", echo=True, filter_fn=_log_filter_fn):
+ print("foo blah")
+ print("blah foo")
+ print("foo foo")
# foo.txt output is still not filtered
- with open('foo.txt') as f:
- assert f.read() == 'foo blah\nblah foo\nfoo foo\n'
+ with open("foo.txt") as f:
+ assert f.read() == "foo blah\nblah foo\nfoo foo\n"
# echoed output is filtered.
- assert capfd.readouterr()[0] == 'bar blah\nblah bar\nbar bar\n'
+ assert capfd.readouterr()[0] == "bar blah\nblah bar\nbar bar\n"
-@pytest.mark.skipif(not which('echo'), reason="needs echo command")
+@pytest.mark.skipif(not which("echo"), reason="needs echo command")
def test_log_subproc_and_echo_output_no_capfd(capfd, tmpdir):
- echo = which('echo')
+ echo = which("echo")
# this is split into two tests because capfd interferes with the
# output logged to file when using a subprocess. We test the file
# here, and echoing in test_log_subproc_and_echo_output_capfd below.
with capfd.disabled():
with tmpdir.as_cwd():
- with log.log_output('foo.txt') as logger:
+ with log.log_output("foo.txt") as logger:
with logger.force_echo():
- echo('echo')
- print('logged')
+ echo("echo")
+ print("logged")
- with open('foo.txt') as f:
- assert f.read() == 'echo\nlogged\n'
+ with open("foo.txt") as f:
+ assert f.read() == "echo\nlogged\n"
-@pytest.mark.skipif(not which('echo'), reason="needs echo command")
+@pytest.mark.skipif(not which("echo"), reason="needs echo command")
def test_log_subproc_and_echo_output_capfd(capfd, tmpdir):
- echo = which('echo')
+ echo = which("echo")
# This tests *only* what is echoed when using a subprocess, as capfd
# interferes with the logged data. See
# test_log_subproc_and_echo_output_no_capfd for tests on the logfile.
with tmpdir.as_cwd():
- with log.log_output('foo.txt') as logger:
+ with log.log_output("foo.txt") as logger:
with logger.force_echo():
- echo('echo')
- print('logged')
+ echo("echo")
+ print("logged")
assert capfd.readouterr()[0] == "echo\n"
@@ -177,6 +177,7 @@ def simple_logger(**kwargs):
def handler(signum, frame):
running[0] = False
+
signal.signal(signal.SIGUSR1, handler)
log_path = kwargs["log_path"]
@@ -319,24 +320,27 @@ def no_termios():
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
@pytest.mark.skipif(not termios, reason="requires termios support")
-@pytest.mark.parametrize('test_fn,termios_on_or_off', [
- # tests with termios
- (mock_shell_fg, lang.nullcontext),
- (mock_shell_bg, lang.nullcontext),
- (mock_shell_bg_fg, lang.nullcontext),
- (mock_shell_fg_bg, lang.nullcontext),
- (mock_shell_tstp_cont, lang.nullcontext),
- (mock_shell_tstp_tstp_cont, lang.nullcontext),
- (mock_shell_tstp_tstp_cont_cont, lang.nullcontext),
- # tests without termios
- (mock_shell_fg_no_termios, no_termios),
- (mock_shell_bg, no_termios),
- (mock_shell_bg_fg_no_termios, no_termios),
- (mock_shell_fg_bg_no_termios, no_termios),
- (mock_shell_tstp_cont, no_termios),
- (mock_shell_tstp_tstp_cont, no_termios),
- (mock_shell_tstp_tstp_cont_cont, no_termios),
-])
+@pytest.mark.parametrize(
+ "test_fn,termios_on_or_off",
+ [
+ # tests with termios
+ (mock_shell_fg, lang.nullcontext),
+ (mock_shell_bg, lang.nullcontext),
+ (mock_shell_bg_fg, lang.nullcontext),
+ (mock_shell_fg_bg, lang.nullcontext),
+ (mock_shell_tstp_cont, lang.nullcontext),
+ (mock_shell_tstp_tstp_cont, lang.nullcontext),
+ (mock_shell_tstp_tstp_cont_cont, lang.nullcontext),
+ # tests without termios
+ (mock_shell_fg_no_termios, no_termios),
+ (mock_shell_bg, no_termios),
+ (mock_shell_bg_fg_no_termios, no_termios),
+ (mock_shell_fg_bg_no_termios, no_termios),
+ (mock_shell_tstp_cont, no_termios),
+ (mock_shell_tstp_tstp_cont, no_termios),
+ (mock_shell_tstp_tstp_cont_cont, no_termios),
+ ],
+)
def test_foreground_background(test_fn, termios_on_or_off, tmpdir):
"""Functional tests for foregrounding and backgrounding a logged process.
@@ -371,6 +375,7 @@ def synchronized_logger(**kwargs):
def handler(signum, frame):
running[0] = False
+
signal.signal(signal.SIGUSR1, handler)
log_path = kwargs["log_path"]
@@ -388,7 +393,7 @@ def synchronized_logger(**kwargs):
print("off")
v_lock.release()
else:
- print("on") # lock held; v is toggled on
+ print("on") # lock held; v is toggled on
time.sleep(1e-2)
@@ -399,22 +404,22 @@ def mock_shell_v_v(proc, ctl, **kwargs):
ctl.fg()
ctl.wait_enabled()
- time.sleep(.1)
+ time.sleep(0.1)
write_lock.acquire() # suspend writing
- v_lock.acquire() # enable v lock
- ctl.write(b'v') # toggle v on stdin
- time.sleep(.1)
+ v_lock.acquire() # enable v lock
+ ctl.write(b"v") # toggle v on stdin
+ time.sleep(0.1)
write_lock.release() # resume writing
- time.sleep(.1)
+ time.sleep(0.1)
write_lock.acquire() # suspend writing
- ctl.write(b'v') # toggle v on stdin
- time.sleep(.1)
- v_lock.release() # disable v lock
+ ctl.write(b"v") # toggle v on stdin
+ time.sleep(0.1)
+ v_lock.release() # disable v lock
write_lock.release() # resume writing
- time.sleep(.1)
+ time.sleep(0.1)
os.kill(proc.pid, signal.SIGUSR1)
@@ -426,37 +431,38 @@ def mock_shell_v_v_no_termios(proc, ctl, **kwargs):
ctl.fg()
ctl.wait_disabled_fg()
- time.sleep(.1)
+ time.sleep(0.1)
write_lock.acquire() # suspend writing
- v_lock.acquire() # enable v lock
- ctl.write(b'v\n') # toggle v on stdin
- time.sleep(.1)
+ v_lock.acquire() # enable v lock
+ ctl.write(b"v\n") # toggle v on stdin
+ time.sleep(0.1)
write_lock.release() # resume writing
- time.sleep(.1)
+ time.sleep(0.1)
write_lock.acquire() # suspend writing
- ctl.write(b'v\n') # toggle v on stdin
- time.sleep(.1)
- v_lock.release() # disable v lock
+ ctl.write(b"v\n") # toggle v on stdin
+ time.sleep(0.1)
+ v_lock.release() # disable v lock
write_lock.release() # resume writing
- time.sleep(.1)
+ time.sleep(0.1)
os.kill(proc.pid, signal.SIGUSR1)
@pytest.mark.skipif(not which("ps"), reason="requires ps utility")
@pytest.mark.skipif(not termios, reason="requires termios support")
-@pytest.mark.parametrize('test_fn,termios_on_or_off', [
- (mock_shell_v_v, lang.nullcontext),
- (mock_shell_v_v_no_termios, no_termios),
-])
-def test_foreground_background_output(
- test_fn, capfd, termios_on_or_off, tmpdir):
+@pytest.mark.parametrize(
+ "test_fn,termios_on_or_off",
+ [
+ (mock_shell_v_v, lang.nullcontext),
+ (mock_shell_v_v_no_termios, no_termios),
+ ],
+)
+def test_foreground_background_output(test_fn, capfd, termios_on_or_off, tmpdir):
"""Tests hitting 'v' toggles output, and that force_echo works."""
- if (sys.version_info >= (3, 8) and sys.platform == 'darwin'
- and termios_on_or_off == no_termios):
+ if sys.version_info >= (3, 8) and sys.platform == "darwin" and termios_on_or_off == no_termios:
return
@@ -468,12 +474,7 @@ def test_foreground_background_output(
v_lock = multiprocessing.Lock() # held while controller is in v mode
with termios_on_or_off():
- shell.start(
- write_lock=write_lock,
- v_lock=v_lock,
- debug=True,
- log_path=log_path
- )
+ shell.start(write_lock=write_lock, v_lock=v_lock, debug=True, log_path=log_path)
exitcode = shell.join()
out, err = capfd.readouterr()
@@ -496,13 +497,13 @@ def test_foreground_background_output(
# output should contain mostly "on" lines, but may contain "off"
# lines if the controller is slow. The important thing to observe
# here is that we started seeing 'on' in the end.
- assert (
- ['forced output', 'on'] == lang.uniq(output) or
- ['forced output', 'off', 'on'] == lang.uniq(output)
- )
+ assert ["forced output", "on"] == lang.uniq(output) or [
+ "forced output",
+ "off",
+ "on",
+ ] == lang.uniq(output)
# log should be off for a while, then on, then off
- assert (
- ['forced output', 'off', 'on', 'off'] == lang.uniq(log_data) and
- log_data.count("off") > 2 # ensure some "off" lines were omitted
- )
+ assert ["forced output", "off", "on", "off"] == lang.uniq(log_data) and log_data.count(
+ "off"
+ ) > 2 # ensure some "off" lines were omitted
diff --git a/lib/spack/spack/test/llnl/util/tty/tty.py b/lib/spack/spack/test/llnl/util/tty/tty.py
index 806ecd6a35..cb56c30d57 100644
--- a/lib/spack/spack/test/llnl/util/tty/tty.py
+++ b/lib/spack/spack/test/llnl/util/tty/tty.py
@@ -14,44 +14,47 @@ def test_get_timestamp(monkeypatch):
"""Ensure the results of get_timestamp are reasonable."""
# Debug disabled should return an empty string
- monkeypatch.setattr(tty, '_debug', 0)
- assert not tty.get_timestamp(False), 'Expected an empty string'
+ monkeypatch.setattr(tty, "_debug", 0)
+ assert not tty.get_timestamp(False), "Expected an empty string"
# Debug disabled but force the timestamp should return a string
- assert tty.get_timestamp(True), 'Expected a timestamp/non-empty string'
+ assert tty.get_timestamp(True), "Expected a timestamp/non-empty string"
- pid_str = ' {0}'.format(os.getpid())
+ pid_str = " {0}".format(os.getpid())
# Level 1 debugging should return a timestamp WITHOUT the pid
- monkeypatch.setattr(tty, '_debug', 1)
+ monkeypatch.setattr(tty, "_debug", 1)
out_str = tty.get_timestamp(False)
- assert out_str and pid_str not in out_str, 'Expected no PID in results'
+ assert out_str and pid_str not in out_str, "Expected no PID in results"
# Level 2 debugging should also return a timestamp WITH the pid
- monkeypatch.setattr(tty, '_debug', 2)
+ monkeypatch.setattr(tty, "_debug", 2)
out_str = tty.get_timestamp(False)
- assert out_str and pid_str in out_str, 'Expected PID in results'
-
-
-@pytest.mark.parametrize('msg,enabled,trace,newline', [
- ('', False, False, False), # Nothing is output
- (Exception(''), True, False, True), # Exception output
- ('trace', True, True, False), # stacktrace output
- ('newline', True, False, True), # newline in output
- ('no newline', True, False, False) # no newline output
-])
+ assert out_str and pid_str in out_str, "Expected PID in results"
+
+
+@pytest.mark.parametrize(
+ "msg,enabled,trace,newline",
+ [
+ ("", False, False, False), # Nothing is output
+ (Exception(""), True, False, True), # Exception output
+ ("trace", True, True, False), # stacktrace output
+ ("newline", True, False, True), # newline in output
+ ("no newline", True, False, False), # no newline output
+ ],
+)
def test_msg(capfd, monkeypatch, enabled, msg, trace, newline):
"""Ensure the output from msg with options is appropriate."""
# temporarily use the parameterized settings
- monkeypatch.setattr(tty, '_msg_enabled', enabled)
- monkeypatch.setattr(tty, '_stacktrace', trace)
+ monkeypatch.setattr(tty, "_msg_enabled", enabled)
+ monkeypatch.setattr(tty, "_stacktrace", trace)
- expected = [msg if isinstance(msg, str) else 'Exception: ']
+ expected = [msg if isinstance(msg, str) else "Exception: "]
if newline:
- expected[0] = '{0}\n'.format(expected[0])
+ expected[0] = "{0}\n".format(expected[0])
if trace:
- expected.insert(0, '.py')
+ expected.insert(0, ".py")
tty.msg(msg, newline=newline)
out = capfd.readouterr()[0]
@@ -59,23 +62,28 @@ def test_msg(capfd, monkeypatch, enabled, msg, trace, newline):
assert msg in out
-@pytest.mark.parametrize('msg,trace,wrap', [
- (Exception(''), False, False), # Exception output
- ('trace', True, False), # stacktrace output
- ('wrap', False, True), # wrap in output
-])
+@pytest.mark.parametrize(
+ "msg,trace,wrap",
+ [
+ (Exception(""), False, False), # Exception output
+ ("trace", True, False), # stacktrace output
+ ("wrap", False, True), # wrap in output
+ ],
+)
def test_info(capfd, monkeypatch, msg, trace, wrap):
"""Ensure the output from info with options is appropriate."""
# temporarily use the parameterized settings
- monkeypatch.setattr(tty, '_stacktrace', trace)
+ monkeypatch.setattr(tty, "_stacktrace", trace)
- expected = [msg if isinstance(msg, str) else 'Exception: ']
+ expected = [msg if isinstance(msg, str) else "Exception: "]
if trace:
- expected.insert(0, '.py')
+ expected.insert(0, ".py")
- extra = 'This extra argument *should* make for a sufficiently long line' \
- ' that needs to be wrapped if the option is enabled.'
+ extra = (
+ "This extra argument *should* make for a sufficiently long line"
+ " that needs to be wrapped if the option is enabled."
+ )
args = [msg, extra]
num_newlines = 3 if wrap else 2
@@ -85,4 +93,4 @@ def test_info(capfd, monkeypatch, msg, trace, wrap):
for msg in expected:
assert msg in out
- assert out.count('\n') == num_newlines
+ assert out.count("\n") == num_newlines
diff --git a/lib/spack/spack/test/main.py b/lib/spack/spack/test/main.py
index dc6fa8299e..8af8bc590c 100644
--- a/lib/spack/spack/test/main.py
+++ b/lib/spack/spack/test/main.py
@@ -14,16 +14,18 @@ import spack.paths
from spack.main import get_version, main
pytestmark = pytest.mark.skipif(
- sys.platform == 'win32',
- reason="Test functionality supported but tests are failing on Win")
+ sys.platform == "win32", reason="Test functionality supported but tests are failing on Win"
+)
def test_version_git_nonsense_output(tmpdir, working_env):
git = str(tmpdir.join("git"))
with open(git, "w") as f:
- f.write("""#!/bin/sh
+ f.write(
+ """#!/bin/sh
echo --|not a hash|----
-""")
+"""
+ )
fs.set_executable(git)
os.environ["PATH"] = str(tmpdir)
@@ -33,10 +35,12 @@ echo --|not a hash|----
def test_version_git_fails(tmpdir, working_env):
git = str(tmpdir.join("git"))
with open(git, "w") as f:
- f.write("""#!/bin/sh
+ f.write(
+ """#!/bin/sh
echo 26552533be04e83e66be2c28e0eb5011cb54e8fa
exit 1
-""")
+"""
+ )
fs.set_executable(git)
os.environ["PATH"] = str(tmpdir)
@@ -45,11 +49,15 @@ exit 1
def test_git_sha_output(tmpdir, working_env):
git = str(tmpdir.join("git"))
- sha = '26552533be04e83e66be2c28e0eb5011cb54e8fa'
+ sha = "26552533be04e83e66be2c28e0eb5011cb54e8fa"
with open(git, "w") as f:
- f.write("""#!/bin/sh
+ f.write(
+ """#!/bin/sh
echo {0}
-""".format(sha))
+""".format(
+ sha
+ )
+ )
fs.set_executable(git)
os.environ["PATH"] = str(tmpdir)
@@ -76,9 +84,11 @@ def test_main_calls_get_version(tmpdir, capsys, working_env):
def test_get_version_bad_git(tmpdir, working_env):
bad_git = str(tmpdir.join("git"))
with open(bad_git, "w") as f:
- f.write("""#!/bin/sh
+ f.write(
+ """#!/bin/sh
exit 1
-""")
+"""
+ )
fs.set_executable(bad_git)
os.environ["PATH"] = str(tmpdir)
diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py
index 9fcf4cc8c4..b7063e5e10 100644
--- a/lib/spack/spack/test/make_executable.py
+++ b/lib/spack/spack/test/make_executable.py
@@ -19,101 +19,97 @@ import pytest
from spack.build_environment import MakeExecutable
from spack.util.environment import path_put_first
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="MakeExecutable \
- not supported on Windows")
+pytestmark = pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="MakeExecutable \
+ not supported on Windows",
+)
class MakeExecutableTest(unittest.TestCase):
-
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
- make_exe = os.path.join(self.tmpdir, 'make')
- with open(make_exe, 'w') as f:
- f.write('#!/bin/sh\n')
+ make_exe = os.path.join(self.tmpdir, "make")
+ with open(make_exe, "w") as f:
+ f.write("#!/bin/sh\n")
f.write('echo "$@"')
os.chmod(make_exe, 0o700)
- path_put_first('PATH', [self.tmpdir])
+ path_put_first("PATH", [self.tmpdir])
def tearDown(self):
shutil.rmtree(self.tmpdir)
def test_make_normal(self):
- make = MakeExecutable('make', 8)
- self.assertEqual(make(output=str).strip(), '-j8')
- self.assertEqual(make('install', output=str).strip(), '-j8 install')
+ make = MakeExecutable("make", 8)
+ self.assertEqual(make(output=str).strip(), "-j8")
+ self.assertEqual(make("install", output=str).strip(), "-j8 install")
def test_make_explicit(self):
- make = MakeExecutable('make', 8)
- self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True,
- output=str).strip(), '-j8 install')
+ make = MakeExecutable("make", 8)
+ self.assertEqual(make(parallel=True, output=str).strip(), "-j8")
+ self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install")
def test_make_one_job(self):
- make = MakeExecutable('make', 1)
- self.assertEqual(make(output=str).strip(), '')
- self.assertEqual(make('install', output=str).strip(), 'install')
+ make = MakeExecutable("make", 1)
+ self.assertEqual(make(output=str).strip(), "")
+ self.assertEqual(make("install", output=str).strip(), "install")
def test_make_parallel_false(self):
- make = MakeExecutable('make', 8)
- self.assertEqual(make(parallel=False, output=str).strip(), '')
- self.assertEqual(make('install', parallel=False,
- output=str).strip(), 'install')
+ make = MakeExecutable("make", 8)
+ self.assertEqual(make(parallel=False, output=str).strip(), "")
+ self.assertEqual(make("install", parallel=False, output=str).strip(), "install")
def test_make_parallel_disabled(self):
- make = MakeExecutable('make', 8)
+ make = MakeExecutable("make", 8)
- os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
- self.assertEqual(make(output=str).strip(), '')
- self.assertEqual(make('install', output=str).strip(), 'install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "true"
+ self.assertEqual(make(output=str).strip(), "")
+ self.assertEqual(make("install", output=str).strip(), "install")
- os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
- self.assertEqual(make(output=str).strip(), '')
- self.assertEqual(make('install', output=str).strip(), 'install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "1"
+ self.assertEqual(make(output=str).strip(), "")
+ self.assertEqual(make("install", output=str).strip(), "install")
# These don't disable (false and random string)
- os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
- self.assertEqual(make(output=str).strip(), '-j8')
- self.assertEqual(make('install', output=str).strip(), '-j8 install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "false"
+ self.assertEqual(make(output=str).strip(), "-j8")
+ self.assertEqual(make("install", output=str).strip(), "-j8 install")
- os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
- self.assertEqual(make(output=str).strip(), '-j8')
- self.assertEqual(make('install', output=str).strip(), '-j8 install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "foobar"
+ self.assertEqual(make(output=str).strip(), "-j8")
+ self.assertEqual(make("install", output=str).strip(), "-j8 install")
- del os.environ['SPACK_NO_PARALLEL_MAKE']
+ del os.environ["SPACK_NO_PARALLEL_MAKE"]
def test_make_parallel_precedence(self):
- make = MakeExecutable('make', 8)
+ make = MakeExecutable("make", 8)
# These should work
- os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
- self.assertEqual(make(parallel=True, output=str).strip(), '')
- self.assertEqual(make('install', parallel=True,
- output=str).strip(), 'install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "true"
+ self.assertEqual(make(parallel=True, output=str).strip(), "")
+ self.assertEqual(make("install", parallel=True, output=str).strip(), "install")
- os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
- self.assertEqual(make(parallel=True, output=str).strip(), '')
- self.assertEqual(make('install', parallel=True,
- output=str).strip(), 'install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "1"
+ self.assertEqual(make(parallel=True, output=str).strip(), "")
+ self.assertEqual(make("install", parallel=True, output=str).strip(), "install")
# These don't disable (false and random string)
- os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
- self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True,
- output=str).strip(), '-j8 install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "false"
+ self.assertEqual(make(parallel=True, output=str).strip(), "-j8")
+ self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install")
- os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
- self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True,
- output=str).strip(), '-j8 install')
+ os.environ["SPACK_NO_PARALLEL_MAKE"] = "foobar"
+ self.assertEqual(make(parallel=True, output=str).strip(), "-j8")
+ self.assertEqual(make("install", parallel=True, output=str).strip(), "-j8 install")
- del os.environ['SPACK_NO_PARALLEL_MAKE']
+ del os.environ["SPACK_NO_PARALLEL_MAKE"]
def test_make_jobs_env(self):
- make = MakeExecutable('make', 8)
+ make = MakeExecutable("make", 8)
dump_env = {}
- self.assertEqual(make(output=str, jobs_env='MAKE_PARALLELISM',
- _dump_env=dump_env).strip(), '-j8')
- self.assertEqual(dump_env['MAKE_PARALLELISM'], '8')
+ self.assertEqual(
+ make(output=str, jobs_env="MAKE_PARALLELISM", _dump_env=dump_env).strip(), "-j8"
+ )
+ self.assertEqual(dump_env["MAKE_PARALLELISM"], "8")
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index f8e1795c49..c156db867c 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -20,12 +20,13 @@ from spack.stage import Stage
from spack.util.executable import which
from spack.util.spack_yaml import SpackYAMLError
-pytestmark = [pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows"),
- pytest.mark.usefixtures('mutable_config', 'mutable_mock_repo')]
+pytestmark = [
+ pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
+ pytest.mark.usefixtures("mutable_config", "mutable_mock_repo"),
+]
# paths in repos that shouldn't be in the mirror tarballs.
-exclude = ['.hg', '.git', '.svn']
+exclude = [".hg", ".git", ".svn"]
repos = {}
@@ -50,12 +51,12 @@ def set_up_package(name, repository, url_attr):
def check_mirror():
- with Stage('spack-mirror-test') as stage:
- mirror_root = os.path.join(stage.path, 'test-mirror')
+ with Stage("spack-mirror-test") as stage:
+ mirror_root = os.path.join(stage.path, "test-mirror")
# register mirror with spack config
- mirrors = {'spack-mirror-test': 'file://' + mirror_root}
- with spack.config.override('mirrors', mirrors):
- with spack.config.override('config:checksum', False):
+ mirrors = {"spack-mirror-test": "file://" + mirror_root}
+ with spack.config.override("mirrors", mirrors):
+ with spack.config.override("config:checksum", False):
specs = [Spec(x).concretized() for x in repos]
spack.mirror.create(mirror_root, specs)
@@ -64,13 +65,9 @@ def check_mirror():
for spec in specs:
fetcher = spec.package.fetcher[0]
- per_package_ref = os.path.join(
- spec.name, '-'.join([spec.name, str(spec.version)]))
- mirror_paths = spack.mirror.mirror_archive_paths(
- fetcher,
- per_package_ref)
- expected_path = os.path.join(
- mirror_root, mirror_paths.storage_path)
+ per_package_ref = os.path.join(spec.name, "-".join([spec.name, str(spec.version)]))
+ mirror_paths = spack.mirror.mirror_archive_paths(fetcher, per_package_ref)
+ expected_path = os.path.join(mirror_root, mirror_paths.storage_path)
assert os.path.exists(expected_path)
# Now try to fetch each package.
@@ -78,22 +75,20 @@ def check_mirror():
spec = Spec(name).concretized()
pkg = spec.package
- with spack.config.override('config:checksum', False):
+ with spack.config.override("config:checksum", False):
with pkg.stage:
pkg.do_stage(mirror_only=True)
# Compare the original repo with the expanded archive
original_path = mock_repo.path
- if 'svn' in name:
+ if "svn" in name:
# have to check out the svn repo to compare.
- original_path = os.path.join(
- mock_repo.path, 'checked_out')
+ original_path = os.path.join(mock_repo.path, "checked_out")
- svn = which('svn', required=True)
- svn('checkout', mock_repo.url, original_path)
+ svn = which("svn", required=True)
+ svn("checkout", mock_repo.url, original_path)
- dcmp = filecmp.dircmp(
- original_path, pkg.stage.source_path)
+ dcmp = filecmp.dircmp(original_path, pkg.stage.source_path)
# make sure there are no new files in the expanded
# tarball
@@ -103,49 +98,44 @@ def check_mirror():
def test_url_mirror(mock_archive):
- set_up_package('trivial-install-test-package', mock_archive, 'url')
+ set_up_package("trivial-install-test-package", mock_archive, "url")
check_mirror()
repos.clear()
-@pytest.mark.skipif(
- not which('git'), reason='requires git to be installed')
+@pytest.mark.skipif(not which("git"), reason="requires git to be installed")
def test_git_mirror(mock_git_repository):
- set_up_package('git-test', mock_git_repository, 'git')
+ set_up_package("git-test", mock_git_repository, "git")
check_mirror()
repos.clear()
@pytest.mark.skipif(
- not which('svn') or not which('svnadmin'),
- reason='requires subversion to be installed')
+ not which("svn") or not which("svnadmin"), reason="requires subversion to be installed"
+)
def test_svn_mirror(mock_svn_repository):
- set_up_package('svn-test', mock_svn_repository, 'svn')
+ set_up_package("svn-test", mock_svn_repository, "svn")
check_mirror()
repos.clear()
-@pytest.mark.skipif(
- not which('hg'), reason='requires mercurial to be installed')
+@pytest.mark.skipif(not which("hg"), reason="requires mercurial to be installed")
def test_hg_mirror(mock_hg_repository):
- set_up_package('hg-test', mock_hg_repository, 'hg')
+ set_up_package("hg-test", mock_hg_repository, "hg")
check_mirror()
repos.clear()
@pytest.mark.skipif(
- not all([which('svn'), which('hg'), which('git')]),
- reason='requires subversion, git, and mercurial to be installed')
-def test_all_mirror(
- mock_git_repository,
- mock_svn_repository,
- mock_hg_repository,
- mock_archive):
-
- set_up_package('git-test', mock_git_repository, 'git')
- set_up_package('svn-test', mock_svn_repository, 'svn')
- set_up_package('hg-test', mock_hg_repository, 'hg')
- set_up_package('trivial-install-test-package', mock_archive, 'url')
+ not all([which("svn"), which("hg"), which("git")]),
+ reason="requires subversion, git, and mercurial to be installed",
+)
+def test_all_mirror(mock_git_repository, mock_svn_repository, mock_hg_repository, mock_archive):
+
+ set_up_package("git-test", mock_git_repository, "git")
+ set_up_package("svn-test", mock_svn_repository, "svn")
+ set_up_package("hg-test", mock_hg_repository, "hg")
+ set_up_package("trivial-install-test-package", mock_archive, "url")
check_mirror()
repos.clear()
@@ -154,8 +144,8 @@ def test_all_mirror(
"mirror",
[
spack.mirror.Mirror(
- 'https://example.com/fetch',
- 'https://example.com/push',
+ "https://example.com/fetch",
+ "https://example.com/push",
),
],
)
@@ -167,10 +157,7 @@ def test_roundtrip_mirror(mirror):
@pytest.mark.parametrize(
- "invalid_yaml",
- [
- "playing_playlist: {{ action }} playlist {{ playlist_name }}"
- ]
+ "invalid_yaml", ["playing_playlist: {{ action }} playlist {{ playlist_name }}"]
)
def test_invalid_yaml_mirror(invalid_yaml):
with pytest.raises(SpackYAMLError) as e:
@@ -180,12 +167,7 @@ def test_invalid_yaml_mirror(invalid_yaml):
assert invalid_yaml in exc_msg
-@pytest.mark.parametrize(
- "invalid_json, error_message",
- [
- ("{13:", "Expecting property name")
- ]
-)
+@pytest.mark.parametrize("invalid_json, error_message", [("{13:", "Expecting property name")])
def test_invalid_json_mirror(invalid_json, error_message):
with pytest.raises(sjson.SpackJSONError) as e:
spack.mirror.Mirror.from_json(invalid_json)
@@ -199,9 +181,9 @@ def test_invalid_json_mirror(invalid_json, error_message):
[
spack.mirror.MirrorCollection(
mirrors={
- 'example-mirror': spack.mirror.Mirror(
- 'https://example.com/fetch',
- 'https://example.com/push',
+ "example-mirror": spack.mirror.Mirror(
+ "https://example.com/fetch",
+ "https://example.com/push",
).to_dict(),
},
),
@@ -209,18 +191,13 @@ def test_invalid_json_mirror(invalid_json, error_message):
)
def test_roundtrip_mirror_collection(mirror_collection):
mirror_collection_yaml = mirror_collection.to_yaml()
- assert (spack.mirror.MirrorCollection.from_yaml(mirror_collection_yaml) ==
- mirror_collection)
+ assert spack.mirror.MirrorCollection.from_yaml(mirror_collection_yaml) == mirror_collection
mirror_collection_json = mirror_collection.to_json()
- assert (spack.mirror.MirrorCollection.from_json(mirror_collection_json) ==
- mirror_collection)
+ assert spack.mirror.MirrorCollection.from_json(mirror_collection_json) == mirror_collection
@pytest.mark.parametrize(
- "invalid_yaml",
- [
- "playing_playlist: {{ action }} playlist {{ playlist_name }}"
- ]
+ "invalid_yaml", ["playing_playlist: {{ action }} playlist {{ playlist_name }}"]
)
def test_invalid_yaml_mirror_collection(invalid_yaml):
with pytest.raises(SpackYAMLError) as e:
@@ -230,12 +207,7 @@ def test_invalid_yaml_mirror_collection(invalid_yaml):
assert invalid_yaml in exc_msg
-@pytest.mark.parametrize(
- "invalid_json, error_message",
- [
- ("{13:", "Expecting property name")
- ]
-)
+@pytest.mark.parametrize("invalid_json, error_message", [("{13:", "Expecting property name")])
def test_invalid_json_mirror_collection(invalid_json, error_message):
with pytest.raises(sjson.SpackJSONError) as e:
spack.mirror.MirrorCollection.from_json(invalid_json)
@@ -245,13 +217,13 @@ def test_invalid_json_mirror_collection(invalid_json, error_message):
def test_mirror_archive_paths_no_version(mock_packages, config, mock_archive):
- spec = Spec('trivial-install-test-package@nonexistingversion').concretized()
+ spec = Spec("trivial-install-test-package@nonexistingversion").concretized()
fetcher = spack.fetch_strategy.URLFetchStrategy(mock_archive.url)
- spack.mirror.mirror_archive_paths(fetcher, 'per-package-ref', spec)
+ spack.mirror.mirror_archive_paths(fetcher, "per-package-ref", spec)
def test_mirror_with_url_patches(mock_packages, config, monkeypatch):
- spec = Spec('patch-several-dependencies')
+ spec = Spec("patch-several-dependencies")
spec.concretize()
files_cached_in_mirror = set()
@@ -260,55 +232,58 @@ def test_mirror_with_url_patches(mock_packages, config, monkeypatch):
files_cached_in_mirror.add(os.path.basename(relative_dst))
def successful_fetch(_class):
- with open(_class.stage.save_filename, 'w'):
+ with open(_class.stage.save_filename, "w"):
pass
def successful_expand(_class):
- expanded_path = os.path.join(_class.stage.path,
- spack.stage._source_path_subdir)
+ expanded_path = os.path.join(_class.stage.path, spack.stage._source_path_subdir)
os.mkdir(expanded_path)
- with open(os.path.join(expanded_path, 'test.patch'), 'w'):
+ with open(os.path.join(expanded_path, "test.patch"), "w"):
pass
def successful_apply(*args, **kwargs):
pass
- with Stage('spack-mirror-test') as stage:
- mirror_root = os.path.join(stage.path, 'test-mirror')
+ with Stage("spack-mirror-test") as stage:
+ mirror_root = os.path.join(stage.path, "test-mirror")
- monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, 'fetch',
- successful_fetch)
- monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy,
- 'expand', successful_expand)
- monkeypatch.setattr(spack.patch, 'apply_patch', successful_apply)
- monkeypatch.setattr(spack.caches.MirrorCache, 'store', record_store)
+ monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "fetch", successful_fetch)
+ monkeypatch.setattr(spack.fetch_strategy.URLFetchStrategy, "expand", successful_expand)
+ monkeypatch.setattr(spack.patch, "apply_patch", successful_apply)
+ monkeypatch.setattr(spack.caches.MirrorCache, "store", record_store)
- with spack.config.override('config:checksum', False):
+ with spack.config.override("config:checksum", False):
spack.mirror.create(mirror_root, list(spec.traverse()))
- assert not (set([
- 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
- 'abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz',
- ]) - files_cached_in_mirror)
+ assert not (
+ set(
+ [
+ "abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
+ "abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.gz",
+ ]
+ )
+ - files_cached_in_mirror
+ )
class MockFetcher(object):
"""Mock fetcher object which implements the necessary functionality for
- testing MirrorCache
+ testing MirrorCache
"""
+
@staticmethod
def archive(dst):
- with open(dst, 'w'):
+ with open(dst, "w"):
pass
-@pytest.mark.regression('14067')
+@pytest.mark.regression("14067")
def test_mirror_cache_symlinks(tmpdir):
"""Confirm that the cosmetic symlink created in the mirror cache (which may
- be relative) targets the storage path correctly.
+ be relative) targets the storage path correctly.
"""
- cosmetic_path = 'zlib/zlib-1.2.11.tar.gz'
- global_path = '_source-cache/archive/c3/c3e5.tar.gz'
+ cosmetic_path = "zlib/zlib-1.2.11.tar.gz"
+ global_path = "_source-cache/archive/c3/c3e5.tar.gz"
cache = spack.caches.MirrorCache(str(tmpdir), False)
reference = spack.mirror.MirrorReference(cosmetic_path, global_path)
@@ -316,17 +291,20 @@ def test_mirror_cache_symlinks(tmpdir):
cache.symlink(reference)
link_target = resolve_link_target_relative_to_the_link(
- os.path.join(cache.root, reference.cosmetic_path))
+ os.path.join(cache.root, reference.cosmetic_path)
+ )
assert os.path.exists(link_target)
- assert (os.path.normpath(link_target) ==
- os.path.join(cache.root, reference.storage_path))
+ assert os.path.normpath(link_target) == os.path.join(cache.root, reference.storage_path)
-@pytest.mark.regression('31627')
-@pytest.mark.parametrize('specs,expected_specs', [
- (['a'], ['a@1.0', 'a@2.0']),
- (['a', 'brillig'], ['a@1.0', 'a@2.0', 'brillig@1.0.0', 'brillig@2.0.0']),
-])
+@pytest.mark.regression("31627")
+@pytest.mark.parametrize(
+ "specs,expected_specs",
+ [
+ (["a"], ["a@1.0", "a@2.0"]),
+ (["a", "brillig"], ["a@1.0", "a@2.0", "brillig@1.0.0", "brillig@2.0.0"]),
+ ],
+)
def test_get_all_versions(specs, expected_specs):
specs = [Spec(s) for s in specs]
output_list = spack.mirror.get_all_versions(specs)
diff --git a/lib/spack/spack/test/module_parsing.py b/lib/spack/spack/test/module_parsing.py
index 2a19d39033..1fd617376d 100644
--- a/lib/spack/spack/test/module_parsing.py
+++ b/lib/spack/spack/test/module_parsing.py
@@ -17,48 +17,51 @@ from spack.util.module_cmd import (
path_from_modules,
)
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="Tests fail on Windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
-test_module_lines = ['prepend-path LD_LIBRARY_PATH /path/to/lib',
- 'setenv MOD_DIR /path/to',
- 'setenv LDFLAGS -Wl,-rpath/path/to/lib',
- 'setenv LDFLAGS -L/path/to/lib',
- 'prepend-path PATH /path/to/bin']
+test_module_lines = [
+ "prepend-path LD_LIBRARY_PATH /path/to/lib",
+ "setenv MOD_DIR /path/to",
+ "setenv LDFLAGS -Wl,-rpath/path/to/lib",
+ "setenv LDFLAGS -L/path/to/lib",
+ "prepend-path PATH /path/to/bin",
+]
def test_module_function_change_env(tmpdir, working_env):
- src_file = str(tmpdir.join('src_me'))
- with open(src_file, 'w') as f:
- f.write('export TEST_MODULE_ENV_VAR=TEST_SUCCESS\n')
+ src_file = str(tmpdir.join("src_me"))
+ with open(src_file, "w") as f:
+ f.write("export TEST_MODULE_ENV_VAR=TEST_SUCCESS\n")
- os.environ['NOT_AFFECTED'] = "NOT_AFFECTED"
- module('load', src_file, module_template='. {0} 2>&1'.format(src_file))
+ os.environ["NOT_AFFECTED"] = "NOT_AFFECTED"
+ module("load", src_file, module_template=". {0} 2>&1".format(src_file))
- assert os.environ['TEST_MODULE_ENV_VAR'] == 'TEST_SUCCESS'
- assert os.environ['NOT_AFFECTED'] == "NOT_AFFECTED"
+ assert os.environ["TEST_MODULE_ENV_VAR"] == "TEST_SUCCESS"
+ assert os.environ["NOT_AFFECTED"] == "NOT_AFFECTED"
def test_module_function_no_change(tmpdir):
- src_file = str(tmpdir.join('src_me'))
- with open(src_file, 'w') as f:
- f.write('echo TEST_MODULE_FUNCTION_PRINT')
+ src_file = str(tmpdir.join("src_me"))
+ with open(src_file, "w") as f:
+ f.write("echo TEST_MODULE_FUNCTION_PRINT")
old_env = os.environ.copy()
- text = module('show', src_file, module_template='. {0} 2>&1'.format(src_file))
+ text = module("show", src_file, module_template=". {0} 2>&1".format(src_file))
- assert text == 'TEST_MODULE_FUNCTION_PRINT\n'
+ assert text == "TEST_MODULE_FUNCTION_PRINT\n"
assert os.environ == old_env
def test_get_path_from_module_faked(monkeypatch):
for line in test_module_lines:
+
def fake_module(*args):
return line
- monkeypatch.setattr(spack.util.module_cmd, 'module', fake_module)
- path = path_from_modules(['mod'])
- assert path == '/path/to'
+ monkeypatch.setattr(spack.util.module_cmd, "module", fake_module)
+
+ path = path_from_modules(["mod"])
+ assert path == "/path/to"
def test_get_path_from_module_contents():
@@ -80,48 +83,50 @@ prepend_path("PATH","/path/to/cmake-3.9.2/bin:/other/bad/path")
prepend_path("MANPATH","/path/to/cmake/cmake-3.9.2/share/man")
prepend_path("LD_LIBRARY_PATH","/path/to/cmake-3.9.2/lib64")
"""
- module_show_lines = module_show_output.split('\n')
+ module_show_lines = module_show_output.split("\n")
# PATH and LD_LIBRARY_PATH outvote MANPATH and the other PATH and
# LD_LIBRARY_PATH entries
- assert (get_path_from_module_contents(module_show_lines, 'cmake-3.9.2') ==
- '/path/to/cmake-3.9.2')
+ assert (
+ get_path_from_module_contents(module_show_lines, "cmake-3.9.2") == "/path/to/cmake-3.9.2"
+ )
def test_get_path_from_empty_module():
- assert get_path_from_module_contents('', 'test') is None
+ assert get_path_from_module_contents("", "test") is None
def test_pkg_dir_from_module_name():
- module_show_lines = ['setenv FOO_BAR_DIR /path/to/foo-bar']
+ module_show_lines = ["setenv FOO_BAR_DIR /path/to/foo-bar"]
- assert (get_path_from_module_contents(module_show_lines, 'foo-bar') ==
- '/path/to/foo-bar')
+ assert get_path_from_module_contents(module_show_lines, "foo-bar") == "/path/to/foo-bar"
- assert (get_path_from_module_contents(module_show_lines, 'foo-bar/1.0') ==
- '/path/to/foo-bar')
+ assert get_path_from_module_contents(module_show_lines, "foo-bar/1.0") == "/path/to/foo-bar"
def test_get_argument_from_module_line():
- simple_lines = ['prepend-path LD_LIBRARY_PATH /lib/path',
- 'prepend-path LD_LIBRARY_PATH /lib/path',
- "prepend_path('PATH' , '/lib/path')",
- 'prepend_path( "PATH" , "/lib/path" )',
- 'prepend_path("PATH",' + "'/lib/path')"]
-
- complex_lines = ['prepend-path LD_LIBRARY_PATH /lib/path:/pkg/path',
- 'prepend-path LD_LIBRARY_PATH /lib/path:/pkg/path',
- "prepend_path('PATH' , '/lib/path:/pkg/path')",
- 'prepend_path( "PATH" , "/lib/path:/pkg/path" )',
- 'prepend_path("PATH",' + "'/lib/path:/pkg/path')"]
-
- bad_lines = ['prepend_path(PATH,/lib/path)',
- 'prepend-path (LD_LIBRARY_PATH) /lib/path']
-
- assert all(get_path_args_from_module_line(x) == ['/lib/path']
- for x in simple_lines)
- assert all(get_path_args_from_module_line(x) == ['/lib/path', '/pkg/path']
- for x in complex_lines)
+ simple_lines = [
+ "prepend-path LD_LIBRARY_PATH /lib/path",
+ "prepend-path LD_LIBRARY_PATH /lib/path",
+ "prepend_path('PATH' , '/lib/path')",
+ 'prepend_path( "PATH" , "/lib/path" )',
+ 'prepend_path("PATH",' + "'/lib/path')",
+ ]
+
+ complex_lines = [
+ "prepend-path LD_LIBRARY_PATH /lib/path:/pkg/path",
+ "prepend-path LD_LIBRARY_PATH /lib/path:/pkg/path",
+ "prepend_path('PATH' , '/lib/path:/pkg/path')",
+ 'prepend_path( "PATH" , "/lib/path:/pkg/path" )',
+ 'prepend_path("PATH",' + "'/lib/path:/pkg/path')",
+ ]
+
+ bad_lines = ["prepend_path(PATH,/lib/path)", "prepend-path (LD_LIBRARY_PATH) /lib/path"]
+
+ assert all(get_path_args_from_module_line(x) == ["/lib/path"] for x in simple_lines)
+ assert all(
+ get_path_args_from_module_line(x) == ["/lib/path", "/pkg/path"] for x in complex_lines
+ )
for bl in bad_lines:
with pytest.raises(ValueError):
get_path_args_from_module_line(bl)
@@ -130,5 +135,5 @@ def test_get_argument_from_module_line():
# lmod is entirely unsupported on Windows
def test_lmod_quote_parsing():
lines = ['setenv("SOME_PARTICULAR_DIR","-L/opt/cray/pe/mpich/8.1.4/gtl/lib")']
- result = get_path_from_module_contents(lines, 'some-module')
- assert '/opt/cray/pe/mpich/8.1.4/gtl' == result
+ result = get_path_from_module_contents(lines, "some-module")
+ assert "/opt/cray/pe/mpich/8.1.4/gtl" == result
diff --git a/lib/spack/spack/test/modules/common.py b/lib/spack/spack/test/modules/common.py
index 9ecde33aa5..47c06e8233 100644
--- a/lib/spack/spack/test/modules/common.py
+++ b/lib/spack/spack/test/modules/common.py
@@ -17,44 +17,33 @@ import spack.util.spack_yaml as syaml
from spack.modules.common import UpstreamModuleIndex
from spack.spec import Spec
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def test_update_dictionary_extending_list():
- target = {
- 'foo': {
- 'a': 1,
- 'b': 2,
- 'd': 4
- },
- 'bar': [1, 2, 4],
- 'baz': 'foobar'
- }
+ target = {"foo": {"a": 1, "b": 2, "d": 4}, "bar": [1, 2, 4], "baz": "foobar"}
update = {
- 'foo': {
- 'c': 3,
+ "foo": {
+ "c": 3,
},
- 'bar': [3],
- 'baz': 'foobaz',
- 'newkey': {
- 'd': 4
- }
+ "bar": [3],
+ "baz": "foobaz",
+ "newkey": {"d": 4},
}
spack.modules.common.update_dictionary_extending_lists(target, update)
assert len(target) == 4
- assert len(target['foo']) == 4
- assert len(target['bar']) == 4
- assert target['baz'] == 'foobaz'
+ assert len(target["foo"]) == 4
+ assert len(target["bar"]) == 4
+ assert target["baz"] == "foobaz"
@pytest.fixture()
def mock_module_defaults(monkeypatch):
def impl(*args):
# No need to patch both types because neither override base
- monkeypatch.setattr(spack.modules.common.BaseConfiguration,
- 'defaults',
- [arg for arg in args])
+ monkeypatch.setattr(
+ spack.modules.common.BaseConfiguration, "defaults", [arg for arg in args]
+ )
return impl
@@ -62,39 +51,36 @@ def mock_module_defaults(monkeypatch):
@pytest.fixture()
def mock_package_perms(monkeypatch):
perms = stat.S_IRGRP | stat.S_IWGRP
- monkeypatch.setattr(spack.package_prefs,
- 'get_package_permissions',
- lambda spec: perms)
+ monkeypatch.setattr(spack.package_prefs, "get_package_permissions", lambda spec: perms)
yield perms
-def test_modules_written_with_proper_permissions(mock_module_filename,
- mock_package_perms,
- mock_packages, config):
- spec = spack.spec.Spec('mpileaks').concretized()
+def test_modules_written_with_proper_permissions(
+ mock_module_filename, mock_package_perms, mock_packages, config
+):
+ spec = spack.spec.Spec("mpileaks").concretized()
# The code tested is common to all module types, but has to be tested from
# one. TCL picked at random
- generator = spack.modules.tcl.TclModulefileWriter(spec, 'default')
+ generator = spack.modules.tcl.TclModulefileWriter(spec, "default")
generator.write()
- assert mock_package_perms & os.stat(
- mock_module_filename).st_mode == mock_package_perms
+ assert mock_package_perms & os.stat(mock_module_filename).st_mode == mock_package_perms
-@pytest.mark.parametrize('module_type', ['tcl', 'lmod'])
+@pytest.mark.parametrize("module_type", ["tcl", "lmod"])
def test_modules_default_symlink(
- module_type, mock_packages, mock_module_filename, mock_module_defaults, config
+ module_type, mock_packages, mock_module_filename, mock_module_defaults, config
):
- spec = spack.spec.Spec('mpileaks@2.3').concretized()
- mock_module_defaults(spec.format('{name}{@version}'))
+ spec = spack.spec.Spec("mpileaks@2.3").concretized()
+ mock_module_defaults(spec.format("{name}{@version}"))
generator_cls = spack.modules.module_types[module_type]
- generator = generator_cls(spec, 'default')
+ generator = generator_cls(spec, "default")
generator.write()
- link_path = os.path.join(os.path.dirname(mock_module_filename), 'default')
+ link_path = os.path.join(os.path.dirname(mock_module_filename), "default")
assert os.path.islink(link_path)
assert os.readlink(link_path) == mock_module_filename
@@ -117,82 +103,63 @@ class MockSpec(object):
def test_upstream_module_index():
- s1 = MockSpec('spec-1')
- s2 = MockSpec('spec-2')
- s3 = MockSpec('spec-3')
- s4 = MockSpec('spec-4')
+ s1 = MockSpec("spec-1")
+ s2 = MockSpec("spec-2")
+ s3 = MockSpec("spec-3")
+ s4 = MockSpec("spec-4")
tcl_module_index = """\
module_index:
{0}:
path: /path/to/a
use_name: a
-""".format(s1.dag_hash())
-
- module_indices = [
- {
- 'tcl': spack.modules.common._read_module_index(tcl_module_index)
- },
- {}
- ]
-
- dbs = [
- 'd0',
- 'd1'
- ]
-
- mock_db = MockDb(
- dbs,
- {
- s1.dag_hash(): 'd0',
- s2.dag_hash(): 'd1',
- s3.dag_hash(): 'd0'
- }
+""".format(
+ s1.dag_hash()
)
+
+ module_indices = [{"tcl": spack.modules.common._read_module_index(tcl_module_index)}, {}]
+
+ dbs = ["d0", "d1"]
+
+ mock_db = MockDb(dbs, {s1.dag_hash(): "d0", s2.dag_hash(): "d1", s3.dag_hash(): "d0"})
upstream_index = UpstreamModuleIndex(mock_db, module_indices)
- m1 = upstream_index.upstream_module(s1, 'tcl')
- assert m1.path == '/path/to/a'
+ m1 = upstream_index.upstream_module(s1, "tcl")
+ assert m1.path == "/path/to/a"
# No modules are defined for the DB associated with s2
- assert not upstream_index.upstream_module(s2, 'tcl')
+ assert not upstream_index.upstream_module(s2, "tcl")
# Modules are defined for the index associated with s1, but none are
# defined for the requested type
- assert not upstream_index.upstream_module(s1, 'lmod')
+ assert not upstream_index.upstream_module(s1, "lmod")
# A module is registered with a DB and the associated module index has
# modules of the specified type defined, but not for the requested spec
- assert not upstream_index.upstream_module(s3, 'tcl')
+ assert not upstream_index.upstream_module(s3, "tcl")
# The spec isn't recorded as installed in any of the DBs
with pytest.raises(spack.error.SpackError):
- upstream_index.upstream_module(s4, 'tcl')
+ upstream_index.upstream_module(s4, "tcl")
def test_get_module_upstream():
- s1 = MockSpec('spec-1')
+ s1 = MockSpec("spec-1")
tcl_module_index = """\
module_index:
{0}:
path: /path/to/a
use_name: a
-""".format(s1.dag_hash())
+""".format(
+ s1.dag_hash()
+ )
- module_indices = [
- {},
- {
- 'tcl': spack.modules.common._read_module_index(tcl_module_index)
- }
- ]
+ module_indices = [{}, {"tcl": spack.modules.common._read_module_index(tcl_module_index)}]
- dbs = ['d0', 'd1']
+ dbs = ["d0", "d1"]
- mock_db = MockDb(
- dbs,
- {s1.dag_hash(): 'd1'}
- )
+ mock_db = MockDb(dbs, {s1.dag_hash(): "d1"})
upstream_index = UpstreamModuleIndex(mock_db, module_indices)
setattr(s1, "installed_upstream", True)
@@ -200,48 +167,46 @@ module_index:
old_index = spack.modules.common.upstream_module_index
spack.modules.common.upstream_module_index = upstream_index
- m1_path = spack.modules.common.get_module('tcl', s1, True)
- assert m1_path == '/path/to/a'
+ m1_path = spack.modules.common.get_module("tcl", s1, True)
+ assert m1_path == "/path/to/a"
finally:
spack.modules.common.upstream_module_index = old_index
-@pytest.mark.regression('14347')
-def test_load_installed_package_not_in_repo(
- install_mockery, mock_fetch, monkeypatch
-):
+@pytest.mark.regression("14347")
+def test_load_installed_package_not_in_repo(install_mockery, mock_fetch, monkeypatch):
"""Test that installed packages that have been removed are still loadable"""
- spec = Spec('trivial-install-test-package').concretized()
+ spec = Spec("trivial-install-test-package").concretized()
spec.package.do_install()
def find_nothing(*args):
- raise spack.repo.UnknownPackageError(
- 'Repo package access is disabled for test')
+ raise spack.repo.UnknownPackageError("Repo package access is disabled for test")
# Mock deletion of the package
spec._package = None
- monkeypatch.setattr(spack.repo.path, 'get', find_nothing)
+ monkeypatch.setattr(spack.repo.path, "get", find_nothing)
with pytest.raises(spack.repo.UnknownPackageError):
spec.package
- module_path = spack.modules.common.get_module('tcl', spec, True)
+ module_path = spack.modules.common.get_module("tcl", spec, True)
assert module_path
spack.package_base.PackageBase.uninstall_by_spec(spec)
# DEPRECATED: remove blacklist in v0.20
-@pytest.mark.parametrize("module_type, old_config,new_config", [
- ("tcl", "blacklist.yaml", "exclude.yaml"),
- ("tcl", "blacklist_implicits.yaml", "exclude_implicits.yaml"),
- ("tcl", "blacklist_environment.yaml", "alter_environment.yaml"),
- ("lmod", "blacklist.yaml", "exclude.yaml"),
- ("lmod", "blacklist_environment.yaml", "alter_environment.yaml"),
-])
+@pytest.mark.parametrize(
+ "module_type, old_config,new_config",
+ [
+ ("tcl", "blacklist.yaml", "exclude.yaml"),
+ ("tcl", "blacklist_implicits.yaml", "exclude_implicits.yaml"),
+ ("tcl", "blacklist_environment.yaml", "alter_environment.yaml"),
+ ("lmod", "blacklist.yaml", "exclude.yaml"),
+ ("lmod", "blacklist_environment.yaml", "alter_environment.yaml"),
+ ],
+)
def test_exclude_include_update(module_type, old_config, new_config):
- module_test_data_root = os.path.join(
- spack.paths.test_path, 'data', 'modules', module_type
- )
+ module_test_data_root = os.path.join(spack.paths.test_path, "data", "modules", module_type)
with open(os.path.join(module_test_data_root, old_config)) as f:
old_yaml = syaml.load(f)
with open(os.path.join(module_test_data_root, new_config)) as f:
diff --git a/lib/spack/spack/test/modules/conftest.py b/lib/spack/spack/test/modules/conftest.py
index 61fe4add0a..388c85d247 100644
--- a/lib/spack/spack/test/modules/conftest.py
+++ b/lib/spack/spack/test/modules/conftest.py
@@ -17,9 +17,9 @@ def modulefile_content(request):
as a list of lines.
"""
- writer_cls = getattr(request.module, 'writer_cls')
+ writer_cls = getattr(request.module, "writer_cls")
- def _impl(spec_str, module_set_name='default'):
+ def _impl(spec_str, module_set_name="default"):
# Write the module file
spec = spack.spec.Spec(spec_str)
spec.concretize()
@@ -32,7 +32,7 @@ def modulefile_content(request):
# Retrieve the content
with open(filename) as f:
content = f.readlines()
- content = ''.join(content).split('\n')
+ content = "".join(content).split("\n")
generator.remove()
return content
@@ -42,9 +42,9 @@ def modulefile_content(request):
@pytest.fixture()
def update_template_dirs(config, monkeypatch):
"""Mocks the template directories for tests"""
- dirs = spack.config.get_config('config')['template_dirs']
+ dirs = spack.config.get_config("config")["template_dirs"]
dirs = [spack.util.path.canonicalize_path(x) for x in dirs]
- monkeypatch.setattr(spack, 'template_dirs', dirs)
+ monkeypatch.setattr(spack, "template_dirs", dirs)
@pytest.fixture()
@@ -54,9 +54,9 @@ def factory(request):
"""
# Class of the module file writer
- writer_cls = getattr(request.module, 'writer_cls')
+ writer_cls = getattr(request.module, "writer_cls")
- def _mock(spec_string, module_set_name='default'):
+ def _mock(spec_string, module_set_name="default"):
spec = spack.spec.Spec(spec_string)
spec.concretize()
return writer_cls(spec, module_set_name), spec
@@ -66,13 +66,9 @@ def factory(request):
@pytest.fixture()
def mock_module_filename(monkeypatch, tmpdir):
- filename = str(tmpdir.join('module'))
+ filename = str(tmpdir.join("module"))
# Set for both module types so we can test both
- monkeypatch.setattr(spack.modules.lmod.LmodFileLayout,
- 'filename',
- filename)
- monkeypatch.setattr(spack.modules.tcl.TclFileLayout,
- 'filename',
- filename)
+ monkeypatch.setattr(spack.modules.lmod.LmodFileLayout, "filename", filename)
+ monkeypatch.setattr(spack.modules.tcl.TclFileLayout, "filename", filename)
yield filename
diff --git a/lib/spack/spack/test/modules/lmod.py b/lib/spack/spack/test/modules/lmod.py
index 66dc8f90de..58c013ef86 100644
--- a/lib/spack/spack/test/modules/lmod.py
+++ b/lib/spack/spack/test/modules/lmod.py
@@ -12,47 +12,45 @@ import spack.main
import spack.modules.lmod
import spack.spec
-mpich_spec_string = 'mpich@3.0.4'
-mpileaks_spec_string = 'mpileaks'
-libdwarf_spec_string = 'libdwarf arch=x64-linux'
+mpich_spec_string = "mpich@3.0.4"
+mpileaks_spec_string = "mpileaks"
+libdwarf_spec_string = "libdwarf arch=x64-linux"
-install = spack.main.SpackCommand('install')
+install = spack.main.SpackCommand("install")
#: Class of the writer tested in this module
writer_cls = spack.modules.lmod.LmodModulefileWriter
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-@pytest.fixture(params=[
- 'clang@3.3',
- 'gcc@4.5.0'
-])
+@pytest.fixture(params=["clang@3.3", "gcc@4.5.0"])
def compiler(request):
return request.param
-@pytest.fixture(params=[
- ('mpich@3.0.4', ('mpi',)),
- ('mpich@3.0.1', []),
- ('openblas@0.2.15', ('blas',)),
- ('openblas-with-lapack@0.2.15', ('blas', 'lapack'))
-])
+@pytest.fixture(
+ params=[
+ ("mpich@3.0.4", ("mpi",)),
+ ("mpich@3.0.1", []),
+ ("openblas@0.2.15", ("blas",)),
+ ("openblas-with-lapack@0.2.15", ("blas", "lapack")),
+ ]
+)
def provider(request):
return request.param
-@pytest.mark.usefixtures('config', 'mock_packages',)
+@pytest.mark.usefixtures(
+ "config",
+ "mock_packages",
+)
class TestLmod(object):
-
- def test_file_layout(
- self, compiler, provider, factory, module_configuration
- ):
+ def test_file_layout(self, compiler, provider, factory, module_configuration):
"""Tests the layout of files in the hierarchy is the one expected."""
- module_configuration('complex_hierarchy')
+ module_configuration("complex_hierarchy")
spec_string, services = provider
- module, spec = factory(spec_string + '%' + compiler)
+ module, spec = factory(spec_string + "%" + compiler)
layout = module.layout
@@ -64,21 +62,21 @@ class TestLmod(object):
# is transformed to r"Core" if the compiler is listed among core
# compilers
# Check that specs listed as core_specs are transformed to "Core"
- if compiler == 'clang@3.3' or spec_string == 'mpich@3.0.1':
- assert 'Core' in layout.available_path_parts
+ if compiler == "clang@3.3" or spec_string == "mpich@3.0.1":
+ assert "Core" in layout.available_path_parts
else:
- assert compiler.replace('@', '/') in layout.available_path_parts
+ assert compiler.replace("@", "/") in layout.available_path_parts
# Check that the provider part instead has always an hash even if
# hash has been disallowed in the configuration file
path_parts = layout.available_path_parts
- service_part = spec_string.replace('@', '/')
- service_part = '-'.join([service_part, layout.spec.dag_hash(length=7)])
+ service_part = spec_string.replace("@", "/")
+ service_part = "-".join([service_part, layout.spec.dag_hash(length=7)])
assert service_part in path_parts
# Check that multi-providers have repetitions in path parts
repetitions = len([x for x in path_parts if service_part == x])
- if spec_string == 'openblas-with-lapack@0.2.15':
+ if spec_string == "openblas-with-lapack@0.2.15":
assert repetitions == 2
else:
assert repetitions == 1
@@ -86,64 +84,53 @@ class TestLmod(object):
def test_simple_case(self, modulefile_content, module_configuration):
"""Tests the generation of a simple TCL module file."""
- module_configuration('autoload_direct')
+ module_configuration("autoload_direct")
content = modulefile_content(mpich_spec_string)
- assert '-- -*- lua -*-' in content
- assert 'whatis([[Name : mpich]])' in content
- assert 'whatis([[Version : 3.0.4]])' in content
+ assert "-- -*- lua -*-" in content
+ assert "whatis([[Name : mpich]])" in content
+ assert "whatis([[Version : 3.0.4]])" in content
assert 'family("mpi")' in content
def test_autoload_direct(self, modulefile_content, module_configuration):
"""Tests the automatic loading of direct dependencies."""
- module_configuration('autoload_direct')
+ module_configuration("autoload_direct")
content = modulefile_content(mpileaks_spec_string)
- assert len([x for x in content if 'depends_on(' in x]) == 2
+ assert len([x for x in content if "depends_on(" in x]) == 2
def test_autoload_all(self, modulefile_content, module_configuration):
"""Tests the automatic loading of all dependencies."""
- module_configuration('autoload_all')
+ module_configuration("autoload_all")
content = modulefile_content(mpileaks_spec_string)
- assert len([x for x in content if 'depends_on(' in x]) == 5
+ assert len([x for x in content if "depends_on(" in x]) == 5
# DEPRECATED: remove blacklist in v0.20
- @pytest.mark.parametrize(
- "config_name", ["alter_environment", "blacklist_environment"]
- )
- def test_alter_environment(
- self, modulefile_content, module_configuration, config_name
- ):
+ @pytest.mark.parametrize("config_name", ["alter_environment", "blacklist_environment"])
+ def test_alter_environment(self, modulefile_content, module_configuration, config_name):
"""Tests modifications to run-time environment."""
module_configuration(config_name)
- content = modulefile_content('mpileaks platform=test target=x86_64')
+ content = modulefile_content("mpileaks platform=test target=x86_64")
- assert len(
- [x for x in content if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')]
- ) == 0
+ assert len([x for x in content if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')]) == 0
assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 1
assert len([x for x in content if 'unsetenv("BAR")' in x]) == 1
- content = modulefile_content(
- 'libdwarf platform=test target=core2'
- )
+ content = modulefile_content("libdwarf platform=test target=core2")
- assert len(
- [x for x in content if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')]
- ) == 0
+ assert len([x for x in content if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')]) == 0
assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 0
assert len([x for x in content if 'unsetenv("BAR")' in x]) == 0
- def test_prepend_path_separator(self, modulefile_content,
- module_configuration):
+ def test_prepend_path_separator(self, modulefile_content, module_configuration):
"""Tests modifications to run-time environment."""
- module_configuration('module_path_separator')
- content = modulefile_content('module-path-separator')
+ module_configuration("module_path_separator")
+ content = modulefile_content("module-path-separator")
for line in content:
if re.match(r'[a-z]+_path\("COLON"', line):
@@ -158,7 +145,7 @@ class TestLmod(object):
module_configuration(config_name)
content = modulefile_content(mpileaks_spec_string)
- assert len([x for x in content if 'depends_on(' in x]) == 1
+ assert len([x for x in content if "depends_on(" in x]) == 1
def test_no_hash(self, factory, module_configuration):
"""Makes sure that virtual providers (in the hierarchy) always
@@ -166,10 +153,10 @@ class TestLmod(object):
does not include a hash if hash_length is 0.
"""
- module_configuration('no_hash')
+ module_configuration("no_hash")
module, spec = factory(mpileaks_spec_string)
path = module.layout.filename
- mpi_spec = spec['mpi']
+ mpi_spec = spec["mpi"]
mpi_element = "{0}/{1}-{2}/".format(
mpi_spec.name, mpi_spec.version, mpi_spec.dag_hash(length=7)
@@ -178,9 +165,7 @@ class TestLmod(object):
assert mpi_element in path
mpileaks_spec = spec
- mpileaks_element = "{0}/{1}.lua".format(
- mpileaks_spec.name, mpileaks_spec.version
- )
+ mpileaks_element = "{0}/{1}.lua".format(mpileaks_spec.name, mpileaks_spec.version)
assert path.endswith(mpileaks_element)
@@ -190,14 +175,14 @@ class TestLmod(object):
"""
# In this case we miss the entry completely
- module_configuration('missing_core_compilers')
+ module_configuration("missing_core_compilers")
module, spec = factory(mpileaks_spec_string)
with pytest.raises(spack.modules.lmod.CoreCompilersNotFoundError):
module.write()
# Here we have an empty list
- module_configuration('core_compilers_empty')
+ module_configuration("core_compilers_empty")
module, spec = factory(mpileaks_spec_string)
with pytest.raises(spack.modules.lmod.CoreCompilersNotFoundError):
@@ -207,77 +192,71 @@ class TestLmod(object):
"""Ensures that if a non-virtual is in hierarchy, an exception will
be raised.
"""
- module_configuration('non_virtual_in_hierarchy')
+ module_configuration("non_virtual_in_hierarchy")
module, spec = factory(mpileaks_spec_string)
with pytest.raises(spack.modules.lmod.NonVirtualInHierarchyError):
module.write()
- def test_override_template_in_package(
- self, modulefile_content, module_configuration
- ):
+ def test_override_template_in_package(self, modulefile_content, module_configuration):
"""Tests overriding a template from and attribute in the package."""
- module_configuration('autoload_direct')
- content = modulefile_content('override-module-templates')
+ module_configuration("autoload_direct")
+ content = modulefile_content("override-module-templates")
- assert 'Override successful!' in content
+ assert "Override successful!" in content
- def test_override_template_in_modules_yaml(
- self, modulefile_content, module_configuration
- ):
+ def test_override_template_in_modules_yaml(self, modulefile_content, module_configuration):
"""Tests overriding a template from `modules.yaml`"""
- module_configuration('override_template')
+ module_configuration("override_template")
- content = modulefile_content('override-module-templates')
- assert 'Override even better!' in content
+ content = modulefile_content("override-module-templates")
+ assert "Override even better!" in content
- content = modulefile_content('mpileaks target=x86_64')
- assert 'Override even better!' in content
+ content = modulefile_content("mpileaks target=x86_64")
+ assert "Override even better!" in content
- @pytest.mark.usefixtures('config')
- def test_external_configure_args(
- self, factory
- ):
+ @pytest.mark.usefixtures("config")
+ def test_external_configure_args(self, factory):
# If this package is detected as an external, its configure option line
# in the module file starts with 'unknown'
- writer, spec = factory('externaltool')
+ writer, spec = factory("externaltool")
- assert 'unknown' in writer.context.configure_options
+ assert "unknown" in writer.context.configure_options
- def test_guess_core_compilers(
- self, factory, module_configuration, monkeypatch
- ):
+ def test_guess_core_compilers(self, factory, module_configuration, monkeypatch):
"""Check that we can guess core compilers."""
# In this case we miss the entry completely
- module_configuration('missing_core_compilers')
+ module_configuration("missing_core_compilers")
# Our mock paths must be detected as system paths
- monkeypatch.setattr(
- spack.util.environment, 'system_dirs', ['/path/to']
- )
+ monkeypatch.setattr(spack.util.environment, "system_dirs", ["/path/to"])
# We don't want to really write into user configuration
# when running tests
def no_op_set(*args, **kwargs):
pass
- monkeypatch.setattr(spack.config, 'set', no_op_set)
+
+ monkeypatch.setattr(spack.config, "set", no_op_set)
# Assert we have core compilers now
writer, _ = factory(mpileaks_spec_string)
assert writer.conf.core_compilers
- @pytest.mark.parametrize('spec_str', [
- 'mpileaks target=nocona',
- 'mpileaks target=core2',
- 'mpileaks target=x86_64',
- ])
- @pytest.mark.regression('13005')
+ @pytest.mark.parametrize(
+ "spec_str",
+ [
+ "mpileaks target=nocona",
+ "mpileaks target=core2",
+ "mpileaks target=x86_64",
+ ],
+ )
+ @pytest.mark.regression("13005")
def test_only_generic_microarchitectures_in_root(
- self, spec_str, factory, module_configuration
+ self, spec_str, factory, module_configuration
):
- module_configuration('complex_hierarchy')
+ module_configuration("complex_hierarchy")
writer, spec = factory(spec_str)
assert str(spec.target.family) in writer.layout.arch_dirname
@@ -289,17 +268,14 @@ class TestLmod(object):
# This configuration has no error, so check the conflicts directives
# are there
- module_configuration('projections')
+ module_configuration("projections")
# Test we read the expected configuration for the naming scheme
- writer, _ = factory('mpileaks')
- expected = {
- 'all': '{name}/v{version}',
- 'mpileaks': '{name}-mpiprojection'
- }
+ writer, _ = factory("mpileaks")
+ expected = {"all": "{name}/v{version}", "mpileaks": "{name}-mpiprojection"}
assert writer.conf.projections == expected
- projection = writer.spec.format(writer.conf.projections['mpileaks'])
+ projection = writer.spec.format(writer.conf.projections["mpileaks"])
assert projection in writer.layout.use_name
def test_projections_all(self, factory, module_configuration):
@@ -307,30 +283,26 @@ class TestLmod(object):
# This configuration has no error, so check the conflicts directives
# are there
- module_configuration('projections')
+ module_configuration("projections")
# Test we read the expected configuration for the naming scheme
- writer, _ = factory('libelf')
- expected = {
- 'all': '{name}/v{version}',
- 'mpileaks': '{name}-mpiprojection'
- }
+ writer, _ = factory("libelf")
+ expected = {"all": "{name}/v{version}", "mpileaks": "{name}-mpiprojection"}
assert writer.conf.projections == expected
- projection = writer.spec.format(writer.conf.projections['all'])
+ projection = writer.spec.format(writer.conf.projections["all"])
assert projection in writer.layout.use_name
def test_modules_relative_to_view(
- self, tmpdir, modulefile_content, module_configuration, install_mockery,
- mock_fetch
+ self, tmpdir, modulefile_content, module_configuration, install_mockery, mock_fetch
):
with ev.Environment(str(tmpdir), with_view=True) as e:
- module_configuration('with_view')
- install('cmake')
+ module_configuration("with_view")
+ install("cmake")
- spec = spack.spec.Spec('cmake').concretized()
+ spec = spack.spec.Spec("cmake").concretized()
- content = modulefile_content('cmake')
+ content = modulefile_content("cmake")
expected = e.default_view.get_projection_for_spec(spec)
# Rather than parse all lines, ensure all prefixes in the content
# point to the right one
@@ -338,7 +310,7 @@ class TestLmod(object):
assert not any(spec.prefix in line for line in content)
def test_modules_no_arch(self, factory, module_configuration):
- module_configuration('no_arch')
+ module_configuration("no_arch")
module, spec = factory(mpileaks_spec_string)
path = module.layout.filename
diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py
index 138c12691c..769bcf50a7 100644
--- a/lib/spack/spack/test/modules/tcl.py
+++ b/lib/spack/spack/test/modules/tcl.py
@@ -11,24 +11,22 @@ import spack.modules.common
import spack.modules.tcl
import spack.spec
-mpich_spec_string = 'mpich@3.0.4'
-mpileaks_spec_string = 'mpileaks'
-libdwarf_spec_string = 'libdwarf target=x86_64'
+mpich_spec_string = "mpich@3.0.4"
+mpileaks_spec_string = "mpileaks"
+libdwarf_spec_string = "libdwarf target=x86_64"
#: Class of the writer tested in this module
writer_cls = spack.modules.tcl.TclModulefileWriter
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
-@pytest.mark.usefixtures('config', 'mock_packages', 'mock_module_filename')
+@pytest.mark.usefixtures("config", "mock_packages", "mock_module_filename")
class TestTcl(object):
-
def test_simple_case(self, modulefile_content, module_configuration):
"""Tests the generation of a simple TCL module file."""
- module_configuration('autoload_direct')
+ module_configuration("autoload_direct")
content = modulefile_content(mpich_spec_string)
assert 'module-whatis "mpich @3.0.4"' in content
@@ -36,21 +34,21 @@ class TestTcl(object):
def test_autoload_direct(self, modulefile_content, module_configuration):
"""Tests the automatic loading of direct dependencies."""
- module_configuration('autoload_direct')
+ module_configuration("autoload_direct")
content = modulefile_content(mpileaks_spec_string)
- assert len([x for x in content if 'is-loaded' in x]) == 2
- assert len([x for x in content if 'module load ' in x]) == 2
+ assert len([x for x in content if "is-loaded" in x]) == 2
+ assert len([x for x in content if "module load " in x]) == 2
# dtbuild1 has
# - 1 ('run',) dependency
# - 1 ('build','link') dependency
# - 1 ('build',) dependency
# Just make sure the 'build' dependency is not there
- content = modulefile_content('dtbuild1')
+ content = modulefile_content("dtbuild1")
- assert len([x for x in content if 'is-loaded' in x]) == 2
- assert len([x for x in content if 'module load ' in x]) == 2
+ assert len([x for x in content if "is-loaded" in x]) == 2
+ assert len([x for x in content if "module load " in x]) == 2
# The configuration file sets the verbose keyword to False
messages = [x for x in content if 'puts stderr "Autoloading' in x]
@@ -59,115 +57,97 @@ class TestTcl(object):
def test_autoload_all(self, modulefile_content, module_configuration):
"""Tests the automatic loading of all dependencies."""
- module_configuration('autoload_all')
+ module_configuration("autoload_all")
content = modulefile_content(mpileaks_spec_string)
- assert len([x for x in content if 'is-loaded' in x]) == 5
- assert len([x for x in content if 'module load ' in x]) == 5
+ assert len([x for x in content if "is-loaded" in x]) == 5
+ assert len([x for x in content if "module load " in x]) == 5
# dtbuild1 has
# - 1 ('run',) dependency
# - 1 ('build','link') dependency
# - 1 ('build',) dependency
# Just make sure the 'build' dependency is not there
- content = modulefile_content('dtbuild1')
+ content = modulefile_content("dtbuild1")
- assert len([x for x in content if 'is-loaded' in x]) == 2
- assert len([x for x in content if 'module load ' in x]) == 2
+ assert len([x for x in content if "is-loaded" in x]) == 2
+ assert len([x for x in content if "module load " in x]) == 2
# The configuration file sets the verbose keyword to True
messages = [x for x in content if 'puts stderr "Autoloading' in x]
assert len(messages) == 2
- def test_prerequisites_direct(
- self, modulefile_content, module_configuration
- ):
+ def test_prerequisites_direct(self, modulefile_content, module_configuration):
"""Tests asking direct dependencies as prerequisites."""
- module_configuration('prerequisites_direct')
- content = modulefile_content('mpileaks target=x86_64')
+ module_configuration("prerequisites_direct")
+ content = modulefile_content("mpileaks target=x86_64")
- assert len([x for x in content if 'prereq' in x]) == 2
+ assert len([x for x in content if "prereq" in x]) == 2
def test_prerequisites_all(self, modulefile_content, module_configuration):
"""Tests asking all dependencies as prerequisites."""
- module_configuration('prerequisites_all')
- content = modulefile_content('mpileaks target=x86_64')
+ module_configuration("prerequisites_all")
+ content = modulefile_content("mpileaks target=x86_64")
- assert len([x for x in content if 'prereq' in x]) == 5
+ assert len([x for x in content if "prereq" in x]) == 5
# DEPRECATED: remove blacklist in v0.20
- @pytest.mark.parametrize(
- "config_name", ["alter_environment", "blacklist_environment"]
- )
- def test_alter_environment(
- self, modulefile_content, module_configuration, config_name
- ):
+ @pytest.mark.parametrize("config_name", ["alter_environment", "blacklist_environment"])
+ def test_alter_environment(self, modulefile_content, module_configuration, config_name):
"""Tests modifications to run-time environment."""
module_configuration(config_name)
- content = modulefile_content('mpileaks platform=test target=x86_64')
+ content = modulefile_content("mpileaks platform=test target=x86_64")
- assert len([x for x in content
- if x.startswith('prepend-path CMAKE_PREFIX_PATH')
- ]) == 0
+ assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 1
- assert len([
- x for x in content if 'setenv OMPI_MCA_mpi_leave_pinned "1"' in x
- ]) == 1
- assert len([
- x for x in content if 'setenv OMPI_MCA_MPI_LEAVE_PINNED "1"' in x
- ]) == 0
- assert len([x for x in content if 'unsetenv BAR' in x]) == 1
- assert len([x for x in content if 'setenv MPILEAKS_ROOT' in x]) == 1
-
- content = modulefile_content(
- 'libdwarf platform=test target=core2'
- )
-
- assert len([x for x in content
- if x.startswith('prepend-path CMAKE_PREFIX_PATH')
- ]) == 0
+ assert len([x for x in content if 'setenv OMPI_MCA_mpi_leave_pinned "1"' in x]) == 1
+ assert len([x for x in content if 'setenv OMPI_MCA_MPI_LEAVE_PINNED "1"' in x]) == 0
+ assert len([x for x in content if "unsetenv BAR" in x]) == 1
+ assert len([x for x in content if "setenv MPILEAKS_ROOT" in x]) == 1
+
+ content = modulefile_content("libdwarf platform=test target=core2")
+
+ assert len([x for x in content if x.startswith("prepend-path CMAKE_PREFIX_PATH")]) == 0
assert len([x for x in content if 'setenv FOO "foo"' in x]) == 0
- assert len([x for x in content if 'unsetenv BAR' in x]) == 0
- assert len([x for x in content if 'is-loaded foo/bar' in x]) == 1
- assert len([x for x in content if 'module load foo/bar' in x]) == 1
- assert len([x for x in content if 'setenv LIBDWARF_ROOT' in x]) == 1
+ assert len([x for x in content if "unsetenv BAR" in x]) == 0
+ assert len([x for x in content if "is-loaded foo/bar" in x]) == 1
+ assert len([x for x in content if "module load foo/bar" in x]) == 1
+ assert len([x for x in content if "setenv LIBDWARF_ROOT" in x]) == 1
@pytest.mark.parametrize("config_name", ["exclude", "blacklist"])
def test_exclude(self, modulefile_content, module_configuration, config_name):
"""Tests excluding the generation of selected modules."""
module_configuration(config_name)
- content = modulefile_content('mpileaks ^zmpi')
+ content = modulefile_content("mpileaks ^zmpi")
- assert len([x for x in content if 'is-loaded' in x]) == 1
- assert len([x for x in content if 'module load ' in x]) == 1
+ assert len([x for x in content if "is-loaded" in x]) == 1
+ assert len([x for x in content if "module load " in x]) == 1
# Catch "Exception" to avoid using FileNotFoundError on Python 3
# and IOError on Python 2 or common bases like EnvironmentError
# which are not officially documented
with pytest.raises(Exception):
- modulefile_content('callpath target=x86_64')
+ modulefile_content("callpath target=x86_64")
- content = modulefile_content('zmpi target=x86_64')
+ content = modulefile_content("zmpi target=x86_64")
- assert len([x for x in content if 'is-loaded' in x]) == 1
- assert len([x for x in content if 'module load ' in x]) == 1
+ assert len([x for x in content if "is-loaded" in x]) == 1
+ assert len([x for x in content if "module load " in x]) == 1
def test_naming_scheme_compat(self, factory, module_configuration):
"""Tests backwards compatibility for naming_scheme key"""
- module_configuration('naming_scheme')
+ module_configuration("naming_scheme")
# Test we read the expected configuration for the naming scheme
- writer, _ = factory('mpileaks')
- expected = {
- 'all': '{name}/{version}-{compiler.name}'
- }
+ writer, _ = factory("mpileaks")
+ expected = {"all": "{name}/{version}-{compiler.name}"}
assert writer.conf.projections == expected
- projection = writer.spec.format(writer.conf.projections['all'])
+ projection = writer.spec.format(writer.conf.projections["all"])
assert projection in writer.layout.use_name
def test_projections_specific(self, factory, module_configuration):
@@ -175,17 +155,14 @@ class TestTcl(object):
# This configuration has no error, so check the conflicts directives
# are there
- module_configuration('projections')
+ module_configuration("projections")
# Test we read the expected configuration for the naming scheme
- writer, _ = factory('mpileaks')
- expected = {
- 'all': '{name}/{version}-{compiler.name}',
- 'mpileaks': '{name}-mpiprojection'
- }
+ writer, _ = factory("mpileaks")
+ expected = {"all": "{name}/{version}-{compiler.name}", "mpileaks": "{name}-mpiprojection"}
assert writer.conf.projections == expected
- projection = writer.spec.format(writer.conf.projections['mpileaks'])
+ projection = writer.spec.format(writer.conf.projections["mpileaks"])
assert projection in writer.layout.use_name
def test_projections_all(self, factory, module_configuration):
@@ -193,40 +170,33 @@ class TestTcl(object):
# This configuration has no error, so check the conflicts directives
# are there
- module_configuration('projections')
+ module_configuration("projections")
# Test we read the expected configuration for the naming scheme
- writer, _ = factory('libelf')
- expected = {
- 'all': '{name}/{version}-{compiler.name}',
- 'mpileaks': '{name}-mpiprojection'
- }
+ writer, _ = factory("libelf")
+ expected = {"all": "{name}/{version}-{compiler.name}", "mpileaks": "{name}-mpiprojection"}
assert writer.conf.projections == expected
- projection = writer.spec.format(writer.conf.projections['all'])
+ projection = writer.spec.format(writer.conf.projections["all"])
assert projection in writer.layout.use_name
- def test_invalid_naming_scheme(
- self, factory, module_configuration, mock_module_filename
- ):
+ def test_invalid_naming_scheme(self, factory, module_configuration, mock_module_filename):
"""Tests the evaluation of an invalid naming scheme."""
- module_configuration('invalid_naming_scheme')
+ module_configuration("invalid_naming_scheme")
# Test that having invalid tokens in the naming scheme raises
# a RuntimeError
- writer, _ = factory('mpileaks')
+ writer, _ = factory("mpileaks")
with pytest.raises(RuntimeError):
writer.layout.use_name
- def test_invalid_token_in_env_name(
- self, factory, module_configuration, mock_module_filename
- ):
+ def test_invalid_token_in_env_name(self, factory, module_configuration, mock_module_filename):
"""Tests setting environment variables with an invalid name."""
- module_configuration('invalid_token_in_env_var_name')
+ module_configuration("invalid_token_in_env_var_name")
- writer, _ = factory('mpileaks')
+ writer, _ = factory("mpileaks")
with pytest.raises(RuntimeError):
writer.write()
@@ -235,28 +205,27 @@ class TestTcl(object):
# This configuration has no error, so check the conflicts directives
# are there
- module_configuration('conflicts')
- content = modulefile_content('mpileaks')
+ module_configuration("conflicts")
+ content = modulefile_content("mpileaks")
- assert len([x for x in content if x.startswith('conflict')]) == 2
- assert len([x for x in content if x == 'conflict mpileaks']) == 1
- assert len([x for x in content if x == 'conflict intel/14.0.1']) == 1
+ assert len([x for x in content if x.startswith("conflict")]) == 2
+ assert len([x for x in content if x == "conflict mpileaks"]) == 1
+ assert len([x for x in content if x == "conflict intel/14.0.1"]) == 1
# This configuration is inconsistent, check an error is raised
- module_configuration('wrong_conflicts')
+ module_configuration("wrong_conflicts")
with pytest.raises(SystemExit):
- modulefile_content('mpileaks')
+ modulefile_content("mpileaks")
- def test_module_index(
- self, module_configuration, factory, tmpdir_factory):
+ def test_module_index(self, module_configuration, factory, tmpdir_factory):
- module_configuration('suffix')
+ module_configuration("suffix")
- w1, s1 = factory('mpileaks')
- w2, s2 = factory('callpath')
- w3, s3 = factory('openblas')
+ w1, s1 = factory("mpileaks")
+ w2, s2 = factory("callpath")
+ w3, s3 = factory("openblas")
- test_root = str(tmpdir_factory.mktemp('module-root'))
+ test_root = str(tmpdir_factory.mktemp("module-root"))
spack.modules.common.generate_module_index(test_root, [w1, w2])
@@ -273,8 +242,7 @@ class TestTcl(object):
assert index[s1.dag_hash()].use_name == w1.layout.use_name
assert index[s2.dag_hash()].path == w2.layout.filename
- spack.modules.common.generate_module_index(
- test_root, [w3], overwrite=True)
+ spack.modules.common.generate_module_index(test_root, [w3], overwrite=True)
index = spack.modules.common.read_module_index(test_root)
@@ -283,130 +251,116 @@ class TestTcl(object):
def test_suffixes(self, module_configuration, factory):
"""Tests adding suffixes to module file name."""
- module_configuration('suffix')
+ module_configuration("suffix")
- writer, spec = factory('mpileaks+debug target=x86_64')
- assert 'foo' in writer.layout.use_name
- assert 'foo-foo' not in writer.layout.use_name
+ writer, spec = factory("mpileaks+debug target=x86_64")
+ assert "foo" in writer.layout.use_name
+ assert "foo-foo" not in writer.layout.use_name
- writer, spec = factory('mpileaks~debug target=x86_64')
- assert 'foo-bar' in writer.layout.use_name
- assert 'baz' not in writer.layout.use_name
+ writer, spec = factory("mpileaks~debug target=x86_64")
+ assert "foo-bar" in writer.layout.use_name
+ assert "baz" not in writer.layout.use_name
- writer, spec = factory('mpileaks~debug+opt target=x86_64')
- assert 'baz-foo-bar' in writer.layout.use_name
+ writer, spec = factory("mpileaks~debug+opt target=x86_64")
+ assert "baz-foo-bar" in writer.layout.use_name
def test_setup_environment(self, modulefile_content, module_configuration):
"""Tests the internal set-up of run-time environment."""
- module_configuration('suffix')
- content = modulefile_content('mpileaks')
+ module_configuration("suffix")
+ content = modulefile_content("mpileaks")
- assert len([x for x in content if 'setenv FOOBAR' in x]) == 1
- assert len(
- [x for x in content if 'setenv FOOBAR "mpileaks"' in x]
- ) == 1
+ assert len([x for x in content if "setenv FOOBAR" in x]) == 1
+ assert len([x for x in content if 'setenv FOOBAR "mpileaks"' in x]) == 1
- spec = spack.spec.Spec('mpileaks')
+ spec = spack.spec.Spec("mpileaks")
spec.concretize()
- content = modulefile_content(str(spec['callpath']))
+ content = modulefile_content(str(spec["callpath"]))
- assert len([x for x in content if 'setenv FOOBAR' in x]) == 1
- assert len(
- [x for x in content if 'setenv FOOBAR "callpath"' in x]
- ) == 1
+ assert len([x for x in content if "setenv FOOBAR" in x]) == 1
+ assert len([x for x in content if 'setenv FOOBAR "callpath"' in x]) == 1
def test_override_config(self, module_configuration, factory):
"""Tests overriding some sections of the configuration file."""
- module_configuration('override_config')
+ module_configuration("override_config")
- writer, spec = factory('mpileaks~opt target=x86_64')
- assert 'mpich-static' in writer.layout.use_name
- assert 'over' not in writer.layout.use_name
- assert 'ridden' not in writer.layout.use_name
+ writer, spec = factory("mpileaks~opt target=x86_64")
+ assert "mpich-static" in writer.layout.use_name
+ assert "over" not in writer.layout.use_name
+ assert "ridden" not in writer.layout.use_name
- writer, spec = factory('mpileaks+opt target=x86_64')
- assert 'over-ridden' in writer.layout.use_name
- assert 'mpich' not in writer.layout.use_name
- assert 'static' not in writer.layout.use_name
+ writer, spec = factory("mpileaks+opt target=x86_64")
+ assert "over-ridden" in writer.layout.use_name
+ assert "mpich" not in writer.layout.use_name
+ assert "static" not in writer.layout.use_name
- def test_override_template_in_package(
- self, modulefile_content, module_configuration
- ):
+ def test_override_template_in_package(self, modulefile_content, module_configuration):
"""Tests overriding a template from and attribute in the package."""
- module_configuration('autoload_direct')
- content = modulefile_content('override-module-templates')
+ module_configuration("autoload_direct")
+ content = modulefile_content("override-module-templates")
- assert 'Override successful!' in content
+ assert "Override successful!" in content
- def test_override_template_in_modules_yaml(
- self, modulefile_content, module_configuration
- ):
+ def test_override_template_in_modules_yaml(self, modulefile_content, module_configuration):
"""Tests overriding a template from `modules.yaml`"""
- module_configuration('override_template')
+ module_configuration("override_template")
- content = modulefile_content('override-module-templates')
- assert 'Override even better!' in content
+ content = modulefile_content("override-module-templates")
+ assert "Override even better!" in content
- content = modulefile_content('mpileaks target=x86_64')
- assert 'Override even better!' in content
+ content = modulefile_content("mpileaks target=x86_64")
+ assert "Override even better!" in content
- def test_extend_context(
- self, modulefile_content, module_configuration
- ):
+ def test_extend_context(self, modulefile_content, module_configuration):
"""Tests using a package defined context"""
- module_configuration('autoload_direct')
- content = modulefile_content('override-context-templates')
+ module_configuration("autoload_direct")
+ content = modulefile_content("override-context-templates")
assert 'puts stderr "sentence from package"' in content
short_description = 'module-whatis "This package updates the context for TCL modulefiles."'
assert short_description in content
- @pytest.mark.regression('4400')
+ @pytest.mark.regression("4400")
@pytest.mark.db
- @pytest.mark.parametrize(
- "config_name", ["exclude_implicits", "blacklist_implicits"]
- )
+ @pytest.mark.parametrize("config_name", ["exclude_implicits", "blacklist_implicits"])
def test_exclude_implicits(
- self, modulefile_content, module_configuration, database, config_name
+ self, modulefile_content, module_configuration, database, config_name
):
module_configuration(config_name)
# mpileaks has been installed explicitly when setting up
# the tests database
- mpileaks_specs = database.query('mpileaks')
+ mpileaks_specs = database.query("mpileaks")
for item in mpileaks_specs:
- writer = writer_cls(item, 'default')
+ writer = writer_cls(item, "default")
assert not writer.conf.excluded
# callpath is a dependency of mpileaks, and has been pulled
# in implicitly
- callpath_specs = database.query('callpath')
+ callpath_specs = database.query("callpath")
for item in callpath_specs:
- writer = writer_cls(item, 'default')
+ writer = writer_cls(item, "default")
assert writer.conf.excluded
- @pytest.mark.regression('9624')
+ @pytest.mark.regression("9624")
@pytest.mark.db
- def test_autoload_with_constraints(
- self, modulefile_content, module_configuration, database
- ):
+ def test_autoload_with_constraints(self, modulefile_content, module_configuration, database):
"""Tests the automatic loading of direct dependencies."""
- module_configuration('autoload_with_constraints')
+ module_configuration("autoload_with_constraints")
# Test the mpileaks that should have the autoloaded dependencies
- content = modulefile_content('mpileaks ^mpich2')
- assert len([x for x in content if 'is-loaded' in x]) == 2
+ content = modulefile_content("mpileaks ^mpich2")
+ assert len([x for x in content if "is-loaded" in x]) == 2
# Test the mpileaks that should NOT have the autoloaded dependencies
- content = modulefile_content('mpileaks ^mpich')
- assert len([x for x in content if 'is-loaded' in x]) == 0
+ content = modulefile_content("mpileaks ^mpich")
+ assert len([x for x in content if "is-loaded" in x]) == 0
def test_modules_no_arch(self, factory, module_configuration):
- module_configuration('no_arch')
+ module_configuration("no_arch")
module, spec = factory(mpileaks_spec_string)
path = module.layout.filename
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index 1e0cee71c7..8f3ddaf8cc 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -15,16 +15,15 @@ import spack.spec
from spack.multimethod import NoSuchMethodError
pytestmark = [
- pytest.mark.usefixtures('mock_packages', 'config'),
+ pytest.mark.usefixtures("mock_packages", "config"),
pytest.mark.skipif(
- os.environ.get('SPACK_TEST_SOLVER') == 'original' or sys.platform == 'win32',
- reason='The original concretizer cannot concretize most of the specs'
- )
+ os.environ.get("SPACK_TEST_SOLVER") == "original" or sys.platform == "win32",
+ reason="The original concretizer cannot concretize most of the specs",
+ ),
]
-@pytest.fixture(scope='module',
- params=['multimethod', 'multimethod-inheritor'])
+@pytest.fixture(scope="module", params=["multimethod", "multimethod-inheritor"])
def pkg_name(request):
"""Make tests run on both multimethod and multimethod-inheritor.
@@ -35,42 +34,45 @@ def pkg_name(request):
def test_no_version_match(pkg_name):
- spec = spack.spec.Spec(pkg_name + '@2.0').concretized()
+ spec = spack.spec.Spec(pkg_name + "@2.0").concretized()
with pytest.raises(NoSuchMethodError):
spec.package.no_version_2()
-@pytest.mark.parametrize('constraint_str,method_name,expected_result', [
- # Only one version match these constraints
- ('@1.0', 'no_version_2', 1),
- ('@3.0', 'no_version_2', 3),
- ('@4.0', 'no_version_2', 4),
- # These constraints overlap, in which case the first match wins
- ('@2.0', 'version_overlap', 1),
- ('@5.0', 'version_overlap', 2),
- # These constraints are on the version of a virtual dependency
- ('^mpich@3.0.4', 'mpi_version', 3),
- ('^mpich2@1.2', 'mpi_version', 2),
- ('^mpich@1.0', 'mpi_version', 1),
- # Undefined mpi versions
- ('^mpich@0.4', 'mpi_version', 1),
- ('^mpich@1.4', 'mpi_version', 1),
- # Constraints on compilers with a default
- ('%gcc', 'has_a_default', 'gcc'),
- ('%clang', 'has_a_default', 'clang'),
- ('%apple-clang os=elcapitan', 'has_a_default', 'default'),
- # Constraints on dependencies
- ('^zmpi', 'different_by_dep', 'zmpi'),
- ('^mpich', 'different_by_dep', 'mpich'),
- # Constraints on virtual dependencies
- ('^mpich2', 'different_by_virtual_dep', 2),
- ('^mpich@1.0', 'different_by_virtual_dep', 1),
- # Multimethod with base classes
- ('@1', 'base_method', 'base_method'),
- # Boolean
- ('', 'boolean_true_first', 'True'),
- ('', 'boolean_false_first', 'True')
-])
+@pytest.mark.parametrize(
+ "constraint_str,method_name,expected_result",
+ [
+ # Only one version match these constraints
+ ("@1.0", "no_version_2", 1),
+ ("@3.0", "no_version_2", 3),
+ ("@4.0", "no_version_2", 4),
+ # These constraints overlap, in which case the first match wins
+ ("@2.0", "version_overlap", 1),
+ ("@5.0", "version_overlap", 2),
+ # These constraints are on the version of a virtual dependency
+ ("^mpich@3.0.4", "mpi_version", 3),
+ ("^mpich2@1.2", "mpi_version", 2),
+ ("^mpich@1.0", "mpi_version", 1),
+ # Undefined mpi versions
+ ("^mpich@0.4", "mpi_version", 1),
+ ("^mpich@1.4", "mpi_version", 1),
+ # Constraints on compilers with a default
+ ("%gcc", "has_a_default", "gcc"),
+ ("%clang", "has_a_default", "clang"),
+ ("%apple-clang os=elcapitan", "has_a_default", "default"),
+ # Constraints on dependencies
+ ("^zmpi", "different_by_dep", "zmpi"),
+ ("^mpich", "different_by_dep", "mpich"),
+ # Constraints on virtual dependencies
+ ("^mpich2", "different_by_virtual_dep", 2),
+ ("^mpich@1.0", "different_by_virtual_dep", 1),
+ # Multimethod with base classes
+ ("@1", "base_method", "base_method"),
+ # Boolean
+ ("", "boolean_true_first", "True"),
+ ("", "boolean_false_first", "True"),
+ ],
+)
def test_multimethod_calls(pkg_name, constraint_str, method_name, expected_result):
s = spack.spec.Spec(pkg_name + constraint_str).concretized()
msg = "Method {0} from {1} is giving a wrong result".format(method_name, s)
@@ -81,10 +83,10 @@ def test_target_match(pkg_name):
platform = spack.platforms.host()
targets = list(platform.targets.values())
for target in targets[:-1]:
- s = spack.spec.Spec(pkg_name + ' target=' + target.name).concretized()
+ s = spack.spec.Spec(pkg_name + " target=" + target.name).concretized()
assert s.package.different_by_target() == target.name
- s = spack.spec.Spec(pkg_name + ' target=' + targets[-1].name).concretized()
+ s = spack.spec.Spec(pkg_name + " target=" + targets[-1].name).concretized()
if len(targets) == 1:
assert s.package.different_by_target() == targets[-1].name
else:
@@ -92,24 +94,27 @@ def test_target_match(pkg_name):
s.package.different_by_target()
-@pytest.mark.parametrize('spec_str,method_name,expected_result', [
- # This is overridden in the second case
- ('multimethod@3', 'base_method', 'multimethod'),
- ('multimethod-inheritor@3', 'base_method', 'multimethod-inheritor'),
- # Here we have a mix of inherited and overridden methods
- ('multimethod-inheritor@1.0', 'inherited_and_overridden', 'inheritor@1.0'),
- ('multimethod-inheritor@2.0', 'inherited_and_overridden', 'base@2.0'),
- ('multimethod@1.0', 'inherited_and_overridden', 'base@1.0'),
- ('multimethod@2.0', 'inherited_and_overridden', 'base@2.0'),
- # Diamond-like inheritance (even though the MRO linearize everything)
- ('multimethod-diamond@1.0', 'diamond_inheritance', 'base_package'),
- ('multimethod-base@1.0', 'diamond_inheritance', 'base_package'),
- ('multimethod-diamond@2.0', 'diamond_inheritance', 'first_parent'),
- ('multimethod-inheritor@2.0', 'diamond_inheritance', 'first_parent'),
- ('multimethod-diamond@3.0', 'diamond_inheritance', 'second_parent'),
- ('multimethod-diamond-parent@3.0', 'diamond_inheritance', 'second_parent'),
- ('multimethod-diamond@4.0', 'diamond_inheritance', 'subclass'),
-])
+@pytest.mark.parametrize(
+ "spec_str,method_name,expected_result",
+ [
+ # This is overridden in the second case
+ ("multimethod@3", "base_method", "multimethod"),
+ ("multimethod-inheritor@3", "base_method", "multimethod-inheritor"),
+ # Here we have a mix of inherited and overridden methods
+ ("multimethod-inheritor@1.0", "inherited_and_overridden", "inheritor@1.0"),
+ ("multimethod-inheritor@2.0", "inherited_and_overridden", "base@2.0"),
+ ("multimethod@1.0", "inherited_and_overridden", "base@1.0"),
+ ("multimethod@2.0", "inherited_and_overridden", "base@2.0"),
+ # Diamond-like inheritance (even though the MRO linearize everything)
+ ("multimethod-diamond@1.0", "diamond_inheritance", "base_package"),
+ ("multimethod-base@1.0", "diamond_inheritance", "base_package"),
+ ("multimethod-diamond@2.0", "diamond_inheritance", "first_parent"),
+ ("multimethod-inheritor@2.0", "diamond_inheritance", "first_parent"),
+ ("multimethod-diamond@3.0", "diamond_inheritance", "second_parent"),
+ ("multimethod-diamond-parent@3.0", "diamond_inheritance", "second_parent"),
+ ("multimethod-diamond@4.0", "diamond_inheritance", "subclass"),
+ ],
+)
def test_multimethod_calls_and_inheritance(spec_str, method_name, expected_result):
s = spack.spec.Spec(spec_str).concretized()
assert getattr(s.package, method_name)() == expected_result
diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py
index 27cc67e7d1..7a1ac99c2f 100644
--- a/lib/spack/spack/test/namespace_trie.py
+++ b/lib/spack/spack/test/namespace_trie.py
@@ -14,81 +14,81 @@ def trie():
def test_add_single(trie):
- trie['foo'] = 'bar'
+ trie["foo"] = "bar"
- assert trie.is_prefix('foo')
- assert trie.has_value('foo')
- assert trie['foo'] == 'bar'
+ assert trie.is_prefix("foo")
+ assert trie.has_value("foo")
+ assert trie["foo"] == "bar"
def test_add_multiple(trie):
- trie['foo.bar'] = 'baz'
+ trie["foo.bar"] = "baz"
- assert not trie.has_value('foo')
- assert trie.is_prefix('foo')
+ assert not trie.has_value("foo")
+ assert trie.is_prefix("foo")
- assert trie.is_prefix('foo.bar')
- assert trie.has_value('foo.bar')
- assert trie['foo.bar'] == 'baz'
+ assert trie.is_prefix("foo.bar")
+ assert trie.has_value("foo.bar")
+ assert trie["foo.bar"] == "baz"
- assert not trie.is_prefix('foo.bar.baz')
- assert not trie.has_value('foo.bar.baz')
+ assert not trie.is_prefix("foo.bar.baz")
+ assert not trie.has_value("foo.bar.baz")
def test_add_three(trie):
# add a three-level namespace
- trie['foo.bar.baz'] = 'quux'
+ trie["foo.bar.baz"] = "quux"
- assert trie.is_prefix('foo')
- assert not trie.has_value('foo')
+ assert trie.is_prefix("foo")
+ assert not trie.has_value("foo")
- assert trie.is_prefix('foo.bar')
- assert not trie.has_value('foo.bar')
+ assert trie.is_prefix("foo.bar")
+ assert not trie.has_value("foo.bar")
- assert trie.is_prefix('foo.bar.baz')
- assert trie.has_value('foo.bar.baz')
- assert trie['foo.bar.baz'] == 'quux'
+ assert trie.is_prefix("foo.bar.baz")
+ assert trie.has_value("foo.bar.baz")
+ assert trie["foo.bar.baz"] == "quux"
- assert not trie.is_prefix('foo.bar.baz.quux')
- assert not trie.has_value('foo.bar.baz.quux')
+ assert not trie.is_prefix("foo.bar.baz.quux")
+ assert not trie.has_value("foo.bar.baz.quux")
# Try to add a second element in a prefix namespace
- trie['foo.bar'] = 'blah'
+ trie["foo.bar"] = "blah"
- assert trie.is_prefix('foo')
- assert not trie.has_value('foo')
+ assert trie.is_prefix("foo")
+ assert not trie.has_value("foo")
- assert trie.is_prefix('foo.bar')
- assert trie.has_value('foo.bar')
- assert trie['foo.bar'] == 'blah'
+ assert trie.is_prefix("foo.bar")
+ assert trie.has_value("foo.bar")
+ assert trie["foo.bar"] == "blah"
- assert trie.is_prefix('foo.bar.baz')
- assert trie.has_value('foo.bar.baz')
- assert trie['foo.bar.baz'] == 'quux'
+ assert trie.is_prefix("foo.bar.baz")
+ assert trie.has_value("foo.bar.baz")
+ assert trie["foo.bar.baz"] == "quux"
- assert not trie.is_prefix('foo.bar.baz.quux')
- assert not trie.has_value('foo.bar.baz.quux')
+ assert not trie.is_prefix("foo.bar.baz.quux")
+ assert not trie.has_value("foo.bar.baz.quux")
def test_add_none_single(trie):
- trie['foo'] = None
- assert trie.is_prefix('foo')
- assert trie.has_value('foo')
- assert trie['foo'] is None
+ trie["foo"] = None
+ assert trie.is_prefix("foo")
+ assert trie.has_value("foo")
+ assert trie["foo"] is None
- assert not trie.is_prefix('foo.bar')
- assert not trie.has_value('foo.bar')
+ assert not trie.is_prefix("foo.bar")
+ assert not trie.has_value("foo.bar")
def test_add_none_multiple(trie):
- trie['foo.bar'] = None
+ trie["foo.bar"] = None
- assert trie.is_prefix('foo')
- assert not trie.has_value('foo')
+ assert trie.is_prefix("foo")
+ assert not trie.has_value("foo")
- assert trie.is_prefix('foo.bar')
- assert trie.has_value('foo.bar')
- assert trie['foo.bar'] is None
+ assert trie.is_prefix("foo.bar")
+ assert trie.has_value("foo.bar")
+ assert trie["foo.bar"] is None
- assert not trie.is_prefix('foo.bar.baz')
- assert not trie.has_value('foo.bar.baz')
+ assert not trie.is_prefix("foo.bar.baz")
+ assert not trie.has_value("foo.bar.baz")
diff --git a/lib/spack/spack/test/operating_system.py b/lib/spack/spack/test/operating_system.py
index d71b5e0b6f..0634ddc3fe 100644
--- a/lib/spack/spack/test/operating_system.py
+++ b/lib/spack/spack/test/operating_system.py
@@ -8,9 +8,10 @@ import spack.operating_systems.cray_backend as cray_backend
def test_read_cle_release_file(tmpdir, monkeypatch):
"""test reading the Cray cle-release file"""
- cle_release_path = tmpdir.join('cle-release')
- with cle_release_path.open('w') as f:
- f.write("""\
+ cle_release_path = tmpdir.join("cle-release")
+ with cle_release_path.open("w") as f:
+ f.write(
+ """\
RELEASE=6.0.UP07
BUILD=6.0.7424
DATE=20190611
@@ -18,44 +19,45 @@ ARCH=noarch
NETWORK=ari
PATCHSET=35-201906112304
DUMMY=foo=bar
-""")
+"""
+ )
- monkeypatch.setattr(cray_backend, '_cle_release_file',
- str(cle_release_path))
+ monkeypatch.setattr(cray_backend, "_cle_release_file", str(cle_release_path))
attrs = cray_backend.read_cle_release_file()
- assert attrs['RELEASE'] == '6.0.UP07'
- assert attrs['BUILD'] == '6.0.7424'
- assert attrs['DATE'] == '20190611'
- assert attrs['ARCH'] == 'noarch'
- assert attrs['NETWORK'] == 'ari'
- assert attrs['PATCHSET'] == '35-201906112304'
- assert attrs['DUMMY'] == 'foo=bar'
+ assert attrs["RELEASE"] == "6.0.UP07"
+ assert attrs["BUILD"] == "6.0.7424"
+ assert attrs["DATE"] == "20190611"
+ assert attrs["ARCH"] == "noarch"
+ assert attrs["NETWORK"] == "ari"
+ assert attrs["PATCHSET"] == "35-201906112304"
+ assert attrs["DUMMY"] == "foo=bar"
assert cray_backend.CrayBackend._detect_crayos_version() == 6
def test_read_clerelease_file(tmpdir, monkeypatch):
"""test reading the Cray clerelease file"""
- clerelease_path = tmpdir.join('clerelease')
- with clerelease_path.open('w') as f:
- f.write('5.2.UP04\n')
+ clerelease_path = tmpdir.join("clerelease")
+ with clerelease_path.open("w") as f:
+ f.write("5.2.UP04\n")
- monkeypatch.setattr(cray_backend, '_clerelease_file', str(clerelease_path))
+ monkeypatch.setattr(cray_backend, "_clerelease_file", str(clerelease_path))
v = cray_backend.read_clerelease_file()
- assert v == '5.2.UP04'
+ assert v == "5.2.UP04"
assert cray_backend.CrayBackend._detect_crayos_version() == 5
def test_cle_release_precedence(tmpdir, monkeypatch):
"""test that cle-release file takes precedence over clerelease file."""
- cle_release_path = tmpdir.join('cle-release')
- clerelease_path = tmpdir.join('clerelease')
+ cle_release_path = tmpdir.join("cle-release")
+ clerelease_path = tmpdir.join("clerelease")
- with cle_release_path.open('w') as f:
- f.write("""\
+ with cle_release_path.open("w") as f:
+ f.write(
+ """\
RELEASE=6.0.UP07
BUILD=6.0.7424
DATE=20190611
@@ -63,13 +65,13 @@ ARCH=noarch
NETWORK=ari
PATCHSET=35-201906112304
DUMMY=foo=bar
-""")
+"""
+ )
- with clerelease_path.open('w') as f:
- f.write('5.2.UP04\n')
+ with clerelease_path.open("w") as f:
+ f.write("5.2.UP04\n")
- monkeypatch.setattr(cray_backend, '_clerelease_file', str(clerelease_path))
- monkeypatch.setattr(cray_backend, '_cle_release_file',
- str(cle_release_path))
+ monkeypatch.setattr(cray_backend, "_clerelease_file", str(clerelease_path))
+ monkeypatch.setattr(cray_backend, "_cle_release_file", str(cle_release_path))
assert cray_backend.CrayBackend._detect_crayos_version() == 6
diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py
index 84c3f2df43..ce6f1700d4 100644
--- a/lib/spack/spack/test/optional_deps.py
+++ b/lib/spack/spack/test/optional_deps.py
@@ -11,76 +11,40 @@ from spack.spec import Spec
@pytest.fixture(
params=[
# Normalize simple conditionals
- ('optional-dep-test', {'optional-dep-test': None}),
- ('optional-dep-test~a', {'optional-dep-test~a': None}),
- ('optional-dep-test+a', {'optional-dep-test+a': {'a': None}}),
- ('optional-dep-test a=true', {
- 'optional-dep-test a=true': {
- 'a': None
- }}),
- ('optional-dep-test a=true', {
- 'optional-dep-test+a': {
- 'a': None
- }}),
- ('optional-dep-test@1.1', {'optional-dep-test@1.1': {'b': None}}),
- ('optional-dep-test%intel', {'optional-dep-test%intel': {'c': None}}),
- ('optional-dep-test%intel@64.1', {
- 'optional-dep-test%intel@64.1': {
- 'c': None,
- 'd': None
- }}),
- ('optional-dep-test%intel@64.1.2', {
- 'optional-dep-test%intel@64.1.2': {
- 'c': None,
- 'd': None
- }}),
- ('optional-dep-test%clang@35', {
- 'optional-dep-test%clang@35': {
- 'e': None
- }}),
+ ("optional-dep-test", {"optional-dep-test": None}),
+ ("optional-dep-test~a", {"optional-dep-test~a": None}),
+ ("optional-dep-test+a", {"optional-dep-test+a": {"a": None}}),
+ ("optional-dep-test a=true", {"optional-dep-test a=true": {"a": None}}),
+ ("optional-dep-test a=true", {"optional-dep-test+a": {"a": None}}),
+ ("optional-dep-test@1.1", {"optional-dep-test@1.1": {"b": None}}),
+ ("optional-dep-test%intel", {"optional-dep-test%intel": {"c": None}}),
+ ("optional-dep-test%intel@64.1", {"optional-dep-test%intel@64.1": {"c": None, "d": None}}),
+ (
+ "optional-dep-test%intel@64.1.2",
+ {"optional-dep-test%intel@64.1.2": {"c": None, "d": None}},
+ ),
+ ("optional-dep-test%clang@35", {"optional-dep-test%clang@35": {"e": None}}),
# Normalize multiple conditionals
- ('optional-dep-test+a@1.1', {
- 'optional-dep-test+a@1.1': {
- 'a': None,
- 'b': None
- }}),
- ('optional-dep-test+a%intel', {
- 'optional-dep-test+a%intel': {
- 'a': None,
- 'c': None
- }}),
- ('optional-dep-test@1.1%intel', {
- 'optional-dep-test@1.1%intel': {
- 'b': None,
- 'c': None
- }}),
- ('optional-dep-test@1.1%intel@64.1.2+a', {
- 'optional-dep-test@1.1%intel@64.1.2+a': {
- 'a': None,
- 'b': None,
- 'c': None,
- 'd': None
- }}),
- ('optional-dep-test@1.1%clang@36.5+a', {
- 'optional-dep-test@1.1%clang@36.5+a': {
- 'b': None,
- 'a': None,
- 'e': None
- }}),
+ ("optional-dep-test+a@1.1", {"optional-dep-test+a@1.1": {"a": None, "b": None}}),
+ ("optional-dep-test+a%intel", {"optional-dep-test+a%intel": {"a": None, "c": None}}),
+ ("optional-dep-test@1.1%intel", {"optional-dep-test@1.1%intel": {"b": None, "c": None}}),
+ (
+ "optional-dep-test@1.1%intel@64.1.2+a",
+ {"optional-dep-test@1.1%intel@64.1.2+a": {"a": None, "b": None, "c": None, "d": None}},
+ ),
+ (
+ "optional-dep-test@1.1%clang@36.5+a",
+ {"optional-dep-test@1.1%clang@36.5+a": {"b": None, "a": None, "e": None}},
+ ),
# Chained MPI
- ('optional-dep-test-2+mpi', {
- 'optional-dep-test-2+mpi': {
- 'optional-dep-test+mpi': {'mpi': None}
- }}),
+ (
+ "optional-dep-test-2+mpi",
+ {"optional-dep-test-2+mpi": {"optional-dep-test+mpi": {"mpi": None}}},
+ ),
# Each of these dependencies comes from a conditional
# dependency on another. This requires iterating to evaluate
# the whole chain.
- ('optional-dep-test+f', {
- 'optional-dep-test+f': {
- 'f': None,
- 'g': None,
- 'mpi': None
- }})
+ ("optional-dep-test+f", {"optional-dep-test+f": {"f": None, "g": None, "mpi": None}}),
]
)
def spec_and_expected(request):
@@ -97,14 +61,14 @@ def test_normalize(spec_and_expected, config, mock_packages):
def test_default_variant(config, mock_packages):
- spec = Spec('optional-dep-test-3')
+ spec = Spec("optional-dep-test-3")
spec.concretize()
- assert 'a' in spec
+ assert "a" in spec
- spec = Spec('optional-dep-test-3~var')
+ spec = Spec("optional-dep-test-3~var")
spec.concretize()
- assert 'a' in spec
+ assert "a" in spec
- spec = Spec('optional-dep-test-3+var')
+ spec = Spec("optional-dep-test-3+var")
spec.concretize()
- assert 'b' in spec
+ assert "b" in spec
diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py
index 15fd1237fb..87cb9a009a 100644
--- a/lib/spack/spack/test/package_class.py
+++ b/lib/spack/spack/test/package_class.py
@@ -23,62 +23,60 @@ import spack.repo
@pytest.fixture(scope="module")
def mpi_names(mock_repo_path):
- return [spec.name for spec in mock_repo_path.providers_for('mpi')]
+ return [spec.name for spec in mock_repo_path.providers_for("mpi")]
@pytest.fixture()
def mpileaks_possible_deps(mock_packages, mpi_names):
possible = {
- 'callpath': set(['dyninst'] + mpi_names),
- 'low-priority-provider': set(),
- 'dyninst': set(['libdwarf', 'libelf']),
- 'fake': set(),
- 'libdwarf': set(['libelf']),
- 'libelf': set(),
- 'mpich': set(),
- 'mpich2': set(),
- 'mpileaks': set(['callpath'] + mpi_names),
- 'multi-provider-mpi': set(),
- 'zmpi': set(['fake']),
+ "callpath": set(["dyninst"] + mpi_names),
+ "low-priority-provider": set(),
+ "dyninst": set(["libdwarf", "libelf"]),
+ "fake": set(),
+ "libdwarf": set(["libelf"]),
+ "libelf": set(),
+ "mpich": set(),
+ "mpich2": set(),
+ "mpileaks": set(["callpath"] + mpi_names),
+ "multi-provider-mpi": set(),
+ "zmpi": set(["fake"]),
}
return possible
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
- pkg_cls = spack.repo.path.get_pkg_class('mpileaks')
+ pkg_cls = spack.repo.path.get_pkg_class("mpileaks")
expanded_possible_deps = pkg_cls.possible_dependencies(expand_virtuals=True)
assert mpileaks_possible_deps == expanded_possible_deps
assert {
- 'callpath': {'dyninst', 'mpi'},
- 'dyninst': {'libdwarf', 'libelf'},
- 'libdwarf': {'libelf'},
- 'libelf': set(),
- 'mpi': set(),
- 'mpileaks': {'callpath', 'mpi'},
+ "callpath": {"dyninst", "mpi"},
+ "dyninst": {"libdwarf", "libelf"},
+ "libdwarf": {"libelf"},
+ "libelf": set(),
+ "mpi": set(),
+ "mpileaks": {"callpath", "mpi"},
} == pkg_cls.possible_dependencies(expand_virtuals=False)
def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps):
- pkg_cls = spack.repo.path.get_pkg_class('mpileaks')
+ pkg_cls = spack.repo.path.get_pkg_class("mpileaks")
deps = pkg_cls.possible_dependencies(transitive=False, expand_virtuals=False)
assert {
- 'callpath': set(),
- 'mpi': set(),
- 'mpileaks': {'callpath', 'mpi'},
+ "callpath": set(),
+ "mpi": set(),
+ "mpileaks": {"callpath", "mpi"},
} == deps
def test_possible_dependencies_virtual(mock_packages, mpi_names):
expected = dict(
- (name, set(spack.repo.path.get_pkg_class(name).dependencies))
- for name in mpi_names
+ (name, set(spack.repo.path.get_pkg_class(name).dependencies)) for name in mpi_names
)
# only one mock MPI has a dependency
- expected['fake'] = set()
+ expected["fake"] = set()
- assert expected == spack.package_base.possible_dependencies(
- "mpi", transitive=False)
+ assert expected == spack.package_base.possible_dependencies("mpi", transitive=False)
def test_possible_dependencies_missing(mock_packages):
@@ -89,36 +87,37 @@ def test_possible_dependencies_missing(mock_packages):
def test_possible_dependencies_with_deptypes(mock_packages):
- dtbuild1 = spack.repo.path.get_pkg_class('dtbuild1')
+ dtbuild1 = spack.repo.path.get_pkg_class("dtbuild1")
assert {
- 'dtbuild1': {'dtrun2', 'dtlink2'},
- 'dtlink2': set(),
- 'dtrun2': set(),
- } == dtbuild1.possible_dependencies(deptype=('link', 'run'))
+ "dtbuild1": {"dtrun2", "dtlink2"},
+ "dtlink2": set(),
+ "dtrun2": set(),
+ } == dtbuild1.possible_dependencies(deptype=("link", "run"))
assert {
- 'dtbuild1': {'dtbuild2', 'dtlink2'},
- 'dtbuild2': set(),
- 'dtlink2': set(),
- } == dtbuild1.possible_dependencies(deptype=('build'))
+ "dtbuild1": {"dtbuild2", "dtlink2"},
+ "dtbuild2": set(),
+ "dtlink2": set(),
+ } == dtbuild1.possible_dependencies(deptype=("build"))
assert {
- 'dtbuild1': {'dtlink2'},
- 'dtlink2': set(),
- } == dtbuild1.possible_dependencies(deptype=('link'))
+ "dtbuild1": {"dtlink2"},
+ "dtlink2": set(),
+ } == dtbuild1.possible_dependencies(deptype=("link"))
-def test_possible_dependencies_with_multiple_classes(
- mock_packages, mpileaks_possible_deps):
- pkgs = ['dt-diamond', 'mpileaks']
+def test_possible_dependencies_with_multiple_classes(mock_packages, mpileaks_possible_deps):
+ pkgs = ["dt-diamond", "mpileaks"]
expected = mpileaks_possible_deps.copy()
- expected.update({
- 'dt-diamond': set(['dt-diamond-left', 'dt-diamond-right']),
- 'dt-diamond-left': set(['dt-diamond-bottom']),
- 'dt-diamond-right': set(['dt-diamond-bottom']),
- 'dt-diamond-bottom': set(),
- })
+ expected.update(
+ {
+ "dt-diamond": set(["dt-diamond-left", "dt-diamond-right"]),
+ "dt-diamond-left": set(["dt-diamond-bottom"]),
+ "dt-diamond-right": set(["dt-diamond-bottom"]),
+ "dt-diamond-bottom": set(),
+ }
+ )
assert expected == spack.package_base.possible_dependencies(*pkgs)
@@ -138,20 +137,29 @@ def setup_install_test(source_paths, install_test_root):
fs.mkdirp(path)
-@pytest.mark.parametrize('spec,sources,extras,expect', [
- ('a',
- ['example/a.c'], # Source(s)
- ['example/a.c'], # Extra test source
- ['example/a.c']), # Test install dir source(s)
- ('b',
- ['test/b.cpp', 'test/b.hpp', 'example/b.txt'], # Source(s)
- ['test'], # Extra test source
- ['test/b.cpp', 'test/b.hpp']), # Test install dir source
- ('c',
- ['examples/a.py', 'examples/b.py', 'examples/c.py', 'tests/d.py'],
- ['examples/b.py', 'tests'],
- ['examples/b.py', 'tests/d.py']),
-])
+@pytest.mark.parametrize(
+ "spec,sources,extras,expect",
+ [
+ (
+ "a",
+ ["example/a.c"], # Source(s)
+ ["example/a.c"], # Extra test source
+ ["example/a.c"],
+ ), # Test install dir source(s)
+ (
+ "b",
+ ["test/b.cpp", "test/b.hpp", "example/b.txt"], # Source(s)
+ ["test"], # Extra test source
+ ["test/b.cpp", "test/b.hpp"],
+ ), # Test install dir source
+ (
+ "c",
+ ["examples/a.py", "examples/b.py", "examples/c.py", "tests/d.py"],
+ ["examples/b.py", "tests"],
+ ["examples/b.py", "tests/d.py"],
+ ),
+ ],
+)
def test_cache_extra_sources(install_mockery, spec, sources, extras, expect):
"""Test the package's cache extra test sources helper function."""
s = spack.spec.Spec(spec).concretized()
@@ -161,10 +169,10 @@ def test_cache_extra_sources(install_mockery, spec, sources, extras, expect):
srcs = [fs.join_path(source_path, src) for src in sources]
setup_install_test(srcs, s.package.install_test_root)
- emsg_dir = 'Expected {0} to be a directory'
- emsg_file = 'Expected {0} to be a file'
+ emsg_dir = "Expected {0} to be a directory"
+ emsg_file = "Expected {0} to be a file"
for src in srcs:
- assert os.path.exists(src), 'Expected {0} to exist'.format(src)
+ assert os.path.exists(src), "Expected {0} to exist".format(src)
if os.path.splitext(src)[1]:
assert os.path.isfile(src), emsg_file.format(src)
else:
@@ -176,16 +184,16 @@ def test_cache_extra_sources(install_mockery, spec, sources, extras, expect):
exp_dests = [fs.join_path(s.package.install_test_root, e) for e in expect]
poss_dests = set(src_dests) | set(exp_dests)
- msg = 'Expected {0} to{1} exist'
+ msg = "Expected {0} to{1} exist"
for pd in poss_dests:
if pd in exp_dests:
- assert os.path.exists(pd), msg.format(pd, '')
+ assert os.path.exists(pd), msg.format(pd, "")
if os.path.splitext(pd)[1]:
assert os.path.isfile(pd), emsg_file.format(pd)
else:
assert os.path.isdir(pd), emsg_dir.format(pd)
else:
- assert not os.path.exists(pd), msg.format(pd, ' not')
+ assert not os.path.exists(pd), msg.format(pd, " not")
# Perform a little cleanup
shutil.rmtree(os.path.dirname(source_path))
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index 145ece1f7e..ba98ccf2a9 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -52,8 +52,7 @@ def test_packages_are_pickleable():
failed_to_pickle.append(name)
if failed_to_pickle:
- tty.msg('The following packages failed to pickle: ' +
- ', '.join(failed_to_pickle))
+ tty.msg("The following packages failed to pickle: " + ", ".join(failed_to_pickle))
for name in failed_to_pickle:
pkg_cls = spack.repo.path.get_pkg_class(name)
@@ -78,13 +77,13 @@ def test_packages_are_unparseable():
failed_to_compile.append(name)
if failed_to_unparse:
- tty.msg('The following packages failed to unparse: ' +
- ', '.join(failed_to_unparse))
+ tty.msg("The following packages failed to unparse: " + ", ".join(failed_to_unparse))
assert False
if failed_to_compile:
- tty.msg('The following unparsed packages failed to compile: ' +
- ', '.join(failed_to_compile))
+ tty.msg(
+ "The following unparsed packages failed to compile: " + ", ".join(failed_to_compile)
+ )
assert False
@@ -110,7 +109,7 @@ def test_all_versions_are_lowercase():
"""Spack package names must be lowercase, and use `-` instead of `_`."""
errors = []
for name in spack.repo.all_package_names():
- if re.search(r'[_A-Z]', name):
+ if re.search(r"[_A-Z]", name):
errors.append(name)
assert len(errors) == 0
@@ -118,15 +117,16 @@ def test_all_versions_are_lowercase():
def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set."""
- defaults = spack.config.get('packages', scope='defaults')
- default_providers = defaults['all']['providers']
+ defaults = spack.config.get("packages", scope="defaults")
+ default_providers = defaults["all"]["providers"]
providers = spack.repo.path.provider_index.providers
- default_providers_filename = \
- spack.config.config.scopes['defaults'].get_section_filename('packages')
+ default_providers_filename = spack.config.config.scopes["defaults"].get_section_filename(
+ "packages"
+ )
for provider in providers:
- assert provider in default_providers, \
- "all providers must have a default in %s" \
- % default_providers_filename
+ assert provider in default_providers, (
+ "all providers must have a default in %s" % default_providers_filename
+ )
def test_package_version_consistency():
@@ -141,24 +141,22 @@ def test_package_version_consistency():
def test_no_fixme():
"""Packages should not contain any boilerplate such as
- FIXME or example.com."""
+ FIXME or example.com."""
errors = []
fixme_regexes = [
- r'remove this boilerplate',
- r'FIXME: Put',
- r'FIXME: Add',
- r'example.com',
+ r"remove this boilerplate",
+ r"FIXME: Put",
+ r"FIXME: Add",
+ r"example.com",
]
for name in spack.repo.all_package_names():
filename = spack.repo.path.filename_for_package_name(name)
- with open(filename, 'r') as package_file:
+ with open(filename, "r") as package_file:
for i, line in enumerate(package_file):
- pattern = next((r for r in fixme_regexes
- if re.search(r, line)), None)
+ pattern = next((r for r in fixme_regexes if re.search(r, line)), None)
if pattern:
errors.append(
- "%s:%d: boilerplate needs to be removed: %s" %
- (filename, i, line.strip())
+ "%s:%d: boilerplate needs to be removed: %s" % (filename, i, line.strip())
)
assert [] == errors
@@ -194,8 +192,8 @@ def test_all_packages_use_sha256_checksums():
bad_digest = invalid_sha256_digest(fetcher)
if bad_digest:
errors.append(
- "All packages must use sha256 checksums. %s@%s uses %s." %
- (name, v, bad_digest)
+ "All packages must use sha256 checksums. %s@%s uses %s."
+ % (name, v, bad_digest)
)
for _, resources in pkg.resources.items():
@@ -204,7 +202,7 @@ def test_all_packages_use_sha256_checksums():
if bad_digest:
errors.append(
"All packages must use sha256 checksums."
- "Resource in %s uses %s." % (name, bad_digest)
+ "Resource in %s uses %s." % (name, bad_digest)
)
assert [] == errors
@@ -216,22 +214,21 @@ def test_api_for_build_and_run_environment():
"""
failing = []
for pkg_cls in spack.repo.path.all_package_classes():
- add_to_list = (hasattr(pkg_cls, 'setup_environment') or
- hasattr(pkg_cls, 'setup_dependent_environment'))
+ add_to_list = hasattr(pkg_cls, "setup_environment") or hasattr(
+ pkg_cls, "setup_dependent_environment"
+ )
if add_to_list:
failing.append(pkg_cls)
- msg = ('there are {0} packages using the old API to set build '
- 'and run environment [{1}], for further information see '
- 'https://github.com/spack/spack/pull/11115')
- assert not failing, msg.format(
- len(failing), ','.join(x.name for x in failing)
+ msg = (
+ "there are {0} packages using the old API to set build "
+ "and run environment [{1}], for further information see "
+ "https://github.com/spack/spack/pull/11115"
)
+ assert not failing, msg.format(len(failing), ",".join(x.name for x in failing))
-@pytest.mark.skipif(
- not executable.which('git'), reason='requires git to be installed'
-)
+@pytest.mark.skipif(not executable.which("git"), reason="requires git to be installed")
def test_prs_update_old_api():
"""Ensures that every package modified in a PR doesn't contain
deprecated calls to any method.
@@ -240,42 +237,36 @@ def test_prs_update_old_api():
if not ref:
pytest.skip("No base ref found")
- changed_package_files = [
- x for x in style.changed_files(base=ref) if style.is_package(x)
- ]
+ changed_package_files = [x for x in style.changed_files(base=ref) if style.is_package(x)]
failing = []
for file in changed_package_files:
- if 'builtin.mock' not in file: # don't restrict packages for tests
+ if "builtin.mock" not in file: # don't restrict packages for tests
name = os.path.basename(os.path.dirname(file))
pkg_cls = spack.repo.path.get_pkg_class(name)
pkg = pkg_cls(spack.spec.Spec(name))
- failed = (hasattr(pkg, 'setup_environment') or
- hasattr(pkg, 'setup_dependent_environment'))
+ failed = hasattr(pkg, "setup_environment") or hasattr(
+ pkg, "setup_dependent_environment"
+ )
if failed:
failing.append(name)
- msg = ('there are {0} packages using the old API to set build '
- 'and run environment [{1}], for further information see '
- 'https://github.com/spack/spack/pull/11115')
- assert not failing, msg.format(
- len(failing), ','.join(failing)
+ msg = (
+ "there are {0} packages using the old API to set build "
+ "and run environment [{1}], for further information see "
+ "https://github.com/spack/spack/pull/11115"
)
+ assert not failing, msg.format(len(failing), ",".join(failing))
def test_all_dependencies_exist():
"""Make sure no packages have nonexisting dependencies."""
missing = {}
pkgs = [pkg for pkg in spack.repo.path.all_package_names()]
- spack.package_base.possible_dependencies(
- *pkgs, transitive=True, missing=missing)
+ spack.package_base.possible_dependencies(*pkgs, transitive=True, missing=missing)
- lines = [
- "%s: [%s]" % (name, ", ".join(deps)) for name, deps in missing.items()
- ]
- assert not missing, "These packages have missing dependencies:\n" + (
- "\n".join(lines)
- )
+ lines = ["%s: [%s]" % (name, ", ".join(deps)) for name, deps in missing.items()]
+ assert not missing, "These packages have missing dependencies:\n" + ("\n".join(lines))
def test_variant_defaults_are_parsable_from_cli():
@@ -288,7 +279,8 @@ def test_variant_defaults_are_parsable_from_cli():
# Permitting a default that is an instance on 'int' permits
# to have foo=false or foo=0. Other falsish values are
# not allowed, since they can't be parsed from cli ('foo=')
- isinstance(variant.default, int) or variant.default
+ isinstance(variant.default, int)
+ or variant.default
)
if not default_is_parsable:
failing.append((pkg_cls.name, variant_name))
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index 03c2cd5ca8..c91d749b0a 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -16,41 +16,33 @@ from spack.util.naming import mod_to_class
from spack.version import VersionChecksumError
-@pytest.mark.usefixtures('config', 'mock_packages')
+@pytest.mark.usefixtures("config", "mock_packages")
class TestPackage(object):
def test_load_package(self):
- spack.repo.path.get_pkg_class('mpich')
+ spack.repo.path.get_pkg_class("mpich")
def test_package_name(self):
- pkg_cls = spack.repo.path.get_pkg_class('mpich')
- assert pkg_cls.name == 'mpich'
+ pkg_cls = spack.repo.path.get_pkg_class("mpich")
+ assert pkg_cls.name == "mpich"
def test_package_filename(self):
repo = spack.repo.Repo(mock_packages_path)
- filename = repo.filename_for_package_name('mpich')
- assert filename == os.path.join(
- mock_packages_path,
- 'packages',
- 'mpich',
- 'package.py'
- )
+ filename = repo.filename_for_package_name("mpich")
+ assert filename == os.path.join(mock_packages_path, "packages", "mpich", "package.py")
def test_nonexisting_package_filename(self):
repo = spack.repo.Repo(mock_packages_path)
- filename = repo.filename_for_package_name('some-nonexisting-package')
+ filename = repo.filename_for_package_name("some-nonexisting-package")
assert filename == os.path.join(
- mock_packages_path,
- 'packages',
- 'some-nonexisting-package',
- 'package.py'
+ mock_packages_path, "packages", "some-nonexisting-package", "package.py"
)
def test_package_class_names(self):
- assert 'Mpich' == mod_to_class('mpich')
- assert 'PmgrCollective' == mod_to_class('pmgr_collective')
- assert 'PmgrCollective' == mod_to_class('pmgr-collective')
- assert 'Pmgrcollective' == mod_to_class('PmgrCollective')
- assert '_3db' == mod_to_class('3db')
+ assert "Mpich" == mod_to_class("mpich")
+ assert "PmgrCollective" == mod_to_class("pmgr_collective")
+ assert "PmgrCollective" == mod_to_class("pmgr-collective")
+ assert "Pmgrcollective" == mod_to_class("PmgrCollective")
+ assert "_3db" == mod_to_class("3db")
# Below tests target direct imports of spack packages from the
# spack.pkg namespace
@@ -64,39 +56,39 @@ class TestPackage(object):
from spack.pkg.builtin import mock # noqa: F401
def test_inheritance_of_diretives(self):
- pkg_cls = spack.repo.path.get_pkg_class('simple-inheritance')
+ pkg_cls = spack.repo.path.get_pkg_class("simple-inheritance")
# Check dictionaries that should have been filled by directives
assert len(pkg_cls.dependencies) == 3
- assert 'cmake' in pkg_cls.dependencies
- assert 'openblas' in pkg_cls.dependencies
- assert 'mpi' in pkg_cls.dependencies
+ assert "cmake" in pkg_cls.dependencies
+ assert "openblas" in pkg_cls.dependencies
+ assert "mpi" in pkg_cls.dependencies
assert len(pkg_cls.provided) == 2
# Check that Spec instantiation behaves as we expect
- s = Spec('simple-inheritance').concretized()
- assert '^cmake' in s
- assert '^openblas' in s
- assert '+openblas' in s
- assert 'mpi' in s
-
- s = Spec('simple-inheritance~openblas').concretized()
- assert '^cmake' in s
- assert '^openblas' not in s
- assert '~openblas' in s
- assert 'mpi' in s
-
- @pytest.mark.regression('11844')
+ s = Spec("simple-inheritance").concretized()
+ assert "^cmake" in s
+ assert "^openblas" in s
+ assert "+openblas" in s
+ assert "mpi" in s
+
+ s = Spec("simple-inheritance~openblas").concretized()
+ assert "^cmake" in s
+ assert "^openblas" not in s
+ assert "~openblas" in s
+ assert "mpi" in s
+
+ @pytest.mark.regression("11844")
def test_inheritance_of_patches(self):
- s = Spec('patch-inheritance')
+ s = Spec("patch-inheritance")
# Will error if inheritor package cannot find inherited patch files
s.concretize()
def test_dependency_extensions(self):
- s = Spec('extension2')
+ s = Spec("extension2")
s.concretize()
deps = set(x.name for x in s.package.dependency_activations())
- assert deps == set(['extension1'])
+ assert deps == set(["extension1"])
def test_import_class_from_package(self):
from spack.pkg.builtin.mock.mpich import Mpich # noqa: F401
@@ -116,83 +108,97 @@ class TestPackage(object):
from spack.pkg.builtin import mock # noqa: F401
-@pytest.mark.regression('2737')
+@pytest.mark.regression("2737")
def test_urls_for_versions(mock_packages, config):
"""Version directive without a 'url' argument should use default url."""
- for spec_str in ('url_override@0.9.0', 'url_override@1.0.0'):
+ for spec_str in ("url_override@0.9.0", "url_override@1.0.0"):
s = Spec(spec_str).concretized()
- url = s.package.url_for_version('0.9.0')
- assert url == 'http://www.anothersite.org/uo-0.9.0.tgz'
+ url = s.package.url_for_version("0.9.0")
+ assert url == "http://www.anothersite.org/uo-0.9.0.tgz"
- url = s.package.url_for_version('1.0.0')
- assert url == 'http://www.doesnotexist.org/url_override-1.0.0.tar.gz'
+ url = s.package.url_for_version("1.0.0")
+ assert url == "http://www.doesnotexist.org/url_override-1.0.0.tar.gz"
- url = s.package.url_for_version('0.8.1')
- assert url == 'http://www.doesnotexist.org/url_override-0.8.1.tar.gz'
+ url = s.package.url_for_version("0.8.1")
+ assert url == "http://www.doesnotexist.org/url_override-0.8.1.tar.gz"
def test_url_for_version_with_no_urls(mock_packages, config):
- spec = Spec('git-test')
+ spec = Spec("git-test")
pkg_cls = spack.repo.path.get_pkg_class(spec.name)
with pytest.raises(spack.package_base.NoURLError):
- pkg_cls(spec).url_for_version('1.0')
+ pkg_cls(spec).url_for_version("1.0")
with pytest.raises(spack.package_base.NoURLError):
- pkg_cls(spec).url_for_version('1.1')
+ pkg_cls(spec).url_for_version("1.1")
def test_url_for_version_with_only_overrides(mock_packages, config):
- s = Spec('url-only-override').concretized()
+ s = Spec("url-only-override").concretized()
# these exist and should just take the URL provided in the package
- assert s.package.url_for_version('1.0.0') == 'http://a.example.com/url_override-1.0.0.tar.gz'
- assert s.package.url_for_version('0.9.0') == 'http://b.example.com/url_override-0.9.0.tar.gz'
- assert s.package.url_for_version('0.8.1') == 'http://c.example.com/url_override-0.8.1.tar.gz'
+ assert s.package.url_for_version("1.0.0") == "http://a.example.com/url_override-1.0.0.tar.gz"
+ assert s.package.url_for_version("0.9.0") == "http://b.example.com/url_override-0.9.0.tar.gz"
+ assert s.package.url_for_version("0.8.1") == "http://c.example.com/url_override-0.8.1.tar.gz"
# these don't exist but should still work, even if there are only overrides
- assert s.package.url_for_version('1.0.5') == 'http://a.example.com/url_override-1.0.5.tar.gz'
- assert s.package.url_for_version('0.9.5') == 'http://b.example.com/url_override-0.9.5.tar.gz'
- assert s.package.url_for_version('0.8.5') == 'http://c.example.com/url_override-0.8.5.tar.gz'
- assert s.package.url_for_version('0.7.0') == 'http://c.example.com/url_override-0.7.0.tar.gz'
+ assert s.package.url_for_version("1.0.5") == "http://a.example.com/url_override-1.0.5.tar.gz"
+ assert s.package.url_for_version("0.9.5") == "http://b.example.com/url_override-0.9.5.tar.gz"
+ assert s.package.url_for_version("0.8.5") == "http://c.example.com/url_override-0.8.5.tar.gz"
+ assert s.package.url_for_version("0.7.0") == "http://c.example.com/url_override-0.7.0.tar.gz"
def test_url_for_version_with_only_overrides_with_gaps(mock_packages, config):
- s = Spec('url-only-override-with-gaps').concretized()
+ s = Spec("url-only-override-with-gaps").concretized()
# same as for url-only-override -- these are specific
- assert s.package.url_for_version('1.0.0') == 'http://a.example.com/url_override-1.0.0.tar.gz'
- assert s.package.url_for_version('0.9.0') == 'http://b.example.com/url_override-0.9.0.tar.gz'
- assert s.package.url_for_version('0.8.1') == 'http://c.example.com/url_override-0.8.1.tar.gz'
+ assert s.package.url_for_version("1.0.0") == "http://a.example.com/url_override-1.0.0.tar.gz"
+ assert s.package.url_for_version("0.9.0") == "http://b.example.com/url_override-0.9.0.tar.gz"
+ assert s.package.url_for_version("0.8.1") == "http://c.example.com/url_override-0.8.1.tar.gz"
# these don't have specific URLs, but should still work by extrapolation
- assert s.package.url_for_version('1.0.5') == 'http://a.example.com/url_override-1.0.5.tar.gz'
- assert s.package.url_for_version('0.9.5') == 'http://b.example.com/url_override-0.9.5.tar.gz'
- assert s.package.url_for_version('0.8.5') == 'http://c.example.com/url_override-0.8.5.tar.gz'
- assert s.package.url_for_version('0.7.0') == 'http://c.example.com/url_override-0.7.0.tar.gz'
-
-
-@pytest.mark.usefixtures('mock_packages', 'config')
-@pytest.mark.parametrize('spec_str,expected_type,expected_url', [
- ('git-top-level', spack.fetch_strategy.GitFetchStrategy, 'https://example.com/some/git/repo'),
- ('svn-top-level', spack.fetch_strategy.SvnFetchStrategy, 'https://example.com/some/svn/repo'),
- ('hg-top-level', spack.fetch_strategy.HgFetchStrategy, 'https://example.com/some/hg/repo'),
-])
+ assert s.package.url_for_version("1.0.5") == "http://a.example.com/url_override-1.0.5.tar.gz"
+ assert s.package.url_for_version("0.9.5") == "http://b.example.com/url_override-0.9.5.tar.gz"
+ assert s.package.url_for_version("0.8.5") == "http://c.example.com/url_override-0.8.5.tar.gz"
+ assert s.package.url_for_version("0.7.0") == "http://c.example.com/url_override-0.7.0.tar.gz"
+
+
+@pytest.mark.usefixtures("mock_packages", "config")
+@pytest.mark.parametrize(
+ "spec_str,expected_type,expected_url",
+ [
+ (
+ "git-top-level",
+ spack.fetch_strategy.GitFetchStrategy,
+ "https://example.com/some/git/repo",
+ ),
+ (
+ "svn-top-level",
+ spack.fetch_strategy.SvnFetchStrategy,
+ "https://example.com/some/svn/repo",
+ ),
+ ("hg-top-level", spack.fetch_strategy.HgFetchStrategy, "https://example.com/some/hg/repo"),
+ ],
+)
def test_fetcher_url(spec_str, expected_type, expected_url):
"""Ensure that top-level git attribute can be used as a default."""
s = Spec(spec_str).concretized()
- fetcher = spack.fetch_strategy.for_package_version(s.package, '1.0')
+ fetcher = spack.fetch_strategy.for_package_version(s.package, "1.0")
assert isinstance(fetcher, expected_type)
assert fetcher.url == expected_url
-@pytest.mark.usefixtures('mock_packages', 'config')
-@pytest.mark.parametrize('spec_str,version_str,exception_type', [
- # Non-url-package
- ('git-top-level', '1.1', spack.fetch_strategy.ExtrapolationError),
- # Two VCS specified together
- ('git-url-svn-top-level', '1.0', spack.fetch_strategy.FetcherConflict),
- ('git-svn-top-level', '1.0', spack.fetch_strategy.FetcherConflict),
-])
+@pytest.mark.usefixtures("mock_packages", "config")
+@pytest.mark.parametrize(
+ "spec_str,version_str,exception_type",
+ [
+ # Non-url-package
+ ("git-top-level", "1.1", spack.fetch_strategy.ExtrapolationError),
+ # Two VCS specified together
+ ("git-url-svn-top-level", "1.0", spack.fetch_strategy.FetcherConflict),
+ ("git-svn-top-level", "1.0", spack.fetch_strategy.FetcherConflict),
+ ],
+)
def test_fetcher_errors(spec_str, version_str, exception_type):
"""Verify that we can't extrapolate versions for non-URL packages."""
with pytest.raises(exception_type):
@@ -200,18 +206,21 @@ def test_fetcher_errors(spec_str, version_str, exception_type):
spack.fetch_strategy.for_package_version(s.package, version_str)
-@pytest.mark.usefixtures('mock_packages', 'config')
-@pytest.mark.parametrize('version_str,expected_url,digest', [
- ('2.0', 'https://example.com/some/tarball-2.0.tar.gz', '20'),
- ('2.1', 'https://example.com/some/tarball-2.1.tar.gz', '21'),
- ('2.2', 'https://www.example.com/foo2.2.tar.gz', '22'),
- ('2.3', 'https://www.example.com/foo2.3.tar.gz', '23'),
-])
+@pytest.mark.usefixtures("mock_packages", "config")
+@pytest.mark.parametrize(
+ "version_str,expected_url,digest",
+ [
+ ("2.0", "https://example.com/some/tarball-2.0.tar.gz", "20"),
+ ("2.1", "https://example.com/some/tarball-2.1.tar.gz", "21"),
+ ("2.2", "https://www.example.com/foo2.2.tar.gz", "22"),
+ ("2.3", "https://www.example.com/foo2.3.tar.gz", "23"),
+ ],
+)
def test_git_url_top_level_url_versions(version_str, expected_url, digest):
"""Test URL fetch strategy inference when url is specified with git."""
- s = Spec('git-url-top-level').concretized()
+ s = Spec("git-url-top-level").concretized()
# leading 62 zeros of sha256 hash
- leading_zeros = '0' * 62
+ leading_zeros = "0" * 62
fetcher = spack.fetch_strategy.for_package_version(s.package, version_str)
assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
@@ -219,33 +228,36 @@ def test_git_url_top_level_url_versions(version_str, expected_url, digest):
assert fetcher.digest == leading_zeros + digest
-@pytest.mark.usefixtures('mock_packages', 'config')
-@pytest.mark.parametrize('version_str,tag,commit,branch', [
- ('3.0', 'v3.0', None, None),
- ('3.1', 'v3.1', 'abc31', None),
- ('3.2', None, None, 'releases/v3.2'),
- ('3.3', None, 'abc33', 'releases/v3.3'),
- ('3.4', None, 'abc34', None),
- ('submodules', None, None, None),
- ('develop', None, None, 'develop'),
-])
+@pytest.mark.usefixtures("mock_packages", "config")
+@pytest.mark.parametrize(
+ "version_str,tag,commit,branch",
+ [
+ ("3.0", "v3.0", None, None),
+ ("3.1", "v3.1", "abc31", None),
+ ("3.2", None, None, "releases/v3.2"),
+ ("3.3", None, "abc33", "releases/v3.3"),
+ ("3.4", None, "abc34", None),
+ ("submodules", None, None, None),
+ ("develop", None, None, "develop"),
+ ],
+)
def test_git_url_top_level_git_versions(version_str, tag, commit, branch):
"""Test git fetch strategy inference when url is specified with git."""
- s = Spec('git-url-top-level').concretized()
+ s = Spec("git-url-top-level").concretized()
fetcher = spack.fetch_strategy.for_package_version(s.package, version_str)
assert isinstance(fetcher, spack.fetch_strategy.GitFetchStrategy)
- assert fetcher.url == 'https://example.com/some/git/repo'
+ assert fetcher.url == "https://example.com/some/git/repo"
assert fetcher.tag == tag
assert fetcher.commit == commit
assert fetcher.branch == branch
-@pytest.mark.usefixtures('mock_packages', 'config')
-@pytest.mark.parametrize('version_str', ['1.0', '1.1', '1.2', '1.3'])
+@pytest.mark.usefixtures("mock_packages", "config")
+@pytest.mark.parametrize("version_str", ["1.0", "1.1", "1.2", "1.3"])
def test_git_url_top_level_conflicts(version_str):
"""Test git fetch strategy inference when url is specified with git."""
- s = Spec('git-url-top-level').concretized()
+ s = Spec("git-url-top-level").concretized()
with pytest.raises(spack.fetch_strategy.FetcherConflict):
spack.fetch_strategy.for_package_version(s.package, version_str)
@@ -253,40 +265,42 @@ def test_git_url_top_level_conflicts(version_str):
def test_rpath_args(mutable_database):
"""Test a package's rpath_args property."""
- rec = mutable_database.get_record('mpich')
+ rec = mutable_database.get_record("mpich")
rpath_args = rec.spec.package.rpath_args
- assert '-rpath' in rpath_args
- assert 'mpich' in rpath_args
+ assert "-rpath" in rpath_args
+ assert "mpich" in rpath_args
-def test_bundle_version_checksum(mock_directive_bundle,
- clear_directive_functions):
+def test_bundle_version_checksum(mock_directive_bundle, clear_directive_functions):
"""Test raising exception on a version checksum with a bundle package."""
with pytest.raises(VersionChecksumError, match="Checksums not allowed"):
- version = spack.directives.version('1.0', checksum='1badpkg')
+ version = spack.directives.version("1.0", checksum="1badpkg")
version(mock_directive_bundle)
-def test_bundle_patch_directive(mock_directive_bundle,
- clear_directive_functions):
+def test_bundle_patch_directive(mock_directive_bundle, clear_directive_functions):
"""Test raising exception on a patch directive with a bundle package."""
- with pytest.raises(spack.directives.UnsupportedPackageDirective,
- match="Patches are not allowed"):
- patch = spack.directives.patch('mock/patch.txt')
+ with pytest.raises(
+ spack.directives.UnsupportedPackageDirective, match="Patches are not allowed"
+ ):
+ patch = spack.directives.patch("mock/patch.txt")
patch(mock_directive_bundle)
-@pytest.mark.usefixtures('mock_packages', 'config')
-@pytest.mark.parametrize('version_str,digest_end,extra_options', [
- ('1.0', '10', {'timeout': 42, 'cookie': 'foobar'}),
- ('1.1', '11', {'timeout': 65}),
- ('1.2', '12', {'cookie': 'baz'}),
-])
+@pytest.mark.usefixtures("mock_packages", "config")
+@pytest.mark.parametrize(
+ "version_str,digest_end,extra_options",
+ [
+ ("1.0", "10", {"timeout": 42, "cookie": "foobar"}),
+ ("1.1", "11", {"timeout": 65}),
+ ("1.2", "12", {"cookie": "baz"}),
+ ],
+)
def test_fetch_options(version_str, digest_end, extra_options):
"""Test fetch options inference."""
- s = Spec('fetch-options').concretized()
- leading_zeros = '000000000000000000000000000000'
+ s = Spec("fetch-options").concretized()
+ leading_zeros = "000000000000000000000000000000"
fetcher = spack.fetch_strategy.for_package_version(s.package, version_str)
assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
assert fetcher.digest == leading_zeros + digest_end
@@ -295,7 +309,7 @@ def test_fetch_options(version_str, digest_end, extra_options):
def test_has_test_method_fails(capsys):
with pytest.raises(SystemExit):
- spack.package_base.has_test_method('printing-package')
+ spack.package_base.has_test_method("printing-package")
captured = capsys.readouterr()[1]
- assert 'is not a class' in captured
+ assert "is not a class" in captured
diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py
index 6e977a6b36..c84a0e1c68 100644
--- a/lib/spack/spack/test/packaging.py
+++ b/lib/spack/spack/test/packaging.py
@@ -40,8 +40,7 @@ from spack.relocate import (
)
from spack.spec import Spec
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
def fake_fetchify(url, pkg):
@@ -51,7 +50,7 @@ def fake_fetchify(url, pkg):
pkg.fetcher = fetcher
-@pytest.mark.usefixtures('install_mockery', 'mock_gnupghome')
+@pytest.mark.usefixtures("install_mockery", "mock_gnupghome")
def test_buildcache(mock_archive, tmpdir):
# tweak patchelf to only do a download
pspec = Spec("patchelf").concretized()
@@ -59,7 +58,7 @@ def test_buildcache(mock_archive, tmpdir):
fake_fetchify(pkg.fetcher, pkg)
mkdirp(os.path.join(pkg.prefix, "bin"))
patchelfscr = os.path.join(pkg.prefix, "bin", "patchelf")
- f = open(patchelfscr, 'w')
+ f = open(patchelfscr, "w")
body = """#!/bin/bash
echo $PATH"""
f.write(body)
@@ -68,13 +67,13 @@ echo $PATH"""
os.chmod(patchelfscr, st.st_mode | stat.S_IEXEC)
# Install the test package
- spec = Spec('trivial-install-test-package')
+ spec = Spec("trivial-install-test-package")
spec.concretize()
assert spec.concrete
pkg = spec.package
fake_fetchify(mock_archive.url, pkg)
pkg.do_install()
- pkghash = '/' + str(spec.dag_hash(7))
+ pkghash = "/" + str(spec.dag_hash(7))
# Put some non-relocatable file in there
filename = os.path.join(spec.prefix, "dummy.txt")
@@ -87,28 +86,27 @@ echo $PATH"""
# Create the build cache and
# put it directly into the mirror
- mirror_path = os.path.join(str(tmpdir), 'test-mirror')
+ mirror_path = os.path.join(str(tmpdir), "test-mirror")
spack.mirror.create(mirror_path, specs=[])
# register mirror with spack config
- mirrors = {'spack-mirror-test': 'file://' + mirror_path}
- spack.config.set('mirrors', mirrors)
+ mirrors = {"spack-mirror-test": "file://" + mirror_path}
+ spack.config.set("mirrors", mirrors)
- stage = spack.stage.Stage(
- mirrors['spack-mirror-test'], name="build_cache", keep=True)
+ stage = spack.stage.Stage(mirrors["spack-mirror-test"], name="build_cache", keep=True)
stage.create()
# setup argument parser
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
- create_args = ['create', '-a', '-f', '-d', mirror_path, pkghash]
+ create_args = ["create", "-a", "-f", "-d", mirror_path, pkghash]
# Create a private key to sign package with if gpg2 available
- spack.util.gpg.create(name='test key 1', expires='0',
- email='spack@googlegroups.com',
- comment='Spack test key')
+ spack.util.gpg.create(
+ name="test key 1", expires="0", email="spack@googlegroups.com", comment="Spack test key"
+ )
- create_args.insert(create_args.index('-a'), '--rebuild-index')
+ create_args.insert(create_args.index("-a"), "--rebuild-index")
args = parser.parse_args(create_args)
buildcache.buildcache(parser, args)
@@ -118,24 +116,24 @@ echo $PATH"""
# Uninstall the package
pkg.do_uninstall(force=True)
- install_args = ['install', '-a', '-f', pkghash]
+ install_args = ["install", "-a", "-f", pkghash]
args = parser.parse_args(install_args)
# Test install
buildcache.buildcache(parser, args)
files = os.listdir(spec.prefix)
- assert 'link_to_dummy.txt' in files
- assert 'dummy.txt' in files
+ assert "link_to_dummy.txt" in files
+ assert "dummy.txt" in files
# Validate the relocation information
buildinfo = bindist.read_buildinfo_file(spec.prefix)
- assert(buildinfo['relocate_textfiles'] == ['dummy.txt'])
- assert(buildinfo['relocate_links'] == ['link_to_dummy.txt'])
+ assert buildinfo["relocate_textfiles"] == ["dummy.txt"]
+ assert buildinfo["relocate_links"] == ["link_to_dummy.txt"]
# create build cache with relative path
- create_args.insert(create_args.index('-a'), '-f')
- create_args.insert(create_args.index('-a'), '-r')
+ create_args.insert(create_args.index("-a"), "-f")
+ create_args.insert(create_args.index("-a"), "-r")
args = parser.parse_args(create_args)
buildcache.buildcache(parser, args)
@@ -146,45 +144,44 @@ echo $PATH"""
buildcache.buildcache(parser, args)
# test overwrite install
- install_args.insert(install_args.index('-a'), '-f')
+ install_args.insert(install_args.index("-a"), "-f")
args = parser.parse_args(install_args)
buildcache.buildcache(parser, args)
files = os.listdir(spec.prefix)
- assert 'link_to_dummy.txt' in files
- assert 'dummy.txt' in files
-# assert os.path.realpath(
-# os.path.join(spec.prefix, 'link_to_dummy.txt')
-# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
+ assert "link_to_dummy.txt" in files
+ assert "dummy.txt" in files
+ # assert os.path.realpath(
+ # os.path.join(spec.prefix, 'link_to_dummy.txt')
+ # ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
- args = parser.parse_args(['keys'])
+ args = parser.parse_args(["keys"])
buildcache.buildcache(parser, args)
- args = parser.parse_args(['list'])
+ args = parser.parse_args(["list"])
buildcache.buildcache(parser, args)
- args = parser.parse_args(['list'])
+ args = parser.parse_args(["list"])
buildcache.buildcache(parser, args)
- args = parser.parse_args(['list', 'trivial'])
+ args = parser.parse_args(["list", "trivial"])
buildcache.buildcache(parser, args)
# Copy a key to the mirror to have something to download
- shutil.copyfile(mock_gpg_keys_path + '/external.key',
- mirror_path + '/external.key')
+ shutil.copyfile(mock_gpg_keys_path + "/external.key", mirror_path + "/external.key")
- args = parser.parse_args(['keys'])
+ args = parser.parse_args(["keys"])
buildcache.buildcache(parser, args)
- args = parser.parse_args(['keys', '-f'])
+ args = parser.parse_args(["keys", "-f"])
buildcache.buildcache(parser, args)
- args = parser.parse_args(['keys', '-i', '-t'])
+ args = parser.parse_args(["keys", "-i", "-t"])
buildcache.buildcache(parser, args)
# unregister mirror with spack config
mirrors = {}
- spack.config.set('mirrors', mirrors)
+ spack.config.set("mirrors", mirrors)
shutil.rmtree(mirror_path)
stage.destroy()
@@ -192,380 +189,355 @@ echo $PATH"""
bindist._cached_specs = set()
-@pytest.mark.usefixtures('install_mockery')
+@pytest.mark.usefixtures("install_mockery")
def test_relocate_text(tmpdir):
- spec = Spec('trivial-install-test-package')
+ spec = Spec("trivial-install-test-package")
spec.concretize()
with tmpdir.as_cwd():
# Validate the text path replacement
- old_dir = '/home/spack/opt/spack'
- filename = 'dummy.txt'
+ old_dir = "/home/spack/opt/spack"
+ filename = "dummy.txt"
with open(filename, "w") as script:
script.write(old_dir)
script.close()
filenames = [filename]
- new_dir = '/opt/rh/devtoolset/'
+ new_dir = "/opt/rh/devtoolset/"
# Singleton dict doesn't matter if Ordered
relocate_text(filenames, {old_dir: new_dir})
- with open(filename, "r")as script:
+ with open(filename, "r") as script:
for line in script:
- assert(new_dir in line)
- assert(file_is_relocatable(os.path.realpath(filename)))
+ assert new_dir in line
+ assert file_is_relocatable(os.path.realpath(filename))
# Remove cached binary specs since we deleted the mirror
bindist._cached_specs = set()
def test_relocate_links(tmpdir):
with tmpdir.as_cwd():
- old_layout_root = os.path.join(
- '%s' % tmpdir, 'home', 'spack', 'opt', 'spack')
- old_install_prefix = os.path.join(
- '%s' % old_layout_root, 'debian6', 'test')
- old_binname = os.path.join(old_install_prefix, 'binfile')
+ old_layout_root = os.path.join("%s" % tmpdir, "home", "spack", "opt", "spack")
+ old_install_prefix = os.path.join("%s" % old_layout_root, "debian6", "test")
+ old_binname = os.path.join(old_install_prefix, "binfile")
placeholder = _placeholder(old_layout_root)
re.sub(old_layout_root, placeholder, old_binname)
- filenames = ['link.ln', 'outsideprefix.ln']
- new_layout_root = os.path.join(
- '%s' % tmpdir, 'opt', 'rh', 'devtoolset')
- new_install_prefix = os.path.join(
- '%s' % new_layout_root, 'test', 'debian6')
- new_linkname = os.path.join(new_install_prefix, 'link.ln')
- new_linkname2 = os.path.join(new_install_prefix, 'outsideprefix.ln')
- new_binname = os.path.join(new_install_prefix, 'binfile')
+ filenames = ["link.ln", "outsideprefix.ln"]
+ new_layout_root = os.path.join("%s" % tmpdir, "opt", "rh", "devtoolset")
+ new_install_prefix = os.path.join("%s" % new_layout_root, "test", "debian6")
+ new_linkname = os.path.join(new_install_prefix, "link.ln")
+ new_linkname2 = os.path.join(new_install_prefix, "outsideprefix.ln")
+ new_binname = os.path.join(new_install_prefix, "binfile")
mkdirp(new_install_prefix)
- with open(new_binname, 'w') as f:
- f.write('\n')
+ with open(new_binname, "w") as f:
+ f.write("\n")
os.utime(new_binname, None)
symlink(old_binname, new_linkname)
- symlink('/usr/lib/libc.so', new_linkname2)
- relocate_links(filenames, old_layout_root,
- old_install_prefix, new_install_prefix)
+ symlink("/usr/lib/libc.so", new_linkname2)
+ relocate_links(filenames, old_layout_root, old_install_prefix, new_install_prefix)
assert os.readlink(new_linkname) == new_binname
- assert os.readlink(new_linkname2) == '/usr/lib/libc.so'
+ assert os.readlink(new_linkname2) == "/usr/lib/libc.so"
def test_needs_relocation():
- assert needs_binary_relocation('application', 'x-sharedlib')
- assert needs_binary_relocation('application', 'x-executable')
- assert not needs_binary_relocation('application', 'x-octet-stream')
- assert not needs_binary_relocation('text', 'x-')
- assert needs_text_relocation('text', 'x-')
- assert not needs_text_relocation('symbolic link to', 'x-')
+ assert needs_binary_relocation("application", "x-sharedlib")
+ assert needs_binary_relocation("application", "x-executable")
+ assert not needs_binary_relocation("application", "x-octet-stream")
+ assert not needs_binary_relocation("text", "x-")
+ assert needs_text_relocation("text", "x-")
+ assert not needs_text_relocation("symbolic link to", "x-")
- assert needs_binary_relocation('application', 'x-mach-binary')
+ assert needs_binary_relocation("application", "x-mach-binary")
def test_replace_paths(tmpdir):
with tmpdir.as_cwd():
- suffix = 'dylib' if platform.system().lower() == 'darwin' else 'so'
- hash_a = '53moz6jwnw3xpiztxwhc4us26klribws'
- hash_b = 'tk62dzu62kd4oh3h3heelyw23hw2sfee'
- hash_c = 'hdkhduizmaddpog6ewdradpobnbjwsjl'
- hash_d = 'hukkosc7ahff7o65h6cdhvcoxm57d4bw'
- hash_loco = 'zy4oigsc4eovn5yhr2lk4aukwzoespob'
+ suffix = "dylib" if platform.system().lower() == "darwin" else "so"
+ hash_a = "53moz6jwnw3xpiztxwhc4us26klribws"
+ hash_b = "tk62dzu62kd4oh3h3heelyw23hw2sfee"
+ hash_c = "hdkhduizmaddpog6ewdradpobnbjwsjl"
+ hash_d = "hukkosc7ahff7o65h6cdhvcoxm57d4bw"
+ hash_loco = "zy4oigsc4eovn5yhr2lk4aukwzoespob"
prefix2hash = dict()
- old_spack_dir = os.path.join('%s' % tmpdir,
- 'Users', 'developer', 'spack')
+ old_spack_dir = os.path.join("%s" % tmpdir, "Users", "developer", "spack")
mkdirp(old_spack_dir)
- oldprefix_a = os.path.join('%s' % old_spack_dir, 'pkgA-%s' % hash_a)
- oldlibdir_a = os.path.join('%s' % oldprefix_a, 'lib')
+ oldprefix_a = os.path.join("%s" % old_spack_dir, "pkgA-%s" % hash_a)
+ oldlibdir_a = os.path.join("%s" % oldprefix_a, "lib")
mkdirp(oldlibdir_a)
prefix2hash[str(oldprefix_a)] = hash_a
- oldprefix_b = os.path.join('%s' % old_spack_dir, 'pkgB-%s' % hash_b)
- oldlibdir_b = os.path.join('%s' % oldprefix_b, 'lib')
+ oldprefix_b = os.path.join("%s" % old_spack_dir, "pkgB-%s" % hash_b)
+ oldlibdir_b = os.path.join("%s" % oldprefix_b, "lib")
mkdirp(oldlibdir_b)
prefix2hash[str(oldprefix_b)] = hash_b
- oldprefix_c = os.path.join('%s' % old_spack_dir, 'pkgC-%s' % hash_c)
- oldlibdir_c = os.path.join('%s' % oldprefix_c, 'lib')
- oldlibdir_cc = os.path.join('%s' % oldlibdir_c, 'C')
+ oldprefix_c = os.path.join("%s" % old_spack_dir, "pkgC-%s" % hash_c)
+ oldlibdir_c = os.path.join("%s" % oldprefix_c, "lib")
+ oldlibdir_cc = os.path.join("%s" % oldlibdir_c, "C")
mkdirp(oldlibdir_c)
prefix2hash[str(oldprefix_c)] = hash_c
- oldprefix_d = os.path.join('%s' % old_spack_dir, 'pkgD-%s' % hash_d)
- oldlibdir_d = os.path.join('%s' % oldprefix_d, 'lib')
+ oldprefix_d = os.path.join("%s" % old_spack_dir, "pkgD-%s" % hash_d)
+ oldlibdir_d = os.path.join("%s" % oldprefix_d, "lib")
mkdirp(oldlibdir_d)
prefix2hash[str(oldprefix_d)] = hash_d
- oldprefix_local = os.path.join('%s' % tmpdir, 'usr', 'local')
- oldlibdir_local = os.path.join('%s' % oldprefix_local, 'lib')
+ oldprefix_local = os.path.join("%s" % tmpdir, "usr", "local")
+ oldlibdir_local = os.path.join("%s" % oldprefix_local, "lib")
mkdirp(oldlibdir_local)
prefix2hash[str(oldprefix_local)] = hash_loco
- libfile_a = 'libA.%s' % suffix
- libfile_b = 'libB.%s' % suffix
- libfile_c = 'libC.%s' % suffix
- libfile_d = 'libD.%s' % suffix
- libfile_loco = 'libloco.%s' % suffix
- old_libnames = [os.path.join(oldlibdir_a, libfile_a),
- os.path.join(oldlibdir_b, libfile_b),
- os.path.join(oldlibdir_c, libfile_c),
- os.path.join(oldlibdir_d, libfile_d),
- os.path.join(oldlibdir_local, libfile_loco)]
+ libfile_a = "libA.%s" % suffix
+ libfile_b = "libB.%s" % suffix
+ libfile_c = "libC.%s" % suffix
+ libfile_d = "libD.%s" % suffix
+ libfile_loco = "libloco.%s" % suffix
+ old_libnames = [
+ os.path.join(oldlibdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b),
+ os.path.join(oldlibdir_c, libfile_c),
+ os.path.join(oldlibdir_d, libfile_d),
+ os.path.join(oldlibdir_local, libfile_loco),
+ ]
for old_libname in old_libnames:
- with open(old_libname, 'a'):
+ with open(old_libname, "a"):
os.utime(old_libname, None)
hash2prefix = dict()
- new_spack_dir = os.path.join('%s' % tmpdir, 'Users', 'Shared',
- 'spack')
+ new_spack_dir = os.path.join("%s" % tmpdir, "Users", "Shared", "spack")
mkdirp(new_spack_dir)
- prefix_a = os.path.join(new_spack_dir, 'pkgA-%s' % hash_a)
- libdir_a = os.path.join(prefix_a, 'lib')
+ prefix_a = os.path.join(new_spack_dir, "pkgA-%s" % hash_a)
+ libdir_a = os.path.join(prefix_a, "lib")
mkdirp(libdir_a)
hash2prefix[hash_a] = str(prefix_a)
- prefix_b = os.path.join(new_spack_dir, 'pkgB-%s' % hash_b)
- libdir_b = os.path.join(prefix_b, 'lib')
+ prefix_b = os.path.join(new_spack_dir, "pkgB-%s" % hash_b)
+ libdir_b = os.path.join(prefix_b, "lib")
mkdirp(libdir_b)
hash2prefix[hash_b] = str(prefix_b)
- prefix_c = os.path.join(new_spack_dir, 'pkgC-%s' % hash_c)
- libdir_c = os.path.join(prefix_c, 'lib')
- libdir_cc = os.path.join(libdir_c, 'C')
+ prefix_c = os.path.join(new_spack_dir, "pkgC-%s" % hash_c)
+ libdir_c = os.path.join(prefix_c, "lib")
+ libdir_cc = os.path.join(libdir_c, "C")
mkdirp(libdir_cc)
hash2prefix[hash_c] = str(prefix_c)
- prefix_d = os.path.join(new_spack_dir, 'pkgD-%s' % hash_d)
- libdir_d = os.path.join(prefix_d, 'lib')
+ prefix_d = os.path.join(new_spack_dir, "pkgD-%s" % hash_d)
+ libdir_d = os.path.join(prefix_d, "lib")
mkdirp(libdir_d)
hash2prefix[hash_d] = str(prefix_d)
- prefix_local = os.path.join('%s' % tmpdir, 'usr', 'local')
- libdir_local = os.path.join(prefix_local, 'lib')
+ prefix_local = os.path.join("%s" % tmpdir, "usr", "local")
+ libdir_local = os.path.join(prefix_local, "lib")
mkdirp(libdir_local)
hash2prefix[hash_loco] = str(prefix_local)
- new_libnames = [os.path.join(libdir_a, libfile_a),
- os.path.join(libdir_b, libfile_b),
- os.path.join(libdir_cc, libfile_c),
- os.path.join(libdir_d, libfile_d),
- os.path.join(libdir_local, libfile_loco)]
+ new_libnames = [
+ os.path.join(libdir_a, libfile_a),
+ os.path.join(libdir_b, libfile_b),
+ os.path.join(libdir_cc, libfile_c),
+ os.path.join(libdir_d, libfile_d),
+ os.path.join(libdir_local, libfile_loco),
+ ]
for new_libname in new_libnames:
- with open(new_libname, 'a'):
+ with open(new_libname, "a"):
os.utime(new_libname, None)
prefix2prefix = dict()
for prefix, hash in prefix2hash.items():
prefix2prefix[prefix] = hash2prefix[hash]
- out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
- oldlibdir_c,
- oldlibdir_cc, oldlibdir_local],
- [os.path.join(oldlibdir_a,
- libfile_a),
- os.path.join(oldlibdir_b,
- libfile_b),
- os.path.join(oldlibdir_local,
- libfile_loco)],
- os.path.join(oldlibdir_cc,
- libfile_c),
- old_spack_dir,
- prefix2prefix
- )
- assert out_dict == {oldlibdir_a: libdir_a,
- oldlibdir_b: libdir_b,
- oldlibdir_c: libdir_c,
- oldlibdir_cc: libdir_cc,
- libdir_local: libdir_local,
- os.path.join(oldlibdir_a, libfile_a):
- os.path.join(libdir_a, libfile_a),
- os.path.join(oldlibdir_b, libfile_b):
- os.path.join(libdir_b, libfile_b),
- os.path.join(oldlibdir_local, libfile_loco):
- os.path.join(libdir_local, libfile_loco),
- os.path.join(oldlibdir_cc, libfile_c):
- os.path.join(libdir_cc, libfile_c)}
-
- out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
- oldlibdir_c,
- oldlibdir_cc,
- oldlibdir_local],
- [os.path.join(oldlibdir_a,
- libfile_a),
- os.path.join(oldlibdir_b,
- libfile_b),
- os.path.join(oldlibdir_cc,
- libfile_c),
- os.path.join(oldlibdir_local,
- libfile_loco)],
- None,
- old_spack_dir,
- prefix2prefix
- )
- assert out_dict == {oldlibdir_a: libdir_a,
- oldlibdir_b: libdir_b,
- oldlibdir_c: libdir_c,
- oldlibdir_cc: libdir_cc,
- libdir_local: libdir_local,
- os.path.join(oldlibdir_a, libfile_a):
- os.path.join(libdir_a, libfile_a),
- os.path.join(oldlibdir_b, libfile_b):
- os.path.join(libdir_b, libfile_b),
- os.path.join(oldlibdir_local, libfile_loco):
- os.path.join(libdir_local, libfile_loco),
- os.path.join(oldlibdir_cc, libfile_c):
- os.path.join(libdir_cc, libfile_c)}
-
- out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
- oldlibdir_c, oldlibdir_cc,
- oldlibdir_local],
- ['@rpath/%s' % libfile_a,
- '@rpath/%s' % libfile_b,
- '@rpath/%s' % libfile_c,
- '@rpath/%s' % libfile_loco],
- None,
- old_spack_dir,
- prefix2prefix
- )
-
- assert out_dict == {'@rpath/%s' % libfile_a:
- '@rpath/%s' % libfile_a,
- '@rpath/%s' % libfile_b:
- '@rpath/%s' % libfile_b,
- '@rpath/%s' % libfile_c:
- '@rpath/%s' % libfile_c,
- '@rpath/%s' % libfile_loco:
- '@rpath/%s' % libfile_loco,
- oldlibdir_a: libdir_a,
- oldlibdir_b: libdir_b,
- oldlibdir_c: libdir_c,
- oldlibdir_cc: libdir_cc,
- libdir_local: libdir_local,
- }
-
- out_dict = macho_find_paths([oldlibdir_a,
- oldlibdir_b,
- oldlibdir_d,
- oldlibdir_local],
- ['@rpath/%s' % libfile_a,
- '@rpath/%s' % libfile_b,
- '@rpath/%s' % libfile_loco],
- None,
- old_spack_dir,
- prefix2prefix)
- assert out_dict == {'@rpath/%s' % libfile_a:
- '@rpath/%s' % libfile_a,
- '@rpath/%s' % libfile_b:
- '@rpath/%s' % libfile_b,
- '@rpath/%s' % libfile_loco:
- '@rpath/%s' % libfile_loco,
- oldlibdir_a: libdir_a,
- oldlibdir_b: libdir_b,
- oldlibdir_d: libdir_d,
- libdir_local: libdir_local,
- }
+ out_dict = macho_find_paths(
+ [oldlibdir_a, oldlibdir_b, oldlibdir_c, oldlibdir_cc, oldlibdir_local],
+ [
+ os.path.join(oldlibdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b),
+ os.path.join(oldlibdir_local, libfile_loco),
+ ],
+ os.path.join(oldlibdir_cc, libfile_c),
+ old_spack_dir,
+ prefix2prefix,
+ )
+ assert out_dict == {
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ os.path.join(oldlibdir_a, libfile_a): os.path.join(libdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b): os.path.join(libdir_b, libfile_b),
+ os.path.join(oldlibdir_local, libfile_loco): os.path.join(libdir_local, libfile_loco),
+ os.path.join(oldlibdir_cc, libfile_c): os.path.join(libdir_cc, libfile_c),
+ }
+
+ out_dict = macho_find_paths(
+ [oldlibdir_a, oldlibdir_b, oldlibdir_c, oldlibdir_cc, oldlibdir_local],
+ [
+ os.path.join(oldlibdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b),
+ os.path.join(oldlibdir_cc, libfile_c),
+ os.path.join(oldlibdir_local, libfile_loco),
+ ],
+ None,
+ old_spack_dir,
+ prefix2prefix,
+ )
+ assert out_dict == {
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ os.path.join(oldlibdir_a, libfile_a): os.path.join(libdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b): os.path.join(libdir_b, libfile_b),
+ os.path.join(oldlibdir_local, libfile_loco): os.path.join(libdir_local, libfile_loco),
+ os.path.join(oldlibdir_cc, libfile_c): os.path.join(libdir_cc, libfile_c),
+ }
+
+ out_dict = macho_find_paths(
+ [oldlibdir_a, oldlibdir_b, oldlibdir_c, oldlibdir_cc, oldlibdir_local],
+ [
+ "@rpath/%s" % libfile_a,
+ "@rpath/%s" % libfile_b,
+ "@rpath/%s" % libfile_c,
+ "@rpath/%s" % libfile_loco,
+ ],
+ None,
+ old_spack_dir,
+ prefix2prefix,
+ )
+
+ assert out_dict == {
+ "@rpath/%s" % libfile_a: "@rpath/%s" % libfile_a,
+ "@rpath/%s" % libfile_b: "@rpath/%s" % libfile_b,
+ "@rpath/%s" % libfile_c: "@rpath/%s" % libfile_c,
+ "@rpath/%s" % libfile_loco: "@rpath/%s" % libfile_loco,
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ }
+
+ out_dict = macho_find_paths(
+ [oldlibdir_a, oldlibdir_b, oldlibdir_d, oldlibdir_local],
+ ["@rpath/%s" % libfile_a, "@rpath/%s" % libfile_b, "@rpath/%s" % libfile_loco],
+ None,
+ old_spack_dir,
+ prefix2prefix,
+ )
+ assert out_dict == {
+ "@rpath/%s" % libfile_a: "@rpath/%s" % libfile_a,
+ "@rpath/%s" % libfile_b: "@rpath/%s" % libfile_b,
+ "@rpath/%s" % libfile_loco: "@rpath/%s" % libfile_loco,
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_d: libdir_d,
+ libdir_local: libdir_local,
+ }
def test_macho_make_paths():
- out = macho_make_paths_relative('/Users/Shared/spack/pkgC/lib/libC.dylib',
- '/Users/Shared/spack',
- ('/Users/Shared/spack/pkgA/lib',
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib'),
- ('/Users/Shared/spack/pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'),
- '/Users/Shared/spack/pkgC/lib/libC.dylib')
- assert out == {'/Users/Shared/spack/pkgA/lib':
- '@loader_path/../../pkgA/lib',
- '/Users/Shared/spack/pkgB/lib':
- '@loader_path/../../pkgB/lib',
- '/usr/local/lib': '/usr/local/lib',
- '/Users/Shared/spack/pkgA/libA.dylib':
- '@loader_path/../../pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib':
- '@loader_path/../../pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib':
- '/usr/local/lib/libloco.dylib',
- '/Users/Shared/spack/pkgC/lib/libC.dylib':
- '@rpath/libC.dylib'}
-
- out = macho_make_paths_normal('/Users/Shared/spack/pkgC/lib/libC.dylib',
- ('@loader_path/../../pkgA/lib',
- '@loader_path/../../pkgB/lib',
- '/usr/local/lib'),
- ('@loader_path/../../pkgA/libA.dylib',
- '@loader_path/../../pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'),
- '@rpath/libC.dylib')
-
- assert out == {'@rpath/libC.dylib':
- '/Users/Shared/spack/pkgC/lib/libC.dylib',
- '@loader_path/../../pkgA/lib':
- '/Users/Shared/spack/pkgA/lib',
- '@loader_path/../../pkgB/lib':
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib': '/usr/local/lib',
- '@loader_path/../../pkgA/libA.dylib':
- '/Users/Shared/spack/pkgA/libA.dylib',
- '@loader_path/../../pkgB/libB.dylib':
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib':
- '/usr/local/lib/libloco.dylib'
- }
-
- out = macho_make_paths_relative('/Users/Shared/spack/pkgC/bin/exeC',
- '/Users/Shared/spack',
- ('/Users/Shared/spack/pkgA/lib',
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib'),
- ('/Users/Shared/spack/pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'), None)
-
- assert out == {'/Users/Shared/spack/pkgA/lib':
- '@loader_path/../../pkgA/lib',
- '/Users/Shared/spack/pkgB/lib':
- '@loader_path/../../pkgB/lib',
- '/usr/local/lib': '/usr/local/lib',
- '/Users/Shared/spack/pkgA/libA.dylib':
- '@loader_path/../../pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib':
- '@loader_path/../../pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib':
- '/usr/local/lib/libloco.dylib'}
-
- out = macho_make_paths_normal('/Users/Shared/spack/pkgC/bin/exeC',
- ('@loader_path/../../pkgA/lib',
- '@loader_path/../../pkgB/lib',
- '/usr/local/lib'),
- ('@loader_path/../../pkgA/libA.dylib',
- '@loader_path/../../pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'),
- None)
-
- assert out == {'@loader_path/../../pkgA/lib':
- '/Users/Shared/spack/pkgA/lib',
- '@loader_path/../../pkgB/lib':
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib': '/usr/local/lib',
- '@loader_path/../../pkgA/libA.dylib':
- '/Users/Shared/spack/pkgA/libA.dylib',
- '@loader_path/../../pkgB/libB.dylib':
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib':
- '/usr/local/lib/libloco.dylib'}
+ out = macho_make_paths_relative(
+ "/Users/Shared/spack/pkgC/lib/libC.dylib",
+ "/Users/Shared/spack",
+ ("/Users/Shared/spack/pkgA/lib", "/Users/Shared/spack/pkgB/lib", "/usr/local/lib"),
+ (
+ "/Users/Shared/spack/pkgA/libA.dylib",
+ "/Users/Shared/spack/pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib",
+ ),
+ "/Users/Shared/spack/pkgC/lib/libC.dylib",
+ )
+ assert out == {
+ "/Users/Shared/spack/pkgA/lib": "@loader_path/../../pkgA/lib",
+ "/Users/Shared/spack/pkgB/lib": "@loader_path/../../pkgB/lib",
+ "/usr/local/lib": "/usr/local/lib",
+ "/Users/Shared/spack/pkgA/libA.dylib": "@loader_path/../../pkgA/libA.dylib",
+ "/Users/Shared/spack/pkgB/libB.dylib": "@loader_path/../../pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
+ "/Users/Shared/spack/pkgC/lib/libC.dylib": "@rpath/libC.dylib",
+ }
+
+ out = macho_make_paths_normal(
+ "/Users/Shared/spack/pkgC/lib/libC.dylib",
+ ("@loader_path/../../pkgA/lib", "@loader_path/../../pkgB/lib", "/usr/local/lib"),
+ (
+ "@loader_path/../../pkgA/libA.dylib",
+ "@loader_path/../../pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib",
+ ),
+ "@rpath/libC.dylib",
+ )
+
+ assert out == {
+ "@rpath/libC.dylib": "/Users/Shared/spack/pkgC/lib/libC.dylib",
+ "@loader_path/../../pkgA/lib": "/Users/Shared/spack/pkgA/lib",
+ "@loader_path/../../pkgB/lib": "/Users/Shared/spack/pkgB/lib",
+ "/usr/local/lib": "/usr/local/lib",
+ "@loader_path/../../pkgA/libA.dylib": "/Users/Shared/spack/pkgA/libA.dylib",
+ "@loader_path/../../pkgB/libB.dylib": "/Users/Shared/spack/pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
+ }
+
+ out = macho_make_paths_relative(
+ "/Users/Shared/spack/pkgC/bin/exeC",
+ "/Users/Shared/spack",
+ ("/Users/Shared/spack/pkgA/lib", "/Users/Shared/spack/pkgB/lib", "/usr/local/lib"),
+ (
+ "/Users/Shared/spack/pkgA/libA.dylib",
+ "/Users/Shared/spack/pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib",
+ ),
+ None,
+ )
+
+ assert out == {
+ "/Users/Shared/spack/pkgA/lib": "@loader_path/../../pkgA/lib",
+ "/Users/Shared/spack/pkgB/lib": "@loader_path/../../pkgB/lib",
+ "/usr/local/lib": "/usr/local/lib",
+ "/Users/Shared/spack/pkgA/libA.dylib": "@loader_path/../../pkgA/libA.dylib",
+ "/Users/Shared/spack/pkgB/libB.dylib": "@loader_path/../../pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
+ }
+
+ out = macho_make_paths_normal(
+ "/Users/Shared/spack/pkgC/bin/exeC",
+ ("@loader_path/../../pkgA/lib", "@loader_path/../../pkgB/lib", "/usr/local/lib"),
+ (
+ "@loader_path/../../pkgA/libA.dylib",
+ "@loader_path/../../pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib",
+ ),
+ None,
+ )
+
+ assert out == {
+ "@loader_path/../../pkgA/lib": "/Users/Shared/spack/pkgA/lib",
+ "@loader_path/../../pkgB/lib": "/Users/Shared/spack/pkgB/lib",
+ "/usr/local/lib": "/usr/local/lib",
+ "@loader_path/../../pkgA/libA.dylib": "/Users/Shared/spack/pkgA/libA.dylib",
+ "@loader_path/../../pkgB/libB.dylib": "/Users/Shared/spack/pkgB/libB.dylib",
+ "/usr/local/lib/libloco.dylib": "/usr/local/lib/libloco.dylib",
+ }
@pytest.fixture()
def mock_download():
"""Mock a failing download strategy."""
+
class FailedDownloadStrategy(spack.fetch_strategy.FetchStrategy):
def mirror_id(self):
return None
def fetch(self):
raise spack.fetch_strategy.FailedDownloadError(
- "<non-existent URL>", "This FetchStrategy always fails")
+ "<non-existent URL>", "This FetchStrategy always fails"
+ )
fetcher = FetchStrategyComposite()
fetcher.append(FailedDownloadStrategy())
@@ -580,27 +552,27 @@ def mock_download():
spack.package_base.PackageBase.fetcher = orig_fn
-@pytest.mark.parametrize("manual,instr", [(False, False), (False, True),
- (True, False), (True, True)])
+@pytest.mark.parametrize(
+ "manual,instr", [(False, False), (False, True), (True, False), (True, True)]
+)
@pytest.mark.disable_clean_stage_check
-def test_manual_download(install_mockery, mock_download, monkeypatch, manual,
- instr):
+def test_manual_download(install_mockery, mock_download, monkeypatch, manual, instr):
"""
Ensure expected fetcher fail message based on manual download and instr.
"""
+
@property
def _instr(pkg):
- return 'Download instructions for {0}'.format(pkg.spec.name)
+ return "Download instructions for {0}".format(pkg.spec.name)
- spec = Spec('a').concretized()
+ spec = Spec("a").concretized()
pkg = spec.package
pkg.manual_download = manual
if instr:
- monkeypatch.setattr(spack.package_base.PackageBase, 'download_instr',
- _instr)
+ monkeypatch.setattr(spack.package_base.PackageBase, "download_instr", _instr)
- expected = pkg.download_instr if manual else 'All fetchers failed'
+ expected = pkg.download_instr if manual else "All fetchers failed"
with pytest.raises(spack.fetch_strategy.FetchError, match=expected):
pkg.do_fetch()
@@ -613,21 +585,22 @@ def fetching_not_allowed(monkeypatch):
def fetch(self):
raise Exception("Sources are fetched but shouldn't have been")
+
fetcher = FetchStrategyComposite()
fetcher.append(FetchingNotAllowed())
- monkeypatch.setattr(spack.package_base.PackageBase, 'fetcher', fetcher)
+ monkeypatch.setattr(spack.package_base.PackageBase, "fetcher", fetcher)
def test_fetch_without_code_is_noop(install_mockery, fetching_not_allowed):
"""do_fetch for packages without code should be a no-op"""
- pkg = Spec('a').concretized().package
+ pkg = Spec("a").concretized().package
pkg.has_code = False
pkg.do_fetch()
def test_fetch_external_package_is_noop(install_mockery, fetching_not_allowed):
"""do_fetch for packages without code should be a no-op"""
- spec = Spec('a').concretized()
+ spec = Spec("a").concretized()
spec.external_path = "/some/where"
assert spec.external
spec.package.do_fetch()
diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py
index 33b49f2506..8b446146d0 100644
--- a/lib/spack/spack/test/patch.py
+++ b/lib/spack/spack/test/patch.py
@@ -26,50 +26,70 @@ from spack.util.path import is_windows
# due to the use of carriage returns ('\r\n') in Windows line endings
# files with contents 'foo', 'bar', and 'baz'
-foo_sha256 = 'b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c' if not is_windows else 'bf874c7dd3a83cf370fdc17e496e341de06cd596b5c66dbf3c9bb7f6c139e3ee'
-bar_sha256 = '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730' if not is_windows else '556ddc69a75d0be0ecafc82cd4657666c8063f13d762282059c39ff5dbf18116'
-baz_sha256 = 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c' if not is_windows else 'd30392e66c636a063769cbb1db08cd3455a424650d4494db6379d73ea799582b'
-biz_sha256 = 'a69b288d7393261e613c276c6d38a01461028291f6e381623acc58139d01f54d' if not is_windows else '2f2b087a8f84834fd03d4d1d5b43584011e869e4657504ef3f8b0a672a5c222e'
+foo_sha256 = (
+ "b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c"
+ if not is_windows
+ else "bf874c7dd3a83cf370fdc17e496e341de06cd596b5c66dbf3c9bb7f6c139e3ee"
+)
+bar_sha256 = (
+ "7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730"
+ if not is_windows
+ else "556ddc69a75d0be0ecafc82cd4657666c8063f13d762282059c39ff5dbf18116"
+)
+baz_sha256 = (
+ "bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c"
+ if not is_windows
+ else "d30392e66c636a063769cbb1db08cd3455a424650d4494db6379d73ea799582b"
+)
+biz_sha256 = (
+ "a69b288d7393261e613c276c6d38a01461028291f6e381623acc58139d01f54d"
+ if not is_windows
+ else "2f2b087a8f84834fd03d4d1d5b43584011e869e4657504ef3f8b0a672a5c222e"
+)
# url patches
# url shas are the same on Windows
-url1_sha256 = 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234'
-url2_sha256 = '1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd'
-url2_archive_sha256 = 'abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd'
+url1_sha256 = "abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234"
+url2_sha256 = "1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd"
+url2_archive_sha256 = "abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd"
-platform_url_sha = '252c0af58be3d90e5dc5e0d16658434c9efa5d20a5df6c10bf72c2d77f780866' if not is_windows else 'ecf44a8244a486e9ef5f72c6cb622f99718dcd790707ac91af0b8c9a4ab7a2bb'
+platform_url_sha = (
+ "252c0af58be3d90e5dc5e0d16658434c9efa5d20a5df6c10bf72c2d77f780866"
+ if not is_windows
+ else "ecf44a8244a486e9ef5f72c6cb622f99718dcd790707ac91af0b8c9a4ab7a2bb"
+)
@pytest.fixture()
def mock_patch_stage(tmpdir_factory, monkeypatch):
# Don't disrupt the spack install directory with tests.
- mock_path = str(tmpdir_factory.mktemp('mock-patch-stage'))
- monkeypatch.setattr(spack.stage, '_stage_root', mock_path)
+ mock_path = str(tmpdir_factory.mktemp("mock-patch-stage"))
+ monkeypatch.setattr(spack.stage, "_stage_root", mock_path)
return mock_path
-data_path = os.path.join(spack.paths.test_path, 'data', 'patch')
+data_path = os.path.join(spack.paths.test_path, "data", "patch")
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Line ending conflict on Windows")
-@pytest.mark.parametrize('filename, sha256, archive_sha256', [
- # compressed patch -- needs sha256 and archive_256
- (os.path.join(data_path, 'foo.tgz'),
- '252c0af58be3d90e5dc5e0d16658434c9efa5d20a5df6c10bf72c2d77f780866',
- '4e8092a161ec6c3a1b5253176fcf33ce7ba23ee2ff27c75dbced589dabacd06e'),
- # uncompressed patch -- needs only sha256
- (os.path.join(data_path, 'foo.patch'),
- platform_url_sha,
- None)
-])
+@pytest.mark.skipif(sys.platform == "win32", reason="Line ending conflict on Windows")
+@pytest.mark.parametrize(
+ "filename, sha256, archive_sha256",
+ [
+ # compressed patch -- needs sha256 and archive_256
+ (
+ os.path.join(data_path, "foo.tgz"),
+ "252c0af58be3d90e5dc5e0d16658434c9efa5d20a5df6c10bf72c2d77f780866",
+ "4e8092a161ec6c3a1b5253176fcf33ce7ba23ee2ff27c75dbced589dabacd06e",
+ ),
+ # uncompressed patch -- needs only sha256
+ (os.path.join(data_path, "foo.patch"), platform_url_sha, None),
+ ],
+)
def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config):
# Make a patch object
- url = 'file://' + filename
- s = Spec('patch').concretized()
- patch = spack.patch.UrlPatch(
- s.package, url, sha256=sha256, archive_sha256=archive_sha256
- )
+ url = "file://" + filename
+ s = Spec("patch").concretized()
+ patch = spack.patch.UrlPatch(s.package, url, sha256=sha256, archive_sha256=archive_sha256)
# make a stage
with Stage(url) as stage: # TODO: url isn't used; maybe refactor Stage
@@ -78,83 +98,89 @@ def test_url_patch(mock_patch_stage, filename, sha256, archive_sha256, config):
mkdirp(stage.source_path)
with working_dir(stage.source_path):
# write a file to be patched
- with open('foo.txt', 'w') as f:
- f.write("""\
+ with open("foo.txt", "w") as f:
+ f.write(
+ """\
first line
second line
-""")
+"""
+ )
# write the expected result of patching.
- with open('foo-expected.txt', 'w') as f:
- f.write("""\
+ with open("foo-expected.txt", "w") as f:
+ f.write(
+ """\
zeroth line
first line
third line
-""")
+"""
+ )
# apply the patch and compare files
patch.fetch()
patch.apply(stage)
patch.clean()
with working_dir(stage.source_path):
- assert filecmp.cmp('foo.txt', 'foo-expected.txt')
+ assert filecmp.cmp("foo.txt", "foo-expected.txt")
def test_patch_in_spec(mock_packages, config):
"""Test whether patches in a package appear in the spec."""
- spec = Spec('patch')
+ spec = Spec("patch")
spec.concretize()
- assert 'patches' in list(spec.variants.keys())
+ assert "patches" in list(spec.variants.keys())
# Here the order is bar, foo, baz. Note that MV variants order
# lexicographically based on the hash, not on the position of the
# patch directive.
- assert ((bar_sha256,
- foo_sha256,
- baz_sha256) ==
- spec.variants['patches'].value)
+ assert (bar_sha256, foo_sha256, baz_sha256) == spec.variants["patches"].value
- assert ((foo_sha256, bar_sha256, baz_sha256) ==
- tuple(spec.variants['patches']._patches_in_order_of_appearance))
+ assert (foo_sha256, bar_sha256, baz_sha256) == tuple(
+ spec.variants["patches"]._patches_in_order_of_appearance
+ )
def test_patch_mixed_versions_subset_constraint(mock_packages, config):
"""If we have a package with mixed x.y and x.y.z versions, make sure that
- a patch applied to a version range of x.y.z versions is not applied to
- an x.y version.
+ a patch applied to a version range of x.y.z versions is not applied to
+ an x.y version.
"""
- spec1 = Spec('patch@1.0.1')
+ spec1 = Spec("patch@1.0.1")
spec1.concretize()
- assert biz_sha256 in spec1.variants['patches'].value
+ assert biz_sha256 in spec1.variants["patches"].value
- spec2 = Spec('patch@1.0')
+ spec2 = Spec("patch@1.0")
spec2.concretize()
- assert biz_sha256 not in spec2.variants['patches'].value
+ assert biz_sha256 not in spec2.variants["patches"].value
def test_patch_order(mock_packages, config):
- spec = Spec('dep-diamond-patch-top')
+ spec = Spec("dep-diamond-patch-top")
spec.concretize()
- mid2_sha256 = 'mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234' \
- if not is_windows \
- else 'mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234'
- mid1_sha256 = '0b62284961dab49887e31319843431ee5b037382ac02c4fe436955abef11f094' \
- if not is_windows else 'aeb16c4dec1087e39f2330542d59d9b456dd26d791338ae6d80b6ffd10c89dfa'
- top_sha256 = 'f7de2947c64cb6435e15fb2bef359d1ed5f6356b2aebb7b20535e3772904e6db' \
- if not is_windows else 'ff34cb21271d16dbf928374f610bb5dd593d293d311036ddae86c4846ff79070'
+ mid2_sha256 = (
+ "mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234"
+ if not is_windows
+ else "mid21234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234"
+ )
+ mid1_sha256 = (
+ "0b62284961dab49887e31319843431ee5b037382ac02c4fe436955abef11f094"
+ if not is_windows
+ else "aeb16c4dec1087e39f2330542d59d9b456dd26d791338ae6d80b6ffd10c89dfa"
+ )
+ top_sha256 = (
+ "f7de2947c64cb6435e15fb2bef359d1ed5f6356b2aebb7b20535e3772904e6db"
+ if not is_windows
+ else "ff34cb21271d16dbf928374f610bb5dd593d293d311036ddae86c4846ff79070"
+ )
- dep = spec['patch']
- patch_order = dep.variants['patches']._patches_in_order_of_appearance
+ dep = spec["patch"]
+ patch_order = dep.variants["patches"]._patches_in_order_of_appearance
# 'mid2' comes after 'mid1' alphabetically
# 'top' comes after 'mid1'/'mid2' alphabetically
# 'patch' comes last of all specs in the dag, alphabetically, so the
# patches of 'patch' to itself are applied last. The patches applied by
# 'patch' are ordered based on their appearance in the package.py file
- expected_order = (
- mid1_sha256,
- mid2_sha256,
- top_sha256,
- foo_sha256, bar_sha256, baz_sha256)
+ expected_order = (mid1_sha256, mid2_sha256, top_sha256, foo_sha256, bar_sha256, baz_sha256)
assert expected_order == tuple(patch_order)
@@ -163,76 +189,76 @@ def test_nested_directives(mock_packages):
"""Ensure pkg data structures are set up properly by nested directives."""
# this ensures that the patch() directive results were removed
# properly from the DirectiveMeta._directives_to_be_executed list
- patcher = spack.repo.path.get_pkg_class('patch-several-dependencies')
+ patcher = spack.repo.path.get_pkg_class("patch-several-dependencies")
assert len(patcher.patches) == 0
# this ensures that results of dependency patches were properly added
# to Dependency objects.
- libelf_dep = next(iter(patcher.dependencies['libelf'].values()))
+ libelf_dep = next(iter(patcher.dependencies["libelf"].values()))
assert len(libelf_dep.patches) == 1
assert len(libelf_dep.patches[Spec()]) == 1
- libdwarf_dep = next(iter(patcher.dependencies['libdwarf'].values()))
+ libdwarf_dep = next(iter(patcher.dependencies["libdwarf"].values()))
assert len(libdwarf_dep.patches) == 2
assert len(libdwarf_dep.patches[Spec()]) == 1
- assert len(libdwarf_dep.patches[Spec('@20111030')]) == 1
+ assert len(libdwarf_dep.patches[Spec("@20111030")]) == 1
- fake_dep = next(iter(patcher.dependencies['fake'].values()))
+ fake_dep = next(iter(patcher.dependencies["fake"].values()))
assert len(fake_dep.patches) == 1
assert len(fake_dep.patches[Spec()]) == 2
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Test requires Autotools")
-def test_patched_dependency(
- mock_packages, config, install_mockery, mock_fetch):
+@pytest.mark.skipif(sys.platform == "win32", reason="Test requires Autotools")
+def test_patched_dependency(mock_packages, config, install_mockery, mock_fetch):
"""Test whether patched dependencies work."""
- spec = Spec('patch-a-dependency')
+ spec = Spec("patch-a-dependency")
spec.concretize()
- assert 'patches' in list(spec['libelf'].variants.keys())
+ assert "patches" in list(spec["libelf"].variants.keys())
# make sure the patch makes it into the dependency spec
- t_sha = 'c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8' \
- if not is_windows else '3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11'
- assert ((t_sha,) ==
- spec['libelf'].variants['patches'].value)
+ t_sha = (
+ "c45c1564f70def3fc1a6e22139f62cb21cd190cc3a7dbe6f4120fa59ce33dcb8"
+ if not is_windows
+ else "3c5b65abcd6a3b2c714dbf7c31ff65fe3748a1adc371f030c283007ca5534f11"
+ )
+ assert (t_sha,) == spec["libelf"].variants["patches"].value
# make sure the patch in the dependent's directory is applied to the
# dependency
- libelf = spec['libelf']
+ libelf = spec["libelf"]
pkg = libelf.package
pkg.do_patch()
with pkg.stage:
with working_dir(pkg.stage.source_path):
# output a Makefile with 'echo Patched!' as the default target
- configure = Executable('./configure')
+ configure = Executable("./configure")
configure()
# Make sure the Makefile contains the patched text
- with open('Makefile') as mf:
- assert 'Patched!' in mf.read()
+ with open("Makefile") as mf:
+ assert "Patched!" in mf.read()
def trigger_bad_patch(pkg):
if not os.path.isdir(pkg.stage.source_path):
os.makedirs(pkg.stage.source_path)
- bad_file = os.path.join(pkg.stage.source_path, '.spack_patch_failed')
+ bad_file = os.path.join(pkg.stage.source_path, ".spack_patch_failed")
touch(bad_file)
return bad_file
def test_patch_failure_develop_spec_exits_gracefully(
- mock_packages, config, install_mockery, mock_fetch, tmpdir):
+ mock_packages, config, install_mockery, mock_fetch, tmpdir
+):
"""
ensure that a failing patch does not trigger exceptions
for develop specs
"""
- spec = Spec('patch-a-dependency '
- '^libelf dev_path=%s' % str(tmpdir))
+ spec = Spec("patch-a-dependency " "^libelf dev_path=%s" % str(tmpdir))
spec.concretize()
- libelf = spec['libelf']
- assert 'patches' in list(libelf.variants.keys())
+ libelf = spec["libelf"]
+ assert "patches" in list(libelf.variants.keys())
pkg = libelf.package
with pkg.stage:
bad_patch_indicator = trigger_bad_patch(pkg)
@@ -241,15 +267,14 @@ def test_patch_failure_develop_spec_exits_gracefully(
# success if no exceptions raised
-def test_patch_failure_restages(
- mock_packages, config, install_mockery, mock_fetch):
+def test_patch_failure_restages(mock_packages, config, install_mockery, mock_fetch):
"""
ensure that a failing patch does not trigger exceptions
for non-develop specs and the source gets restaged
"""
- spec = Spec('patch-a-dependency')
+ spec = Spec("patch-a-dependency")
spec.concretize()
- pkg = spec['libelf'].package
+ pkg = spec["libelf"].package
with pkg.stage:
bad_patch_indicator = trigger_bad_patch(pkg)
assert os.path.isfile(bad_patch_indicator)
@@ -259,157 +284,147 @@ def test_patch_failure_restages(
def test_multiple_patched_dependencies(mock_packages, config):
"""Test whether multiple patched dependencies work."""
- spec = Spec('patch-several-dependencies')
+ spec = Spec("patch-several-dependencies")
spec.concretize()
# basic patch on libelf
- assert 'patches' in list(spec['libelf'].variants.keys())
+ assert "patches" in list(spec["libelf"].variants.keys())
# foo
- assert ((foo_sha256,) ==
- spec['libelf'].variants['patches'].value)
+ assert (foo_sha256,) == spec["libelf"].variants["patches"].value
# URL patches
- assert 'patches' in list(spec['fake'].variants.keys())
+ assert "patches" in list(spec["fake"].variants.keys())
# urlpatch.patch, urlpatch.patch.gz
- assert (
- (url2_sha256, url1_sha256) == spec['fake'].variants['patches'].value)
+ assert (url2_sha256, url1_sha256) == spec["fake"].variants["patches"].value
def test_conditional_patched_dependencies(mock_packages, config):
"""Test whether conditional patched dependencies work."""
- spec = Spec('patch-several-dependencies @1.0')
+ spec = Spec("patch-several-dependencies @1.0")
spec.concretize()
# basic patch on libelf
- assert 'patches' in list(spec['libelf'].variants.keys())
+ assert "patches" in list(spec["libelf"].variants.keys())
# foo
- assert ((foo_sha256,) ==
- spec['libelf'].variants['patches'].value)
+ assert (foo_sha256,) == spec["libelf"].variants["patches"].value
# conditional patch on libdwarf
- assert 'patches' in list(spec['libdwarf'].variants.keys())
+ assert "patches" in list(spec["libdwarf"].variants.keys())
# bar
- assert ((bar_sha256,) ==
- spec['libdwarf'].variants['patches'].value)
+ assert (bar_sha256,) == spec["libdwarf"].variants["patches"].value
# baz is conditional on libdwarf version
- assert (baz_sha256
- not in spec['libdwarf'].variants['patches'].value)
+ assert baz_sha256 not in spec["libdwarf"].variants["patches"].value
# URL patches
- assert 'patches' in list(spec['fake'].variants.keys())
+ assert "patches" in list(spec["fake"].variants.keys())
# urlpatch.patch, urlpatch.patch.gz
- assert (
- (url2_sha256, url1_sha256) == spec['fake'].variants['patches'].value)
+ assert (url2_sha256, url1_sha256) == spec["fake"].variants["patches"].value
def check_multi_dependency_patch_specs(
- libelf, libdwarf, fake, # specs
- owner, package_dir): # parent spec properties
+ libelf, libdwarf, fake, owner, package_dir # specs
+): # parent spec properties
"""Validate patches on dependencies of patch-several-dependencies."""
# basic patch on libelf
- assert 'patches' in list(libelf.variants.keys())
+ assert "patches" in list(libelf.variants.keys())
# foo
- assert (foo_sha256 in libelf.variants['patches'].value)
+ assert foo_sha256 in libelf.variants["patches"].value
# conditional patch on libdwarf
- assert 'patches' in list(libdwarf.variants.keys())
+ assert "patches" in list(libdwarf.variants.keys())
# bar
- assert (bar_sha256 in libdwarf.variants['patches'].value)
+ assert bar_sha256 in libdwarf.variants["patches"].value
# baz is conditional on libdwarf version (no guarantee on order w/conds)
- assert (baz_sha256 in libdwarf.variants['patches'].value)
+ assert baz_sha256 in libdwarf.variants["patches"].value
def get_patch(spec, ending):
return next(p for p in spec.patches if p.path_or_url.endswith(ending))
# make sure file patches are reconstructed properly
- foo_patch = get_patch(libelf, 'foo.patch')
- bar_patch = get_patch(libdwarf, 'bar.patch')
- baz_patch = get_patch(libdwarf, 'baz.patch')
+ foo_patch = get_patch(libelf, "foo.patch")
+ bar_patch = get_patch(libdwarf, "bar.patch")
+ baz_patch = get_patch(libdwarf, "baz.patch")
assert foo_patch.owner == owner
- assert foo_patch.path == os.path.join(package_dir, 'foo.patch')
+ assert foo_patch.path == os.path.join(package_dir, "foo.patch")
assert foo_patch.sha256 == foo_sha256
- assert bar_patch.owner == 'builtin.mock.patch-several-dependencies'
- assert bar_patch.path == os.path.join(package_dir, 'bar.patch')
+ assert bar_patch.owner == "builtin.mock.patch-several-dependencies"
+ assert bar_patch.path == os.path.join(package_dir, "bar.patch")
assert bar_patch.sha256 == bar_sha256
- assert baz_patch.owner == 'builtin.mock.patch-several-dependencies'
- assert baz_patch.path == os.path.join(package_dir, 'baz.patch')
+ assert baz_patch.owner == "builtin.mock.patch-several-dependencies"
+ assert baz_patch.path == os.path.join(package_dir, "baz.patch")
assert baz_patch.sha256 == baz_sha256
# URL patches
- assert 'patches' in list(fake.variants.keys())
+ assert "patches" in list(fake.variants.keys())
# urlpatch.patch, urlpatch.patch.gz
- assert (url2_sha256, url1_sha256) == fake.variants['patches'].value
+ assert (url2_sha256, url1_sha256) == fake.variants["patches"].value
- url1_patch = get_patch(fake, 'urlpatch.patch')
- url2_patch = get_patch(fake, 'urlpatch2.patch.gz')
+ url1_patch = get_patch(fake, "urlpatch.patch")
+ url2_patch = get_patch(fake, "urlpatch2.patch.gz")
- assert url1_patch.owner == 'builtin.mock.patch-several-dependencies'
- assert url1_patch.url == 'http://example.com/urlpatch.patch'
+ assert url1_patch.owner == "builtin.mock.patch-several-dependencies"
+ assert url1_patch.url == "http://example.com/urlpatch.patch"
assert url1_patch.sha256 == url1_sha256
- assert url2_patch.owner == 'builtin.mock.patch-several-dependencies'
- assert url2_patch.url == 'http://example.com/urlpatch2.patch.gz'
+ assert url2_patch.owner == "builtin.mock.patch-several-dependencies"
+ assert url2_patch.url == "http://example.com/urlpatch2.patch.gz"
assert url2_patch.sha256 == url2_sha256
assert url2_patch.archive_sha256 == url2_archive_sha256
def test_conditional_patched_deps_with_conditions(mock_packages, config):
"""Test whether conditional patched dependencies with conditions work."""
- spec = Spec('patch-several-dependencies @1.0 ^libdwarf@20111030')
+ spec = Spec("patch-several-dependencies @1.0 ^libdwarf@20111030")
spec.concretize()
- libelf = spec['libelf']
- libdwarf = spec['libdwarf']
- fake = spec['fake']
+ libelf = spec["libelf"]
+ libdwarf = spec["libdwarf"]
+ fake = spec["fake"]
check_multi_dependency_patch_specs(
- libelf, libdwarf, fake,
- 'builtin.mock.patch-several-dependencies',
- spec.package.package_dir)
+ libelf, libdwarf, fake, "builtin.mock.patch-several-dependencies", spec.package.package_dir
+ )
def test_write_and_read_sub_dags_with_patched_deps(mock_packages, config):
"""Test whether patched dependencies are still correct after writing and
- reading a sub-DAG of a concretized Spec.
+ reading a sub-DAG of a concretized Spec.
"""
- spec = Spec('patch-several-dependencies @1.0 ^libdwarf@20111030')
+ spec = Spec("patch-several-dependencies @1.0 ^libdwarf@20111030")
spec.concretize()
# write to YAML and read back in -- new specs will *only* contain
# their sub-DAGs, and won't contain the dependent that patched them
- libelf = spack.spec.Spec.from_yaml(spec['libelf'].to_yaml())
- libdwarf = spack.spec.Spec.from_yaml(spec['libdwarf'].to_yaml())
- fake = spack.spec.Spec.from_yaml(spec['fake'].to_yaml())
+ libelf = spack.spec.Spec.from_yaml(spec["libelf"].to_yaml())
+ libdwarf = spack.spec.Spec.from_yaml(spec["libdwarf"].to_yaml())
+ fake = spack.spec.Spec.from_yaml(spec["fake"].to_yaml())
# make sure we can still read patches correctly for these specs
check_multi_dependency_patch_specs(
- libelf, libdwarf, fake,
- 'builtin.mock.patch-several-dependencies',
- spec.package.package_dir)
+ libelf, libdwarf, fake, "builtin.mock.patch-several-dependencies", spec.package.package_dir
+ )
def test_patch_no_file():
# Give it the attributes we need to construct the error message
- FakePackage = collections.namedtuple(
- 'FakePackage', ['name', 'namespace', 'fullname'])
- fp = FakePackage('fake-package', 'test', 'fake-package')
- with pytest.raises(ValueError, match='FilePatch:'):
- spack.patch.FilePatch(fp, 'nonexistent_file', 0, '')
+ FakePackage = collections.namedtuple("FakePackage", ["name", "namespace", "fullname"])
+ fp = FakePackage("fake-package", "test", "fake-package")
+ with pytest.raises(ValueError, match="FilePatch:"):
+ spack.patch.FilePatch(fp, "nonexistent_file", 0, "")
- patch = spack.patch.Patch(fp, 'nonexistent_file', 0, '')
- patch.path = 'test'
- with pytest.raises(spack.patch.NoSuchPatchError, match='No such patch:'):
- patch.apply('')
+ patch = spack.patch.Patch(fp, "nonexistent_file", 0, "")
+ patch.path = "test"
+ with pytest.raises(spack.patch.NoSuchPatchError, match="No such patch:"):
+ patch.apply("")
-@pytest.mark.parametrize('level', [-1, 0.0, '1'])
+@pytest.mark.parametrize("level", [-1, 0.0, "1"])
def test_invalid_level(level):
# Give it the attributes we need to construct the error message
- FakePackage = collections.namedtuple('FakePackage', ['name', 'namespace'])
- fp = FakePackage('fake-package', 'test')
- with pytest.raises(ValueError,
- match='Patch level needs to be a non-negative integer.'):
- spack.patch.Patch(fp, 'nonexistent_file', level, '')
+ FakePackage = collections.namedtuple("FakePackage", ["name", "namespace"])
+ fp = FakePackage("fake-package", "test")
+ with pytest.raises(ValueError, match="Patch level needs to be a non-negative integer."):
+ spack.patch.Patch(fp, "nonexistent_file", level, "")
diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py
index 4e77fbd7c1..3d95f2f9d3 100644
--- a/lib/spack/spack/test/pattern.py
+++ b/lib/spack/spack/test/pattern.py
@@ -12,14 +12,15 @@ import spack.util.pattern as pattern
@pytest.fixture()
def interface():
"""Returns the interface class for the composite."""
+
class Base:
counter = 0
def add(self):
- raise NotImplementedError('add not implemented')
+ raise NotImplementedError("add not implemented")
def subtract(self):
- raise NotImplementedError('subtract not implemented')
+ raise NotImplementedError("subtract not implemented")
return Base
@@ -27,6 +28,7 @@ def interface():
@pytest.fixture()
def implementation(interface):
"""Returns an implementation of the interface"""
+
class Implementation(interface):
def __init__(self, value):
self.value = value
@@ -40,21 +42,20 @@ def implementation(interface):
return Implementation
-@pytest.fixture(params=[
- 'interface',
- 'method_list'
-])
+@pytest.fixture(params=["interface", "method_list"])
def composite(interface, implementation, request):
"""Returns a composite that contains an instance of `implementation(1)`
and one of `implementation(2)`.
"""
- if request.param == 'interface':
+ if request.param == "interface":
+
@pattern.composite(interface=interface)
class Composite:
pass
else:
- @pattern.composite(method_list=['add', 'subtract'])
+
+ @pattern.composite(method_list=["add", "subtract"])
class Composite:
pass
@@ -78,6 +79,7 @@ def test_composite_interface_calls(interface, composite):
def test_composite_wrong_container(interface):
with pytest.raises(TypeError):
+
@pattern.composite(interface=interface, container=2)
class CompositeFromInterface:
pass
@@ -86,6 +88,7 @@ def test_composite_wrong_container(interface):
def test_composite_no_methods():
with pytest.raises(TypeError):
+
@pattern.composite()
class CompositeFromInterface:
pass
diff --git a/lib/spack/spack/test/permissions.py b/lib/spack/spack/test/permissions.py
index e2667b9b56..06814695de 100644
--- a/lib/spack/spack/test/permissions.py
+++ b/lib/spack/spack/test/permissions.py
@@ -13,12 +13,11 @@ import llnl.util.filesystem as fs
from spack.util.file_permissions import InvalidPermissionsError, set_permissions
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="chmod unsupported on Windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="chmod unsupported on Windows")
def test_chmod_real_entries_ignores_suid_sgid(tmpdir):
- path = str(tmpdir.join('file').ensure())
+ path = str(tmpdir.join("file").ensure())
mode = stat.S_ISUID | stat.S_ISGID | stat.S_ISVTX
os.chmod(path, mode)
mode = os.stat(path).st_mode # adds a high bit we aren't concerned with
@@ -30,7 +29,7 @@ def test_chmod_real_entries_ignores_suid_sgid(tmpdir):
def test_chmod_rejects_group_writable_suid(tmpdir):
- path = str(tmpdir.join('file').ensure())
+ path = str(tmpdir.join("file").ensure())
mode = stat.S_ISUID
fs.chmod_x(path, mode)
@@ -40,7 +39,7 @@ def test_chmod_rejects_group_writable_suid(tmpdir):
def test_chmod_rejects_world_writable_suid(tmpdir):
- path = str(tmpdir.join('file').ensure())
+ path = str(tmpdir.join("file").ensure())
mode = stat.S_ISUID
fs.chmod_x(path, mode)
@@ -50,7 +49,7 @@ def test_chmod_rejects_world_writable_suid(tmpdir):
def test_chmod_rejects_world_writable_sgid(tmpdir):
- path = str(tmpdir.join('file').ensure())
+ path = str(tmpdir.join("file").ensure())
mode = stat.S_ISGID
fs.chmod_x(path, mode)
diff --git a/lib/spack/spack/test/provider_index.py b/lib/spack/spack/test/provider_index.py
index 609f0a0593..c342ca68b4 100644
--- a/lib/spack/spack/test/provider_index.py
+++ b/lib/spack/spack/test/provider_index.py
@@ -40,27 +40,27 @@ def test_provider_index_round_trip(mock_packages):
def test_providers_for_simple(mock_packages):
p = ProviderIndex(spack.repo.all_package_names())
- blas_providers = p.providers_for('blas')
- assert Spec('netlib-blas') in blas_providers
- assert Spec('openblas') in blas_providers
- assert Spec('openblas-with-lapack') in blas_providers
+ blas_providers = p.providers_for("blas")
+ assert Spec("netlib-blas") in blas_providers
+ assert Spec("openblas") in blas_providers
+ assert Spec("openblas-with-lapack") in blas_providers
- lapack_providers = p.providers_for('lapack')
- assert Spec('netlib-lapack') in lapack_providers
- assert Spec('openblas-with-lapack') in lapack_providers
+ lapack_providers = p.providers_for("lapack")
+ assert Spec("netlib-lapack") in lapack_providers
+ assert Spec("openblas-with-lapack") in lapack_providers
def test_mpi_providers(mock_packages):
p = ProviderIndex(spack.repo.all_package_names())
- mpi_2_providers = p.providers_for('mpi@2')
- assert Spec('mpich2') in mpi_2_providers
- assert Spec('mpich@3:') in mpi_2_providers
+ mpi_2_providers = p.providers_for("mpi@2")
+ assert Spec("mpich2") in mpi_2_providers
+ assert Spec("mpich@3:") in mpi_2_providers
- mpi_3_providers = p.providers_for('mpi@3')
- assert Spec('mpich2') not in mpi_3_providers
- assert Spec('mpich@3:') in mpi_3_providers
- assert Spec('zmpi') in mpi_3_providers
+ mpi_3_providers = p.providers_for("mpi@3")
+ assert Spec("mpich2") not in mpi_3_providers
+ assert Spec("mpich@3:") in mpi_3_providers
+ assert Spec("zmpi") in mpi_3_providers
def test_equal(mock_packages):
diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py
index b5c0151420..b79f0ba1a4 100644
--- a/lib/spack/spack/test/relocate.py
+++ b/lib/spack/spack/test/relocate.py
@@ -21,21 +21,20 @@ import spack.store
import spack.tengine
import spack.util.executable
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="Tests fail on Windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
def skip_unless_linux(f):
return pytest.mark.skipif(
- str(spack.platforms.real_host()) != 'linux',
- reason='implementation currently requires linux'
+ str(spack.platforms.real_host()) != "linux",
+ reason="implementation currently requires linux",
)(f)
def rpaths_for(new_binary):
"""Return the RPATHs or RUNPATHs of a binary."""
- patchelf = spack.util.executable.which('patchelf')
- output = patchelf('--print-rpath', str(new_binary), output=str)
+ patchelf = spack.util.executable.which("patchelf")
+ output = patchelf("--print-rpath", str(new_binary), output=str)
return output.strip()
@@ -43,7 +42,7 @@ def text_in_bin(text, binary):
with open(str(binary), "rb") as f:
data = f.read()
f.seek(0)
- pat = re.compile(text.encode('utf-8'))
+ pat = re.compile(text.encode("utf-8"))
if not pat.search(data):
return False
return True
@@ -58,20 +57,16 @@ def is_relocatable(request):
def source_file(tmpdir, is_relocatable):
"""Returns the path to a source file of a relocatable executable."""
if is_relocatable:
- template_src = os.path.join(
- spack.paths.test_path, 'data', 'templates', 'relocatable.c'
- )
- src = tmpdir.join('relocatable.c')
+ template_src = os.path.join(spack.paths.test_path, "data", "templates", "relocatable.c")
+ src = tmpdir.join("relocatable.c")
shutil.copy(template_src, str(src))
else:
- template_dirs = [
- os.path.join(spack.paths.test_path, 'data', 'templates')
- ]
+ template_dirs = [os.path.join(spack.paths.test_path, "data", "templates")]
env = spack.tengine.make_environment(template_dirs)
- template = env.get_template('non_relocatable.c')
- text = template.render({'prefix': spack.store.layout.root})
+ template = env.get_template("non_relocatable.c")
+ text = template.render({"prefix": spack.store.layout.root})
- src = tmpdir.join('non_relocatable.c')
+ src = tmpdir.join("non_relocatable.c")
src.write(text)
return src
@@ -80,7 +75,8 @@ def source_file(tmpdir, is_relocatable):
@pytest.fixture()
def mock_patchelf(tmpdir, mock_executable):
def _factory(output):
- return mock_executable('patchelf', output=output)
+ return mock_executable("patchelf", output=output)
+
return _factory
@@ -89,24 +85,30 @@ def hello_world(tmpdir):
"""Factory fixture that compiles an ELF binary setting its RPATH. Relative
paths are encoded with `$ORIGIN` prepended.
"""
+
def _factory(rpaths, message="Hello world!"):
- source = tmpdir.join('main.c')
- source.write("""
+ source = tmpdir.join("main.c")
+ source.write(
+ """
#include <stdio.h>
int main(){{
printf("{0}");
}}
- """.format(message))
- gcc = spack.util.executable.which('gcc')
- executable = source.dirpath('main.x')
+ """.format(
+ message
+ )
+ )
+ gcc = spack.util.executable.which("gcc")
+ executable = source.dirpath("main.x")
# Encode relative RPATHs using `$ORIGIN` as the root prefix
- rpaths = [x if os.path.isabs(x) else os.path.join('$ORIGIN', x)
- for x in rpaths]
- rpath_str = ':'.join(rpaths)
+ rpaths = [x if os.path.isabs(x) else os.path.join("$ORIGIN", x) for x in rpaths]
+ rpath_str = ":".join(rpaths)
opts = [
- '-Wl,--disable-new-dtags',
- '-Wl,-rpath={0}'.format(rpath_str),
- str(source), '-o', str(executable)
+ "-Wl,--disable-new-dtags",
+ "-Wl,-rpath={0}".format(rpath_str),
+ str(source),
+ "-o",
+ str(executable),
]
gcc(*opts)
return executable
@@ -121,31 +123,29 @@ def make_dylib(tmpdir_factory):
- Writes the same rpath twice
- Writes its install path as an absolute path
"""
- cc = spack.util.executable.which('cc')
+ cc = spack.util.executable.which("cc")
def _factory(abs_install_name="abs", extra_rpaths=[]):
assert all(extra_rpaths)
- tmpdir = tmpdir_factory.mktemp(
- abs_install_name + '-'.join(extra_rpaths).replace('/', '')
- )
- src = tmpdir.join('foo.c')
+ tmpdir = tmpdir_factory.mktemp(abs_install_name + "-".join(extra_rpaths).replace("/", ""))
+ src = tmpdir.join("foo.c")
src.write("int foo() { return 1; }\n")
- filename = 'foo.dylib'
+ filename = "foo.dylib"
lib = tmpdir.join(filename)
- args = ['-shared', str(src), '-o', str(lib)]
+ args = ["-shared", str(src), "-o", str(lib)]
rpaths = list(extra_rpaths)
- if abs_install_name.startswith('abs'):
- args += ['-install_name', str(lib)]
+ if abs_install_name.startswith("abs"):
+ args += ["-install_name", str(lib)]
else:
- args += ['-install_name', '@rpath/' + filename]
+ args += ["-install_name", "@rpath/" + filename]
- if abs_install_name.endswith('rpath'):
+ if abs_install_name.endswith("rpath"):
rpaths.append(str(tmpdir))
- args.extend('-Wl,-rpath,' + s for s in rpaths)
+ args.extend("-Wl,-rpath," + s for s in rpaths)
cc(*args)
@@ -156,16 +156,16 @@ def make_dylib(tmpdir_factory):
@pytest.fixture()
def make_object_file(tmpdir):
- cc = spack.util.executable.which('cc')
+ cc = spack.util.executable.which("cc")
def _factory():
- src = tmpdir.join('bar.c')
+ src = tmpdir.join("bar.c")
src.write("int bar() { return 2; }\n")
- filename = 'bar.o'
+ filename = "bar.o"
lib = tmpdir.join(filename)
- args = ['-c', str(src), '-o', str(lib)]
+ args = ["-c", str(src), "-o", str(lib)]
cc(*args)
@@ -179,31 +179,29 @@ def copy_binary():
"""Returns a function that copies a binary somewhere and
returns the new location.
"""
+
def _copy_somewhere(orig_binary):
new_root = orig_binary.mkdtemp()
- new_binary = new_root.join('main.x')
+ new_binary = new_root.join("main.x")
shutil.copy(str(orig_binary), str(new_binary))
return new_binary
+
return _copy_somewhere
-@pytest.mark.requires_executables(
- '/usr/bin/gcc', 'patchelf', 'strings', 'file'
-)
+@pytest.mark.requires_executables("/usr/bin/gcc", "patchelf", "strings", "file")
@skip_unless_linux
def test_file_is_relocatable(source_file, is_relocatable):
- compiler = spack.util.executable.Executable('/usr/bin/gcc')
- executable = str(source_file).replace('.c', '.x')
- compiler_env = {
- 'PATH': '/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
- }
- compiler(str(source_file), '-o', executable, env=compiler_env)
+ compiler = spack.util.executable.Executable("/usr/bin/gcc")
+ executable = str(source_file).replace(".c", ".x")
+ compiler_env = {"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"}
+ compiler(str(source_file), "-o", executable, env=compiler_env)
assert spack.relocate.is_binary(executable)
assert spack.relocate.file_is_relocatable(executable) is is_relocatable
-@pytest.mark.requires_executables('patchelf', 'strings', 'file')
+@pytest.mark.requires_executables("patchelf", "strings", "file")
@skip_unless_linux
def test_patchelf_is_relocatable():
patchelf = os.path.realpath(spack.relocate._patchelf())
@@ -215,24 +213,27 @@ def test_patchelf_is_relocatable():
def test_file_is_relocatable_errors(tmpdir):
# The file passed in as argument must exist...
with pytest.raises(ValueError) as exc_info:
- spack.relocate.file_is_relocatable('/usr/bin/does_not_exist')
- assert 'does not exist' in str(exc_info.value)
+ spack.relocate.file_is_relocatable("/usr/bin/does_not_exist")
+ assert "does not exist" in str(exc_info.value)
# ...and the argument must be an absolute path to it
- file = tmpdir.join('delete.me')
- file.write('foo')
+ file = tmpdir.join("delete.me")
+ file.write("foo")
with llnl.util.filesystem.working_dir(str(tmpdir)):
with pytest.raises(ValueError) as exc_info:
- spack.relocate.file_is_relocatable('delete.me')
- assert 'is not an absolute path' in str(exc_info.value)
+ spack.relocate.file_is_relocatable("delete.me")
+ assert "is not an absolute path" in str(exc_info.value)
-@pytest.mark.parametrize('patchelf_behavior,expected', [
- ('echo ', []),
- ('echo /opt/foo/lib:/opt/foo/lib64', ['/opt/foo/lib', '/opt/foo/lib64']),
- ('exit 1', [])
-])
+@pytest.mark.parametrize(
+ "patchelf_behavior,expected",
+ [
+ ("echo ", []),
+ ("echo /opt/foo/lib:/opt/foo/lib64", ["/opt/foo/lib", "/opt/foo/lib64"]),
+ ("exit 1", []),
+ ],
+)
def test_existing_rpaths(patchelf_behavior, expected, mock_patchelf):
# Here we are mocking an executable that is always called "patchelf"
# because that will skip the part where we try to build patchelf
@@ -243,122 +244,135 @@ def test_existing_rpaths(patchelf_behavior, expected, mock_patchelf):
assert rpaths == expected
-@pytest.mark.parametrize('start_path,path_root,paths,expected', [
- ('/usr/bin/test', '/usr', ['/usr/lib', '/usr/lib64', '/opt/local/lib'],
- [os.path.join('$ORIGIN', '..', 'lib'), os.path.join('$ORIGIN', '..', 'lib64'),
- '/opt/local/lib'])
-])
+@pytest.mark.parametrize(
+ "start_path,path_root,paths,expected",
+ [
+ (
+ "/usr/bin/test",
+ "/usr",
+ ["/usr/lib", "/usr/lib64", "/opt/local/lib"],
+ [
+ os.path.join("$ORIGIN", "..", "lib"),
+ os.path.join("$ORIGIN", "..", "lib64"),
+ "/opt/local/lib",
+ ],
+ )
+ ],
+)
def test_make_relative_paths(start_path, path_root, paths, expected):
relatives = spack.relocate._make_relative(start_path, path_root, paths)
assert relatives == expected
-@pytest.mark.parametrize('start_path,relative_paths,expected', [
- # $ORIGIN will be replaced with os.path.dirname('usr/bin/test')
- # and then normalized
- ('/usr/bin/test',
- ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib'],
- [os.sep + os.path.join('usr', 'lib'), os.sep + os.path.join('usr', 'lib64'),
- '/opt/local/lib']),
- # Relative path without $ORIGIN
- ('/usr/bin/test', ['../local/lib'], ['../local/lib']),
-])
+@pytest.mark.parametrize(
+ "start_path,relative_paths,expected",
+ [
+ # $ORIGIN will be replaced with os.path.dirname('usr/bin/test')
+ # and then normalized
+ (
+ "/usr/bin/test",
+ ["$ORIGIN/../lib", "$ORIGIN/../lib64", "/opt/local/lib"],
+ [
+ os.sep + os.path.join("usr", "lib"),
+ os.sep + os.path.join("usr", "lib64"),
+ "/opt/local/lib",
+ ],
+ ),
+ # Relative path without $ORIGIN
+ ("/usr/bin/test", ["../local/lib"], ["../local/lib"]),
+ ],
+)
def test_normalize_relative_paths(start_path, relative_paths, expected):
- normalized = spack.relocate._normalize_relative_paths(
- start_path, relative_paths
- )
+ normalized = spack.relocate._normalize_relative_paths(start_path, relative_paths)
assert normalized == expected
def test_set_elf_rpaths(mock_patchelf):
# Try to relocate a mock version of patchelf and check
# the call made to patchelf itself
- patchelf = mock_patchelf('echo $@')
- rpaths = ['/usr/lib', '/usr/lib64', '/opt/local/lib']
+ patchelf = mock_patchelf("echo $@")
+ rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
output = spack.relocate._set_elf_rpaths(patchelf, rpaths)
# Assert that the arguments of the call to patchelf are as expected
- assert '--force-rpath' in output
- assert '--set-rpath ' + ':'.join(rpaths) in output
+ assert "--force-rpath" in output
+ assert "--set-rpath " + ":".join(rpaths) in output
assert patchelf in output
@skip_unless_linux
def test_set_elf_rpaths_warning(mock_patchelf):
# Mock a failing patchelf command and ensure it warns users
- patchelf = mock_patchelf('exit 1')
- rpaths = ['/usr/lib', '/usr/lib64', '/opt/local/lib']
+ patchelf = mock_patchelf("exit 1")
+ rpaths = ["/usr/lib", "/usr/lib64", "/opt/local/lib"]
# To avoid using capfd in order to check if the warning was triggered
# here we just check that output is not set
output = spack.relocate._set_elf_rpaths(patchelf, rpaths)
assert output is None
-@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc')
+@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
def test_replace_prefix_bin(hello_world):
# Compile an "Hello world!" executable and set RPATHs
- executable = hello_world(rpaths=['/usr/lib', '/usr/lib64'])
+ executable = hello_world(rpaths=["/usr/lib", "/usr/lib64"])
# Relocate the RPATHs
- spack.relocate._replace_prefix_bin(str(executable), {b'/usr': b'/foo'})
+ spack.relocate._replace_prefix_bin(str(executable), {b"/usr": b"/foo"})
# Some compilers add rpaths so ensure changes included in final result
- assert '/foo/lib:/foo/lib64' in rpaths_for(executable)
+ assert "/foo/lib:/foo/lib64" in rpaths_for(executable)
-@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc')
+@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
-def test_relocate_elf_binaries_absolute_paths(
- hello_world, copy_binary, tmpdir
-):
+def test_relocate_elf_binaries_absolute_paths(hello_world, copy_binary, tmpdir):
# Create an executable, set some RPATHs, copy it to another location
- orig_binary = hello_world(rpaths=[str(tmpdir.mkdir('lib')), '/usr/lib64'])
+ orig_binary = hello_world(rpaths=[str(tmpdir.mkdir("lib")), "/usr/lib64"])
new_binary = copy_binary(orig_binary)
spack.relocate.relocate_elf_binaries(
binaries=[str(new_binary)],
orig_root=str(orig_binary.dirpath()),
new_root=None, # Not needed when relocating absolute paths
- new_prefixes={
- str(tmpdir): '/foo'
- },
+ new_prefixes={str(tmpdir): "/foo"},
rel=False,
# Not needed when relocating absolute paths
- orig_prefix=None, new_prefix=None
+ orig_prefix=None,
+ new_prefix=None,
)
# Some compilers add rpaths so ensure changes included in final result
- assert '/foo/lib:/usr/lib64' in rpaths_for(new_binary)
+ assert "/foo/lib:/usr/lib64" in rpaths_for(new_binary)
-@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc')
+@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
def test_relocate_elf_binaries_relative_paths(hello_world, copy_binary):
# Create an executable, set some RPATHs, copy it to another location
- orig_binary = hello_world(rpaths=['lib', 'lib64', '/opt/local/lib'])
+ orig_binary = hello_world(rpaths=["lib", "lib64", "/opt/local/lib"])
new_binary = copy_binary(orig_binary)
spack.relocate.relocate_elf_binaries(
binaries=[str(new_binary)],
orig_root=str(orig_binary.dirpath()),
new_root=str(new_binary.dirpath()),
- new_prefixes={str(orig_binary.dirpath()): '/foo'},
+ new_prefixes={str(orig_binary.dirpath()): "/foo"},
rel=True,
orig_prefix=str(orig_binary.dirpath()),
- new_prefix=str(new_binary.dirpath())
+ new_prefix=str(new_binary.dirpath()),
)
# Some compilers add rpaths so ensure changes included in final result
- assert '/foo/lib:/foo/lib64:/opt/local/lib' in rpaths_for(new_binary)
+ assert "/foo/lib:/foo/lib64:/opt/local/lib" in rpaths_for(new_binary)
-@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc')
+@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
def test_make_elf_binaries_relative(hello_world, copy_binary, tmpdir):
- orig_binary = hello_world(rpaths=[
- str(tmpdir.mkdir('lib')), str(tmpdir.mkdir('lib64')), '/opt/local/lib'
- ])
+ orig_binary = hello_world(
+ rpaths=[str(tmpdir.mkdir("lib")), str(tmpdir.mkdir("lib64")), "/opt/local/lib"]
+ )
new_binary = copy_binary(orig_binary)
spack.relocate.make_elf_binaries_relative(
@@ -366,23 +380,22 @@ def test_make_elf_binaries_relative(hello_world, copy_binary, tmpdir):
)
# Some compilers add rpaths so ensure changes included in final result
- assert '$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib' in rpaths_for(new_binary)
+ assert "$ORIGIN/lib:$ORIGIN/lib64:/opt/local/lib" in rpaths_for(new_binary)
def test_raise_if_not_relocatable(monkeypatch):
- monkeypatch.setattr(spack.relocate, 'file_is_relocatable', lambda x: False)
+ monkeypatch.setattr(spack.relocate, "file_is_relocatable", lambda x: False)
with pytest.raises(spack.relocate.InstallRootStringError):
- spack.relocate.raise_if_not_relocatable(
- ['an_executable'], allow_root=False
- )
+ spack.relocate.raise_if_not_relocatable(["an_executable"], allow_root=False)
-@pytest.mark.requires_executables('patchelf', 'strings', 'file', 'gcc')
+@pytest.mark.requires_executables("patchelf", "strings", "file", "gcc")
@skip_unless_linux
def test_relocate_text_bin(hello_world, copy_binary, tmpdir):
- orig_binary = hello_world(rpaths=[
- str(tmpdir.mkdir('lib')), str(tmpdir.mkdir('lib64')), '/opt/local/lib'
- ], message=str(tmpdir))
+ orig_binary = hello_world(
+ rpaths=[str(tmpdir.mkdir("lib")), str(tmpdir.mkdir("lib64")), "/opt/local/lib"],
+ message=str(tmpdir),
+ )
new_binary = copy_binary(orig_binary)
# Check original directory is in the executabel and the new one is not
@@ -390,13 +403,10 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir):
assert not text_in_bin(str(new_binary.dirpath()), new_binary)
# Check this call succeed
- orig_path_bytes = str(orig_binary.dirpath()).encode('utf-8')
- new_path_bytes = str(new_binary.dirpath()).encode('utf-8')
+ orig_path_bytes = str(orig_binary.dirpath()).encode("utf-8")
+ new_path_bytes = str(new_binary.dirpath()).encode("utf-8")
- spack.relocate.relocate_text_bin(
- [str(new_binary)],
- {orig_path_bytes: new_path_bytes}
- )
+ spack.relocate.relocate_text_bin([str(new_binary)], {orig_path_bytes: new_path_bytes})
# Check original directory is not there anymore and it was
# substituted with the new one
@@ -405,26 +415,24 @@ def test_relocate_text_bin(hello_world, copy_binary, tmpdir):
def test_relocate_text_bin_raise_if_new_prefix_is_longer(tmpdir):
- short_prefix = b'/short'
- long_prefix = b'/much/longer'
- fpath = str(tmpdir.join('fakebin'))
- with open(fpath, 'w') as f:
- f.write('/short')
+ short_prefix = b"/short"
+ long_prefix = b"/much/longer"
+ fpath = str(tmpdir.join("fakebin"))
+ with open(fpath, "w") as f:
+ f.write("/short")
with pytest.raises(spack.relocate.BinaryTextReplaceError):
- spack.relocate.relocate_text_bin(
- [fpath], {short_prefix: long_prefix}
- )
+ spack.relocate.relocate_text_bin([fpath], {short_prefix: long_prefix})
-@pytest.mark.requires_executables('install_name_tool', 'file', 'cc')
+@pytest.mark.requires_executables("install_name_tool", "file", "cc")
def test_fixup_macos_rpaths(make_dylib, make_object_file):
# For each of these tests except for the "correct" case, the first fixup
# should make changes, and the second fixup should be a null-op.
fixup_rpath = spack.relocate.fixup_macos_rpath
no_rpath = []
- duplicate_rpaths = ['/usr', '/usr']
- bad_rpath = ['/nonexistent/path']
+ duplicate_rpaths = ["/usr", "/usr"]
+ bad_rpath = ["/nonexistent/path"]
# Non-relocatable library id and duplicate rpaths
(root, filename) = make_dylib("abs", duplicate_rpaths)
@@ -449,8 +457,7 @@ def test_fixup_macos_rpaths(make_dylib, make_object_file):
assert not fixup_rpath(root, filename)
# Relocatable with executable paths and loader paths
- (root, filename) = make_dylib("rpath", ['@executable_path/../lib',
- '@loader_path'])
+ (root, filename) = make_dylib("rpath", ["@executable_path/../lib", "@loader_path"])
assert not fixup_rpath(root, filename)
# Non-relocatable library id but nonexistent rpath
diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py
index 5b3ebceb10..1ab0a7dd5c 100644
--- a/lib/spack/spack/test/repo.py
+++ b/lib/spack/spack/test/repo.py
@@ -14,43 +14,45 @@ import spack.repo
@pytest.fixture()
def extra_repo(tmpdir_factory):
- repo_namespace = 'extra_test_repo'
+ repo_namespace = "extra_test_repo"
repo_dir = tmpdir_factory.mktemp(repo_namespace)
- repo_dir.ensure('packages', dir=True)
+ repo_dir.ensure("packages", dir=True)
- with open(str(repo_dir.join('repo.yaml')), 'w') as f:
- f.write("""
+ with open(str(repo_dir.join("repo.yaml")), "w") as f:
+ f.write(
+ """
repo:
namespace: extra_test_repo
-""")
+"""
+ )
return spack.repo.Repo(str(repo_dir))
def test_repo_getpkg(mutable_mock_repo):
- mutable_mock_repo.get_pkg_class('a')
- mutable_mock_repo.get_pkg_class('builtin.mock.a')
+ mutable_mock_repo.get_pkg_class("a")
+ mutable_mock_repo.get_pkg_class("builtin.mock.a")
def test_repo_multi_getpkg(mutable_mock_repo, extra_repo):
mutable_mock_repo.put_first(extra_repo)
- mutable_mock_repo.get_pkg_class('a')
- mutable_mock_repo.get_pkg_class('builtin.mock.a')
+ mutable_mock_repo.get_pkg_class("a")
+ mutable_mock_repo.get_pkg_class("builtin.mock.a")
def test_repo_multi_getpkgclass(mutable_mock_repo, extra_repo):
mutable_mock_repo.put_first(extra_repo)
- mutable_mock_repo.get_pkg_class('a')
- mutable_mock_repo.get_pkg_class('builtin.mock.a')
+ mutable_mock_repo.get_pkg_class("a")
+ mutable_mock_repo.get_pkg_class("builtin.mock.a")
def test_repo_pkg_with_unknown_namespace(mutable_mock_repo):
with pytest.raises(spack.repo.UnknownNamespaceError):
- mutable_mock_repo.get_pkg_class('unknown.a')
+ mutable_mock_repo.get_pkg_class("unknown.a")
def test_repo_unknown_pkg(mutable_mock_repo):
with pytest.raises(spack.repo.UnknownPackageError):
- mutable_mock_repo.get_pkg_class('builtin.mock.nonexistentpackage')
+ mutable_mock_repo.get_pkg_class("builtin.mock.nonexistentpackage")
@pytest.mark.maybeslow
@@ -58,49 +60,48 @@ def test_repo_unknown_pkg(mutable_mock_repo):
sys.version_info < (3, 5), reason="Test started failing spuriously on Python 2.7"
)
def test_repo_last_mtime():
- latest_mtime = max(os.path.getmtime(p.module.__file__)
- for p in spack.repo.path.all_package_classes())
+ latest_mtime = max(
+ os.path.getmtime(p.module.__file__) for p in spack.repo.path.all_package_classes()
+ )
assert spack.repo.path.last_mtime() == latest_mtime
def test_repo_invisibles(mutable_mock_repo, extra_repo):
- with open(os.path.join(extra_repo.root, 'packages', '.invisible'), 'w'):
+ with open(os.path.join(extra_repo.root, "packages", ".invisible"), "w"):
pass
extra_repo.all_package_names()
-@pytest.mark.parametrize('attr_name,exists', [
- ('cmake', True),
- ('__sphinx_mock__', False)
-])
-@pytest.mark.regression('20661')
+@pytest.mark.parametrize("attr_name,exists", [("cmake", True), ("__sphinx_mock__", False)])
+@pytest.mark.regression("20661")
def test_namespace_hasattr(attr_name, exists, mutable_mock_repo):
# Check that we don't fail on 'hasattr' checks because
# of a custom __getattr__ implementation
- nms = spack.repo.SpackNamespace('spack.pkg.builtin.mock')
+ nms = spack.repo.SpackNamespace("spack.pkg.builtin.mock")
assert hasattr(nms, attr_name) == exists
-@pytest.mark.regression('24552')
+@pytest.mark.regression("24552")
def test_all_package_names_is_cached_correctly():
- assert 'mpi' in spack.repo.all_package_names(include_virtuals=True)
- assert 'mpi' not in spack.repo.all_package_names(include_virtuals=False)
+ assert "mpi" in spack.repo.all_package_names(include_virtuals=True)
+ assert "mpi" not in spack.repo.all_package_names(include_virtuals=False)
-@pytest.mark.regression('29203')
+@pytest.mark.regression("29203")
def test_use_repositories_doesnt_change_class():
"""Test that we don't create the same package module and class multiple times
when swapping repositories.
"""
- zlib_cls_outer = spack.repo.path.get_pkg_class('zlib')
+ zlib_cls_outer = spack.repo.path.get_pkg_class("zlib")
current_paths = [r.root for r in spack.repo.path.repos]
with spack.repo.use_repositories(*current_paths):
- zlib_cls_inner = spack.repo.path.get_pkg_class('zlib')
+ zlib_cls_inner = spack.repo.path.get_pkg_class("zlib")
assert id(zlib_cls_inner) == id(zlib_cls_outer)
def test_import_repo_prefixes_as_python_modules(mock_packages):
import spack.pkg.builtin.mock
+
assert isinstance(spack.pkg, spack.repo.SpackNamespace)
assert isinstance(spack.pkg.builtin, spack.repo.SpackNamespace)
assert isinstance(spack.pkg.builtin.mock, spack.repo.SpackNamespace)
@@ -108,15 +109,15 @@ def test_import_repo_prefixes_as_python_modules(mock_packages):
def test_absolute_import_spack_packages_as_python_modules(mock_packages):
import spack.pkg.builtin.mock.mpileaks
- assert hasattr(spack.pkg.builtin.mock, 'mpileaks')
- assert hasattr(spack.pkg.builtin.mock.mpileaks, 'Mpileaks')
- assert isinstance(spack.pkg.builtin.mock.mpileaks.Mpileaks,
- spack.package_base.PackageMeta)
- assert issubclass(spack.pkg.builtin.mock.mpileaks.Mpileaks,
- spack.package_base.Package)
+
+ assert hasattr(spack.pkg.builtin.mock, "mpileaks")
+ assert hasattr(spack.pkg.builtin.mock.mpileaks, "Mpileaks")
+ assert isinstance(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package_base.PackageMeta)
+ assert issubclass(spack.pkg.builtin.mock.mpileaks.Mpileaks, spack.package_base.Package)
def test_relative_import_spack_packages_as_python_modules(mock_packages):
from spack.pkg.builtin.mock.mpileaks import Mpileaks
+
assert isinstance(Mpileaks, spack.package_base.PackageMeta)
assert issubclass(Mpileaks, spack.package_base.Package)
diff --git a/lib/spack/spack/test/rewiring.py b/lib/spack/spack/test/rewiring.py
index f883c222a5..936ba1e78a 100644
--- a/lib/spack/spack/test/rewiring.py
+++ b/lib/spack/spack/test/rewiring.py
@@ -14,19 +14,19 @@ import spack.store
from spack.spec import Spec
from spack.test.relocate import text_in_bin
-args = ['strings', 'file']
-if sys.platform == 'darwin':
- args.extend(['/usr/bin/clang++', 'install_name_tool'])
+args = ["strings", "file"]
+if sys.platform == "darwin":
+ args.extend(["/usr/bin/clang++", "install_name_tool"])
else:
- args.extend(['/usr/bin/g++', 'patchelf'])
+ args.extend(["/usr/bin/g++", "patchelf"])
@pytest.mark.requires_executables(*args)
-@pytest.mark.parametrize('transitive', [True, False])
+@pytest.mark.parametrize("transitive", [True, False])
def test_rewire_db(mock_fetch, install_mockery, transitive):
"""Tests basic rewiring without binary executables."""
- spec = Spec('splice-t^splice-h~foo').concretized()
- dep = Spec('splice-h+foo').concretized()
+ spec = Spec("splice-t^splice-h~foo").concretized()
+ dep = Spec("splice-h+foo").concretized()
spec.package.do_install()
dep.package.do_install()
spliced_spec = spec.splice(dep, transitive=transitive)
@@ -45,18 +45,18 @@ def test_rewire_db(mock_fetch, install_mockery, transitive):
# check the file in the prefix has the correct paths
for node in spliced_spec.traverse(root=True):
text_file_path = os.path.join(node.prefix, node.name)
- with open(text_file_path, 'r') as f:
+ with open(text_file_path, "r") as f:
text = f.read()
for modded_spec in node.traverse(root=True):
assert modded_spec.prefix in text
@pytest.mark.requires_executables(*args)
-@pytest.mark.parametrize('transitive', [True, False])
+@pytest.mark.parametrize("transitive", [True, False])
def test_rewire_bin(mock_fetch, install_mockery, transitive):
"""Tests basic rewiring with binary executables."""
- spec = Spec('quux').concretized()
- dep = Spec('garply cflags=-g').concretized()
+ spec = Spec("quux").concretized()
+ dep = Spec("garply cflags=-g").concretized()
spec.package.do_install()
dep.package.do_install()
spliced_spec = spec.splice(dep, transitive=transitive)
@@ -73,9 +73,7 @@ def test_rewire_bin(mock_fetch, install_mockery, transitive):
assert installed_in_db
# check the file in the prefix has the correct paths
- bin_names = {'garply': 'garplinator',
- 'corge': 'corgegator',
- 'quux': 'quuxifier'}
+ bin_names = {"garply": "garplinator", "corge": "corgegator", "quux": "quuxifier"}
for node in spliced_spec.traverse(root=True):
for dep in node.traverse(root=True):
bin_file_path = os.path.join(dep.prefix.bin, bin_names[dep.name])
@@ -86,8 +84,8 @@ def test_rewire_bin(mock_fetch, install_mockery, transitive):
def test_rewire_writes_new_metadata(mock_fetch, install_mockery):
"""Tests that new metadata was written during a rewire.
Accuracy of metadata is left to other tests."""
- spec = Spec('quux').concretized()
- dep = Spec('garply cflags=-g').concretized()
+ spec = Spec("quux").concretized()
+ dep = Spec("garply cflags=-g").concretized()
spec.package.do_install()
dep.package.do_install()
spliced_spec = spec.splice(dep, transitive=True)
@@ -96,35 +94,35 @@ def test_rewire_writes_new_metadata(mock_fetch, install_mockery):
# test install manifests
for node in spliced_spec.traverse(root=True):
spack.store.layout.ensure_installed(node)
- manifest_file_path = os.path.join(node.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ manifest_file_path = os.path.join(
+ node.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
assert os.path.exists(manifest_file_path)
orig_node = spec[node.name]
- orig_manifest_file_path = os.path.join(orig_node.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ orig_manifest_file_path = os.path.join(
+ orig_node.prefix,
+ spack.store.layout.metadata_dir,
+ spack.store.layout.manifest_file_name,
+ )
assert os.path.exists(orig_manifest_file_path)
- assert not filecmp.cmp(orig_manifest_file_path, manifest_file_path,
- shallow=False)
- specfile_path = os.path.join(node.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.spec_file_name)
+ assert not filecmp.cmp(orig_manifest_file_path, manifest_file_path, shallow=False)
+ specfile_path = os.path.join(
+ node.prefix, spack.store.layout.metadata_dir, spack.store.layout.spec_file_name
+ )
assert os.path.exists(specfile_path)
- orig_specfile_path = os.path.join(orig_node.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.spec_file_name)
+ orig_specfile_path = os.path.join(
+ orig_node.prefix, spack.store.layout.metadata_dir, spack.store.layout.spec_file_name
+ )
assert os.path.exists(orig_specfile_path)
- assert not filecmp.cmp(orig_specfile_path, specfile_path,
- shallow=False)
+ assert not filecmp.cmp(orig_specfile_path, specfile_path, shallow=False)
@pytest.mark.requires_executables(*args)
-@pytest.mark.parametrize('transitive', [True, False])
+@pytest.mark.parametrize("transitive", [True, False])
def test_uninstall_rewired_spec(mock_fetch, install_mockery, transitive):
"""Test that rewired packages can be uninstalled as normal."""
- spec = Spec('quux').concretized()
- dep = Spec('garply cflags=-g').concretized()
+ spec = Spec("quux").concretized()
+ dep = Spec("garply cflags=-g").concretized()
spec.package.do_install()
dep.package.do_install()
spliced_spec = spec.splice(dep, transitive=transitive)
@@ -138,9 +136,11 @@ def test_uninstall_rewired_spec(mock_fetch, install_mockery, transitive):
def test_rewire_not_installed_fails(mock_fetch, install_mockery):
"""Tests error when an attempt is made to rewire a package that was not
previously installed."""
- spec = Spec('quux').concretized()
- dep = Spec('garply cflags=-g').concretized()
+ spec = Spec("quux").concretized()
+ dep = Spec("garply cflags=-g").concretized()
spliced_spec = spec.splice(dep, False)
- with pytest.raises(spack.rewiring.PackageNotInstalledError,
- match="failed due to missing install of build spec"):
+ with pytest.raises(
+ spack.rewiring.PackageNotInstalledError,
+ match="failed due to missing install of build spec",
+ ):
spack.rewiring.rewire(spliced_spec)
diff --git a/lib/spack/spack/test/s3_fetch.py b/lib/spack/spack/test/s3_fetch.py
index df918ab669..4c14390112 100644
--- a/lib/spack/spack/test/s3_fetch.py
+++ b/lib/spack/spack/test/s3_fetch.py
@@ -12,21 +12,21 @@ import spack.fetch_strategy as spack_fs
import spack.stage as spack_stage
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_s3fetchstrategy_sans_url(_fetch_method):
"""Ensure constructor with no URL fails."""
- with spack_config.override('config:url_fetch_method', _fetch_method):
+ with spack_config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(ValueError):
spack_fs.S3FetchStrategy(None)
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_s3fetchstrategy_bad_url(tmpdir, _fetch_method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
- with spack_config.override('config:url_fetch_method', _fetch_method):
- fetcher = spack_fs.S3FetchStrategy(url='file:///does-not-exist')
+ with spack_config.override("config:url_fetch_method", _fetch_method):
+ fetcher = spack_fs.S3FetchStrategy(url="file:///does-not-exist")
assert fetcher is not None
with spack_stage.Stage(fetcher, path=testpath) as stage:
@@ -36,19 +36,20 @@ def test_s3fetchstrategy_bad_url(tmpdir, _fetch_method):
fetcher.fetch()
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_s3fetchstrategy_downloaded(tmpdir, _fetch_method):
"""Ensure fetch with archive file already downloaded is a noop."""
testpath = str(tmpdir)
- archive = os.path.join(testpath, 's3.tar.gz')
+ archive = os.path.join(testpath, "s3.tar.gz")
+
+ with spack_config.override("config:url_fetch_method", _fetch_method):
- with spack_config.override('config:url_fetch_method', _fetch_method):
class Archived_S3FS(spack_fs.S3FetchStrategy):
@property
def archive_file(self):
return archive
- url = 's3:///{0}'.format(archive)
+ url = "s3:///{0}".format(archive)
fetcher = Archived_S3FS(url=url)
with spack_stage.Stage(fetcher, path=testpath):
fetcher.fetch()
diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py
index 2a4189e7f8..ebc9a85347 100644
--- a/lib/spack/spack/test/sbang.py
+++ b/lib/spack/spack/test/sbang.py
@@ -23,146 +23,146 @@ import spack.store
import spack.util.spack_yaml as syaml
from spack.util.executable import which
-if sys.platform != 'win32':
+if sys.platform != "win32":
import grp
-pytestmark = pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows")
too_long = sbang.system_shebang_limit + 1
-short_line = "#!/this/is/short/bin/bash\n"
-long_line = "#!/this/" + ('x' * too_long) + "/is/long\n"
+short_line = "#!/this/is/short/bin/bash\n"
+long_line = "#!/this/" + ("x" * too_long) + "/is/long\n"
-lua_line = "#!/this/" + ('x' * too_long) + "/is/lua\n"
-lua_in_text = ("line\n") * 100 + "lua\n" + ("line\n" * 100)
-lua_line_patched = "--!/this/" + ('x' * too_long) + "/is/lua\n"
+lua_line = "#!/this/" + ("x" * too_long) + "/is/lua\n"
+lua_in_text = ("line\n") * 100 + "lua\n" + ("line\n" * 100)
+lua_line_patched = "--!/this/" + ("x" * too_long) + "/is/lua\n"
-luajit_line = "#!/this/" + ('x' * too_long) + "/is/luajit\n"
-luajit_in_text = ("line\n") * 100 + "lua\n" + ("line\n" * 100)
-luajit_line_patched = "--!/this/" + ('x' * too_long) + "/is/luajit\n"
+luajit_line = "#!/this/" + ("x" * too_long) + "/is/luajit\n"
+luajit_in_text = ("line\n") * 100 + "lua\n" + ("line\n" * 100)
+luajit_line_patched = "--!/this/" + ("x" * too_long) + "/is/luajit\n"
-node_line = "#!/this/" + ('x' * too_long) + "/is/node\n"
-node_in_text = ("line\n") * 100 + "lua\n" + ("line\n" * 100)
-node_line_patched = "//!/this/" + ('x' * too_long) + "/is/node\n"
+node_line = "#!/this/" + ("x" * too_long) + "/is/node\n"
+node_in_text = ("line\n") * 100 + "lua\n" + ("line\n" * 100)
+node_line_patched = "//!/this/" + ("x" * too_long) + "/is/node\n"
-php_line = "#!/this/" + ('x' * too_long) + "/is/php\n"
-php_in_text = ("line\n") * 100 + "php\n" + ("line\n" * 100)
-php_line_patched = "<?php #!/this/" + ('x' * too_long) + "/is/php\n"
+php_line = "#!/this/" + ("x" * too_long) + "/is/php\n"
+php_in_text = ("line\n") * 100 + "php\n" + ("line\n" * 100)
+php_line_patched = "<?php #!/this/" + ("x" * too_long) + "/is/php\n"
php_line_patched2 = "?>\n"
-sbang_line = '#!/bin/sh %s/bin/sbang\n' % spack.store.store.unpadded_root
-last_line = "last!\n"
+sbang_line = "#!/bin/sh %s/bin/sbang\n" % spack.store.store.unpadded_root
+last_line = "last!\n"
@pytest.fixture # type: ignore[no-redef]
def sbang_line():
- yield '#!/bin/sh %s/bin/sbang\n' % spack.store.layout.root
+ yield "#!/bin/sh %s/bin/sbang\n" % spack.store.layout.root
class ScriptDirectory(object):
"""Directory full of test scripts to run sbang instrumentation on."""
+
def __init__(self, sbang_line):
self.tempdir = tempfile.mkdtemp()
- self.directory = os.path.join(self.tempdir, 'dir')
+ self.directory = os.path.join(self.tempdir, "dir")
fs.mkdirp(self.directory)
# Script with short shebang
- self.short_shebang = os.path.join(self.tempdir, 'short')
- with open(self.short_shebang, 'w') as f:
+ self.short_shebang = os.path.join(self.tempdir, "short")
+ with open(self.short_shebang, "w") as f:
f.write(short_line)
f.write(last_line)
self.make_executable(self.short_shebang)
# Script with long shebang
- self.long_shebang = os.path.join(self.tempdir, 'long')
- with open(self.long_shebang, 'w') as f:
+ self.long_shebang = os.path.join(self.tempdir, "long")
+ with open(self.long_shebang, "w") as f:
f.write(long_line)
f.write(last_line)
self.make_executable(self.long_shebang)
# Non-executable script with long shebang
- self.nonexec_long_shebang = os.path.join(self.tempdir, 'nonexec_long')
- with open(self.nonexec_long_shebang, 'w') as f:
+ self.nonexec_long_shebang = os.path.join(self.tempdir, "nonexec_long")
+ with open(self.nonexec_long_shebang, "w") as f:
f.write(long_line)
f.write(last_line)
# Lua script with long shebang
- self.lua_shebang = os.path.join(self.tempdir, 'lua')
- with open(self.lua_shebang, 'w') as f:
+ self.lua_shebang = os.path.join(self.tempdir, "lua")
+ with open(self.lua_shebang, "w") as f:
f.write(lua_line)
f.write(last_line)
self.make_executable(self.lua_shebang)
# Lua occurring in text, not in shebang
- self.lua_textbang = os.path.join(self.tempdir, 'lua_in_text')
- with open(self.lua_textbang, 'w') as f:
+ self.lua_textbang = os.path.join(self.tempdir, "lua_in_text")
+ with open(self.lua_textbang, "w") as f:
f.write(short_line)
f.write(lua_in_text)
f.write(last_line)
self.make_executable(self.lua_textbang)
# Luajit script with long shebang
- self.luajit_shebang = os.path.join(self.tempdir, 'luajit')
- with open(self.luajit_shebang, 'w') as f:
+ self.luajit_shebang = os.path.join(self.tempdir, "luajit")
+ with open(self.luajit_shebang, "w") as f:
f.write(luajit_line)
f.write(last_line)
self.make_executable(self.luajit_shebang)
# Luajit occuring in text, not in shebang
- self.luajit_textbang = os.path.join(self.tempdir, 'luajit_in_text')
- with open(self.luajit_textbang, 'w') as f:
+ self.luajit_textbang = os.path.join(self.tempdir, "luajit_in_text")
+ with open(self.luajit_textbang, "w") as f:
f.write(short_line)
f.write(luajit_in_text)
f.write(last_line)
self.make_executable(self.luajit_textbang)
# Node script with long shebang
- self.node_shebang = os.path.join(self.tempdir, 'node')
- with open(self.node_shebang, 'w') as f:
+ self.node_shebang = os.path.join(self.tempdir, "node")
+ with open(self.node_shebang, "w") as f:
f.write(node_line)
f.write(last_line)
self.make_executable(self.node_shebang)
# Node occuring in text, not in shebang
- self.node_textbang = os.path.join(self.tempdir, 'node_in_text')
- with open(self.node_textbang, 'w') as f:
+ self.node_textbang = os.path.join(self.tempdir, "node_in_text")
+ with open(self.node_textbang, "w") as f:
f.write(short_line)
f.write(node_in_text)
f.write(last_line)
self.make_executable(self.node_textbang)
# php script with long shebang
- self.php_shebang = os.path.join(self.tempdir, 'php')
- with open(self.php_shebang, 'w') as f:
+ self.php_shebang = os.path.join(self.tempdir, "php")
+ with open(self.php_shebang, "w") as f:
f.write(php_line)
f.write(last_line)
self.make_executable(self.php_shebang)
# php occuring in text, not in shebang
- self.php_textbang = os.path.join(self.tempdir, 'php_in_text')
- with open(self.php_textbang, 'w') as f:
+ self.php_textbang = os.path.join(self.tempdir, "php_in_text")
+ with open(self.php_textbang, "w") as f:
f.write(short_line)
f.write(php_in_text)
f.write(last_line)
self.make_executable(self.php_textbang)
# Script already using sbang.
- self.has_sbang = os.path.join(self.tempdir, 'shebang')
- with open(self.has_sbang, 'w') as f:
+ self.has_sbang = os.path.join(self.tempdir, "shebang")
+ with open(self.has_sbang, "w") as f:
f.write(sbang_line)
f.write(long_line)
f.write(last_line)
self.make_executable(self.has_sbang)
# Fake binary file.
- self.binary = os.path.join(self.tempdir, 'binary')
- tar = which('tar', required=True)
- tar('czf', self.binary, self.has_sbang)
+ self.binary = os.path.join(self.tempdir, "binary")
+ tar = which("tar", required=True)
+ tar("czf", self.binary, self.has_sbang)
self.make_executable(self.binary)
def destroy(self):
@@ -171,8 +171,7 @@ class ScriptDirectory(object):
def make_executable(self, path):
# make a file executable
st = os.stat(path)
- executable_mode = st.st_mode \
- | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ executable_mode = st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
os.chmod(path, executable_mode)
st = os.stat(path)
@@ -186,17 +185,20 @@ def script_dir(sbang_line):
sdir.destroy()
-@pytest.mark.parametrize('shebang,interpreter', [
- (b'#!/path/to/interpreter argument\n', b'/path/to/interpreter'),
- (b'#! /path/to/interpreter truncated-argum', b'/path/to/interpreter'),
- (b'#! \t \t/path/to/interpreter\t \targument', b'/path/to/interpreter'),
- (b'#! \t \t /path/to/interpreter', b'/path/to/interpreter'),
- (b'#!/path/to/interpreter\0', b'/path/to/interpreter'),
- (b'#!/path/to/interpreter multiple args\n', b'/path/to/interpreter'),
- (b'#!\0/path/to/interpreter arg\n', None),
- (b'#!\n/path/to/interpreter arg\n', None),
- (b'#!', None)
-])
+@pytest.mark.parametrize(
+ "shebang,interpreter",
+ [
+ (b"#!/path/to/interpreter argument\n", b"/path/to/interpreter"),
+ (b"#! /path/to/interpreter truncated-argum", b"/path/to/interpreter"),
+ (b"#! \t \t/path/to/interpreter\t \targument", b"/path/to/interpreter"),
+ (b"#! \t \t /path/to/interpreter", b"/path/to/interpreter"),
+ (b"#!/path/to/interpreter\0", b"/path/to/interpreter"),
+ (b"#!/path/to/interpreter multiple args\n", b"/path/to/interpreter"),
+ (b"#!\0/path/to/interpreter arg\n", None),
+ (b"#!\n/path/to/interpreter arg\n", None),
+ (b"#!", None),
+ ],
+)
def test_shebang_interpreter_regex(shebang, interpreter):
sbang.get_interpreter(shebang) == interpreter
@@ -205,50 +207,48 @@ def test_shebang_handling(script_dir, sbang_line):
sbang.filter_shebangs_in_directory(script_dir.tempdir)
# Make sure this is untouched
- with open(script_dir.short_shebang, 'r') as f:
+ with open(script_dir.short_shebang, "r") as f:
assert f.readline() == short_line
assert f.readline() == last_line
# Make sure this got patched.
- with open(script_dir.long_shebang, 'r') as f:
+ with open(script_dir.long_shebang, "r") as f:
assert f.readline() == sbang_line
assert f.readline() == long_line
assert f.readline() == last_line
# Make sure this is untouched
- with open(script_dir.nonexec_long_shebang, 'r') as f:
+ with open(script_dir.nonexec_long_shebang, "r") as f:
assert f.readline() == long_line
assert f.readline() == last_line
# Make sure this got patched.
- with open(script_dir.lua_shebang, 'r') as f:
+ with open(script_dir.lua_shebang, "r") as f:
assert f.readline() == sbang_line
assert f.readline() == lua_line_patched
assert f.readline() == last_line
# Make sure this got patched.
- with open(script_dir.luajit_shebang, 'r') as f:
+ with open(script_dir.luajit_shebang, "r") as f:
assert f.readline() == sbang_line
assert f.readline() == luajit_line_patched
assert f.readline() == last_line
# Make sure this got patched.
- with open(script_dir.node_shebang, 'r') as f:
+ with open(script_dir.node_shebang, "r") as f:
assert f.readline() == sbang_line
assert f.readline() == node_line_patched
assert f.readline() == last_line
- assert filecmp.cmp(script_dir.lua_textbang,
- os.path.join(script_dir.tempdir, 'lua_in_text'))
- assert filecmp.cmp(script_dir.luajit_textbang,
- os.path.join(script_dir.tempdir, 'luajit_in_text'))
- assert filecmp.cmp(script_dir.node_textbang,
- os.path.join(script_dir.tempdir, 'node_in_text'))
- assert filecmp.cmp(script_dir.php_textbang,
- os.path.join(script_dir.tempdir, 'php_in_text'))
+ assert filecmp.cmp(script_dir.lua_textbang, os.path.join(script_dir.tempdir, "lua_in_text"))
+ assert filecmp.cmp(
+ script_dir.luajit_textbang, os.path.join(script_dir.tempdir, "luajit_in_text")
+ )
+ assert filecmp.cmp(script_dir.node_textbang, os.path.join(script_dir.tempdir, "node_in_text"))
+ assert filecmp.cmp(script_dir.php_textbang, os.path.join(script_dir.tempdir, "php_in_text"))
# Make sure this is untouched
- with open(script_dir.has_sbang, 'r') as f:
+ with open(script_dir.has_sbang, "r") as f:
assert f.readline() == sbang_line
assert f.readline() == long_line
assert f.readline() == last_line
@@ -266,29 +266,35 @@ def test_shebang_handles_non_writable_files(script_dir, sbang_line):
assert oct(not_writable_mode) == oct(st.st_mode)
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def configure_group_perms():
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
permissions:
read: world
write: group
group: {0}
-""".format(grp.getgrgid(os.getegid()).gr_name))
- spack.config.set('packages', conf, scope='user')
+""".format(
+ grp.getgrgid(os.getegid()).gr_name
+ )
+ )
+ spack.config.set("packages", conf, scope="user")
yield
-@pytest.fixture(scope='function')
+@pytest.fixture(scope="function")
def configure_user_perms():
- conf = syaml.load_config("""\
+ conf = syaml.load_config(
+ """\
all:
permissions:
read: world
write: user
-""")
- spack.config.set('packages', conf, scope='user')
+"""
+ )
+ spack.config.set("packages", conf, scope="user")
yield
@@ -302,18 +308,18 @@ def check_sbang_installation(group=False):
assert fs.is_exe(sbang_path)
status = os.stat(sbang_bin_dir)
- mode = (status.st_mode & 0o777)
+ mode = status.st_mode & 0o777
if group:
- assert mode == 0o775, 'Unexpected {0}'.format(oct(mode))
+ assert mode == 0o775, "Unexpected {0}".format(oct(mode))
else:
- assert mode == 0o755, 'Unexpected {0}'.format(oct(mode))
+ assert mode == 0o755, "Unexpected {0}".format(oct(mode))
status = os.stat(sbang_path)
- mode = (status.st_mode & 0o777)
+ mode = status.st_mode & 0o777
if group:
- assert mode == 0o775, 'Unexpected {0}'.format(oct(mode))
+ assert mode == 0o775, "Unexpected {0}".format(oct(mode))
else:
- assert mode == 0o755, 'Unexpected {0}'.format(oct(mode))
+ assert mode == 0o755, "Unexpected {0}".format(oct(mode))
def run_test_install_sbang(group):
@@ -349,20 +355,20 @@ def test_install_user_sbang(install_mockery, configure_user_perms):
def test_install_sbang_too_long(tmpdir):
root = str(tmpdir)
- num_extend = sbang.system_shebang_limit - len(root) - len('/bin/sbang')
+ num_extend = sbang.system_shebang_limit - len(root) - len("/bin/sbang")
long_path = root
while num_extend > 1:
add = min(num_extend, 255)
- long_path = os.path.join(long_path, 'e' * add)
+ long_path = os.path.join(long_path, "e" * add)
num_extend -= add
with spack.store.use_store(spack.store.Store(long_path)):
with pytest.raises(sbang.SbangPathError) as exc_info:
sbang.sbang_install_path()
err = str(exc_info.value)
- assert 'root is too long' in err
- assert 'exceeds limit' in err
- assert 'cannot patch' in err
+ assert "root is too long" in err
+ assert "exceeds limit" in err
+ assert "cannot patch" in err
def test_sbang_hook_skips_nonexecutable_blobs(tmpdir):
@@ -370,41 +376,40 @@ def test_sbang_hook_skips_nonexecutable_blobs(tmpdir):
# consisting of invalid UTF-8. The latter is technically not really necessary for
# the test, but binary blobs accidentally starting with b'#!' usually do not contain
# valid UTF-8, so we also ensure that Spack does not attempt to decode as UTF-8.
- contents = b'#!' + b'\x80' * sbang.system_shebang_limit
- file = str(tmpdir.join('non-executable.sh'))
- with open(file, 'wb') as f:
+ contents = b"#!" + b"\x80" * sbang.system_shebang_limit
+ file = str(tmpdir.join("non-executable.sh"))
+ with open(file, "wb") as f:
f.write(contents)
sbang.filter_shebangs_in_directory(str(tmpdir))
# Make sure there is no sbang shebang.
- with open(file, 'rb') as f:
- assert b'sbang' not in f.readline()
+ with open(file, "rb") as f:
+ assert b"sbang" not in f.readline()
def test_sbang_handles_non_utf8_files(tmpdir):
# We have an executable with a copyright sign as filename
- contents = (b'#!' + b'\xa9' * sbang.system_shebang_limit +
- b'\nand another symbol: \xa9')
+ contents = b"#!" + b"\xa9" * sbang.system_shebang_limit + b"\nand another symbol: \xa9"
# Make sure it's indeed valid latin1 but invalid utf-8.
- assert contents.decode('latin1')
+ assert contents.decode("latin1")
with pytest.raises(UnicodeDecodeError):
- contents.decode('utf-8')
+ contents.decode("utf-8")
# Put it in an executable file
- file = str(tmpdir.join('latin1.sh'))
- with open(file, 'wb') as f:
+ file = str(tmpdir.join("latin1.sh"))
+ with open(file, "wb") as f:
f.write(contents)
# Run sbang
assert sbang.filter_shebang(file)
- with open(file, 'rb') as f:
+ with open(file, "rb") as f:
new_contents = f.read()
assert contents in new_contents
- assert b'sbang' in new_contents
+ assert b"sbang" in new_contents
@pytest.fixture
@@ -418,23 +423,23 @@ def shebang_limits_system_8_spack_16():
def test_shebang_exceeds_spack_shebang_limit(shebang_limits_system_8_spack_16, tmpdir):
"""Tests whether shebangs longer than Spack's limit are skipped"""
- file = str(tmpdir.join('longer_than_spack_limit.sh'))
- with open(file, 'wb') as f:
- f.write(b'#!' + b'x' * sbang.spack_shebang_limit)
+ file = str(tmpdir.join("longer_than_spack_limit.sh"))
+ with open(file, "wb") as f:
+ f.write(b"#!" + b"x" * sbang.spack_shebang_limit)
# Then Spack shouldn't try to add a shebang
assert not sbang.filter_shebang(file)
- with open(file, 'rb') as f:
- assert b'sbang' not in f.read()
+ with open(file, "rb") as f:
+ assert b"sbang" not in f.read()
def test_sbang_hook_handles_non_writable_files_preserving_permissions(tmpdir):
- path = str(tmpdir.join('file.sh'))
- with open(path, 'w') as f:
+ path = str(tmpdir.join("file.sh"))
+ with open(path, "w") as f:
f.write(long_line)
os.chmod(path, 0o555)
sbang.filter_shebang(path)
- with open(path, 'r') as f:
- assert 'sbang' in f.readline()
+ with open(path, "r") as f:
+ assert "sbang" in f.readline()
assert os.stat(path).st_mode & 0o777 == 0o555
diff --git a/lib/spack/spack/test/schema.py b/lib/spack/spack/test/schema.py
index 8f9d0132ad..214a2e52fd 100644
--- a/lib/spack/spack/test/schema.py
+++ b/lib/spack/spack/test/schema.py
@@ -17,100 +17,89 @@ import spack.schema
@pytest.fixture()
def validate_spec_schema():
return {
- 'type': 'object',
- 'validate_spec': True,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'string'
- }
- }
+ "type": "object",
+ "validate_spec": True,
+ "patternProperties": {r"\w[\w-]*": {"type": "string"}},
}
@pytest.fixture()
def module_suffixes_schema():
return {
- 'type': 'object',
- 'properties': {
- 'tcl': {
- 'type': 'object',
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'object',
- 'properties': {
- 'suffixes': {
- 'validate_spec': True,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'string',
+ "type": "object",
+ "properties": {
+ "tcl": {
+ "type": "object",
+ "patternProperties": {
+ r"\w[\w-]*": {
+ "type": "object",
+ "properties": {
+ "suffixes": {
+ "validate_spec": True,
+ "patternProperties": {
+ r"\w[\w-]*": {
+ "type": "string",
}
- }
+ },
}
- }
+ },
}
- }
+ },
}
- }
+ },
}
-@pytest.fixture(scope='module')
+@pytest.fixture(scope="module")
def meta_schema():
"""Meta schema for JSON schema validation (Draft 4)"""
- meta_schema_file = os.path.join(
- spack.paths.test_path, 'data', 'jsonschema_meta.json'
- )
+ meta_schema_file = os.path.join(spack.paths.test_path, "data", "jsonschema_meta.json")
with open(meta_schema_file) as f:
ms = json.load(f)
return ms
-@pytest.mark.regression('9857')
+@pytest.mark.regression("9857")
def test_validate_spec(validate_spec_schema):
v = spack.schema.Validator(validate_spec_schema)
- data = {'foo@3.7': 'bar'}
+ data = {"foo@3.7": "bar"}
# Validate good data (the key is a spec)
v.validate(data)
# Check that invalid data throws
- data['^python@3.7@'] = 'baz'
+ data["^python@3.7@"] = "baz"
with pytest.raises(jsonschema.ValidationError) as exc_err:
v.validate(data)
- assert 'is an invalid spec' in str(exc_err.value)
+ assert "is an invalid spec" in str(exc_err.value)
-@pytest.mark.regression('9857')
+@pytest.mark.regression("9857")
def test_module_suffixes(module_suffixes_schema):
v = spack.schema.Validator(module_suffixes_schema)
- data = {'tcl': {'all': {'suffixes': {'^python@2.7@': 'py2.7'}}}}
+ data = {"tcl": {"all": {"suffixes": {"^python@2.7@": "py2.7"}}}}
with pytest.raises(jsonschema.ValidationError) as exc_err:
v.validate(data)
- assert 'is an invalid spec' in str(exc_err.value)
+ assert "is an invalid spec" in str(exc_err.value)
-@pytest.mark.regression('10246')
+@pytest.mark.regression("10246")
@pytest.mark.skipif(
- sys.version_info < (2, 7),
- reason='requires python2.7 or higher because of importlib')
-@pytest.mark.parametrize('config_name', [
- 'compilers',
- 'config',
- 'env',
- 'merged',
- 'mirrors',
- 'modules',
- 'packages',
- 'repos'
-])
+ sys.version_info < (2, 7), reason="requires python2.7 or higher because of importlib"
+)
+@pytest.mark.parametrize(
+ "config_name",
+ ["compilers", "config", "env", "merged", "mirrors", "modules", "packages", "repos"],
+)
def test_schema_validation(meta_schema, config_name):
import importlib # novm
- module_name = 'spack.schema.{0}'.format(config_name)
+
+ module_name = "spack.schema.{0}".format(config_name)
module = importlib.import_module(module_name)
- schema = getattr(module, 'schema')
+ schema = getattr(module, "schema")
# If this validation throws the test won't pass
jsonschema.validate(schema, meta_schema)
@@ -118,26 +107,26 @@ def test_schema_validation(meta_schema, config_name):
def test_deprecated_properties(module_suffixes_schema):
# Test that an error is reported when 'error: True'
- msg_fmt = r'deprecated properties detected [properties={properties}]'
- module_suffixes_schema['deprecatedProperties'] = {
- 'properties': ['tcl'],
- 'message': msg_fmt,
- 'error': True
+ msg_fmt = r"deprecated properties detected [properties={properties}]"
+ module_suffixes_schema["deprecatedProperties"] = {
+ "properties": ["tcl"],
+ "message": msg_fmt,
+ "error": True,
}
v = spack.schema.Validator(module_suffixes_schema)
- data = {'tcl': {'all': {'suffixes': {'^python': 'py'}}}}
+ data = {"tcl": {"all": {"suffixes": {"^python": "py"}}}}
- expected_match = 'deprecated properties detected'
+ expected_match = "deprecated properties detected"
with pytest.raises(jsonschema.ValidationError, match=expected_match):
v.validate(data)
# Test that just a warning is reported when 'error: False'
- module_suffixes_schema['deprecatedProperties'] = {
- 'properties': ['tcl'],
- 'message': msg_fmt,
- 'error': False
+ module_suffixes_schema["deprecatedProperties"] = {
+ "properties": ["tcl"],
+ "message": msg_fmt,
+ "error": False,
}
v = spack.schema.Validator(module_suffixes_schema)
- data = {'tcl': {'all': {'suffixes': {'^python': 'py'}}}}
+ data = {"tcl": {"all": {"suffixes": {"^python": "py"}}}}
# The next validation doesn't raise anymore
v.validate(data)
diff --git a/lib/spack/spack/test/spack_yaml.py b/lib/spack/spack/test/spack_yaml.py
index 358460a059..d664cb4263 100644
--- a/lib/spack/spack/test/spack_yaml.py
+++ b/lib/spack/spack/test/spack_yaml.py
@@ -33,26 +33,31 @@ config_file:
def test_parse(data):
expected = {
- 'config_file': syaml.syaml_dict([
- ('x86_64', syaml.syaml_dict([
- ('foo', '/path/to/foo'),
- ('bar', '/path/to/bar'),
- ('baz', '/path/to/baz')])),
- ('some_list', ['item 1', 'item 2', 'item 3']),
- ('another_list', [1, 2, 3]),
- ('some_key', 'some_string')
- ])}
+ "config_file": syaml.syaml_dict(
+ [
+ (
+ "x86_64",
+ syaml.syaml_dict(
+ [("foo", "/path/to/foo"), ("bar", "/path/to/bar"), ("baz", "/path/to/baz")]
+ ),
+ ),
+ ("some_list", ["item 1", "item 2", "item 3"]),
+ ("another_list", [1, 2, 3]),
+ ("some_key", "some_string"),
+ ]
+ )
+ }
assert data == expected
def test_dict_order(data):
- expected_order = ['x86_64', 'some_list', 'another_list', 'some_key']
- assert list(data['config_file'].keys()) == expected_order
+ expected_order = ["x86_64", "some_list", "another_list", "some_key"]
+ assert list(data["config_file"].keys()) == expected_order
- expected_order = ['foo', 'bar', 'baz']
- assert list(data['config_file']['x86_64'].keys()) == expected_order
+ expected_order = ["foo", "bar", "baz"]
+ assert list(data["config_file"]["x86_64"].keys()) == expected_order
def test_line_numbers(data):
@@ -60,32 +65,32 @@ def test_line_numbers(data):
assert obj._start_mark.line == start_line
assert obj._end_mark.line == end_line
- check(data, 0, 12)
- check(data['config_file'], 1, 12)
- check(data['config_file']['x86_64'], 2, 5)
- check(data['config_file']['x86_64']['foo'], 2, 2)
- check(data['config_file']['x86_64']['bar'], 3, 3)
- check(data['config_file']['x86_64']['baz'], 4, 4)
- check(data['config_file']['some_list'], 6, 9)
- check(data['config_file']['some_list'][0], 6, 6)
- check(data['config_file']['some_list'][1], 7, 7)
- check(data['config_file']['some_list'][2], 8, 8)
- check(data['config_file']['another_list'], 10, 10)
- check(data['config_file']['some_key'], 11, 11)
+ check(data, 0, 12)
+ check(data["config_file"], 1, 12)
+ check(data["config_file"]["x86_64"], 2, 5)
+ check(data["config_file"]["x86_64"]["foo"], 2, 2)
+ check(data["config_file"]["x86_64"]["bar"], 3, 3)
+ check(data["config_file"]["x86_64"]["baz"], 4, 4)
+ check(data["config_file"]["some_list"], 6, 9)
+ check(data["config_file"]["some_list"][0], 6, 6)
+ check(data["config_file"]["some_list"][1], 7, 7)
+ check(data["config_file"]["some_list"][2], 8, 8)
+ check(data["config_file"]["another_list"], 10, 10)
+ check(data["config_file"]["some_key"], 11, 11)
def test_yaml_aliases():
- aliased_list_1 = ['foo']
+ aliased_list_1 = ["foo"]
aliased_list_2 = []
dict_with_aliases = {
- 'a': aliased_list_1,
- 'b': aliased_list_1,
- 'c': aliased_list_1,
- 'd': aliased_list_2,
- 'e': aliased_list_2,
- 'f': aliased_list_2,
+ "a": aliased_list_1,
+ "b": aliased_list_1,
+ "c": aliased_list_1,
+ "d": aliased_list_2,
+ "e": aliased_list_2,
+ "f": aliased_list_2,
}
string = syaml.dump(dict_with_aliases)
# ensure no YAML aliases appear in syaml dumps.
- assert '*id' not in string
+ assert "*id" not in string
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index b7c0f95332..d7f9a750ee 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -35,6 +35,7 @@ def set_dependency(saved_deps, monkeypatch):
"""Returns a function that alters the dependency information
for a package in the ``saved_deps`` fixture.
"""
+
def _mock(pkg_name, spec, deptypes=all_deptypes):
"""Alters dependence information for a package.
@@ -49,41 +50,41 @@ def set_dependency(saved_deps, monkeypatch):
cond = Spec(pkg_cls.name)
dependency = Dependency(pkg_cls, spec, type=deptypes)
monkeypatch.setitem(pkg_cls.dependencies, spec.name, {cond: dependency})
+
return _mock
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_test_deptype():
"""Ensure that test-only dependencies are only included for specified
-packages in the following spec DAG::
+ packages in the following spec DAG::
- w
- /|
- x y
- |
- z
+ w
+ /|
+ x y
+ |
+ z
-w->y deptypes are (link, build), w->x and y->z deptypes are (test)
-
-"""
- default = ('build', 'link')
- test_only = ('test',)
+ w->y deptypes are (link, build), w->x and y->z deptypes are (test)
+ """
+ default = ("build", "link")
+ test_only = ("test",)
mock_repo = MockPackageMultiRepo()
- x = mock_repo.add_package('x', [], [])
- z = mock_repo.add_package('z', [], [])
- y = mock_repo.add_package('y', [z], [test_only])
- w = mock_repo.add_package('w', [x, y], [test_only, default])
+ x = mock_repo.add_package("x", [], [])
+ z = mock_repo.add_package("z", [], [])
+ y = mock_repo.add_package("y", [z], [test_only])
+ w = mock_repo.add_package("w", [x, y], [test_only, default])
with spack.repo.use_repositories(mock_repo):
- spec = Spec('w')
+ spec = Spec("w")
spec.concretize(tests=(w.name,))
- assert ('x' in spec)
- assert ('z' not in spec)
+ assert "x" in spec
+ assert "z" not in spec
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_installed_deps(monkeypatch, mock_packages):
"""Ensure that concrete specs and their build deps don't constrain solves.
@@ -96,8 +97,8 @@ def test_installed_deps(monkeypatch, mock_packages):
constrain ``a``'s dependency on ``d``.
"""
- if spack.config.get('config:concretizer') == 'original':
- pytest.xfail('fails with the original concretizer and full hashes')
+ if spack.config.get("config:concretizer") == "original":
+ pytest.xfail("fails with the original concretizer and full hashes")
# see installed-deps-[abcde] test packages.
# a
@@ -115,14 +116,14 @@ def test_installed_deps(monkeypatch, mock_packages):
# (concrete) installation.
c_spec = Spec(c)
c_spec.concretize()
- assert c_spec[d].version == spack.version.Version('2')
+ assert c_spec[d].version == spack.version.Version("2")
installed_names = [s.name for s in c_spec.traverse()]
def _mock_installed(self):
return self.name in installed_names
- monkeypatch.setattr(Spec, 'installed', _mock_installed)
+ monkeypatch.setattr(Spec, "installed", _mock_installed)
# install A, which depends on B, C, D, and E, and force A to
# use the installed C. It should *not* force A to use the installed D
@@ -130,43 +131,42 @@ def test_installed_deps(monkeypatch, mock_packages):
a_spec = Spec(a)
a_spec._add_dependency(c_spec, ("build", "link"))
a_spec.concretize()
- assert spack.version.Version('2') == a_spec[c][d].version
- assert spack.version.Version('2') == a_spec[e].version
- assert spack.version.Version('3') == a_spec[b][d].version
- assert spack.version.Version('3') == a_spec[d].version
+ assert spack.version.Version("2") == a_spec[c][d].version
+ assert spack.version.Version("2") == a_spec[e].version
+ assert spack.version.Version("3") == a_spec[b][d].version
+ assert spack.version.Version("3") == a_spec[d].version
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
def test_specify_preinstalled_dep():
"""Specify the use of a preinstalled package during concretization with a
transitive dependency that is only supplied by the preinstalled package.
"""
- default = ('build', 'link')
+ default = ("build", "link")
mock_repo = MockPackageMultiRepo()
- c = mock_repo.add_package('c', [], [])
- b = mock_repo.add_package('b', [c], [default])
- mock_repo.add_package('a', [b], [default])
+ c = mock_repo.add_package("c", [], [])
+ b = mock_repo.add_package("b", [c], [default])
+ mock_repo.add_package("a", [b], [default])
with spack.repo.use_repositories(mock_repo):
- b_spec = Spec('b')
+ b_spec = Spec("b")
b_spec.concretize()
for spec in b_spec.traverse():
- setattr(spec.package, 'installed', True)
+ setattr(spec.package, "installed", True)
- a_spec = Spec('a')
+ a_spec = Spec("a")
a_spec._add_dependency(b_spec, default)
a_spec.concretize()
- assert set(x.name for x in a_spec.traverse()) == set(['a', 'b', 'c'])
+ assert set(x.name for x in a_spec.traverse()) == set(["a", "b", "c"])
-@pytest.mark.usefixtures('config')
-@pytest.mark.parametrize('spec_str,expr_str,expected', [
- ('x ^y@2', 'y@2', True),
- ('x@1', 'y', False),
- ('x', 'y@3', True)
-])
+@pytest.mark.usefixtures("config")
+@pytest.mark.parametrize(
+ "spec_str,expr_str,expected",
+ [("x ^y@2", "y@2", True), ("x@1", "y", False), ("x", "y@3", True)],
+)
def test_conditional_dep_with_user_constraints(spec_str, expr_str, expected):
"""This sets up packages X->Y such that X depends on Y conditionally. It
then constructs a Spec with X but with no constraints on X, so that the
@@ -177,35 +177,31 @@ def test_conditional_dep_with_user_constraints(spec_str, expr_str, expected):
# FIXME: We need to tweak optimization rules to make this test
# FIXME: not prefer a DAG with fewer nodes wrt more recent
# FIXME: versions of the package
- if spack.config.get('config:concretizer') == 'clingo':
- pytest.xfail('Clingo optimization rules prefer to trim a node')
+ if spack.config.get("config:concretizer") == "clingo":
+ pytest.xfail("Clingo optimization rules prefer to trim a node")
- default = ('build', 'link')
+ default = ("build", "link")
mock_repo = MockPackageMultiRepo()
- y = mock_repo.add_package('y', [], [])
- x_on_y_conditions = {
- y.name: {
- 'x@2:': 'y'
- }
- }
- mock_repo.add_package('x', [y], [default], conditions=x_on_y_conditions)
+ y = mock_repo.add_package("y", [], [])
+ x_on_y_conditions = {y.name: {"x@2:": "y"}}
+ mock_repo.add_package("x", [y], [default], conditions=x_on_y_conditions)
with spack.repo.use_repositories(mock_repo):
spec = Spec(spec_str)
spec.concretize()
result = expr_str in spec
- assert result is expected, '{0} in {1}'.format(expr_str, spec)
+ assert result is expected, "{0} in {1}".format(expr_str, spec)
-@pytest.mark.usefixtures('mutable_mock_repo', 'config')
+@pytest.mark.usefixtures("mutable_mock_repo", "config")
class TestSpecDag(object):
def test_conflicting_package_constraints(self, set_dependency):
- set_dependency('mpileaks', 'mpich@1.0')
- set_dependency('callpath', 'mpich@2.0')
+ set_dependency("mpileaks", "mpich@1.0")
+ set_dependency("callpath", "mpich@2.0")
- spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
+ spec = Spec("mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf")
# TODO: try to do something to show that the issue was with
# TODO: the user's input or with package inconsistencies.
@@ -213,11 +209,10 @@ class TestSpecDag(object):
spec.normalize()
def test_preorder_node_traversal(self):
- dag = Spec('mpileaks ^zmpi')
+ dag = Spec("mpileaks ^zmpi")
dag.normalize()
- names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
- 'zmpi', 'fake']
+ names = ["mpileaks", "callpath", "dyninst", "libdwarf", "libelf", "zmpi", "fake"]
pairs = list(zip([0, 1, 2, 3, 4, 2, 3], names))
traversal = dag.traverse()
@@ -227,99 +222,132 @@ class TestSpecDag(object):
assert [(x, y.name) for x, y in traversal] == pairs
def test_preorder_edge_traversal(self):
- dag = Spec('mpileaks ^zmpi')
+ dag = Spec("mpileaks ^zmpi")
dag.normalize()
- names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
- 'libelf', 'zmpi', 'fake', 'zmpi']
+ names = [
+ "mpileaks",
+ "callpath",
+ "dyninst",
+ "libdwarf",
+ "libelf",
+ "libelf",
+ "zmpi",
+ "fake",
+ "zmpi",
+ ]
pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names))
- traversal = dag.traverse(cover='edges')
+ traversal = dag.traverse(cover="edges")
assert [x.name for x in traversal] == names
- traversal = dag.traverse(cover='edges', depth=True)
+ traversal = dag.traverse(cover="edges", depth=True)
assert [(x, y.name) for x, y in traversal] == pairs
def test_preorder_path_traversal(self):
- dag = Spec('mpileaks ^zmpi')
+ dag = Spec("mpileaks ^zmpi")
dag.normalize()
- names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
- 'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
+ names = [
+ "mpileaks",
+ "callpath",
+ "dyninst",
+ "libdwarf",
+ "libelf",
+ "libelf",
+ "zmpi",
+ "fake",
+ "zmpi",
+ "fake",
+ ]
pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names))
- traversal = dag.traverse(cover='paths')
+ traversal = dag.traverse(cover="paths")
assert [x.name for x in traversal] == names
- traversal = dag.traverse(cover='paths', depth=True)
+ traversal = dag.traverse(cover="paths", depth=True)
assert [(x, y.name) for x, y in traversal] == pairs
def test_postorder_node_traversal(self):
- dag = Spec('mpileaks ^zmpi')
+ dag = Spec("mpileaks ^zmpi")
dag.normalize()
- names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
- 'callpath', 'mpileaks']
+ names = ["libelf", "libdwarf", "dyninst", "fake", "zmpi", "callpath", "mpileaks"]
pairs = list(zip([4, 3, 2, 3, 2, 1, 0], names))
- traversal = dag.traverse(order='post')
+ traversal = dag.traverse(order="post")
assert [x.name for x in traversal] == names
- traversal = dag.traverse(depth=True, order='post')
+ traversal = dag.traverse(depth=True, order="post")
assert [(x, y.name) for x, y in traversal] == pairs
def test_postorder_edge_traversal(self):
- dag = Spec('mpileaks ^zmpi')
+ dag = Spec("mpileaks ^zmpi")
dag.normalize()
- names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
- 'callpath', 'zmpi', 'mpileaks']
+ names = [
+ "libelf",
+ "libdwarf",
+ "libelf",
+ "dyninst",
+ "fake",
+ "zmpi",
+ "callpath",
+ "zmpi",
+ "mpileaks",
+ ]
pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names))
- traversal = dag.traverse(cover='edges', order='post')
+ traversal = dag.traverse(cover="edges", order="post")
assert [x.name for x in traversal] == names
- traversal = dag.traverse(cover='edges', depth=True, order='post')
+ traversal = dag.traverse(cover="edges", depth=True, order="post")
assert [(x, y.name) for x, y in traversal] == pairs
def test_postorder_path_traversal(self):
- dag = Spec('mpileaks ^zmpi')
+ dag = Spec("mpileaks ^zmpi")
dag.normalize()
- names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
- 'callpath', 'fake', 'zmpi', 'mpileaks']
+ names = [
+ "libelf",
+ "libdwarf",
+ "libelf",
+ "dyninst",
+ "fake",
+ "zmpi",
+ "callpath",
+ "fake",
+ "zmpi",
+ "mpileaks",
+ ]
pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names))
- traversal = dag.traverse(cover='paths', order='post')
+ traversal = dag.traverse(cover="paths", order="post")
assert [x.name for x in traversal] == names
- traversal = dag.traverse(cover='paths', depth=True, order='post')
+ traversal = dag.traverse(cover="paths", depth=True, order="post")
assert [(x, y.name) for x, y in traversal] == pairs
def test_conflicting_spec_constraints(self):
- mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
+ mpileaks = Spec("mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf")
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
- mpileaks.edges_to_dependencies(
- name='mpich'
- )[0].spec = Spec('mpich@1.0')
+ mpileaks.edges_to_dependencies(name="mpich")[0].spec = Spec("mpich@1.0")
- mpileaks.edges_to_dependencies(
- name='callpath'
- )[0].spec.edges_to_dependencies(
- name='mpich'
- )[0].spec = Spec('mpich@2.0')
+ mpileaks.edges_to_dependencies(name="callpath")[0].spec.edges_to_dependencies(
+ name="mpich"
+ )[0].spec = Spec("mpich@2.0")
with pytest.raises(spack.spec.InconsistentSpecError):
mpileaks.flat_dependencies(copy=False)
def test_normalize_twice(self):
"""Make sure normalize can be run twice on the same spec,
- and that it is idempotent."""
- spec = Spec('mpileaks')
+ and that it is idempotent."""
+ spec = Spec("mpileaks")
spec.normalize()
n1 = spec.copy()
@@ -327,25 +355,26 @@ class TestSpecDag(object):
assert n1 == spec
def test_normalize_a_lot(self):
- spec = Spec('mpileaks')
+ spec = Spec("mpileaks")
spec.normalize()
spec.normalize()
spec.normalize()
spec.normalize()
- def test_normalize_with_virtual_spec(self, ):
- dag = Spec.from_literal({
- 'mpileaks': {
- 'callpath': {
- 'dyninst': {
- 'libdwarf': {'libelf': None},
- 'libelf': None
+ def test_normalize_with_virtual_spec(
+ self,
+ ):
+ dag = Spec.from_literal(
+ {
+ "mpileaks": {
+ "callpath": {
+ "dyninst": {"libdwarf": {"libelf": None}, "libelf": None},
+ "mpi": None,
},
- 'mpi': None
- },
- 'mpi': None
+ "mpi": None,
+ }
}
- })
+ )
dag.normalize()
# make sure nothing with the same name occurs twice
@@ -359,53 +388,51 @@ class TestSpecDag(object):
assert counts[name] == 1
def test_dependents_and_dependencies_are_correct(self):
- spec = Spec.from_literal({
- 'mpileaks': {
- 'callpath': {
- 'dyninst': {
- 'libdwarf': {'libelf': None},
- 'libelf': None
+ spec = Spec.from_literal(
+ {
+ "mpileaks": {
+ "callpath": {
+ "dyninst": {"libdwarf": {"libelf": None}, "libelf": None},
+ "mpi": None,
},
- 'mpi': None
- },
- 'mpi': None
+ "mpi": None,
+ }
}
- })
+ )
check_links(spec)
spec.normalize()
check_links(spec)
def test_unsatisfiable_version(self, set_dependency):
- set_dependency('mpileaks', 'mpich@1.0')
- spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
+ set_dependency("mpileaks", "mpich@1.0")
+ spec = Spec("mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf")
with pytest.raises(spack.spec.UnsatisfiableVersionSpecError):
spec.normalize()
def test_unsatisfiable_compiler(self, set_dependency):
- set_dependency('mpileaks', 'mpich%gcc')
- spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf'
- ' ^libdwarf')
+ set_dependency("mpileaks", "mpich%gcc")
+ spec = Spec("mpileaks ^mpich%intel ^callpath ^dyninst ^libelf" " ^libdwarf")
with pytest.raises(spack.spec.UnsatisfiableCompilerSpecError):
spec.normalize()
def test_unsatisfiable_compiler_version(self, set_dependency):
- set_dependency('mpileaks', 'mpich%gcc@4.6')
- spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf'
- ' ^libdwarf')
+ set_dependency("mpileaks", "mpich%gcc@4.6")
+ spec = Spec("mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf" " ^libdwarf")
with pytest.raises(spack.spec.UnsatisfiableCompilerSpecError):
spec.normalize()
def test_unsatisfiable_architecture(self, set_dependency):
- set_dependency('mpileaks', 'mpich platform=test target=be')
- spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath'
- ' ^dyninst ^libelf ^libdwarf')
+ set_dependency("mpileaks", "mpich platform=test target=be")
+ spec = Spec(
+ "mpileaks ^mpich platform=test target=fe ^callpath" " ^dyninst ^libelf ^libdwarf"
+ )
with pytest.raises(spack.spec.UnsatisfiableArchitectureSpecError):
spec.normalize()
- @pytest.mark.parametrize('spec_str', [
- 'libelf ^mpich', 'libelf ^libdwarf', 'mpich ^dyninst ^libelf'
- ])
+ @pytest.mark.parametrize(
+ "spec_str", ["libelf ^mpich", "libelf ^libdwarf", "mpich ^dyninst ^libelf"]
+ )
def test_invalid_dep(self, spec_str):
spec = Spec(spec_str)
with pytest.raises(spack.error.SpecError):
@@ -413,45 +440,19 @@ class TestSpecDag(object):
def test_equal(self):
# Different spec structures to test for equality
- flat = Spec.from_literal(
- {'mpileaks ^callpath ^libelf ^libdwarf': None}
+ flat = Spec.from_literal({"mpileaks ^callpath ^libelf ^libdwarf": None})
+
+ flat_init = Spec.from_literal(
+ {"mpileaks": {"callpath": None, "libdwarf": None, "libelf": None}}
)
- flat_init = Spec.from_literal({
- 'mpileaks': {
- 'callpath': None,
- 'libdwarf': None,
- 'libelf': None
- }
- })
+ flip_flat = Spec.from_literal(
+ {"mpileaks": {"libelf": None, "libdwarf": None, "callpath": None}}
+ )
- flip_flat = Spec.from_literal({
- 'mpileaks': {
- 'libelf': None,
- 'libdwarf': None,
- 'callpath': None
- }
- })
-
- dag = Spec.from_literal({
- 'mpileaks': {
- 'callpath': {
- 'libdwarf': {
- 'libelf': None
- }
- }
- }
- })
-
- flip_dag = Spec.from_literal({
- 'mpileaks': {
- 'callpath': {
- 'libelf': {
- 'libdwarf': None
- }
- }
- }
- })
+ dag = Spec.from_literal({"mpileaks": {"callpath": {"libdwarf": {"libelf": None}}}})
+
+ flip_dag = Spec.from_literal({"mpileaks": {"callpath": {"libelf": {"libdwarf": None}}}})
# All these are equal to each other with regular ==
specs = (flat, flat_init, flip_flat, dag, flip_dag)
@@ -474,52 +475,53 @@ class TestSpecDag(object):
def test_normalize_mpileaks(self):
# Spec parsed in from a string
- spec = Spec.from_literal({
- 'mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf': None
- })
+ spec = Spec.from_literal(
+ {"mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf": None}
+ )
# What that spec should look like after parsing
- expected_flat = Spec.from_literal({
- 'mpileaks': {
- 'mpich': None,
- 'callpath': None,
- 'dyninst': None,
- 'libelf@1.8.11': None,
- 'libdwarf': None
+ expected_flat = Spec.from_literal(
+ {
+ "mpileaks": {
+ "mpich": None,
+ "callpath": None,
+ "dyninst": None,
+ "libelf@1.8.11": None,
+ "libdwarf": None,
+ }
}
- })
+ )
# What it should look like after normalization
- mpich = Spec('mpich')
- libelf = Spec('libelf@1.8.11')
- expected_normalized = Spec.from_literal({
- 'mpileaks': {
- 'callpath': {
- 'dyninst': {
- 'libdwarf': {libelf: None},
- libelf: None
+ mpich = Spec("mpich")
+ libelf = Spec("libelf@1.8.11")
+ expected_normalized = Spec.from_literal(
+ {
+ "mpileaks": {
+ "callpath": {
+ "dyninst": {"libdwarf": {libelf: None}, libelf: None},
+ mpich: None,
},
- mpich: None
+ mpich: None,
},
- mpich: None
- },
- })
+ }
+ )
# Similar to normalized spec, but now with copies of the same
# libelf node. Normalization should result in a single unique
# node for each package, so this is the wrong DAG.
- non_unique_nodes = Spec.from_literal({
- 'mpileaks': {
- 'callpath': {
- 'dyninst': {
- 'libdwarf': {'libelf@1.8.11': None},
- 'libelf@1.8.11': None
+ non_unique_nodes = Spec.from_literal(
+ {
+ "mpileaks": {
+ "callpath": {
+ "dyninst": {"libdwarf": {"libelf@1.8.11": None}, "libelf@1.8.11": None},
+ mpich: None,
},
- mpich: None
- },
- mpich: None
- }
- }, normal=False)
+ mpich: None,
+ }
+ },
+ normal=False,
+ )
# All specs here should be equal under regular equality
specs = (spec, expected_flat, expected_normalized, non_unique_nodes)
@@ -553,36 +555,35 @@ class TestSpecDag(object):
assert not spec.eq_dag(non_unique_nodes, deptypes=True)
def test_normalize_with_virtual_package(self):
- spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
+ spec = Spec("mpileaks ^mpi ^libelf@1.8.11 ^libdwarf")
spec.normalize()
- expected_normalized = Spec.from_literal({
- 'mpileaks': {
- 'callpath': {
- 'dyninst': {
- 'libdwarf': {'libelf@1.8.11': None},
- 'libelf@1.8.11': None
+ expected_normalized = Spec.from_literal(
+ {
+ "mpileaks": {
+ "callpath": {
+ "dyninst": {"libdwarf": {"libelf@1.8.11": None}, "libelf@1.8.11": None},
+ "mpi": None,
},
- 'mpi': None
- },
- 'mpi': None
+ "mpi": None,
+ }
}
- })
+ )
assert str(spec) == str(expected_normalized)
def test_contains(self):
- spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
- assert Spec('mpi') in spec
- assert Spec('libelf') in spec
- assert Spec('libelf@1.8.11') in spec
- assert Spec('libelf@1.8.12') not in spec
- assert Spec('libdwarf') in spec
- assert Spec('libgoblin') not in spec
- assert Spec('mpileaks') in spec
+ spec = Spec("mpileaks ^mpi ^libelf@1.8.11 ^libdwarf")
+ assert Spec("mpi") in spec
+ assert Spec("libelf") in spec
+ assert Spec("libelf@1.8.11") in spec
+ assert Spec("libelf@1.8.12") not in spec
+ assert Spec("libdwarf") in spec
+ assert Spec("libgoblin") not in spec
+ assert Spec("mpileaks") in spec
def test_copy_simple(self):
- orig = Spec('mpileaks')
+ orig = Spec("mpileaks")
copy = orig.copy()
check_links(copy)
@@ -597,7 +598,7 @@ class TestSpecDag(object):
assert not orig_ids.intersection(copy_ids)
def test_copy_normalized(self):
- orig = Spec('mpileaks')
+ orig = Spec("mpileaks")
orig.normalize()
copy = orig.copy()
check_links(copy)
@@ -611,7 +612,7 @@ class TestSpecDag(object):
assert not orig_ids.intersection(copy_ids)
def test_copy_concretized(self):
- orig = Spec('mpileaks')
+ orig = Spec("mpileaks")
orig.concretize()
copy = orig.copy()
@@ -631,17 +632,17 @@ class TestSpecDag(object):
"""Check that copying dependencies using id(node) as a fast identifier of the
node works when the spec is wrapped in a SpecBuildInterface object.
"""
- s = Spec('mpileaks').concretized()
+ s = Spec("mpileaks").concretized()
c0 = s.copy()
assert c0 == s
# Single indirection
- c1 = s['mpileaks'].copy()
+ c1 = s["mpileaks"].copy()
assert c0 == c1 == s
# Double indirection
- c2 = s['mpileaks']['mpileaks'].copy()
+ c2 = s["mpileaks"]["mpileaks"].copy()
assert c0 == c1 == c2 == s
"""
@@ -671,70 +672,88 @@ class TestSpecDag(object):
"""
def test_deptype_traversal(self):
- dag = Spec('dtuse')
+ dag = Spec("dtuse")
dag.normalize()
- names = ['dtuse', 'dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
- 'dtlink1', 'dtlink3', 'dtlink4']
-
- traversal = dag.traverse(deptype=('build', 'link'))
+ names = [
+ "dtuse",
+ "dttop",
+ "dtbuild1",
+ "dtbuild2",
+ "dtlink2",
+ "dtlink1",
+ "dtlink3",
+ "dtlink4",
+ ]
+
+ traversal = dag.traverse(deptype=("build", "link"))
assert [x.name for x in traversal] == names
def test_deptype_traversal_with_builddeps(self):
- dag = Spec('dttop')
+ dag = Spec("dttop")
dag.normalize()
- names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
- 'dtlink1', 'dtlink3', 'dtlink4']
+ names = ["dttop", "dtbuild1", "dtbuild2", "dtlink2", "dtlink1", "dtlink3", "dtlink4"]
- traversal = dag.traverse(deptype=('build', 'link'))
+ traversal = dag.traverse(deptype=("build", "link"))
assert [x.name for x in traversal] == names
def test_deptype_traversal_full(self):
- dag = Spec('dttop')
+ dag = Spec("dttop")
dag.normalize()
- names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', 'dtrun2',
- 'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
- 'dtrun3', 'dtbuild3']
+ names = [
+ "dttop",
+ "dtbuild1",
+ "dtbuild2",
+ "dtlink2",
+ "dtrun2",
+ "dtlink1",
+ "dtlink3",
+ "dtlink4",
+ "dtrun1",
+ "dtlink5",
+ "dtrun3",
+ "dtbuild3",
+ ]
traversal = dag.traverse(deptype=all)
assert [x.name for x in traversal] == names
def test_deptype_traversal_run(self):
- dag = Spec('dttop')
+ dag = Spec("dttop")
dag.normalize()
- names = ['dttop', 'dtrun1', 'dtrun3']
+ names = ["dttop", "dtrun1", "dtrun3"]
- traversal = dag.traverse(deptype='run')
+ traversal = dag.traverse(deptype="run")
assert [x.name for x in traversal] == names
def test_hash_bits(self):
"""Ensure getting first n bits of a base32-encoded DAG hash works."""
# RFC 4648 base32 decode table
- b32 = dict((j, i) for i, j in enumerate('abcdefghijklmnopqrstuvwxyz'))
- b32.update(dict((j, i) for i, j in enumerate('234567', 26)))
+ b32 = dict((j, i) for i, j in enumerate("abcdefghijklmnopqrstuvwxyz"))
+ b32.update(dict((j, i) for i, j in enumerate("234567", 26)))
# some package hashes
tests = [
- '35orsd4cenv743hg4i5vxha2lzayycby',
- '6kfqtj7dap3773rxog6kkmoweix5gpwo',
- 'e6h6ff3uvmjbq3azik2ckr6ckwm3depv',
- 'snz2juf4ij7sv77cq3vs467q6acftmur',
- '4eg47oedi5bbkhpoxw26v3oe6vamkfd7',
- 'vrwabwj6umeb5vjw6flx2rnft3j457rw']
+ "35orsd4cenv743hg4i5vxha2lzayycby",
+ "6kfqtj7dap3773rxog6kkmoweix5gpwo",
+ "e6h6ff3uvmjbq3azik2ckr6ckwm3depv",
+ "snz2juf4ij7sv77cq3vs467q6acftmur",
+ "4eg47oedi5bbkhpoxw26v3oe6vamkfd7",
+ "vrwabwj6umeb5vjw6flx2rnft3j457rw",
+ ]
for test_hash in tests:
# string containing raw bits of hash ('1' and '0')
- expected = ''.join([format(b32[c], '#07b').replace('0b', '')
- for c in test_hash])
+ expected = "".join([format(b32[c], "#07b").replace("0b", "") for c in test_hash])
for bits in (1, 2, 3, 4, 7, 8, 9, 16, 64, 117, 128, 160):
actual_int = hashutil.base32_prefix_bits(test_hash, bits)
fmt = "#0%sb" % (bits + 2)
- actual = format(actual_int, fmt).replace('0b', '')
+ actual = format(actual_int, fmt).replace("0b", "")
assert expected[:bits] == actual
@@ -747,95 +766,58 @@ class TestSpecDag(object):
def test_traversal_directions(self):
"""Make sure child and parent traversals of specs work."""
# Mock spec - d is used for a diamond dependency
- spec = Spec.from_literal({
- 'a': {
- 'b': {
- 'c': {'d': None},
- 'e': None
- },
- 'f': {
- 'g': {'d': None}
- }
- }
- })
+ spec = Spec.from_literal(
+ {"a": {"b": {"c": {"d": None}, "e": None}, "f": {"g": {"d": None}}}}
+ )
- assert (
- ['a', 'b', 'c', 'd', 'e', 'f', 'g'] ==
- [s.name for s in spec.traverse(direction='children')])
+ assert ["a", "b", "c", "d", "e", "f", "g"] == [
+ s.name for s in spec.traverse(direction="children")
+ ]
- assert (
- ['g', 'f', 'a'] ==
- [s.name for s in spec['g'].traverse(direction='parents')])
+ assert ["g", "f", "a"] == [s.name for s in spec["g"].traverse(direction="parents")]
- assert (
- ['d', 'c', 'b', 'a', 'g', 'f'] ==
- [s.name for s in spec['d'].traverse(direction='parents')])
+ assert ["d", "c", "b", "a", "g", "f"] == [
+ s.name for s in spec["d"].traverse(direction="parents")
+ ]
def test_edge_traversals(self):
"""Make sure child and parent traversals of specs work."""
# Mock spec - d is used for a diamond dependency
- spec = Spec.from_literal({
- 'a': {
- 'b': {
- 'c': {'d': None},
- 'e': None
- },
- 'f': {
- 'g': {'d': None}
- }
- }
- })
+ spec = Spec.from_literal(
+ {"a": {"b": {"c": {"d": None}, "e": None}, "f": {"g": {"d": None}}}}
+ )
- assert (
- ['a', 'b', 'c', 'd', 'e', 'f', 'g'] ==
- [s.name for s in spec.traverse(direction='children')])
+ assert ["a", "b", "c", "d", "e", "f", "g"] == [
+ s.name for s in spec.traverse(direction="children")
+ ]
- assert (
- ['g', 'f', 'a'] ==
- [s.name for s in spec['g'].traverse(direction='parents')])
+ assert ["g", "f", "a"] == [s.name for s in spec["g"].traverse(direction="parents")]
- assert (
- ['d', 'c', 'b', 'a', 'g', 'f'] ==
- [s.name for s in spec['d'].traverse(direction='parents')])
+ assert ["d", "c", "b", "a", "g", "f"] == [
+ s.name for s in spec["d"].traverse(direction="parents")
+ ]
def test_copy_dependencies(self):
- s1 = Spec('mpileaks ^mpich2@1.1')
+ s1 = Spec("mpileaks ^mpich2@1.1")
s2 = s1.copy()
- assert '^mpich2@1.1' in s2
- assert '^mpich2' in s2
+ assert "^mpich2@1.1" in s2
+ assert "^mpich2" in s2
def test_construct_spec_with_deptypes(self):
"""Ensure that it is possible to construct a spec with explicit
- dependency types."""
- s = Spec.from_literal({
- 'a': {
- 'b': {'c:build': None},
- 'd': {
- 'e:build,link': {'f:run': None}
- }
- }
- })
-
- assert s['b'].edges_to_dependencies(
- name='c'
- )[0].deptypes == ('build',)
- assert s['d'].edges_to_dependencies(
- name='e'
- )[0].deptypes == ('build', 'link')
- assert s['e'].edges_to_dependencies(
- name='f'
- )[0].deptypes == ('run',)
-
- assert s['c'].edges_from_dependents(
- name='b'
- )[0].deptypes == ('build',)
- assert s['e'].edges_from_dependents(
- name='d'
- )[0].deptypes == ('build', 'link')
- assert s['f'].edges_from_dependents(
- name='e'
- )[0].deptypes == ('run',)
+ dependency types."""
+ s = Spec.from_literal(
+ {"a": {"b": {"c:build": None}, "d": {"e:build,link": {"f:run": None}}}}
+ )
+
+ assert s["b"].edges_to_dependencies(name="c")[0].deptypes == ("build",)
+ assert s["d"].edges_to_dependencies(name="e")[0].deptypes == ("build", "link")
+ assert s["e"].edges_to_dependencies(name="f")[0].deptypes == ("run",)
+
+ assert s["c"].edges_from_dependents(name="b")[0].deptypes == ("build",)
+ assert s["e"].edges_from_dependents(name="d")[0].deptypes == ("build", "link")
+ assert s["f"].edges_from_dependents(name="e")[0].deptypes == ("run",)
def check_diamond_deptypes(self, spec):
"""Validate deptypes in dt-diamond spec.
@@ -844,42 +826,41 @@ class TestSpecDag(object):
depend on the same dependency in different ways.
"""
- assert spec['dt-diamond'].edges_to_dependencies(
- name='dt-diamond-left'
- )[0].deptypes == ('build', 'link')
+ assert spec["dt-diamond"].edges_to_dependencies(name="dt-diamond-left")[0].deptypes == (
+ "build",
+ "link",
+ )
- assert spec['dt-diamond'].edges_to_dependencies(
- name='dt-diamond-right'
- )[0].deptypes == ('build', 'link')
+ assert spec["dt-diamond"].edges_to_dependencies(name="dt-diamond-right")[0].deptypes == (
+ "build",
+ "link",
+ )
- assert spec['dt-diamond-left'].edges_to_dependencies(
- name='dt-diamond-bottom'
- )[0].deptypes == ('build',)
+ assert spec["dt-diamond-left"].edges_to_dependencies(name="dt-diamond-bottom")[
+ 0
+ ].deptypes == ("build",)
- assert spec['dt-diamond-right'].edges_to_dependencies(
- name='dt-diamond-bottom'
- )[0].deptypes == ('build', 'link', 'run')
+ assert spec["dt-diamond-right"].edges_to_dependencies(name="dt-diamond-bottom")[
+ 0
+ ].deptypes == ("build", "link", "run")
def check_diamond_normalized_dag(self, spec):
- dag = Spec.from_literal({
- 'dt-diamond': {
- 'dt-diamond-left:build,link': {
- 'dt-diamond-bottom:build': None
- },
- 'dt-diamond-right:build,link': {
- 'dt-diamond-bottom:build,link,run': None
- },
-
+ dag = Spec.from_literal(
+ {
+ "dt-diamond": {
+ "dt-diamond-left:build,link": {"dt-diamond-bottom:build": None},
+ "dt-diamond-right:build,link": {"dt-diamond-bottom:build,link,run": None},
+ }
}
- })
+ )
assert spec.eq_dag(dag)
def test_normalize_diamond_deptypes(self):
"""Ensure that dependency types are preserved even if the same thing is
- depended on in two different ways."""
- s = Spec('dt-diamond')
+ depended on in two different ways."""
+ s = Spec("dt-diamond")
s.normalize()
self.check_diamond_deptypes(s)
@@ -887,13 +868,13 @@ class TestSpecDag(object):
def test_concretize_deptypes(self):
"""Ensure that dependency types are preserved after concretization."""
- s = Spec('dt-diamond')
+ s = Spec("dt-diamond")
s.concretize()
self.check_diamond_deptypes(s)
def test_copy_deptypes(self):
"""Ensure that dependency types are preserved by spec copy."""
- s1 = Spec('dt-diamond')
+ s1 = Spec("dt-diamond")
s1.normalize()
self.check_diamond_deptypes(s1)
self.check_diamond_normalized_dag(s1)
@@ -902,7 +883,7 @@ class TestSpecDag(object):
self.check_diamond_normalized_dag(s2)
self.check_diamond_deptypes(s2)
- s3 = Spec('dt-diamond')
+ s3 = Spec("dt-diamond")
s3.concretize()
self.check_diamond_deptypes(s3)
@@ -910,45 +891,45 @@ class TestSpecDag(object):
self.check_diamond_deptypes(s4)
def test_getitem_query(self):
- s = Spec('mpileaks')
+ s = Spec("mpileaks")
s.concretize()
# Check a query to a non-virtual package
- a = s['callpath']
+ a = s["callpath"]
query = a.last_query
- assert query.name == 'callpath'
+ assert query.name == "callpath"
assert len(query.extra_parameters) == 0
assert not query.isvirtual
# Check a query to a virtual package
- a = s['mpi']
+ a = s["mpi"]
query = a.last_query
- assert query.name == 'mpi'
+ assert query.name == "mpi"
assert len(query.extra_parameters) == 0
assert query.isvirtual
# Check a query to a virtual package with
# extra parameters after query
- a = s['mpi:cxx,fortran']
+ a = s["mpi:cxx,fortran"]
query = a.last_query
- assert query.name == 'mpi'
+ assert query.name == "mpi"
assert len(query.extra_parameters) == 2
- assert 'cxx' in query.extra_parameters
- assert 'fortran' in query.extra_parameters
+ assert "cxx" in query.extra_parameters
+ assert "fortran" in query.extra_parameters
assert query.isvirtual
def test_getitem_exceptional_paths(self):
- s = Spec('mpileaks')
+ s = Spec("mpileaks")
s.concretize()
# Needed to get a proxy object
- q = s['mpileaks']
+ q = s["mpileaks"]
# Test that the attribute is read-only
with pytest.raises(AttributeError):
- q.libs = 'foo'
+ q.libs = "foo"
with pytest.raises(AttributeError):
q.libs
@@ -956,7 +937,7 @@ class TestSpecDag(object):
def test_canonical_deptype(self):
# special values
assert canonical_deptype(all) == all_deptypes
- assert canonical_deptype('all') == all_deptypes
+ assert canonical_deptype("all") == all_deptypes
with pytest.raises(ValueError):
canonical_deptype(None)
@@ -968,67 +949,55 @@ class TestSpecDag(object):
assert canonical_deptype(v) == (v,)
# tuples
- assert canonical_deptype(('build',)) == ('build',)
- assert canonical_deptype(
- ('build', 'link', 'run')) == ('build', 'link', 'run')
- assert canonical_deptype(
- ('build', 'link')) == ('build', 'link')
- assert canonical_deptype(
- ('build', 'run')) == ('build', 'run')
+ assert canonical_deptype(("build",)) == ("build",)
+ assert canonical_deptype(("build", "link", "run")) == ("build", "link", "run")
+ assert canonical_deptype(("build", "link")) == ("build", "link")
+ assert canonical_deptype(("build", "run")) == ("build", "run")
# lists
- assert canonical_deptype(
- ['build', 'link', 'run']) == ('build', 'link', 'run')
- assert canonical_deptype(
- ['build', 'link']) == ('build', 'link')
- assert canonical_deptype(
- ['build', 'run']) == ('build', 'run')
+ assert canonical_deptype(["build", "link", "run"]) == ("build", "link", "run")
+ assert canonical_deptype(["build", "link"]) == ("build", "link")
+ assert canonical_deptype(["build", "run"]) == ("build", "run")
# sorting
- assert canonical_deptype(
- ('run', 'build', 'link')) == ('build', 'link', 'run')
- assert canonical_deptype(
- ('run', 'link', 'build')) == ('build', 'link', 'run')
- assert canonical_deptype(
- ('run', 'link')) == ('link', 'run')
- assert canonical_deptype(
- ('link', 'build')) == ('build', 'link')
+ assert canonical_deptype(("run", "build", "link")) == ("build", "link", "run")
+ assert canonical_deptype(("run", "link", "build")) == ("build", "link", "run")
+ assert canonical_deptype(("run", "link")) == ("link", "run")
+ assert canonical_deptype(("link", "build")) == ("build", "link")
# can't put 'all' in tuple or list
with pytest.raises(ValueError):
- canonical_deptype(['all'])
+ canonical_deptype(["all"])
with pytest.raises(ValueError):
- canonical_deptype(('all',))
+ canonical_deptype(("all",))
# invalid values
with pytest.raises(ValueError):
- canonical_deptype('foo')
+ canonical_deptype("foo")
with pytest.raises(ValueError):
- canonical_deptype(('foo', 'bar'))
+ canonical_deptype(("foo", "bar"))
with pytest.raises(ValueError):
- canonical_deptype(('foo',))
+ canonical_deptype(("foo",))
def test_invalid_literal_spec(self):
# Can't give type 'build' to a top-level spec
with pytest.raises(spack.spec.SpecParseError):
- Spec.from_literal({'foo:build': None})
+ Spec.from_literal({"foo:build": None})
# Can't use more than one ':' separator
with pytest.raises(KeyError):
- Spec.from_literal({'foo': {'bar:build:link': None}})
+ Spec.from_literal({"foo": {"bar:build:link": None}})
def test_spec_tree_respect_deptypes(self):
# Version-test-root uses version-test-pkg as a build dependency
- s = Spec('version-test-root').concretized()
- out = s.tree(deptypes='all')
- assert 'version-test-pkg' in out
- out = s.tree(deptypes=('link', 'run'))
- assert 'version-test-pkg' not in out
+ s = Spec("version-test-root").concretized()
+ out = s.tree(deptypes="all")
+ assert "version-test-pkg" in out
+ out = s.tree(deptypes=("link", "run"))
+ assert "version-test-pkg" not in out
-def test_synthetic_construction_of_split_dependencies_from_same_package(
- mock_packages, config
-):
+def test_synthetic_construction_of_split_dependencies_from_same_package(mock_packages, config):
# Construct in a synthetic way (i.e. without using the solver)
# the following spec:
#
@@ -1038,20 +1007,20 @@ def test_synthetic_construction_of_split_dependencies_from_same_package(
#
# To demonstrate that a spec can now hold two direct
# dependencies from the same package
- root = Spec('b').concretized()
- link_run_spec = Spec('c@1.0').concretized()
- build_spec = Spec('c@2.0').concretized()
+ root = Spec("b").concretized()
+ link_run_spec = Spec("c@1.0").concretized()
+ build_spec = Spec("c@2.0").concretized()
- root.add_dependency_edge(link_run_spec, deptype='link')
- root.add_dependency_edge(link_run_spec, deptype='run')
- root.add_dependency_edge(build_spec, deptype='build')
+ root.add_dependency_edge(link_run_spec, deptype="link")
+ root.add_dependency_edge(link_run_spec, deptype="run")
+ root.add_dependency_edge(build_spec, deptype="build")
# Check dependencies from the perspective of root
assert len(root.dependencies()) == 2
- assert all(x.name == 'c' for x in root.dependencies())
+ assert all(x.name == "c" for x in root.dependencies())
- assert '@2.0' in root.dependencies(name='c', deptype='build')[0]
- assert '@1.0' in root.dependencies(name='c', deptype=('link', 'run'))[0]
+ assert "@2.0" in root.dependencies(name="c", deptype="build")[0]
+ assert "@1.0" in root.dependencies(name="c", deptype=("link", "run"))[0]
# Check parent from the perspective of the dependencies
assert len(build_spec.dependents()) == 1
@@ -1067,14 +1036,14 @@ def test_synthetic_construction_bootstrapping(mock_packages, config):
# | build
# b@1.0
#
- root = Spec('b@2.0').concretized()
- bootstrap = Spec('b@1.0').concretized()
+ root = Spec("b@2.0").concretized()
+ bootstrap = Spec("b@1.0").concretized()
- root.add_dependency_edge(bootstrap, deptype='build')
+ root.add_dependency_edge(bootstrap, deptype="build")
assert len(root.dependencies()) == 1
- assert root.dependencies()[0].name == 'b'
- assert root.name == 'b'
+ assert root.dependencies()[0].name == "b"
+ assert root.name == "b"
def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config):
@@ -1085,10 +1054,10 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
# b@1.0
#
# with three calls and check we always have a single edge
- root = Spec('b@2.0').concretized()
- bootstrap = Spec('b@1.0').concretized()
+ root = Spec("b@2.0").concretized()
+ bootstrap = Spec("b@1.0").concretized()
- for current_deptype in ('build', 'link', 'run'):
+ for current_deptype in ("build", "link", "run"):
root.add_dependency_edge(bootstrap, deptype=current_deptype)
# Check edges in dependencies
@@ -1106,16 +1075,15 @@ def test_addition_of_different_deptypes_in_multiple_calls(mock_packages, config)
assert id(backward_edge.spec) == id(bootstrap)
-@pytest.mark.parametrize('c1_deptypes,c2_deptypes', [
- ('link', ('build', 'link')),
- (('link', 'run'), ('build', 'link'))
-])
+@pytest.mark.parametrize(
+ "c1_deptypes,c2_deptypes", [("link", ("build", "link")), (("link", "run"), ("build", "link"))]
+)
def test_adding_same_deptype_with_the_same_name_raises(
- mock_packages, config, c1_deptypes, c2_deptypes
+ mock_packages, config, c1_deptypes, c2_deptypes
):
- p = Spec('b@2.0').concretized()
- c1 = Spec('b@1.0').concretized()
- c2 = Spec('b@2.0').concretized()
+ p = Spec("b@2.0").concretized()
+ c1 = Spec("b@1.0").concretized()
+ c2 = Spec("b@2.0").concretized()
p.add_dependency_edge(c1, deptype=c1_deptypes)
with pytest.raises(spack.error.SpackError):
diff --git a/lib/spack/spack/test/spec_list.py b/lib/spack/spack/test/spec_list.py
index d5a26cae2d..487417dff1 100644
--- a/lib/spack/spack/test/spec_list.py
+++ b/lib/spack/spack/test/spec_list.py
@@ -11,176 +11,183 @@ from spack.spec_list import SpecList
class TestSpecList(object):
- default_input = ['mpileaks', '$mpis',
- {'matrix': [['hypre'], ['$gccs', '$clangs']]},
- 'libelf']
-
- default_reference = {'gccs': SpecList('gccs', ['%gcc@4.5.0']),
- 'clangs': SpecList('clangs', ['%clang@3.3']),
- 'mpis': SpecList('mpis', ['zmpi@1.0', 'mpich@3.0'])}
-
- default_expansion = ['mpileaks', 'zmpi@1.0', 'mpich@3.0',
- {'matrix': [
- ['hypre'],
- ['%gcc@4.5.0', '%clang@3.3'],
- ]},
- 'libelf']
-
- default_constraints = [[Spec('mpileaks')],
- [Spec('zmpi@1.0')],
- [Spec('mpich@3.0')],
- [Spec('hypre'), Spec('%gcc@4.5.0')],
- [Spec('hypre'), Spec('%clang@3.3')],
- [Spec('libelf')]]
-
- default_specs = [Spec('mpileaks'), Spec('zmpi@1.0'),
- Spec('mpich@3.0'), Spec('hypre%gcc@4.5.0'),
- Spec('hypre%clang@3.3'), Spec('libelf')]
+ default_input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$gccs", "$clangs"]]}, "libelf"]
+
+ default_reference = {
+ "gccs": SpecList("gccs", ["%gcc@4.5.0"]),
+ "clangs": SpecList("clangs", ["%clang@3.3"]),
+ "mpis": SpecList("mpis", ["zmpi@1.0", "mpich@3.0"]),
+ }
+
+ default_expansion = [
+ "mpileaks",
+ "zmpi@1.0",
+ "mpich@3.0",
+ {
+ "matrix": [
+ ["hypre"],
+ ["%gcc@4.5.0", "%clang@3.3"],
+ ]
+ },
+ "libelf",
+ ]
+
+ default_constraints = [
+ [Spec("mpileaks")],
+ [Spec("zmpi@1.0")],
+ [Spec("mpich@3.0")],
+ [Spec("hypre"), Spec("%gcc@4.5.0")],
+ [Spec("hypre"), Spec("%clang@3.3")],
+ [Spec("libelf")],
+ ]
+
+ default_specs = [
+ Spec("mpileaks"),
+ Spec("zmpi@1.0"),
+ Spec("mpich@3.0"),
+ Spec("hypre%gcc@4.5.0"),
+ Spec("hypre%clang@3.3"),
+ Spec("libelf"),
+ ]
def test_spec_list_expansions(self):
- speclist = SpecList('specs', self.default_input,
- self.default_reference)
+ speclist = SpecList("specs", self.default_input, self.default_reference)
assert speclist.specs_as_yaml_list == self.default_expansion
assert speclist.specs_as_constraints == self.default_constraints
assert speclist.specs == self.default_specs
- @pytest.mark.regression('28749')
- @pytest.mark.parametrize('specs,expected', [
- # Constraints are ordered randomly
- ([{'matrix': [
- ['^zmpi'],
- ['%gcc@4.5.0'],
- ['hypre', 'libelf'],
- ['~shared'],
- ['cflags=-O3', 'cflags="-g -O0"'],
- ['^foo']
- ]}], [
- 'hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi',
- 'hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
- 'libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi',
- 'libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
- ]),
- # A constraint affects both the root and a dependency
- ([{'matrix': [
- ['gromacs'],
- ['%gcc'],
- ['+plumed ^plumed%gcc']
- ]}], [
- 'gromacs+plumed%gcc ^plumed%gcc'
- ])
- ])
+ @pytest.mark.regression("28749")
+ @pytest.mark.parametrize(
+ "specs,expected",
+ [
+ # Constraints are ordered randomly
+ (
+ [
+ {
+ "matrix": [
+ ["^zmpi"],
+ ["%gcc@4.5.0"],
+ ["hypre", "libelf"],
+ ["~shared"],
+ ["cflags=-O3", 'cflags="-g -O0"'],
+ ["^foo"],
+ ]
+ }
+ ],
+ [
+ "hypre cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi",
+ 'hypre cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
+ "libelf cflags=-O3 ~shared %gcc@4.5.0 ^foo ^zmpi",
+ 'libelf cflags="-g -O0" ~shared %gcc@4.5.0 ^foo ^zmpi',
+ ],
+ ),
+ # A constraint affects both the root and a dependency
+ (
+ [{"matrix": [["gromacs"], ["%gcc"], ["+plumed ^plumed%gcc"]]}],
+ ["gromacs+plumed%gcc ^plumed%gcc"],
+ ),
+ ],
+ )
def test_spec_list_constraint_ordering(self, specs, expected):
- speclist = SpecList('specs', specs)
+ speclist = SpecList("specs", specs)
expected_specs = [Spec(x) for x in expected]
assert speclist.specs == expected_specs
def test_spec_list_add(self):
- speclist = SpecList('specs', self.default_input,
- self.default_reference)
+ speclist = SpecList("specs", self.default_input, self.default_reference)
assert speclist.specs_as_yaml_list == self.default_expansion
assert speclist.specs_as_constraints == self.default_constraints
assert speclist.specs == self.default_specs
- speclist.add('libdwarf')
+ speclist.add("libdwarf")
- assert speclist.specs_as_yaml_list == self.default_expansion + [
- 'libdwarf']
- assert speclist.specs_as_constraints == self.default_constraints + [
- [Spec('libdwarf')]]
- assert speclist.specs == self.default_specs + [Spec('libdwarf')]
+ assert speclist.specs_as_yaml_list == self.default_expansion + ["libdwarf"]
+ assert speclist.specs_as_constraints == self.default_constraints + [[Spec("libdwarf")]]
+ assert speclist.specs == self.default_specs + [Spec("libdwarf")]
def test_spec_list_remove(self):
- speclist = SpecList('specs', self.default_input,
- self.default_reference)
+ speclist = SpecList("specs", self.default_input, self.default_reference)
assert speclist.specs_as_yaml_list == self.default_expansion
assert speclist.specs_as_constraints == self.default_constraints
assert speclist.specs == self.default_specs
- speclist.remove('libelf')
+ speclist.remove("libelf")
- assert speclist.specs_as_yaml_list + [
- 'libelf'
- ] == self.default_expansion
+ assert speclist.specs_as_yaml_list + ["libelf"] == self.default_expansion
- assert speclist.specs_as_constraints + [
- [Spec('libelf')]
- ] == self.default_constraints
+ assert speclist.specs_as_constraints + [[Spec("libelf")]] == self.default_constraints
- assert speclist.specs + [Spec('libelf')] == self.default_specs
+ assert speclist.specs + [Spec("libelf")] == self.default_specs
def test_spec_list_update_reference(self):
- speclist = SpecList('specs', self.default_input,
- self.default_reference)
+ speclist = SpecList("specs", self.default_input, self.default_reference)
assert speclist.specs_as_yaml_list == self.default_expansion
assert speclist.specs_as_constraints == self.default_constraints
assert speclist.specs == self.default_specs
- new_mpis = SpecList('mpis', self.default_reference['mpis'].yaml_list)
- new_mpis.add('mpich@3.3')
+ new_mpis = SpecList("mpis", self.default_reference["mpis"].yaml_list)
+ new_mpis.add("mpich@3.3")
new_reference = self.default_reference.copy()
- new_reference['mpis'] = new_mpis
+ new_reference["mpis"] = new_mpis
speclist.update_reference(new_reference)
expansion = list(self.default_expansion)
- expansion.insert(3, 'mpich@3.3')
+ expansion.insert(3, "mpich@3.3")
constraints = list(self.default_constraints)
- constraints.insert(3, [Spec('mpich@3.3')])
+ constraints.insert(3, [Spec("mpich@3.3")])
specs = list(self.default_specs)
- specs.insert(3, Spec('mpich@3.3'))
+ specs.insert(3, Spec("mpich@3.3"))
assert speclist.specs_as_yaml_list == expansion
assert speclist.specs_as_constraints == constraints
assert speclist.specs == specs
def test_spec_list_extension(self):
- speclist = SpecList('specs', self.default_input,
- self.default_reference)
+ speclist = SpecList("specs", self.default_input, self.default_reference)
assert speclist.specs_as_yaml_list == self.default_expansion
assert speclist.specs_as_constraints == self.default_constraints
assert speclist.specs == self.default_specs
new_ref = self.default_reference.copy()
- otherlist = SpecList('specs',
- ['zlib', {'matrix': [['callpath'],
- ['%intel@18']]}],
- new_ref)
+ otherlist = SpecList("specs", ["zlib", {"matrix": [["callpath"], ["%intel@18"]]}], new_ref)
speclist.extend(otherlist)
- assert speclist.specs_as_yaml_list == (self.default_expansion +
- otherlist.specs_as_yaml_list)
+ assert speclist.specs_as_yaml_list == (
+ self.default_expansion + otherlist.specs_as_yaml_list
+ )
assert speclist.specs == self.default_specs + otherlist.specs
assert speclist._reference is new_ref
def test_spec_list_nested_matrices(self):
- inner_matrix = [{'matrix': [['zlib', 'libelf'], ['%gcc', '%intel']]}]
- outer_addition = ['+shared', '~shared']
- outer_matrix = [{'matrix': [inner_matrix, outer_addition]}]
- speclist = SpecList('specs', outer_matrix)
-
- expected_components = itertools.product(['zlib', 'libelf'],
- ['%gcc', '%intel'],
- ['+shared', '~shared'])
- expected = [Spec(' '.join(combo)) for combo in expected_components]
+ inner_matrix = [{"matrix": [["zlib", "libelf"], ["%gcc", "%intel"]]}]
+ outer_addition = ["+shared", "~shared"]
+ outer_matrix = [{"matrix": [inner_matrix, outer_addition]}]
+ speclist = SpecList("specs", outer_matrix)
+
+ expected_components = itertools.product(
+ ["zlib", "libelf"], ["%gcc", "%intel"], ["+shared", "~shared"]
+ )
+ expected = [Spec(" ".join(combo)) for combo in expected_components]
assert set(speclist.specs) == set(expected)
- @pytest.mark.regression('16897')
+ @pytest.mark.regression("16897")
def test_spec_list_recursion_specs_as_constraints(self):
- input = ['mpileaks', '$mpis',
- {'matrix': [['hypre'], ['$%gccs', '$%clangs']]},
- 'libelf']
+ input = ["mpileaks", "$mpis", {"matrix": [["hypre"], ["$%gccs", "$%clangs"]]}, "libelf"]
- reference = {'gccs': SpecList('gccs', ['gcc@4.5.0']),
- 'clangs': SpecList('clangs', ['clang@3.3']),
- 'mpis': SpecList('mpis', ['zmpi@1.0', 'mpich@3.0'])}
+ reference = {
+ "gccs": SpecList("gccs", ["gcc@4.5.0"]),
+ "clangs": SpecList("clangs", ["clang@3.3"]),
+ "mpis": SpecList("mpis", ["zmpi@1.0", "mpich@3.0"]),
+ }
- speclist = SpecList('specs', input, reference)
+ speclist = SpecList("specs", input, reference)
assert speclist.specs_as_yaml_list == self.default_expansion
assert speclist.specs_as_constraints == self.default_constraints
@@ -188,7 +195,8 @@ class TestSpecList(object):
def test_spec_list_matrix_exclude(self, mock_packages):
# Test on non-boolean variants for regression for #16841
- matrix = [{'matrix': [['multivalue-variant'], ['foo=bar', 'foo=baz']],
- 'exclude': ['foo=bar']}]
- speclist = SpecList('specs', matrix)
+ matrix = [
+ {"matrix": [["multivalue-variant"], ["foo=bar", "foo=baz"]], "exclude": ["foo=bar"]}
+ ]
+ speclist = SpecList("specs", matrix)
assert len(speclist.specs) == 1
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index db1f68f396..d417adaa18 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -88,278 +88,242 @@ def check_constrain_not_changed(spec, constraint):
def check_invalid_constraint(spec, constraint):
spec = Spec(spec)
constraint = Spec(constraint)
- with pytest.raises((UnsatisfiableSpecError,
- UnconstrainableDependencySpecError)):
+ with pytest.raises((UnsatisfiableSpecError, UnconstrainableDependencySpecError)):
spec.constrain(constraint)
-@pytest.mark.usefixtures('config', 'mock_packages')
+@pytest.mark.usefixtures("config", "mock_packages")
class TestSpecSematics(object):
"""This tests satisfies(), constrain() and other semantic operations
on specs.
"""
+
def test_satisfies(self):
- check_satisfies('libelf@0.8.13', '@0:1')
- check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1')
+ check_satisfies("libelf@0.8.13", "@0:1")
+ check_satisfies("libdwarf^libelf@0.8.13", "^libelf@0:1")
def test_empty_satisfies(self):
# Basic satisfaction
- check_satisfies('libelf', Spec())
- check_satisfies('libdwarf', Spec())
- check_satisfies('%intel', Spec())
- check_satisfies('^mpi', Spec())
- check_satisfies('+debug', Spec())
- check_satisfies('@3:', Spec())
+ check_satisfies("libelf", Spec())
+ check_satisfies("libdwarf", Spec())
+ check_satisfies("%intel", Spec())
+ check_satisfies("^mpi", Spec())
+ check_satisfies("+debug", Spec())
+ check_satisfies("@3:", Spec())
# Concrete (strict) satisfaction
- check_satisfies('libelf', Spec(), True)
- check_satisfies('libdwarf', Spec(), True)
- check_satisfies('%intel', Spec(), True)
- check_satisfies('^mpi', Spec(), True)
+ check_satisfies("libelf", Spec(), True)
+ check_satisfies("libdwarf", Spec(), True)
+ check_satisfies("%intel", Spec(), True)
+ check_satisfies("^mpi", Spec(), True)
# TODO: Variants can't be called concrete while anonymous
# check_satisfies('+debug', Spec(), True)
- check_satisfies('@3:', Spec(), True)
+ check_satisfies("@3:", Spec(), True)
# Reverse (non-strict) satisfaction
- check_satisfies(Spec(), 'libelf')
- check_satisfies(Spec(), 'libdwarf')
- check_satisfies(Spec(), '%intel')
- check_satisfies(Spec(), '^mpi')
+ check_satisfies(Spec(), "libelf")
+ check_satisfies(Spec(), "libdwarf")
+ check_satisfies(Spec(), "%intel")
+ check_satisfies(Spec(), "^mpi")
# TODO: Variant matching is auto-strict
# we should rethink this
# check_satisfies(Spec(), '+debug')
- check_satisfies(Spec(), '@3:')
+ check_satisfies(Spec(), "@3:")
def test_satisfies_namespace(self):
- check_satisfies('builtin.mpich', 'mpich')
- check_satisfies('builtin.mock.mpich', 'mpich')
+ check_satisfies("builtin.mpich", "mpich")
+ check_satisfies("builtin.mock.mpich", "mpich")
# TODO: only works for deps now, but shouldn't we allow for root spec?
# check_satisfies('builtin.mock.mpich', 'mpi')
- check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich')
+ check_satisfies("builtin.mock.mpich", "builtin.mock.mpich")
- check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich')
+ check_unsatisfiable("builtin.mock.mpich", "builtin.mpich")
def test_satisfies_namespaced_dep(self):
"""Ensure spec from same or unspecified namespace satisfies namespace
- constraint."""
- check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
+ constraint."""
+ check_satisfies("mpileaks ^builtin.mock.mpich", "^mpich")
- check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
- check_satisfies(
- 'mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich')
+ check_satisfies("mpileaks ^builtin.mock.mpich", "^mpi")
+ check_satisfies("mpileaks ^builtin.mock.mpich", "^builtin.mock.mpich")
- check_unsatisfiable(
- 'mpileaks ^builtin.mock.mpich', '^builtin.mpich')
+ check_unsatisfiable("mpileaks ^builtin.mock.mpich", "^builtin.mpich")
def test_satisfies_compiler(self):
- check_satisfies('foo%gcc', '%gcc')
- check_satisfies('foo%intel', '%intel')
- check_unsatisfiable('foo%intel', '%gcc')
- check_unsatisfiable('foo%intel', '%pgi')
+ check_satisfies("foo%gcc", "%gcc")
+ check_satisfies("foo%intel", "%intel")
+ check_unsatisfiable("foo%intel", "%gcc")
+ check_unsatisfiable("foo%intel", "%pgi")
def test_satisfies_compiler_version(self):
- check_satisfies('foo%gcc', '%gcc@4.7.2')
- check_satisfies('foo%intel', '%intel@4.7.2')
+ check_satisfies("foo%gcc", "%gcc@4.7.2")
+ check_satisfies("foo%intel", "%intel@4.7.2")
- check_satisfies('foo%pgi@4.5', '%pgi@4.4:4.6')
- check_satisfies('foo@2.0%pgi@4.5', '@1:3%pgi@4.4:4.6')
+ check_satisfies("foo%pgi@4.5", "%pgi@4.4:4.6")
+ check_satisfies("foo@2.0%pgi@4.5", "@1:3%pgi@4.4:4.6")
- check_unsatisfiable('foo%pgi@4.3', '%pgi@4.4:4.6')
- check_unsatisfiable('foo@4.0%pgi', '@1:3%pgi')
- check_unsatisfiable('foo@4.0%pgi@4.5', '@1:3%pgi@4.4:4.6')
+ check_unsatisfiable("foo%pgi@4.3", "%pgi@4.4:4.6")
+ check_unsatisfiable("foo@4.0%pgi", "@1:3%pgi")
+ check_unsatisfiable("foo@4.0%pgi@4.5", "@1:3%pgi@4.4:4.6")
- check_satisfies('foo %gcc@4.7.3', '%gcc@4.7')
- check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3')
+ check_satisfies("foo %gcc@4.7.3", "%gcc@4.7")
+ check_unsatisfiable("foo %gcc@4.7", "%gcc@4.7.3")
def test_satisfies_architecture(self):
+ check_satisfies("foo platform=test", "platform=test")
+ check_satisfies("foo platform=linux", "platform=linux")
+ check_satisfies("foo platform=test", "platform=test target=frontend")
+ check_satisfies("foo platform=test", "platform=test os=frontend target=frontend")
+ check_satisfies("foo platform=test os=frontend target=frontend", "platform=test")
+
+ check_unsatisfiable("foo platform=linux", "platform=test os=redhat6 target=x86")
+ check_unsatisfiable("foo os=redhat6", "platform=test os=debian6 target=x86_64")
+ check_unsatisfiable("foo target=x86_64", "platform=test os=redhat6 target=x86")
+
+ check_satisfies("foo arch=test-None-None", "platform=test")
+ check_satisfies("foo arch=test-None-frontend", "platform=test target=frontend")
check_satisfies(
- 'foo platform=test',
- 'platform=test')
- check_satisfies(
- 'foo platform=linux',
- 'platform=linux')
- check_satisfies(
- 'foo platform=test',
- 'platform=test target=frontend')
- check_satisfies(
- 'foo platform=test',
- 'platform=test os=frontend target=frontend')
- check_satisfies(
- 'foo platform=test os=frontend target=frontend',
- 'platform=test')
-
- check_unsatisfiable(
- 'foo platform=linux',
- 'platform=test os=redhat6 target=x86')
- check_unsatisfiable(
- 'foo os=redhat6',
- 'platform=test os=debian6 target=x86_64')
- check_unsatisfiable(
- 'foo target=x86_64',
- 'platform=test os=redhat6 target=x86')
-
- check_satisfies(
- 'foo arch=test-None-None',
- 'platform=test')
- check_satisfies(
- 'foo arch=test-None-frontend',
- 'platform=test target=frontend')
- check_satisfies(
- 'foo arch=test-frontend-frontend',
- 'platform=test os=frontend target=frontend')
- check_satisfies(
- 'foo arch=test-frontend-frontend',
- 'platform=test')
+ "foo arch=test-frontend-frontend", "platform=test os=frontend target=frontend"
+ )
+ check_satisfies("foo arch=test-frontend-frontend", "platform=test")
check_unsatisfiable(
- 'foo arch=test-frontend-frontend',
- 'platform=test os=frontend target=backend')
+ "foo arch=test-frontend-frontend", "platform=test os=frontend target=backend"
+ )
check_satisfies(
- 'foo platform=test target=frontend os=frontend',
- 'platform=test target=frontend os=frontend')
+ "foo platform=test target=frontend os=frontend",
+ "platform=test target=frontend os=frontend",
+ )
check_satisfies(
- 'foo platform=test target=backend os=backend',
- 'platform=test target=backend os=backend')
+ "foo platform=test target=backend os=backend",
+ "platform=test target=backend os=backend",
+ )
check_satisfies(
- 'foo platform=test target=default_target os=default_os',
- 'platform=test os=default_os')
+ "foo platform=test target=default_target os=default_os", "platform=test os=default_os"
+ )
check_unsatisfiable(
- 'foo platform=test target=x86 os=redhat6',
- 'platform=linux target=x86 os=redhat6')
+ "foo platform=test target=x86 os=redhat6", "platform=linux target=x86 os=redhat6"
+ )
def test_satisfies_dependencies(self):
- check_satisfies('mpileaks^mpich', '^mpich')
- check_satisfies('mpileaks^zmpi', '^zmpi')
+ check_satisfies("mpileaks^mpich", "^mpich")
+ check_satisfies("mpileaks^zmpi", "^zmpi")
- check_unsatisfiable('mpileaks^mpich', '^zmpi')
- check_unsatisfiable('mpileaks^zmpi', '^mpich')
+ check_unsatisfiable("mpileaks^mpich", "^zmpi")
+ check_unsatisfiable("mpileaks^zmpi", "^mpich")
def test_satisfies_dependency_versions(self):
- check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3')
- check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0')
+ check_satisfies("mpileaks^mpich@2.0", "^mpich@1:3")
+ check_unsatisfiable("mpileaks^mpich@1.2", "^mpich@2.0")
- check_satisfies(
- 'mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- check_unsatisfiable(
- 'mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- check_unsatisfiable(
- 'mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
- check_unsatisfiable(
- 'mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
+ check_satisfies("mpileaks^mpich@2.0^callpath@1.5", "^mpich@1:3^callpath@1.4:1.6")
+ check_unsatisfiable("mpileaks^mpich@4.0^callpath@1.5", "^mpich@1:3^callpath@1.4:1.6")
+ check_unsatisfiable("mpileaks^mpich@2.0^callpath@1.7", "^mpich@1:3^callpath@1.4:1.6")
+ check_unsatisfiable("mpileaks^mpich@4.0^callpath@1.7", "^mpich@1:3^callpath@1.4:1.6")
def test_satisfies_virtual_dependencies(self):
- check_satisfies('mpileaks^mpi', '^mpi')
- check_satisfies('mpileaks^mpi', '^mpich')
+ check_satisfies("mpileaks^mpi", "^mpi")
+ check_satisfies("mpileaks^mpi", "^mpich")
- check_satisfies('mpileaks^mpi', '^zmpi')
- check_unsatisfiable('mpileaks^mpich', '^zmpi')
+ check_satisfies("mpileaks^mpi", "^zmpi")
+ check_unsatisfiable("mpileaks^mpich", "^zmpi")
def test_satisfies_virtual_dependency_versions(self):
- check_satisfies('mpileaks^mpi@1.5', '^mpi@1.2:1.6')
- check_unsatisfiable('mpileaks^mpi@3', '^mpi@1.2:1.6')
+ check_satisfies("mpileaks^mpi@1.5", "^mpi@1.2:1.6")
+ check_unsatisfiable("mpileaks^mpi@3", "^mpi@1.2:1.6")
- check_satisfies('mpileaks^mpi@2:', '^mpich')
- check_satisfies('mpileaks^mpi@2:', '^mpich@3.0.4')
- check_satisfies('mpileaks^mpi@2:', '^mpich2@1.4')
+ check_satisfies("mpileaks^mpi@2:", "^mpich")
+ check_satisfies("mpileaks^mpi@2:", "^mpich@3.0.4")
+ check_satisfies("mpileaks^mpi@2:", "^mpich2@1.4")
- check_satisfies('mpileaks^mpi@1:', '^mpich2')
- check_satisfies('mpileaks^mpi@2:', '^mpich2')
+ check_satisfies("mpileaks^mpi@1:", "^mpich2")
+ check_satisfies("mpileaks^mpi@2:", "^mpich2")
- check_unsatisfiable('mpileaks^mpi@3:', '^mpich2@1.4')
- check_unsatisfiable('mpileaks^mpi@3:', '^mpich2')
- check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0')
+ check_unsatisfiable("mpileaks^mpi@3:", "^mpich2@1.4")
+ check_unsatisfiable("mpileaks^mpi@3:", "^mpich2")
+ check_unsatisfiable("mpileaks^mpi@3:", "^mpich@1.0")
def test_satisfies_matching_variant(self):
- check_satisfies('mpich+foo', 'mpich+foo')
- check_satisfies('mpich~foo', 'mpich~foo')
- check_satisfies('mpich foo=1', 'mpich foo=1')
+ check_satisfies("mpich+foo", "mpich+foo")
+ check_satisfies("mpich~foo", "mpich~foo")
+ check_satisfies("mpich foo=1", "mpich foo=1")
# confirm that synonymous syntax works correctly
- check_satisfies('mpich+foo', 'mpich foo=True')
- check_satisfies('mpich foo=true', 'mpich+foo')
- check_satisfies('mpich~foo', 'mpich foo=FALSE')
- check_satisfies('mpich foo=False', 'mpich~foo')
- check_satisfies('mpich foo=*', 'mpich~foo')
- check_satisfies('mpich +foo', 'mpich foo=*')
+ check_satisfies("mpich+foo", "mpich foo=True")
+ check_satisfies("mpich foo=true", "mpich+foo")
+ check_satisfies("mpich~foo", "mpich foo=FALSE")
+ check_satisfies("mpich foo=False", "mpich~foo")
+ check_satisfies("mpich foo=*", "mpich~foo")
+ check_satisfies("mpich +foo", "mpich foo=*")
def test_satisfies_multi_value_variant(self):
# Check quoting
- check_satisfies('multivalue-variant foo="bar,baz"',
- 'multivalue-variant foo="bar,baz"')
- check_satisfies('multivalue-variant foo=bar,baz',
- 'multivalue-variant foo=bar,baz')
- check_satisfies('multivalue-variant foo="bar,baz"',
- 'multivalue-variant foo=bar,baz')
+ check_satisfies('multivalue-variant foo="bar,baz"', 'multivalue-variant foo="bar,baz"')
+ check_satisfies("multivalue-variant foo=bar,baz", "multivalue-variant foo=bar,baz")
+ check_satisfies('multivalue-variant foo="bar,baz"', "multivalue-variant foo=bar,baz")
# A more constrained spec satisfies a less constrained one
- check_satisfies('multivalue-variant foo="bar,baz"',
- 'multivalue-variant foo=*')
+ check_satisfies('multivalue-variant foo="bar,baz"', "multivalue-variant foo=*")
- check_satisfies('multivalue-variant foo=*',
- 'multivalue-variant foo="bar,baz"')
+ check_satisfies("multivalue-variant foo=*", 'multivalue-variant foo="bar,baz"')
- check_satisfies('multivalue-variant foo="bar,baz"',
- 'multivalue-variant foo="bar"')
+ check_satisfies('multivalue-variant foo="bar,baz"', 'multivalue-variant foo="bar"')
- check_satisfies('multivalue-variant foo="bar,baz"',
- 'multivalue-variant foo="baz"')
+ check_satisfies('multivalue-variant foo="bar,baz"', 'multivalue-variant foo="baz"')
- check_satisfies('multivalue-variant foo="bar,baz,barbaz"',
- 'multivalue-variant foo="bar,baz"')
+ check_satisfies(
+ 'multivalue-variant foo="bar,baz,barbaz"', 'multivalue-variant foo="bar,baz"'
+ )
- check_satisfies('multivalue-variant foo="bar,baz"',
- 'foo="bar,baz"')
+ check_satisfies('multivalue-variant foo="bar,baz"', 'foo="bar,baz"')
- check_satisfies('multivalue-variant foo="bar,baz"',
- 'foo="bar"')
+ check_satisfies('multivalue-variant foo="bar,baz"', 'foo="bar"')
def test_satisfies_single_valued_variant(self):
"""Tests that the case reported in
https://github.com/spack/spack/pull/2386#issuecomment-282147639
is handled correctly.
"""
- a = Spec('a foobar=bar')
+ a = Spec("a foobar=bar")
a.concretize()
- assert a.satisfies('foobar=bar')
- assert a.satisfies('foobar=*')
+ assert a.satisfies("foobar=bar")
+ assert a.satisfies("foobar=*")
# Assert that an autospec generated from a literal
# gives the right result for a single valued variant
- assert 'foobar=bar' in a
- assert 'foobar=baz' not in a
- assert 'foobar=fee' not in a
+ assert "foobar=bar" in a
+ assert "foobar=baz" not in a
+ assert "foobar=fee" not in a
# ... and for a multi valued variant
- assert 'foo=bar' in a
+ assert "foo=bar" in a
# Check that conditional dependencies are treated correctly
- assert '^b' in a
+ assert "^b" in a
def test_unsatisfied_single_valued_variant(self):
- a = Spec('a foobar=baz')
+ a = Spec("a foobar=baz")
a.concretize()
- assert '^b' not in a
+ assert "^b" not in a
- mv = Spec('multivalue-variant')
+ mv = Spec("multivalue-variant")
mv.concretize()
- assert 'a@1.0' not in mv
+ assert "a@1.0" not in mv
def test_indirect_unsatisfied_single_valued_variant(self):
- spec = Spec('singlevalue-variant-dependent')
+ spec = Spec("singlevalue-variant-dependent")
spec.concretize()
- assert 'a@1.0' not in spec
+ assert "a@1.0" not in spec
def test_unsatisfiable_multi_value_variant(self):
# Semantics for a multi-valued variant is different
# Depending on whether the spec is concrete or not
- a = make_spec(
- 'multivalue-variant foo="bar"', concrete=True
- )
+ a = make_spec('multivalue-variant foo="bar"', concrete=True)
spec_str = 'multivalue-variant foo="bar,baz"'
b = Spec(spec_str)
assert not a.satisfies(b)
@@ -377,9 +341,7 @@ class TestSpecSematics(object):
# An abstract spec can instead be constrained
assert a.constrain(b)
- a = make_spec(
- 'multivalue-variant foo="bar,baz"', concrete=True
- )
+ a = make_spec('multivalue-variant foo="bar,baz"', concrete=True)
spec_str = 'multivalue-variant foo="bar,baz,quux"'
b = Spec(spec_str)
assert not a.satisfies(b)
@@ -430,62 +392,60 @@ class TestSpecSematics(object):
check_unsatisfiable(
target_spec='multivalue-variant foo="bar"',
- constraint_spec='multivalue-variant +foo',
- target_concrete=True
+ constraint_spec="multivalue-variant +foo",
+ target_concrete=True,
)
check_unsatisfiable(
target_spec='multivalue-variant foo="bar"',
- constraint_spec='multivalue-variant ~foo',
- target_concrete=True
+ constraint_spec="multivalue-variant ~foo",
+ target_concrete=True,
)
def test_satisfies_unconstrained_variant(self):
# only asked for mpich, no constraints. Either will do.
- check_satisfies('mpich+foo', 'mpich')
- check_satisfies('mpich~foo', 'mpich')
- check_satisfies('mpich foo=1', 'mpich')
+ check_satisfies("mpich+foo", "mpich")
+ check_satisfies("mpich~foo", "mpich")
+ check_satisfies("mpich foo=1", "mpich")
def test_unsatisfiable_variants(self):
# This case is different depending on whether the specs are concrete.
# 'mpich' is not concrete:
- check_satisfies('mpich', 'mpich+foo', False)
- check_satisfies('mpich', 'mpich~foo', False)
- check_satisfies('mpich', 'mpich foo=1', False)
+ check_satisfies("mpich", "mpich+foo", False)
+ check_satisfies("mpich", "mpich~foo", False)
+ check_satisfies("mpich", "mpich foo=1", False)
# 'mpich' is concrete:
- check_unsatisfiable('mpich', 'mpich+foo', True)
- check_unsatisfiable('mpich', 'mpich~foo', True)
- check_unsatisfiable('mpich', 'mpich foo=1', True)
+ check_unsatisfiable("mpich", "mpich+foo", True)
+ check_unsatisfiable("mpich", "mpich~foo", True)
+ check_unsatisfiable("mpich", "mpich foo=1", True)
def test_unsatisfiable_variant_mismatch(self):
# No matchi in specs
- check_unsatisfiable('mpich~foo', 'mpich+foo')
- check_unsatisfiable('mpich+foo', 'mpich~foo')
- check_unsatisfiable('mpich foo=True', 'mpich foo=False')
+ check_unsatisfiable("mpich~foo", "mpich+foo")
+ check_unsatisfiable("mpich+foo", "mpich~foo")
+ check_unsatisfiable("mpich foo=True", "mpich foo=False")
def test_satisfies_matching_compiler_flag(self):
check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"')
- check_satisfies(
- 'mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"'
- )
+ check_satisfies('mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"')
def test_satisfies_unconstrained_compiler_flag(self):
# only asked for mpich, no constraints. Any will do.
- check_satisfies('mpich cppflags="-O3"', 'mpich')
+ check_satisfies('mpich cppflags="-O3"', "mpich")
def test_unsatisfiable_compiler_flag(self):
# This case is different depending on whether the specs are concrete.
# 'mpich' is not concrete:
- check_satisfies('mpich', 'mpich cppflags="-O3"', False)
+ check_satisfies("mpich", 'mpich cppflags="-O3"', False)
# 'mpich' is concrete:
- check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True)
+ check_unsatisfiable("mpich", 'mpich cppflags="-O3"', True)
def test_copy_satisfies_transitive(self):
- spec = Spec('dttop')
+ spec = Spec("dttop")
spec.concretize()
copy = spec.copy()
for s in spec.traverse():
@@ -494,35 +454,26 @@ class TestSpecSematics(object):
def test_unsatisfiable_compiler_flag_mismatch(self):
# No matchi in specs
- check_unsatisfiable(
- 'mpich cppflags="-O3"', 'mpich cppflags="-O2"')
+ check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags="-O2"')
def test_satisfies_virtual(self):
# Don't use check_satisfies: it checks constrain() too, and
# you can't constrain a non-virtual by a virtual.
- assert Spec('mpich').satisfies(Spec('mpi'))
- assert Spec('mpich2').satisfies(Spec('mpi'))
- assert Spec('zmpi').satisfies(Spec('mpi'))
+ assert Spec("mpich").satisfies(Spec("mpi"))
+ assert Spec("mpich2").satisfies(Spec("mpi"))
+ assert Spec("zmpi").satisfies(Spec("mpi"))
def test_satisfies_virtual_dep_with_virtual_constraint(self):
"""Ensure we can satisfy virtual constraints when there are multiple
- vdep providers in the specs."""
- assert Spec('netlib-lapack ^openblas').satisfies(
- 'netlib-lapack ^openblas'
- )
- assert not Spec('netlib-lapack ^netlib-blas').satisfies(
- 'netlib-lapack ^openblas'
- )
- assert not Spec('netlib-lapack ^openblas').satisfies(
- 'netlib-lapack ^netlib-blas'
- )
- assert Spec('netlib-lapack ^netlib-blas').satisfies(
- 'netlib-lapack ^netlib-blas'
- )
+ vdep providers in the specs."""
+ assert Spec("netlib-lapack ^openblas").satisfies("netlib-lapack ^openblas")
+ assert not Spec("netlib-lapack ^netlib-blas").satisfies("netlib-lapack ^openblas")
+ assert not Spec("netlib-lapack ^openblas").satisfies("netlib-lapack ^netlib-blas")
+ assert Spec("netlib-lapack ^netlib-blas").satisfies("netlib-lapack ^netlib-blas")
def test_satisfies_same_spec_with_different_hash(self):
"""Ensure that concrete specs are matched *exactly* by hash."""
- s1 = Spec('mpileaks').concretized()
+ s1 = Spec("mpileaks").concretized()
s2 = s1.copy()
assert s1.satisfies(s2)
@@ -539,214 +490,184 @@ class TestSpecSematics(object):
# Indexing specs
# ========================================================================
def test_self_index(self):
- s = Spec('callpath')
- assert s['callpath'] == s
+ s = Spec("callpath")
+ assert s["callpath"] == s
def test_dep_index(self):
- s = Spec('callpath')
+ s = Spec("callpath")
s.normalize()
- assert s['callpath'] == s
- assert type(s['dyninst']) == Spec
- assert type(s['libdwarf']) == Spec
- assert type(s['libelf']) == Spec
- assert type(s['mpi']) == Spec
+ assert s["callpath"] == s
+ assert type(s["dyninst"]) == Spec
+ assert type(s["libdwarf"]) == Spec
+ assert type(s["libelf"]) == Spec
+ assert type(s["mpi"]) == Spec
- assert s['dyninst'].name == 'dyninst'
- assert s['libdwarf'].name == 'libdwarf'
- assert s['libelf'].name == 'libelf'
- assert s['mpi'].name == 'mpi'
+ assert s["dyninst"].name == "dyninst"
+ assert s["libdwarf"].name == "libdwarf"
+ assert s["libelf"].name == "libelf"
+ assert s["mpi"].name == "mpi"
def test_spec_contains_deps(self):
- s = Spec('callpath')
+ s = Spec("callpath")
s.normalize()
- assert 'dyninst' in s
- assert 'libdwarf' in s
- assert 'libelf' in s
- assert 'mpi' in s
+ assert "dyninst" in s
+ assert "libdwarf" in s
+ assert "libelf" in s
+ assert "mpi" in s
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_virtual_index(self):
- s = Spec('callpath')
+ s = Spec("callpath")
s.concretize()
- s_mpich = Spec('callpath ^mpich')
+ s_mpich = Spec("callpath ^mpich")
s_mpich.concretize()
- s_mpich2 = Spec('callpath ^mpich2')
+ s_mpich2 = Spec("callpath ^mpich2")
s_mpich2.concretize()
- s_zmpi = Spec('callpath ^zmpi')
+ s_zmpi = Spec("callpath ^zmpi")
s_zmpi.concretize()
- assert s['mpi'].name != 'mpi'
- assert s_mpich['mpi'].name == 'mpich'
- assert s_mpich2['mpi'].name == 'mpich2'
- assert s_zmpi['zmpi'].name == 'zmpi'
+ assert s["mpi"].name != "mpi"
+ assert s_mpich["mpi"].name == "mpich"
+ assert s_mpich2["mpi"].name == "mpich2"
+ assert s_zmpi["zmpi"].name == "zmpi"
for spec in [s, s_mpich, s_mpich2, s_zmpi]:
- assert 'mpi' in spec
+ assert "mpi" in spec
# ========================================================================
# Constraints
# ========================================================================
def test_constrain_variants(self):
- check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3')
- check_constrain(
- 'libelf@2.1:2.5%gcc@4.5:4.6',
- 'libelf@0:2.5%gcc@2:4.6',
- 'libelf@2.1:3%gcc@4.5:4.7'
- )
- check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
- check_constrain(
- 'libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo'
- )
+ check_constrain("libelf@2.1:2.5", "libelf@0:2.5", "libelf@2.1:3")
check_constrain(
- 'libelf debug=2 foo=1', 'libelf debug=2', 'libelf foo=1'
- )
- check_constrain(
- 'libelf debug=2 foo=1', 'libelf debug=2', 'libelf debug=2 foo=1'
+ "libelf@2.1:2.5%gcc@4.5:4.6", "libelf@0:2.5%gcc@2:4.6", "libelf@2.1:3%gcc@4.5:4.7"
)
+ check_constrain("libelf+debug+foo", "libelf+debug", "libelf+foo")
+ check_constrain("libelf+debug+foo", "libelf+debug", "libelf+debug+foo")
+ check_constrain("libelf debug=2 foo=1", "libelf debug=2", "libelf foo=1")
+ check_constrain("libelf debug=2 foo=1", "libelf debug=2", "libelf debug=2 foo=1")
- check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
- check_constrain(
- 'libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo'
- )
+ check_constrain("libelf+debug~foo", "libelf+debug", "libelf~foo")
+ check_constrain("libelf+debug~foo", "libelf+debug", "libelf+debug~foo")
def test_constrain_multi_value_variant(self):
check_constrain(
'multivalue-variant foo="bar,baz"',
'multivalue-variant foo="bar"',
- 'multivalue-variant foo="baz"'
+ 'multivalue-variant foo="baz"',
)
check_constrain(
'multivalue-variant foo="bar,baz,barbaz"',
'multivalue-variant foo="bar,barbaz"',
- 'multivalue-variant foo="baz"'
+ 'multivalue-variant foo="baz"',
)
- check_constrain(
- 'libelf foo=bar,baz', 'libelf foo=bar,baz', 'libelf foo=*')
- check_constrain(
- 'libelf foo=bar,baz', 'libelf foo=*', 'libelf foo=bar,baz')
+ check_constrain("libelf foo=bar,baz", "libelf foo=bar,baz", "libelf foo=*")
+ check_constrain("libelf foo=bar,baz", "libelf foo=*", "libelf foo=bar,baz")
def test_constrain_compiler_flags(self):
check_constrain(
'libelf cflags="-O3" cppflags="-Wall"',
'libelf cflags="-O3"',
- 'libelf cppflags="-Wall"'
+ 'libelf cppflags="-Wall"',
)
check_constrain(
'libelf cflags="-O3" cppflags="-Wall"',
'libelf cflags="-O3"',
- 'libelf cflags="-O3" cppflags="-Wall"'
+ 'libelf cflags="-O3" cppflags="-Wall"',
)
def test_constrain_architecture(self):
check_constrain(
- 'libelf target=default_target os=default_os',
- 'libelf target=default_target os=default_os',
- 'libelf target=default_target os=default_os'
+ "libelf target=default_target os=default_os",
+ "libelf target=default_target os=default_os",
+ "libelf target=default_target os=default_os",
)
check_constrain(
- 'libelf target=default_target os=default_os',
- 'libelf',
- 'libelf target=default_target os=default_os'
+ "libelf target=default_target os=default_os",
+ "libelf",
+ "libelf target=default_target os=default_os",
)
def test_constrain_compiler(self):
- check_constrain(
- 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7'
- )
- check_constrain(
- 'libelf %gcc@4.4.7', 'libelf', 'libelf %gcc@4.4.7'
- )
+ check_constrain("libelf %gcc@4.4.7", "libelf %gcc@4.4.7", "libelf %gcc@4.4.7")
+ check_constrain("libelf %gcc@4.4.7", "libelf", "libelf %gcc@4.4.7")
def test_invalid_constraint(self):
- check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3')
- check_invalid_constraint(
- 'libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7')
-
- check_invalid_constraint('libelf+debug', 'libelf~debug')
- check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
- check_invalid_constraint('libelf debug=True', 'libelf debug=False')
-
- check_invalid_constraint(
- 'libelf cppflags="-O3"', 'libelf cppflags="-O2"')
- check_invalid_constraint(
- 'libelf platform=test target=be os=be', 'libelf target=fe os=fe'
- )
- check_invalid_constraint('libdwarf', '^%gcc')
+ check_invalid_constraint("libelf@0:2.0", "libelf@2.1:3")
+ check_invalid_constraint("libelf@0:2.5%gcc@4.8:4.9", "libelf@2.1:3%gcc@4.5:4.7")
+
+ check_invalid_constraint("libelf+debug", "libelf~debug")
+ check_invalid_constraint("libelf+debug~foo", "libelf+debug+foo")
+ check_invalid_constraint("libelf debug=True", "libelf debug=False")
+
+ check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"')
+ check_invalid_constraint("libelf platform=test target=be os=be", "libelf target=fe os=fe")
+ check_invalid_constraint("libdwarf", "^%gcc")
def test_constrain_changed(self):
- check_constrain_changed('libelf', '@1.0')
- check_constrain_changed('libelf', '@1.0:5.0')
- check_constrain_changed('libelf', '%gcc')
- check_constrain_changed('libelf%gcc', '%gcc@4.5')
- check_constrain_changed('libelf', '+debug')
- check_constrain_changed('libelf', 'debug=*')
- check_constrain_changed('libelf', '~debug')
- check_constrain_changed('libelf', 'debug=2')
- check_constrain_changed('libelf', 'cppflags="-O3"')
+ check_constrain_changed("libelf", "@1.0")
+ check_constrain_changed("libelf", "@1.0:5.0")
+ check_constrain_changed("libelf", "%gcc")
+ check_constrain_changed("libelf%gcc", "%gcc@4.5")
+ check_constrain_changed("libelf", "+debug")
+ check_constrain_changed("libelf", "debug=*")
+ check_constrain_changed("libelf", "~debug")
+ check_constrain_changed("libelf", "debug=2")
+ check_constrain_changed("libelf", 'cppflags="-O3"')
platform = spack.platforms.host()
- check_constrain_changed(
- 'libelf', 'target=' + platform.target('default_target').name)
- check_constrain_changed(
- 'libelf', 'os=' + platform.operating_system('default_os').name)
+ check_constrain_changed("libelf", "target=" + platform.target("default_target").name)
+ check_constrain_changed("libelf", "os=" + platform.operating_system("default_os").name)
def test_constrain_not_changed(self):
- check_constrain_not_changed('libelf', 'libelf')
- check_constrain_not_changed('libelf@1.0', '@1.0')
- check_constrain_not_changed('libelf@1.0:5.0', '@1.0:5.0')
- check_constrain_not_changed('libelf%gcc', '%gcc')
- check_constrain_not_changed('libelf%gcc@4.5', '%gcc@4.5')
- check_constrain_not_changed('libelf+debug', '+debug')
- check_constrain_not_changed('libelf~debug', '~debug')
- check_constrain_not_changed('libelf debug=2', 'debug=2')
- check_constrain_not_changed('libelf debug=2', 'debug=*')
- check_constrain_not_changed(
- 'libelf cppflags="-O3"', 'cppflags="-O3"')
+ check_constrain_not_changed("libelf", "libelf")
+ check_constrain_not_changed("libelf@1.0", "@1.0")
+ check_constrain_not_changed("libelf@1.0:5.0", "@1.0:5.0")
+ check_constrain_not_changed("libelf%gcc", "%gcc")
+ check_constrain_not_changed("libelf%gcc@4.5", "%gcc@4.5")
+ check_constrain_not_changed("libelf+debug", "+debug")
+ check_constrain_not_changed("libelf~debug", "~debug")
+ check_constrain_not_changed("libelf debug=2", "debug=2")
+ check_constrain_not_changed("libelf debug=2", "debug=*")
+ check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"')
platform = spack.platforms.host()
- default_target = platform.target('default_target').name
- check_constrain_not_changed(
- 'libelf target=' + default_target, 'target=' + default_target)
+ default_target = platform.target("default_target").name
+ check_constrain_not_changed("libelf target=" + default_target, "target=" + default_target)
def test_constrain_dependency_changed(self):
- check_constrain_changed('libelf^foo', 'libelf^foo@1.0')
- check_constrain_changed('libelf^foo', 'libelf^foo@1.0:5.0')
- check_constrain_changed('libelf^foo', 'libelf^foo%gcc')
- check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
- check_constrain_changed('libelf^foo', 'libelf^foo+debug')
- check_constrain_changed('libelf^foo', 'libelf^foo~debug')
- check_constrain_changed('libelf', '^foo')
+ check_constrain_changed("libelf^foo", "libelf^foo@1.0")
+ check_constrain_changed("libelf^foo", "libelf^foo@1.0:5.0")
+ check_constrain_changed("libelf^foo", "libelf^foo%gcc")
+ check_constrain_changed("libelf^foo%gcc", "libelf^foo%gcc@4.5")
+ check_constrain_changed("libelf^foo", "libelf^foo+debug")
+ check_constrain_changed("libelf^foo", "libelf^foo~debug")
+ check_constrain_changed("libelf", "^foo")
platform = spack.platforms.host()
- default_target = platform.target('default_target').name
- check_constrain_changed(
- 'libelf^foo', 'libelf^foo target=' + default_target)
+ default_target = platform.target("default_target").name
+ check_constrain_changed("libelf^foo", "libelf^foo target=" + default_target)
def test_constrain_dependency_not_changed(self):
- check_constrain_not_changed('libelf^foo@1.0', 'libelf^foo@1.0')
- check_constrain_not_changed(
- 'libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0')
- check_constrain_not_changed('libelf^foo%gcc', 'libelf^foo%gcc')
- check_constrain_not_changed(
- 'libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
- check_constrain_not_changed(
- 'libelf^foo+debug', 'libelf^foo+debug')
- check_constrain_not_changed(
- 'libelf^foo~debug', 'libelf^foo~debug')
- check_constrain_not_changed(
- 'libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
+ check_constrain_not_changed("libelf^foo@1.0", "libelf^foo@1.0")
+ check_constrain_not_changed("libelf^foo@1.0:5.0", "libelf^foo@1.0:5.0")
+ check_constrain_not_changed("libelf^foo%gcc", "libelf^foo%gcc")
+ check_constrain_not_changed("libelf^foo%gcc@4.5", "libelf^foo%gcc@4.5")
+ check_constrain_not_changed("libelf^foo+debug", "libelf^foo+debug")
+ check_constrain_not_changed("libelf^foo~debug", "libelf^foo~debug")
+ check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
platform = spack.platforms.host()
- default_target = platform.target('default_target').name
+ default_target = platform.target("default_target").name
check_constrain_not_changed(
- 'libelf^foo target=' + default_target,
- 'libelf^foo target=' + default_target)
+ "libelf^foo target=" + default_target, "libelf^foo target=" + default_target
+ )
def test_exceptional_paths_for_constructor(self):
@@ -754,10 +675,10 @@ class TestSpecSematics(object):
Spec((1, 2))
with pytest.raises(ValueError):
- Spec('')
+ Spec("")
with pytest.raises(ValueError):
- Spec('libelf foo')
+ Spec("libelf foo")
def test_spec_formatting(self):
spec = Spec("multivalue-variant cflags=-O2")
@@ -767,40 +688,46 @@ class TestSpecSematics(object):
# spec is the same as the output of spec.format()
# ignoring whitespace (though should we?) and ignoring dependencies
spec_string = str(spec)
- idx = spec_string.index(' ^')
+ idx = spec_string.index(" ^")
assert spec_string[:idx] == spec.format().strip()
# Testing named strings ie {string} and whether we get
# the correct component
# Mixed case intentional to test both
- package_segments = [("{NAME}", "name"),
- ("{VERSION}", "versions"),
- ("{compiler}", "compiler"),
- ("{compiler_flags}", "compiler_flags"),
- ("{variants}", "variants"),
- ("{architecture}", "architecture")]
-
- sigil_package_segments = [("{@VERSIONS}", '@' + str(spec.version)),
- ("{%compiler}", '%' + str(spec.compiler)),
- ("{arch=architecture}",
- 'arch=' + str(spec.architecture))]
-
- compiler_segments = [("{compiler.name}", "name"),
- ("{compiler.version}", "versions")]
-
- sigil_compiler_segments = [("{%compiler.name}",
- '%' + spec.compiler.name),
- ("{@compiler.version}",
- '@' + str(spec.compiler.version))]
-
- architecture_segments = [("{architecture.platform}", "platform"),
- ("{architecture.os}", "os"),
- ("{architecture.target}", "target")]
-
- other_segments = [('{spack_root}', spack.paths.spack_root),
- ('{spack_install}', spack.store.layout.root),
- ('{hash:7}', spec.dag_hash(7)),
- ('{/hash}', '/' + spec.dag_hash())]
+ package_segments = [
+ ("{NAME}", "name"),
+ ("{VERSION}", "versions"),
+ ("{compiler}", "compiler"),
+ ("{compiler_flags}", "compiler_flags"),
+ ("{variants}", "variants"),
+ ("{architecture}", "architecture"),
+ ]
+
+ sigil_package_segments = [
+ ("{@VERSIONS}", "@" + str(spec.version)),
+ ("{%compiler}", "%" + str(spec.compiler)),
+ ("{arch=architecture}", "arch=" + str(spec.architecture)),
+ ]
+
+ compiler_segments = [("{compiler.name}", "name"), ("{compiler.version}", "versions")]
+
+ sigil_compiler_segments = [
+ ("{%compiler.name}", "%" + spec.compiler.name),
+ ("{@compiler.version}", "@" + str(spec.compiler.version)),
+ ]
+
+ architecture_segments = [
+ ("{architecture.platform}", "platform"),
+ ("{architecture.os}", "os"),
+ ("{architecture.target}", "target"),
+ ]
+
+ other_segments = [
+ ("{spack_root}", spack.paths.spack_root),
+ ("{spack_install}", spack.store.layout.root),
+ ("{hash:7}", spec.dag_hash(7)),
+ ("{/hash}", "/" + spec.dag_hash()),
+ ]
for named_str, prop in package_segments:
expected = getattr(spec, prop, "")
@@ -832,15 +759,15 @@ class TestSpecSematics(object):
assert expected == actual
def test_spec_formatting_escapes(self):
- spec = Spec('multivalue-variant cflags=-O2')
+ spec = Spec("multivalue-variant cflags=-O2")
spec.concretize()
sigil_mismatches = [
- '{@name}',
- '{@version.concrete}',
- '{%compiler.version}',
- '{/hashd}',
- '{arch=architecture.os}'
+ "{@name}",
+ "{@version.concrete}",
+ "{%compiler.version}",
+ "{/hashd}",
+ "{arch=architecture.os}",
]
for fmt_str in sigil_mismatches:
@@ -848,15 +775,15 @@ class TestSpecSematics(object):
spec.format(fmt_str)
bad_formats = [
- r'{}',
- r'name}',
- r'\{name}',
- r'{name',
- r'{name\}',
- r'{_concrete}',
- r'{dag_hash}',
- r'{foo}',
- r'{+variants.debug}'
+ r"{}",
+ r"name}",
+ r"\{name}",
+ r"{name",
+ r"{name\}",
+ r"{_concrete}",
+ r"{dag_hash}",
+ r"{foo}",
+ r"{+variants.debug}",
]
for fmt_str in bad_formats:
@@ -870,24 +797,27 @@ class TestSpecSematics(object):
# Since the default is the full spec see if the string rep of
# spec is the same as the output of spec.format()
# ignoring whitespace (though should we?)
- assert str(spec) == spec.format('$_$@$%@+$+$=').strip()
+ assert str(spec) == spec.format("$_$@$%@+$+$=").strip()
# Testing named strings ie {string} and whether we get
# the correct component
# Mixed case intentional for testing both
- package_segments = [("${PACKAGE}", "name"),
- ("${VERSION}", "versions"),
- ("${compiler}", "compiler"),
- ("${compilerflags}", "compiler_flags"),
- ("${options}", "variants"),
- ("${architecture}", "architecture")]
+ package_segments = [
+ ("${PACKAGE}", "name"),
+ ("${VERSION}", "versions"),
+ ("${compiler}", "compiler"),
+ ("${compilerflags}", "compiler_flags"),
+ ("${options}", "variants"),
+ ("${architecture}", "architecture"),
+ ]
- compiler_segments = [("${compilername}", "name"),
- ("${compilerver}", "versions")]
+ compiler_segments = [("${compilername}", "name"), ("${compilerver}", "versions")]
- architecture_segments = [("${PLATFORM}", "platform"),
- ("${OS}", "os"),
- ("${TARGET}", "target")]
+ architecture_segments = [
+ ("${PLATFORM}", "platform"),
+ ("${OS}", "os"),
+ ("${TARGET}", "target"),
+ ]
for named_str, prop in package_segments:
expected = getattr(spec, prop, "")
@@ -906,103 +836,95 @@ class TestSpecSematics(object):
actual = spec.format(named_str)
assert str(expected) == actual
- @pytest.mark.regression('9908')
+ @pytest.mark.regression("9908")
def test_spec_flags_maintain_order(self):
# Spack was assembling flags in a manner that could result in
# different orderings for repeated concretizations of the same
# spec and config
- spec_str = 'libelf %gcc@4.7.2 os=redhat6'
+ spec_str = "libelf %gcc@4.7.2 os=redhat6"
for _ in range(25):
s = Spec(spec_str).concretized()
assert all(
- s.compiler_flags[x] == ['-O0', '-g']
- for x in ('cflags', 'cxxflags', 'fflags')
+ s.compiler_flags[x] == ["-O0", "-g"] for x in ("cflags", "cxxflags", "fflags")
)
def test_combination_of_wildcard_or_none(self):
# Test that using 'none' and another value raises
with pytest.raises(spack.variant.InvalidVariantValueCombinationError):
- Spec('multivalue-variant foo=none,bar')
+ Spec("multivalue-variant foo=none,bar")
# Test that using wildcard and another value raises
with pytest.raises(spack.variant.InvalidVariantValueCombinationError):
- Spec('multivalue-variant foo=*,bar')
+ Spec("multivalue-variant foo=*,bar")
- @pytest.mark.skipif(
- sys.version_info[0] == 2, reason='__wrapped__ requires python 3'
- )
+ @pytest.mark.skipif(sys.version_info[0] == 2, reason="__wrapped__ requires python 3")
def test_errors_in_variant_directive(self):
variant = spack.directives.variant.__wrapped__
class Pkg(object):
- name = 'PKG'
+ name = "PKG"
# We can't use names that are reserved by Spack
- fn = variant('patches')
+ fn = variant("patches")
with pytest.raises(spack.directives.DirectiveError) as exc_info:
fn(Pkg())
assert "The name 'patches' is reserved" in str(exc_info.value)
# We can't have conflicting definitions for arguments
- fn = variant(
- 'foo', values=spack.variant.any_combination_of('fee', 'foom'),
- default='bar'
- )
+ fn = variant("foo", values=spack.variant.any_combination_of("fee", "foom"), default="bar")
with pytest.raises(spack.directives.DirectiveError) as exc_info:
fn(Pkg())
- assert " it is handled by an attribute of the 'values' " \
- "argument" in str(exc_info.value)
+ assert " it is handled by an attribute of the 'values' " "argument" in str(exc_info.value)
# We can't leave None as a default value
- fn = variant('foo', default=None)
+ fn = variant("foo", default=None)
with pytest.raises(spack.directives.DirectiveError) as exc_info:
fn(Pkg())
- assert "either a default was not explicitly set, or 'None' was used"\
- in str(exc_info.value)
+ assert "either a default was not explicitly set, or 'None' was used" in str(exc_info.value)
# We can't use an empty string as a default value
- fn = variant('foo', default='')
+ fn = variant("foo", default="")
with pytest.raises(spack.directives.DirectiveError) as exc_info:
fn(Pkg())
assert "the default cannot be an empty string" in str(exc_info.value)
def test_abstract_spec_prefix_error(self):
- spec = Spec('libelf')
+ spec = Spec("libelf")
with pytest.raises(SpecError):
spec.prefix
def test_forwarding_of_architecture_attributes(self):
- spec = Spec('libelf target=x86_64').concretized()
+ spec = Spec("libelf target=x86_64").concretized()
# Check that we can still access each member through
# the architecture attribute
- assert 'test' in spec.architecture
- assert 'debian' in spec.architecture
- assert 'x86_64' in spec.architecture
+ assert "test" in spec.architecture
+ assert "debian" in spec.architecture
+ assert "x86_64" in spec.architecture
# Check that we forward the platform and os attribute correctly
- assert spec.platform == 'test'
- assert spec.os == 'debian6'
+ assert spec.platform == "test"
+ assert spec.os == "debian6"
# Check that the target is also forwarded correctly and supports
# all the operators we expect
- assert spec.target == 'x86_64'
- assert spec.target.family == 'x86_64'
- assert 'avx512' not in spec.target
- assert spec.target < 'broadwell'
+ assert spec.target == "x86_64"
+ assert spec.target.family == "x86_64"
+ assert "avx512" not in spec.target
+ assert spec.target < "broadwell"
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice(self, transitive):
# Tests the new splice function in Spec using a somewhat simple case
# with a variant with a conditional dependency.
- spec = Spec('splice-t')
- dep = Spec('splice-h+foo')
+ spec = Spec("splice-t")
+ dep = Spec("splice-h+foo")
spec.concretize()
dep.concretize()
# Sanity checking that these are not the same thing.
- assert dep.dag_hash() != spec['splice-h'].dag_hash()
+ assert dep.dag_hash() != spec["splice-h"].dag_hash()
# Do the splice.
out = spec.splice(dep, transitive)
@@ -1017,46 +939,45 @@ class TestSpecSematics(object):
# If the splice worked, then the dag hash of the spliced dep should
# now match the dag hash of the build spec of the dependency from the
# returned spec.
- out_h_build = out['splice-h'].build_spec
+ out_h_build = out["splice-h"].build_spec
assert out_h_build.dag_hash() == dep.dag_hash()
# Transitivity should determine whether the transitive dependency was
# changed.
- expected_z = dep['splice-z'] if transitive else spec['splice-z']
- assert out['splice-z'].dag_hash() == expected_z.dag_hash()
+ expected_z = dep["splice-z"] if transitive else spec["splice-z"]
+ assert out["splice-z"].dag_hash() == expected_z.dag_hash()
# Sanity check build spec of out should be the original spec.
- assert (out['splice-t'].build_spec.dag_hash() ==
- spec['splice-t'].dag_hash())
+ assert out["splice-t"].build_spec.dag_hash() == spec["splice-t"].dag_hash()
# Finally, the spec should know it's been spliced:
assert out.spliced
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice_with_cached_hashes(self, transitive):
- spec = Spec('splice-t')
- dep = Spec('splice-h+foo')
+ spec = Spec("splice-t")
+ dep = Spec("splice-h+foo")
spec.concretize()
dep.concretize()
# monkeypatch hashes so we can test that they are cached
- spec._hash = 'aaaaaa'
- dep._hash = 'bbbbbb'
- spec['splice-h']._hash = 'cccccc'
- spec['splice-z']._hash = 'dddddd'
- dep['splice-z']._hash = 'eeeeee'
+ spec._hash = "aaaaaa"
+ dep._hash = "bbbbbb"
+ spec["splice-h"]._hash = "cccccc"
+ spec["splice-z"]._hash = "dddddd"
+ dep["splice-z"]._hash = "eeeeee"
out = spec.splice(dep, transitive=transitive)
- out_z_expected = (dep if transitive else spec)['splice-z']
+ out_z_expected = (dep if transitive else spec)["splice-z"]
assert out.dag_hash() != spec.dag_hash()
- assert (out['splice-h'].dag_hash() == dep.dag_hash()) == transitive
- assert out['splice-z'].dag_hash() == out_z_expected.dag_hash()
+ assert (out["splice-h"].dag_hash() == dep.dag_hash()) == transitive
+ assert out["splice-z"].dag_hash() == out_z_expected.dag_hash()
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice_input_unchanged(self, transitive):
- spec = Spec('splice-t').concretized()
- dep = Spec('splice-h+foo').concretized()
+ spec = Spec("splice-t").concretized()
+ dep = Spec("splice-h+foo").concretized()
orig_spec_hash = spec.dag_hash()
orig_dep_hash = dep.dag_hash()
spec.splice(dep, transitive)
@@ -1065,31 +986,30 @@ class TestSpecSematics(object):
assert spec.dag_hash() == orig_spec_hash
assert dep.dag_hash() == orig_dep_hash
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice_subsequent(self, transitive):
- spec = Spec('splice-t')
- dep = Spec('splice-h+foo')
+ spec = Spec("splice-t")
+ dep = Spec("splice-h+foo")
spec.concretize()
dep.concretize()
out = spec.splice(dep, transitive)
# Now we attempt a second splice.
- dep = Spec('splice-z+bar')
+ dep = Spec("splice-z+bar")
dep.concretize()
# Transitivity shouldn't matter since Splice Z has no dependencies.
out2 = out.splice(dep, transitive)
assert out2.concrete
- assert out2['splice-z'].dag_hash() != spec['splice-z'].dag_hash()
- assert out2['splice-z'].dag_hash() != out['splice-z'].dag_hash()
- assert (out2['splice-t'].build_spec.dag_hash() ==
- spec['splice-t'].dag_hash())
+ assert out2["splice-z"].dag_hash() != spec["splice-z"].dag_hash()
+ assert out2["splice-z"].dag_hash() != out["splice-z"].dag_hash()
+ assert out2["splice-t"].build_spec.dag_hash() == spec["splice-t"].dag_hash()
assert out2.spliced
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice_dict(self, transitive):
- spec = Spec('splice-t')
- dep = Spec('splice-h+foo')
+ spec = Spec("splice-t")
+ dep = Spec("splice-h+foo")
spec.concretize()
dep.concretize()
out = spec.splice(dep, transitive)
@@ -1098,17 +1018,17 @@ class TestSpecSematics(object):
assert spec.dag_hash() != dep.dag_hash()
assert out.dag_hash() != dep.dag_hash()
assert out.dag_hash() != spec.dag_hash()
- node_list = out.to_dict()['spec']['nodes']
- root_nodes = [n for n in node_list if n['hash'] == out.dag_hash()]
- build_spec_nodes = [n for n in node_list if n['hash'] == spec.dag_hash()]
+ node_list = out.to_dict()["spec"]["nodes"]
+ root_nodes = [n for n in node_list if n["hash"] == out.dag_hash()]
+ build_spec_nodes = [n for n in node_list if n["hash"] == spec.dag_hash()]
assert spec.dag_hash() == out.build_spec.dag_hash()
assert len(root_nodes) == 1
assert len(build_spec_nodes) == 1
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice_dict_roundtrip(self, transitive):
- spec = Spec('splice-t')
- dep = Spec('splice-h+foo')
+ spec = Spec("splice-t")
+ dep = Spec("splice-h+foo")
spec.concretize()
dep.concretize()
out = spec.splice(dep, transitive)
@@ -1120,8 +1040,8 @@ class TestSpecSematics(object):
out_rt_spec = Spec.from_dict(out.to_dict()) # rt is "round trip"
assert out_rt_spec.dag_hash() == out.dag_hash()
out_rt_spec_bld_hash = out_rt_spec.build_spec.dag_hash()
- out_rt_spec_h_bld_hash = out_rt_spec['splice-h'].build_spec.dag_hash()
- out_rt_spec_z_bld_hash = out_rt_spec['splice-z'].build_spec.dag_hash()
+ out_rt_spec_h_bld_hash = out_rt_spec["splice-h"].build_spec.dag_hash()
+ out_rt_spec_z_bld_hash = out_rt_spec["splice-z"].build_spec.dag_hash()
# In any case, the build spec for splice-t (root) should point to the
# original spec, preserving build provenance.
@@ -1130,69 +1050,70 @@ class TestSpecSematics(object):
# The build spec for splice-h should always point to the introduced
# spec, since that is the spec spliced in.
- assert dep['splice-h'].dag_hash() == out_rt_spec_h_bld_hash
+ assert dep["splice-h"].dag_hash() == out_rt_spec_h_bld_hash
# The build spec for splice-z will depend on whether or not the splice
# was transitive.
- expected_z_bld_hash = (dep['splice-z'].dag_hash() if transitive else
- spec['splice-z'].dag_hash())
+ expected_z_bld_hash = (
+ dep["splice-z"].dag_hash() if transitive else spec["splice-z"].dag_hash()
+ )
assert expected_z_bld_hash == out_rt_spec_z_bld_hash
- @pytest.mark.parametrize('spec,constraint,expected_result', [
- ('libelf target=haswell', 'target=broadwell', False),
- ('libelf target=haswell', 'target=haswell', True),
- ('libelf target=haswell', 'target=x86_64:', True),
- ('libelf target=haswell', 'target=:haswell', True),
- ('libelf target=haswell', 'target=icelake,:nocona', False),
- ('libelf target=haswell', 'target=haswell,:nocona', True),
- # Check that a single target is not treated as the start
- # or the end of an open range
- ('libelf target=haswell', 'target=x86_64', False),
- ('libelf target=x86_64', 'target=haswell', False),
- ])
- @pytest.mark.regression('13111')
+ @pytest.mark.parametrize(
+ "spec,constraint,expected_result",
+ [
+ ("libelf target=haswell", "target=broadwell", False),
+ ("libelf target=haswell", "target=haswell", True),
+ ("libelf target=haswell", "target=x86_64:", True),
+ ("libelf target=haswell", "target=:haswell", True),
+ ("libelf target=haswell", "target=icelake,:nocona", False),
+ ("libelf target=haswell", "target=haswell,:nocona", True),
+ # Check that a single target is not treated as the start
+ # or the end of an open range
+ ("libelf target=haswell", "target=x86_64", False),
+ ("libelf target=x86_64", "target=haswell", False),
+ ],
+ )
+ @pytest.mark.regression("13111")
def test_target_constraints(self, spec, constraint, expected_result):
s = Spec(spec)
assert s.satisfies(constraint) is expected_result
- @pytest.mark.regression('13124')
+ @pytest.mark.regression("13124")
def test_error_message_unknown_variant(self):
- s = Spec('mpileaks +unknown')
- with pytest.raises(UnknownVariantError, match=r'package has no such'):
+ s = Spec("mpileaks +unknown")
+ with pytest.raises(UnknownVariantError, match=r"package has no such"):
s.concretize()
- @pytest.mark.regression('18527')
+ @pytest.mark.regression("18527")
def test_satisfies_dependencies_ordered(self):
- d = Spec('zmpi ^fake')
- s = Spec('mpileaks')
+ d = Spec("zmpi ^fake")
+ s = Spec("mpileaks")
s._add_dependency(d, ())
- assert s.satisfies('mpileaks ^zmpi ^fake', strict=True)
+ assert s.satisfies("mpileaks ^zmpi ^fake", strict=True)
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice_swap_names(self, transitive):
- spec = Spec('splice-t')
- dep = Spec('splice-a+foo')
+ spec = Spec("splice-t")
+ dep = Spec("splice-a+foo")
spec.concretize()
dep.concretize()
out = spec.splice(dep, transitive)
assert dep.name in out
- assert transitive == ('+foo' in out['splice-z'])
+ assert transitive == ("+foo" in out["splice-z"])
- @pytest.mark.parametrize('transitive', [True, False])
+ @pytest.mark.parametrize("transitive", [True, False])
def test_splice_swap_names_mismatch_virtuals(self, transitive):
- spec = Spec('splice-t')
- dep = Spec('splice-vh+foo')
+ spec = Spec("splice-t")
+ dep = Spec("splice-vh+foo")
spec.concretize()
dep.concretize()
- with pytest.raises(spack.spec.SpliceError,
- match='will not provide the same virtuals.'):
+ with pytest.raises(spack.spec.SpliceError, match="will not provide the same virtuals."):
spec.splice(dep, transitive)
-@pytest.mark.regression('3887')
-@pytest.mark.parametrize('spec_str', [
- 'git', 'hdf5', 'py-flake8'
-])
+@pytest.mark.regression("3887")
+@pytest.mark.parametrize("spec_str", ["git", "hdf5", "py-flake8"])
def test_is_extension_after_round_trip_to_dict(config, spec_str):
# x is constructed directly from string, y from a
# round-trip to dict representation
@@ -1206,52 +1127,50 @@ def test_is_extension_after_round_trip_to_dict(config, spec_str):
def test_malformed_spec_dict():
- with pytest.raises(SpecError, match='malformed'):
- Spec.from_dict({'spec': {'nodes': [{'dependencies': {'name': 'foo'}}]}})
+ with pytest.raises(SpecError, match="malformed"):
+ Spec.from_dict({"spec": {"nodes": [{"dependencies": {"name": "foo"}}]}})
def test_spec_dict_hashless_dep():
with pytest.raises(SpecError, match="Couldn't parse"):
Spec.from_dict(
{
- 'spec': {
- 'nodes': [
- {
- 'name': 'foo',
- 'hash': 'thehash',
- 'dependencies': [
- {
- 'name': 'bar'
- }
- ]
- }
+ "spec": {
+ "nodes": [
+ {"name": "foo", "hash": "thehash", "dependencies": [{"name": "bar"}]}
]
}
}
)
-@pytest.mark.parametrize('specs,expected', [
- # Anonymous specs without dependencies
- (['+baz', '+bar'], '+baz+bar'),
- (['@2.0:', '@:5.1', '+bar'], '@2.0:5.1 +bar'),
- # Anonymous specs with dependencies
- (['^mpich@3.2', '^mpich@:4.0+foo'], '^mpich@3.2 +foo'),
- # Mix a real package with a virtual one. This test
- # should fail if we start using the repository
- (['^mpich@3.2', '^mpi+foo'], '^mpich@3.2 ^mpi+foo'),
-])
+@pytest.mark.parametrize(
+ "specs,expected",
+ [
+ # Anonymous specs without dependencies
+ (["+baz", "+bar"], "+baz+bar"),
+ (["@2.0:", "@:5.1", "+bar"], "@2.0:5.1 +bar"),
+ # Anonymous specs with dependencies
+ (["^mpich@3.2", "^mpich@:4.0+foo"], "^mpich@3.2 +foo"),
+ # Mix a real package with a virtual one. This test
+ # should fail if we start using the repository
+ (["^mpich@3.2", "^mpi+foo"], "^mpich@3.2 ^mpi+foo"),
+ ],
+)
def test_merge_abstract_anonymous_specs(specs, expected):
specs = [Spec(x) for x in specs]
result = spack.spec.merge_abstract_anonymous_specs(*specs)
assert result == Spec(expected)
-@pytest.mark.parametrize('anonymous,named,expected', [
- ('+plumed', 'gromacs', 'gromacs+plumed'),
- ('+plumed ^plumed%gcc', 'gromacs', 'gromacs+plumed ^plumed%gcc'),
- ('+plumed', 'builtin.gromacs', 'builtin.gromacs+plumed')
-])
+@pytest.mark.parametrize(
+ "anonymous,named,expected",
+ [
+ ("+plumed", "gromacs", "gromacs+plumed"),
+ ("+plumed ^plumed%gcc", "gromacs", "gromacs+plumed ^plumed%gcc"),
+ ("+plumed", "builtin.gromacs", "builtin.gromacs+plumed"),
+ ],
+)
def test_merge_anonymous_spec_with_named_spec(anonymous, named, expected):
s = Spec(anonymous)
changed = s.constrain(named)
@@ -1276,13 +1195,11 @@ def test_spec_installed(install_mockery, database):
assert not spec.installed
-@pytest.mark.regression('30678')
+@pytest.mark.regression("30678")
def test_call_dag_hash_on_old_dag_hash_spec(mock_packages, config):
# create a concrete spec
a = Spec("a").concretized()
- dag_hashes = {
- spec.name: spec.dag_hash() for spec in a.traverse()
- }
+ dag_hashes = {spec.name: spec.dag_hash() for spec in a.traverse()}
# make it look like an old DAG hash spec with no package hash on the spec.
for spec in a.traverse():
@@ -1292,11 +1209,11 @@ def test_call_dag_hash_on_old_dag_hash_spec(mock_packages, config):
for spec in a.traverse():
assert dag_hashes[spec.name] == spec.dag_hash()
- with pytest.raises(ValueError, match='Cannot call package_hash()'):
+ with pytest.raises(ValueError, match="Cannot call package_hash()"):
spec.package_hash()
-@pytest.mark.regression('30861')
+@pytest.mark.regression("30861")
def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
# create an "old" spec with no package hash
bottom = Spec("dt-diamond-bottom").concretized()
@@ -1320,9 +1237,9 @@ def test_concretize_partial_old_dag_hash_spec(mock_packages, config):
assert spec["dt-diamond-bottom"]._hash == dummy_hash
# make sure package hash is NOT recomputed
- assert not getattr(spec["dt-diamond-bottom"], '_package_hash', None)
+ assert not getattr(spec["dt-diamond-bottom"], "_package_hash", None)
def test_unsupported_compiler():
with pytest.raises(UnsupportedCompilerError):
- Spec('gcc%fake-compiler').validate_or_raise()
+ Spec("gcc%fake-compiler").validate_or_raise()
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index 815a46847a..956593ceb8 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -32,57 +32,61 @@ from spack.spec import (
from spack.variant import DuplicateVariantError
# Sample output for a complex lexing.
-complex_lex = [Token(sp.ID, 'mvapich_foo'),
- Token(sp.DEP),
- Token(sp.ID, '_openmpi'),
- Token(sp.AT),
- Token(sp.ID, '1.2'),
- Token(sp.COLON),
- Token(sp.ID, '1.4'),
- Token(sp.COMMA),
- Token(sp.ID, '1.6'),
- Token(sp.PCT),
- Token(sp.ID, 'intel'),
- Token(sp.AT),
- Token(sp.ID, '12.1'),
- Token(sp.COLON),
- Token(sp.ID, '12.6'),
- Token(sp.ON),
- Token(sp.ID, 'debug'),
- Token(sp.OFF),
- Token(sp.ID, 'qt_4'),
- Token(sp.DEP),
- Token(sp.ID, 'stackwalker'),
- Token(sp.AT),
- Token(sp.ID, '8.1_1e')]
+complex_lex = [
+ Token(sp.ID, "mvapich_foo"),
+ Token(sp.DEP),
+ Token(sp.ID, "_openmpi"),
+ Token(sp.AT),
+ Token(sp.ID, "1.2"),
+ Token(sp.COLON),
+ Token(sp.ID, "1.4"),
+ Token(sp.COMMA),
+ Token(sp.ID, "1.6"),
+ Token(sp.PCT),
+ Token(sp.ID, "intel"),
+ Token(sp.AT),
+ Token(sp.ID, "12.1"),
+ Token(sp.COLON),
+ Token(sp.ID, "12.6"),
+ Token(sp.ON),
+ Token(sp.ID, "debug"),
+ Token(sp.OFF),
+ Token(sp.ID, "qt_4"),
+ Token(sp.DEP),
+ Token(sp.ID, "stackwalker"),
+ Token(sp.AT),
+ Token(sp.ID, "8.1_1e"),
+]
# Another sample lexer output with a kv pair.
-kv_lex = [Token(sp.ID, 'mvapich_foo'),
- Token(sp.ID, 'debug'),
- Token(sp.EQ),
- Token(sp.VAL, '4'),
- Token(sp.DEP),
- Token(sp.ID, '_openmpi'),
- Token(sp.AT),
- Token(sp.ID, '1.2'),
- Token(sp.COLON),
- Token(sp.ID, '1.4'),
- Token(sp.COMMA),
- Token(sp.ID, '1.6'),
- Token(sp.PCT),
- Token(sp.ID, 'intel'),
- Token(sp.AT),
- Token(sp.ID, '12.1'),
- Token(sp.COLON),
- Token(sp.ID, '12.6'),
- Token(sp.ON),
- Token(sp.ID, 'debug'),
- Token(sp.OFF),
- Token(sp.ID, 'qt_4'),
- Token(sp.DEP),
- Token(sp.ID, 'stackwalker'),
- Token(sp.AT),
- Token(sp.ID, '8.1_1e')]
+kv_lex = [
+ Token(sp.ID, "mvapich_foo"),
+ Token(sp.ID, "debug"),
+ Token(sp.EQ),
+ Token(sp.VAL, "4"),
+ Token(sp.DEP),
+ Token(sp.ID, "_openmpi"),
+ Token(sp.AT),
+ Token(sp.ID, "1.2"),
+ Token(sp.COLON),
+ Token(sp.ID, "1.4"),
+ Token(sp.COMMA),
+ Token(sp.ID, "1.6"),
+ Token(sp.PCT),
+ Token(sp.ID, "intel"),
+ Token(sp.AT),
+ Token(sp.ID, "12.1"),
+ Token(sp.COLON),
+ Token(sp.ID, "12.6"),
+ Token(sp.ON),
+ Token(sp.ID, "debug"),
+ Token(sp.OFF),
+ Token(sp.ID, "qt_4"),
+ Token(sp.DEP),
+ Token(sp.ID, "stackwalker"),
+ Token(sp.AT),
+ Token(sp.ID, "8.1_1e"),
+]
class TestSpecSyntax(object):
@@ -93,21 +97,21 @@ class TestSpecSyntax(object):
def check_parse(self, expected, spec=None):
"""Assert that the provided spec is able to be parsed.
- If this is called with one argument, it assumes that the
- string is canonical (i.e., no spaces and ~ instead of - for
- variants) and that it will convert back to the string it came
- from.
+ If this is called with one argument, it assumes that the
+ string is canonical (i.e., no spaces and ~ instead of - for
+ variants) and that it will convert back to the string it came
+ from.
- If this is called with two arguments, the first argument is
- the expected canonical form and the second is a non-canonical
- input to be parsed.
+ If this is called with two arguments, the first argument is
+ the expected canonical form and the second is a non-canonical
+ input to be parsed.
"""
if spec is None:
spec = expected
output = sp.parse(spec)
- parsed = (" ".join(str(spec) for spec in output))
+ parsed = " ".join(str(spec) for spec in output)
assert expected == parsed
def check_lex(self, tokens, spec):
@@ -141,23 +145,22 @@ class TestSpecSyntax(object):
self.check_parse("^zlib")
self.check_parse("+foo")
self.check_parse("arch=test-None-None", "platform=test")
- self.check_parse('@2.7:')
+ self.check_parse("@2.7:")
def test_anonymous_specs_with_multiple_parts(self):
# Parse anonymous spec with multiple tokens
- self.check_parse('@4.2: languages=go', 'languages=go @4.2:')
- self.check_parse('@4.2: languages=go')
+ self.check_parse("@4.2: languages=go", "languages=go @4.2:")
+ self.check_parse("@4.2: languages=go")
def test_simple_dependence(self):
self.check_parse("openmpi ^hwloc")
self.check_parse("openmpi ^hwloc", "openmpi^hwloc")
self.check_parse("openmpi ^hwloc ^libunwind")
- self.check_parse("openmpi ^hwloc ^libunwind",
- "openmpi^hwloc^libunwind")
+ self.check_parse("openmpi ^hwloc ^libunwind", "openmpi^hwloc^libunwind")
def test_version_after_compiler(self):
- self.check_parse('foo@2.0%bar@1.0', 'foo %bar@1.0 @2.0')
+ self.check_parse("foo@2.0%bar@1.0", "foo %bar@1.0 @2.0")
def test_dependencies_with_versions(self):
self.check_parse("openmpi ^hwloc@1.2e6")
@@ -170,120 +173,103 @@ class TestSpecSyntax(object):
def test_multiple_specs_after_kv(self):
self.check_parse('mvapich cppflags="-O3 -fPIC" emacs')
- self.check_parse('mvapich cflags="-O3" emacs',
- 'mvapich cflags=-O3 emacs')
+ self.check_parse('mvapich cflags="-O3" emacs', "mvapich cflags=-O3 emacs")
def test_multiple_specs_long_second(self):
- self.check_parse('mvapich emacs@1.1.1%intel cflags="-O3"',
- 'mvapich emacs @1.1.1 %intel cflags=-O3')
+ self.check_parse(
+ 'mvapich emacs@1.1.1%intel cflags="-O3"', "mvapich emacs @1.1.1 %intel cflags=-O3"
+ )
self.check_parse('mvapich cflags="-O3 -fPIC" emacs ^ncurses%intel')
- self.check_parse('mvapich cflags="-O3 -fPIC" emacs ^ncurses%intel',
- 'mvapich cflags="-O3 -fPIC" emacs^ncurses%intel')
+ self.check_parse(
+ 'mvapich cflags="-O3 -fPIC" emacs ^ncurses%intel',
+ 'mvapich cflags="-O3 -fPIC" emacs^ncurses%intel',
+ )
def test_full_specs(self):
self.check_parse(
- "mvapich_foo"
- " ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4"
- " ^stackwalker@8.1_1e")
+ "mvapich_foo" " ^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4" " ^stackwalker@8.1_1e"
+ )
self.check_parse(
- "mvapich_foo"
- " ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2"
- " ^stackwalker@8.1_1e")
+ "mvapich_foo" " ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2" " ^stackwalker@8.1_1e"
+ )
self.check_parse(
- 'mvapich_foo'
+ "mvapich_foo"
' ^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3" +debug~qt_4'
- ' ^stackwalker@8.1_1e')
+ " ^stackwalker@8.1_1e"
+ )
self.check_parse(
"mvapich_foo"
" ^_openmpi@1.2:1.4,1.6%intel@12.1~qt_4 debug=2"
- " ^stackwalker@8.1_1e arch=test-redhat6-x86")
+ " ^stackwalker@8.1_1e arch=test-redhat6-x86"
+ )
def test_yaml_specs(self):
- self.check_parse(
- "yaml-cpp@0.1.8%intel@12.1"
- " ^boost@3.1.4")
+ self.check_parse("yaml-cpp@0.1.8%intel@12.1" " ^boost@3.1.4")
tempspec = r"builtin.yaml-cpp%gcc"
- self.check_parse(
- tempspec.strip("builtin."),
- spec=tempspec)
+ self.check_parse(tempspec.strip("builtin."), spec=tempspec)
tempspec = r"testrepo.yaml-cpp%gcc"
- self.check_parse(
- tempspec.strip("testrepo."),
- spec=tempspec)
+ self.check_parse(tempspec.strip("testrepo."), spec=tempspec)
tempspec = r"builtin.yaml-cpp@0.1.8%gcc"
- self.check_parse(
- tempspec.strip("builtin."),
- spec=tempspec)
+ self.check_parse(tempspec.strip("builtin."), spec=tempspec)
tempspec = r"builtin.yaml-cpp@0.1.8%gcc@7.2.0"
- self.check_parse(
- tempspec.strip("builtin."),
- spec=tempspec)
- tempspec = r"builtin.yaml-cpp@0.1.8%gcc@7.2.0" \
- r" ^boost@3.1.4"
- self.check_parse(
- tempspec.strip("builtin."),
- spec=tempspec)
+ self.check_parse(tempspec.strip("builtin."), spec=tempspec)
+ tempspec = r"builtin.yaml-cpp@0.1.8%gcc@7.2.0" r" ^boost@3.1.4"
+ self.check_parse(tempspec.strip("builtin."), spec=tempspec)
def test_canonicalize(self):
self.check_parse(
"mvapich_foo"
" ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
" ^stackwalker@8.1_1e",
-
"mvapich_foo "
"^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 "
- "^stackwalker@8.1_1e")
+ "^stackwalker@8.1_1e",
+ )
self.check_parse(
"mvapich_foo"
" ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
" ^stackwalker@8.1_1e",
-
"mvapich_foo "
"^stackwalker@8.1_1e "
- "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug")
+ "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug",
+ )
self.check_parse(
- "x ^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f",
- "x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1")
+ "x ^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f", "x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1"
+ )
self.check_parse(
- "x arch=test-redhat6-None"
- " ^y arch=test-None-core2"
- " ^z arch=linux-None-None",
-
- "x os=fe "
- "^y target=be "
- "^z platform=linux")
+ "x arch=test-redhat6-None" " ^y arch=test-None-core2" " ^z arch=linux-None-None",
+ "x os=fe " "^y target=be " "^z platform=linux",
+ )
self.check_parse(
- "x arch=test-debian6-core2"
- " ^y arch=test-debian6-core2",
-
- "x os=default_os target=default_target"
- " ^y os=default_os target=default_target")
+ "x arch=test-debian6-core2" " ^y arch=test-debian6-core2",
+ "x os=default_os target=default_target" " ^y os=default_os target=default_target",
+ )
self.check_parse("x ^y", "x@: ^y@:")
def test_parse_errors(self):
- errors = ['x@@1.2', 'x ^y@@1.2', 'x@1.2::', 'x::']
+ errors = ["x@@1.2", "x ^y@@1.2", "x@1.2::", "x::"]
self._check_raises(SpecParseError, errors)
def _check_hash_parse(self, spec):
"""Check several ways to specify a spec by hash."""
# full hash
- self.check_parse(str(spec), '/' + spec.dag_hash())
+ self.check_parse(str(spec), "/" + spec.dag_hash())
# partial hash
- self.check_parse(str(spec), '/ ' + spec.dag_hash()[:5])
+ self.check_parse(str(spec), "/ " + spec.dag_hash()[:5])
# name + hash
- self.check_parse(str(spec), spec.name + '/' + spec.dag_hash())
+ self.check_parse(str(spec), spec.name + "/" + spec.dag_hash())
# name + version + space + partial hash
self.check_parse(
- str(spec), spec.name + '@' + str(spec.version) +
- ' /' + spec.dag_hash()[:6])
+ str(spec), spec.name + "@" + str(spec.version) + " /" + spec.dag_hash()[:6]
+ )
@pytest.mark.db
def test_spec_by_hash(self, database):
@@ -295,100 +281,103 @@ class TestSpecSyntax(object):
@pytest.mark.db
def test_dep_spec_by_hash(self, database):
- mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
- zmpi = database.query_one('zmpi')
- fake = database.query_one('fake')
+ mpileaks_zmpi = database.query_one("mpileaks ^zmpi")
+ zmpi = database.query_one("zmpi")
+ fake = database.query_one("fake")
- assert 'fake' in mpileaks_zmpi
- assert 'zmpi' in mpileaks_zmpi
+ assert "fake" in mpileaks_zmpi
+ assert "zmpi" in mpileaks_zmpi
- mpileaks_hash_fake = sp.Spec('mpileaks ^/' + fake.dag_hash())
- assert 'fake' in mpileaks_hash_fake
- assert mpileaks_hash_fake['fake'] == fake
+ mpileaks_hash_fake = sp.Spec("mpileaks ^/" + fake.dag_hash())
+ assert "fake" in mpileaks_hash_fake
+ assert mpileaks_hash_fake["fake"] == fake
mpileaks_hash_zmpi = sp.Spec(
- 'mpileaks %' + str(mpileaks_zmpi.compiler) +
- ' ^ / ' + zmpi.dag_hash())
- assert 'zmpi' in mpileaks_hash_zmpi
- assert mpileaks_hash_zmpi['zmpi'] == zmpi
+ "mpileaks %" + str(mpileaks_zmpi.compiler) + " ^ / " + zmpi.dag_hash()
+ )
+ assert "zmpi" in mpileaks_hash_zmpi
+ assert mpileaks_hash_zmpi["zmpi"] == zmpi
assert mpileaks_hash_zmpi.compiler == mpileaks_zmpi.compiler
mpileaks_hash_fake_and_zmpi = sp.Spec(
- 'mpileaks ^/' + fake.dag_hash()[:4] + '^ / ' + zmpi.dag_hash()[:5])
- assert 'zmpi' in mpileaks_hash_fake_and_zmpi
- assert mpileaks_hash_fake_and_zmpi['zmpi'] == zmpi
+ "mpileaks ^/" + fake.dag_hash()[:4] + "^ / " + zmpi.dag_hash()[:5]
+ )
+ assert "zmpi" in mpileaks_hash_fake_and_zmpi
+ assert mpileaks_hash_fake_and_zmpi["zmpi"] == zmpi
- assert 'fake' in mpileaks_hash_fake_and_zmpi
- assert mpileaks_hash_fake_and_zmpi['fake'] == fake
+ assert "fake" in mpileaks_hash_fake_and_zmpi
+ assert mpileaks_hash_fake_and_zmpi["fake"] == fake
@pytest.mark.db
def test_multiple_specs_with_hash(self, database):
- mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
- callpath_mpich2 = database.query_one('callpath ^mpich2')
+ mpileaks_zmpi = database.query_one("mpileaks ^zmpi")
+ callpath_mpich2 = database.query_one("callpath ^mpich2")
# name + hash + separate hash
- specs = sp.parse('mpileaks /' + mpileaks_zmpi.dag_hash() +
- '/' + callpath_mpich2.dag_hash())
+ specs = sp.parse(
+ "mpileaks /" + mpileaks_zmpi.dag_hash() + "/" + callpath_mpich2.dag_hash()
+ )
assert len(specs) == 2
# 2 separate hashes
- specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
- '/' + callpath_mpich2.dag_hash())
+ specs = sp.parse("/" + mpileaks_zmpi.dag_hash() + "/" + callpath_mpich2.dag_hash())
assert len(specs) == 2
# 2 separate hashes + name
- specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
- '/' + callpath_mpich2.dag_hash() +
- ' callpath')
+ specs = sp.parse(
+ "/" + mpileaks_zmpi.dag_hash() + "/" + callpath_mpich2.dag_hash() + " callpath"
+ )
assert len(specs) == 3
# hash + 2 names
- specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
- ' callpath' +
- ' callpath')
+ specs = sp.parse("/" + mpileaks_zmpi.dag_hash() + " callpath" + " callpath")
assert len(specs) == 3
# hash + name + hash
- specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
- ' callpath' +
- ' / ' + callpath_mpich2.dag_hash())
+ specs = sp.parse(
+ "/" + mpileaks_zmpi.dag_hash() + " callpath" + " / " + callpath_mpich2.dag_hash()
+ )
assert len(specs) == 2
@pytest.mark.db
def test_ambiguous_hash(self, mutable_database):
- x1 = Spec('a')
+ x1 = Spec("a")
x1.concretize()
- x1._hash = 'xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy'
- x2 = Spec('a')
+ x1._hash = "xyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"
+ x2 = Spec("a")
x2.concretize()
- x2._hash = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
+ x2._hash = "xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
mutable_database.add(x1, spack.store.layout)
mutable_database.add(x2, spack.store.layout)
# ambiguity in first hash character
- self._check_raises(AmbiguousHashError, ['/x'])
+ self._check_raises(AmbiguousHashError, ["/x"])
# ambiguity in first hash character AND spec name
- self._check_raises(AmbiguousHashError, ['a/x'])
+ self._check_raises(AmbiguousHashError, ["a/x"])
@pytest.mark.db
def test_invalid_hash(self, database):
- mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
- zmpi = database.query_one('zmpi')
+ mpileaks_zmpi = database.query_one("mpileaks ^zmpi")
+ zmpi = database.query_one("zmpi")
- mpileaks_mpich = database.query_one('mpileaks ^mpich')
- mpich = database.query_one('mpich')
+ mpileaks_mpich = database.query_one("mpileaks ^mpich")
+ mpich = database.query_one("mpich")
# name + incompatible hash
- self._check_raises(InvalidHashError, [
- 'zmpi /' + mpich.dag_hash(),
- 'mpich /' + zmpi.dag_hash()])
+ self._check_raises(
+ InvalidHashError, ["zmpi /" + mpich.dag_hash(), "mpich /" + zmpi.dag_hash()]
+ )
# name + dep + incompatible hash
- self._check_raises(InvalidHashError, [
- 'mpileaks ^mpich /' + mpileaks_zmpi.dag_hash(),
- 'mpileaks ^zmpi /' + mpileaks_mpich.dag_hash()])
+ self._check_raises(
+ InvalidHashError,
+ [
+ "mpileaks ^mpich /" + mpileaks_zmpi.dag_hash(),
+ "mpileaks ^zmpi /" + mpileaks_mpich.dag_hash(),
+ ],
+ )
@pytest.mark.db
def test_nonexistent_hash(self, database):
@@ -396,13 +385,11 @@ class TestSpecSyntax(object):
specs = database.query()
# This hash shouldn't be in the test DB. What are the odds :)
- no_such_hash = 'aaaaaaaaaaaaaaa'
+ no_such_hash = "aaaaaaaaaaaaaaa"
hashes = [s._hash for s in specs]
- assert no_such_hash not in [h[:len(no_such_hash)] for h in hashes]
+ assert no_such_hash not in [h[: len(no_such_hash)] for h in hashes]
- self._check_raises(NoSuchHashError, [
- '/' + no_such_hash,
- 'mpileaks /' + no_such_hash])
+ self._check_raises(NoSuchHashError, ["/" + no_such_hash, "mpileaks /" + no_such_hash])
@pytest.mark.db
def test_redundant_spec(self, database):
@@ -412,45 +399,42 @@ class TestSpecSyntax(object):
specs only raise errors if constraints cause a contradiction?
"""
- mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
- callpath_zmpi = database.query_one('callpath ^zmpi')
- dyninst = database.query_one('dyninst')
+ mpileaks_zmpi = database.query_one("mpileaks ^zmpi")
+ callpath_zmpi = database.query_one("callpath ^zmpi")
+ dyninst = database.query_one("dyninst")
- mpileaks_mpich2 = database.query_one('mpileaks ^mpich2')
+ mpileaks_mpich2 = database.query_one("mpileaks ^mpich2")
redundant_specs = [
# redudant compiler
- '/' + mpileaks_zmpi.dag_hash() + '%' + str(mpileaks_zmpi.compiler),
-
+ "/" + mpileaks_zmpi.dag_hash() + "%" + str(mpileaks_zmpi.compiler),
# redudant version
- 'mpileaks/' + mpileaks_mpich2.dag_hash() +
- '@' + str(mpileaks_mpich2.version),
-
+ "mpileaks/" + mpileaks_mpich2.dag_hash() + "@" + str(mpileaks_mpich2.version),
# redundant dependency
- 'callpath /' + callpath_zmpi.dag_hash() + '^ libelf',
-
+ "callpath /" + callpath_zmpi.dag_hash() + "^ libelf",
# redundant flags
- '/' + dyninst.dag_hash() + ' cflags="-O3 -fPIC"']
+ "/" + dyninst.dag_hash() + ' cflags="-O3 -fPIC"',
+ ]
self._check_raises(RedundantSpecError, redundant_specs)
def test_duplicate_variant(self):
duplicates = [
- 'x@1.2+debug+debug',
- 'x ^y@1.2+debug debug=true',
- 'x ^y@1.2 debug=false debug=true',
- 'x ^y@1.2 debug=false ~debug'
+ "x@1.2+debug+debug",
+ "x ^y@1.2+debug debug=true",
+ "x ^y@1.2 debug=false debug=true",
+ "x ^y@1.2 debug=false ~debug",
]
self._check_raises(DuplicateVariantError, duplicates)
def test_multiple_versions(self):
multiples = [
- 'x@1.2@2.3',
- 'x@1.2:2.3@1.4',
- 'x@1.2@2.3:2.4',
- 'x@1.2@2.3,2.4',
- 'x@1.2 +foo~bar @2.3',
- 'x@1.2%y@1.2@2.3:2.4',
+ "x@1.2@2.3",
+ "x@1.2:2.3@1.4",
+ "x@1.2@2.3:2.4",
+ "x@1.2@2.3,2.4",
+ "x@1.2 +foo~bar @2.3",
+ "x@1.2%y@1.2@2.3:2.4",
]
self._check_raises(MultipleVersionError, multiples)
@@ -464,7 +448,7 @@ class TestSpecSyntax(object):
"x%gcc%intel",
"x ^y%intel%intel",
"x ^y%intel%gcc",
- "x ^y%gcc%intel"
+ "x ^y%gcc%intel",
]
self._check_raises(DuplicateCompilerSpecError, duplicates)
@@ -474,7 +458,7 @@ class TestSpecSyntax(object):
"x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le",
"x arch=linux-rhel7-ppc64le arch=linux-rhel7-x86_64",
"y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64",
- "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le"
+ "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le",
]
self._check_raises(DuplicateArchitectureError, duplicates)
@@ -486,18 +470,18 @@ class TestSpecSyntax(object):
"x target=fe target=be",
"x platform=test platform=test",
"x os=fe platform=test target=fe os=fe",
- "x target=be platform=test os=be os=fe"
+ "x target=be platform=test os=be os=fe",
]
self._check_raises(DuplicateArchitectureError, duplicates)
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_parse_yaml_simple(self, mock_packages, tmpdir):
- s = Spec('libdwarf')
+ s = Spec("libdwarf")
s.concretize()
- specfile = tmpdir.join('libdwarf.yaml')
+ specfile = tmpdir.join("libdwarf.yaml")
- with specfile.open('w') as f:
+ with specfile.open("w") as f:
f.write(s.to_yaml(hash=ht.dag_hash))
# Check an absolute path to spec.yaml by itself:
@@ -507,20 +491,20 @@ class TestSpecSyntax(object):
# Check absolute path to spec.yaml mixed with a clispec, e.g.:
# "spack spec mvapich_foo /path/to/libdwarf.yaml"
- specs = sp.parse('mvapich_foo {0}'.format(specfile.strpath))
+ specs = sp.parse("mvapich_foo {0}".format(specfile.strpath))
assert len(specs) == 2
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_parse_filename_missing_slash_as_spec(self, mock_packages, tmpdir):
"""Ensure that libelf.yaml parses as a spec, NOT a file."""
- s = Spec('libelf')
+ s = Spec("libelf")
s.concretize()
- specfile = tmpdir.join('libelf.yaml')
+ specfile = tmpdir.join("libelf.yaml")
# write the file to the current directory to make sure it exists,
# and that we still do not parse the spec as a file.
- with specfile.open('w') as f:
+ with specfile.open("w") as f:
f.write(s.to_yaml(hash=ht.dag_hash))
# Check the spec `libelf.yaml` in the working directory, which
@@ -540,42 +524,41 @@ class TestSpecSyntax(object):
with pytest.raises(spack.repo.UnknownPackageError) as exc_info:
spec.concretize()
assert exc_info.value.long_message
- assert ("Did you mean to specify a filename with './libelf.yaml'?"
- in exc_info.value.long_message)
+ assert (
+ "Did you mean to specify a filename with './libelf.yaml'?"
+ in exc_info.value.long_message
+ )
# make sure that only happens when the spec ends in yaml
with pytest.raises(spack.repo.UnknownPackageError) as exc_info:
- Spec('builtin.mock.doesnotexist').concretize()
- assert (
- not exc_info.value.long_message or (
- "Did you mean to specify a filename with" not in
- exc_info.value.long_message
- )
+ Spec("builtin.mock.doesnotexist").concretize()
+ assert not exc_info.value.long_message or (
+ "Did you mean to specify a filename with" not in exc_info.value.long_message
)
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_parse_yaml_dependency(self, mock_packages, tmpdir):
- s = Spec('libdwarf')
+ s = Spec("libdwarf")
s.concretize()
- specfile = tmpdir.join('libelf.yaml')
+ specfile = tmpdir.join("libelf.yaml")
- with specfile.open('w') as f:
- f.write(s['libelf'].to_yaml(hash=ht.dag_hash))
+ with specfile.open("w") as f:
+ f.write(s["libelf"].to_yaml(hash=ht.dag_hash))
# Make sure we can use yaml path as dependency, e.g.:
# "spack spec libdwarf ^ /path/to/libelf.yaml"
- specs = sp.parse('libdwarf ^ {0}'.format(specfile.strpath))
+ specs = sp.parse("libdwarf ^ {0}".format(specfile.strpath))
assert len(specs) == 1
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_parse_yaml_relative_paths(self, mock_packages, tmpdir):
- s = Spec('libdwarf')
+ s = Spec("libdwarf")
s.concretize()
- specfile = tmpdir.join('libdwarf.yaml')
+ specfile = tmpdir.join("libdwarf.yaml")
- with specfile.open('w') as f:
+ with specfile.open("w") as f:
f.write(s.to_yaml(hash=ht.dag_hash))
file_name = specfile.basename
@@ -586,52 +569,51 @@ class TestSpecSyntax(object):
# Test for command like: "spack spec libelf.yaml"
# This should parse a single spec, but should not concretize.
# See test_parse_filename_missing_slash_as_spec()
- specs = sp.parse('{0}'.format(file_name))
+ specs = sp.parse("{0}".format(file_name))
assert len(specs) == 1
# Make sure this also works: "spack spec ./libelf.yaml"
- specs = sp.parse('./{0}'.format(file_name))
+ specs = sp.parse("./{0}".format(file_name))
assert len(specs) == 1
# Should also be accepted: "spack spec ../<cur-dir>/libelf.yaml"
- specs = sp.parse('../{0}/{1}'.format(parent_dir, file_name))
+ specs = sp.parse("../{0}/{1}".format(parent_dir, file_name))
assert len(specs) == 1
# Should also handle mixed clispecs and relative paths, e.g.:
# "spack spec mvapich_foo ../<cur-dir>/libelf.yaml"
- specs = sp.parse('mvapich_foo ../{0}/{1}'.format(
- parent_dir, file_name))
+ specs = sp.parse("mvapich_foo ../{0}/{1}".format(parent_dir, file_name))
assert len(specs) == 2
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_parse_yaml_relative_subdir_path(self, mock_packages, tmpdir):
- s = Spec('libdwarf')
+ s = Spec("libdwarf")
s.concretize()
- specfile = tmpdir.mkdir('subdir').join('libdwarf.yaml')
+ specfile = tmpdir.mkdir("subdir").join("libdwarf.yaml")
- with specfile.open('w') as f:
+ with specfile.open("w") as f:
f.write(s.to_yaml(hash=ht.dag_hash))
file_name = specfile.basename
# Relative path to specfile
with tmpdir.as_cwd():
- assert os.path.exists('subdir/{0}'.format(file_name))
+ assert os.path.exists("subdir/{0}".format(file_name))
# Test for command like: "spack spec libelf.yaml"
- specs = sp.parse('subdir/{0}'.format(file_name))
+ specs = sp.parse("subdir/{0}".format(file_name))
assert len(specs) == 1
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_parse_yaml_dependency_relative_paths(self, mock_packages, tmpdir):
- s = Spec('libdwarf')
+ s = Spec("libdwarf")
s.concretize()
- specfile = tmpdir.join('libelf.yaml')
+ specfile = tmpdir.join("libelf.yaml")
- with specfile.open('w') as f:
- f.write(s['libelf'].to_yaml(hash=ht.dag_hash))
+ with specfile.open("w") as f:
+ f.write(s["libelf"].to_yaml(hash=ht.dag_hash))
file_name = specfile.basename
parent_dir = os.path.basename(specfile.dirname)
@@ -639,62 +621,67 @@ class TestSpecSyntax(object):
# Relative path to specfile
with fs.working_dir(specfile.dirname):
# Test for command like: "spack spec libelf.yaml"
- specs = sp.parse('libdwarf^{0}'.format(file_name))
+ specs = sp.parse("libdwarf^{0}".format(file_name))
assert len(specs) == 1
# Make sure this also works: "spack spec ./libelf.yaml"
- specs = sp.parse('libdwarf^./{0}'.format(file_name))
+ specs = sp.parse("libdwarf^./{0}".format(file_name))
assert len(specs) == 1
# Should also be accepted: "spack spec ../<cur-dir>/libelf.yaml"
- specs = sp.parse('libdwarf^../{0}/{1}'.format(
- parent_dir, file_name))
+ specs = sp.parse("libdwarf^../{0}/{1}".format(parent_dir, file_name))
assert len(specs) == 1
def test_parse_yaml_error_handling(self):
- self._check_raises(NoSuchSpecFileError, [
- # Single spec that looks like a yaml path
- '/bogus/path/libdwarf.yaml',
- '../../libdwarf.yaml',
- './libdwarf.yaml',
- # Dependency spec that looks like a yaml path
- 'libdwarf^/bogus/path/libelf.yaml',
- 'libdwarf ^../../libelf.yaml',
- 'libdwarf^ ./libelf.yaml',
- # Multiple specs, one looks like a yaml path
- 'mvapich_foo /bogus/path/libelf.yaml',
- 'mvapich_foo ../../libelf.yaml',
- 'mvapich_foo ./libelf.yaml',
- ])
+ self._check_raises(
+ NoSuchSpecFileError,
+ [
+ # Single spec that looks like a yaml path
+ "/bogus/path/libdwarf.yaml",
+ "../../libdwarf.yaml",
+ "./libdwarf.yaml",
+ # Dependency spec that looks like a yaml path
+ "libdwarf^/bogus/path/libelf.yaml",
+ "libdwarf ^../../libelf.yaml",
+ "libdwarf^ ./libelf.yaml",
+ # Multiple specs, one looks like a yaml path
+ "mvapich_foo /bogus/path/libelf.yaml",
+ "mvapich_foo ../../libelf.yaml",
+ "mvapich_foo ./libelf.yaml",
+ ],
+ )
def test_nice_error_for_no_space_after_spec_filename(self):
"""Ensure that omitted spaces don't give weird errors about hashes."""
- self._check_raises(SpecFilenameError, [
- '/bogus/path/libdwarf.yamlfoobar',
- 'libdwarf^/bogus/path/libelf.yamlfoobar ^/path/to/bogus.yaml',
- ])
+ self._check_raises(
+ SpecFilenameError,
+ [
+ "/bogus/path/libdwarf.yamlfoobar",
+ "libdwarf^/bogus/path/libelf.yamlfoobar ^/path/to/bogus.yaml",
+ ],
+ )
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_yaml_spec_not_filename(self, mock_packages, tmpdir):
with pytest.raises(spack.repo.UnknownPackageError):
- Spec('builtin.mock.yaml').concretize()
+ Spec("builtin.mock.yaml").concretize()
with pytest.raises(spack.repo.UnknownPackageError):
- Spec('builtin.mock.yamlfoobar').concretize()
+ Spec("builtin.mock.yamlfoobar").concretize()
- @pytest.mark.usefixtures('config')
+ @pytest.mark.usefixtures("config")
def test_parse_yaml_variant_error(self, mock_packages, tmpdir):
- s = Spec('a')
+ s = Spec("a")
s.concretize()
- specfile = tmpdir.join('a.yaml')
+ specfile = tmpdir.join("a.yaml")
- with specfile.open('w') as f:
+ with specfile.open("w") as f:
f.write(s.to_yaml(hash=ht.dag_hash))
with pytest.raises(RedundantSpecError):
# Trying to change a variant on a concrete spec is an error
- sp.parse('{0} ~bvv'.format(specfile.strpath))
+ sp.parse("{0} ~bvv".format(specfile.strpath))
# ========================================================================
# Lex checks
@@ -707,7 +694,7 @@ class TestSpecSyntax(object):
complex_lex,
"mvapich_foo"
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4"
- "^stackwalker@8.1_1e"
+ "^stackwalker@8.1_1e",
)
# The following lexes are non-ambiguous (add a space before -qt_4)
@@ -717,99 +704,114 @@ class TestSpecSyntax(object):
complex_lex,
"mvapich_foo"
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4"
- "^stackwalker@8.1_1e")
+ "^stackwalker@8.1_1e",
+ )
self.check_lex(
complex_lex,
- "mvapich_foo"
- "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
- "^stackwalker@8.1_1e")
+ "mvapich_foo" "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4" "^stackwalker@8.1_1e",
+ )
def test_spaces_between_dependences(self):
self.check_lex(
complex_lex,
"mvapich_foo "
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 "
- "^stackwalker @ 8.1_1e")
+ "^stackwalker @ 8.1_1e",
+ )
self.check_lex(
complex_lex,
"mvapich_foo "
"^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 "
- "^stackwalker @ 8.1_1e")
+ "^stackwalker @ 8.1_1e",
+ )
def test_spaces_between_options(self):
self.check_lex(
complex_lex,
"mvapich_foo "
"^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 "
- "^stackwalker @8.1_1e")
+ "^stackwalker @8.1_1e",
+ )
def test_way_too_many_spaces(self):
self.check_lex(
complex_lex,
"mvapich_foo "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
self.check_lex(
complex_lex,
"mvapich_foo "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
def test_kv_with_quotes(self):
self.check_lex(
kv_lex,
"mvapich_foo debug='4' "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
self.check_lex(
kv_lex,
'mvapich_foo debug="4" '
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
self.check_lex(
kv_lex,
"mvapich_foo 'debug = 4' "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
def test_kv_without_quotes(self):
self.check_lex(
kv_lex,
"mvapich_foo debug=4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
def test_kv_with_spaces(self):
self.check_lex(
kv_lex,
"mvapich_foo debug = 4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
self.check_lex(
kv_lex,
"mvapich_foo debug =4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
+ "^ stackwalker @ 8.1_1e",
+ )
self.check_lex(
kv_lex,
"mvapich_foo debug= 4 "
"^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
- "^ stackwalker @ 8.1_1e")
-
- @pytest.mark.parametrize('expected_tokens,spec_string', [
- ([Token(sp.ID, 'target'),
- Token(sp.EQ, '='),
- Token(sp.VAL, 'broadwell')],
- 'target=broadwell'),
- ([Token(sp.ID, 'target'),
- Token(sp.EQ, '='),
- Token(sp.VAL, ':broadwell,icelake')],
- 'target=:broadwell,icelake')
- ])
+ "^ stackwalker @ 8.1_1e",
+ )
+
+ @pytest.mark.parametrize(
+ "expected_tokens,spec_string",
+ [
+ (
+ [Token(sp.ID, "target"), Token(sp.EQ, "="), Token(sp.VAL, "broadwell")],
+ "target=broadwell",
+ ),
+ (
+ [Token(sp.ID, "target"), Token(sp.EQ, "="), Token(sp.VAL, ":broadwell,icelake")],
+ "target=:broadwell,icelake",
+ ),
+ ],
+ )
def test_target_tokenization(self, expected_tokens, spec_string):
self.check_lex(expected_tokens, spec_string)
- @pytest.mark.regression('20310')
+ @pytest.mark.regression("20310")
def test_compare_abstract_specs(self):
"""Spec comparisons must be valid for abstract specs.
@@ -818,13 +820,13 @@ class TestSpecSyntax(object):
# Add fields in order they appear in `Spec._cmp_node`
constraints = [
None,
- 'foo',
- 'foo.foo',
- 'foo.foo@foo',
- 'foo.foo@foo+foo',
- 'foo.foo@foo+foo arch=foo-foo-foo',
- 'foo.foo@foo+foo arch=foo-foo-foo %foo',
- 'foo.foo@foo+foo arch=foo-foo-foo %foo cflags=foo',
+ "foo",
+ "foo.foo",
+ "foo.foo@foo",
+ "foo.foo@foo+foo",
+ "foo.foo@foo+foo arch=foo-foo-foo",
+ "foo.foo@foo+foo arch=foo-foo-foo %foo",
+ "foo.foo@foo+foo arch=foo-foo-foo %foo cflags=foo",
]
specs = [Spec(s) for s in constraints]
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index a13a22aa66..67aecfc698 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -44,22 +44,22 @@ def check_json_round_trip(spec):
def test_simple_spec():
- spec = Spec('mpileaks')
+ spec = Spec("mpileaks")
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_read_spec_from_signed_json():
- spec_dir = os.path.join(
- spack.paths.test_path, 'data', 'mirrors', 'signed_json')
+ spec_dir = os.path.join(spack.paths.test_path, "data", "mirrors", "signed_json")
file_name = (
- 'linux-ubuntu18.04-haswell-gcc-8.4.0-'
- 'zlib-1.2.12-g7otk5dra3hifqxej36m5qzm7uyghqgb.spec.json.sig')
+ "linux-ubuntu18.04-haswell-gcc-8.4.0-"
+ "zlib-1.2.12-g7otk5dra3hifqxej36m5qzm7uyghqgb.spec.json.sig"
+ )
spec_path = os.path.join(spec_dir, file_name)
def check_spec(spec_to_check):
- assert(spec_to_check.name == 'zlib')
- assert(spec_to_check._hash == 'g7otk5dra3hifqxej36m5qzm7uyghqgb')
+ assert spec_to_check.name == "zlib"
+ assert spec_to_check._hash == "g7otk5dra3hifqxej36m5qzm7uyghqgb"
with open(spec_path) as fd:
s = Spec.from_signed_json(fd)
@@ -71,17 +71,14 @@ def test_read_spec_from_signed_json():
def test_normal_spec(mock_packages):
- spec = Spec('mpileaks+debug~opt')
+ spec = Spec("mpileaks+debug~opt")
spec.normalize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
@pytest.mark.parametrize(
- "invalid_yaml",
- [
- "playing_playlist: {{ action }} playlist {{ playlist_name }}"
- ]
+ "invalid_yaml", ["playing_playlist: {{ action }} playlist {{ playlist_name }}"]
)
def test_invalid_yaml_spec(invalid_yaml):
with pytest.raises(SpackYAMLError) as e:
@@ -91,12 +88,7 @@ def test_invalid_yaml_spec(invalid_yaml):
assert invalid_yaml in exc_msg
-@pytest.mark.parametrize(
- "invalid_json, error_message",
- [
- ("{13:", "Expecting property name")
- ]
-)
+@pytest.mark.parametrize("invalid_json, error_message", [("{13:", "Expecting property name")])
def test_invalid_json_spec(invalid_json, error_message):
with pytest.raises(sjson.SpackJSONError) as e:
Spec.from_json(invalid_json)
@@ -106,26 +98,26 @@ def test_invalid_json_spec(invalid_json, error_message):
def test_external_spec(config, mock_packages):
- spec = Spec('externaltool')
+ spec = Spec("externaltool")
spec.concretize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
- spec = Spec('externaltest')
+ spec = Spec("externaltest")
spec.concretize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_ambiguous_version_spec(mock_packages):
- spec = Spec('mpileaks@1.0:5.0,6.1,7.3+debug~opt')
+ spec = Spec("mpileaks@1.0:5.0,6.1,7.3+debug~opt")
spec.normalize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
def test_concrete_spec(config, mock_packages):
- spec = Spec('mpileaks+debug~opt')
+ spec = Spec("mpileaks+debug~opt")
spec.concretize()
check_yaml_round_trip(spec)
check_json_round_trip(spec)
@@ -139,22 +131,23 @@ def test_yaml_multivalue(config, mock_packages):
def test_yaml_subdag(config, mock_packages):
- spec = Spec('mpileaks^mpich+debug')
+ spec = Spec("mpileaks^mpich+debug")
spec.concretize()
yaml_spec = Spec.from_yaml(spec.to_yaml())
json_spec = Spec.from_json(spec.to_json())
- for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
+ for dep in ("callpath", "mpich", "dyninst", "libdwarf", "libelf"):
assert spec[dep].eq_dag(yaml_spec[dep])
assert spec[dep].eq_dag(json_spec[dep])
def test_using_ordered_dict(mock_packages):
- """ Checks that dicts are ordered
+ """Checks that dicts are ordered
Necessary to make sure that dag_hash is stable across python
versions and processes.
"""
+
def descend_and_check(iterable, level=0):
if isinstance(iterable, Mapping):
assert isinstance(iterable, syaml_dict)
@@ -167,7 +160,7 @@ def test_using_ordered_dict(mock_packages):
max_level = nlevel
return max_level
- specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
+ specs = ["mpileaks ^zmpi", "dttop", "dtuse"]
for spec in specs:
dag = Spec(spec)
dag.normalize()
@@ -177,9 +170,7 @@ def test_using_ordered_dict(mock_packages):
assert level >= 5
-def test_ordered_read_not_required_for_consistent_dag_hash(
- config, mock_packages
-):
+def test_ordered_read_not_required_for_consistent_dag_hash(config, mock_packages):
"""Make sure ordered serialization isn't required to preserve hashes.
For consistent hashes, we require that YAML and json documents
@@ -187,7 +178,7 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
don't want to require them to be serialized in order. This
ensures that is not required.
"""
- specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
+ specs = ["mpileaks ^zmpi", "dttop", "dtuse"]
for spec in specs:
spec = Spec(spec)
spec.concretize()
@@ -209,8 +200,7 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
# Dump to YAML and JSON
#
yaml_string = syaml.dump(spec_dict, default_flow_style=False)
- reversed_yaml_string = syaml.dump(reversed_spec_dict,
- default_flow_style=False)
+ reversed_yaml_string = syaml.dump(reversed_spec_dict, default_flow_style=False)
json_string = sjson.dump(spec_dict)
reversed_json_string = sjson.dump(reversed_spec_dict)
@@ -230,12 +220,8 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
# build specs from the "wrongly" ordered data
round_trip_yaml_spec = Spec.from_yaml(yaml_string)
round_trip_json_spec = Spec.from_json(json_string)
- round_trip_reversed_yaml_spec = Spec.from_yaml(
- reversed_yaml_string
- )
- round_trip_reversed_json_spec = Spec.from_yaml(
- reversed_json_string
- )
+ round_trip_reversed_yaml_spec = Spec.from_yaml(reversed_yaml_string)
+ round_trip_reversed_json_spec = Spec.from_yaml(reversed_json_string)
# Strip spec if we stripped the yaml
spec = spec.copy(deps=ht.dag_hash.deptype)
@@ -266,10 +252,13 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
-@pytest.mark.parametrize("module", [
- spack.spec,
- spack.version,
-])
+@pytest.mark.parametrize(
+ "module",
+ [
+ spack.spec,
+ spack.version,
+ ],
+)
def test_hashes_use_no_python_dicts(module):
"""Coarse check to make sure we don't use dicts in Spec.to_node_dict().
@@ -285,8 +274,10 @@ def test_hashes_use_no_python_dicts(module):
prints out the line numbers where they occur.
"""
+
class FindFunctions(ast.NodeVisitor):
"""Find a function definition called to_node_dict."""
+
def __init__(self):
self.nodes = []
@@ -296,6 +287,7 @@ def test_hashes_use_no_python_dicts(module):
class FindDicts(ast.NodeVisitor):
"""Find source locations of dicts in an AST."""
+
def __init__(self, filename):
self.nodes = []
self.filename = filename
@@ -316,7 +308,7 @@ def test_hashes_use_no_python_dicts(module):
elif isinstance(node.func, ast.Attribute):
name = node.func.attr
- if name == 'dict':
+ if name == "dict":
self.add_error(node)
find_functions = FindFunctions()
@@ -334,9 +326,9 @@ def test_hashes_use_no_python_dicts(module):
def reverse_all_dicts(data):
"""Descend into data and reverse all the dictionaries"""
if isinstance(data, dict):
- return syaml_dict(reversed(
- [(reverse_all_dicts(k), reverse_all_dicts(v))
- for k, v in data.items()]))
+ return syaml_dict(
+ reversed([(reverse_all_dicts(k), reverse_all_dicts(v)) for k, v in data.items()])
+ )
elif isinstance(data, (list, tuple)):
return type(data)(reverse_all_dicts(elt) for elt in data)
else:
@@ -344,37 +336,37 @@ def reverse_all_dicts(data):
def check_specs_equal(original_spec, spec_yaml_path):
- with open(spec_yaml_path, 'r') as fd:
+ with open(spec_yaml_path, "r") as fd:
spec_yaml = fd.read()
spec_from_yaml = Spec.from_yaml(spec_yaml)
return original_spec.eq_dag(spec_from_yaml)
def test_save_dependency_spec_jsons_subset(tmpdir, config):
- output_path = str(tmpdir.mkdir('spec_jsons'))
+ output_path = str(tmpdir.mkdir("spec_jsons"))
- default = ('build', 'link')
+ default = ("build", "link")
mock_repo = MockPackageMultiRepo()
- g = mock_repo.add_package('g', [], [])
- f = mock_repo.add_package('f', [], [])
- e = mock_repo.add_package('e', [], [])
- d = mock_repo.add_package('d', [f, g], [default, default])
- c = mock_repo.add_package('c', [], [])
- b = mock_repo.add_package('b', [d, e], [default, default])
- mock_repo.add_package('a', [b, c], [default, default])
+ g = mock_repo.add_package("g", [], [])
+ f = mock_repo.add_package("f", [], [])
+ e = mock_repo.add_package("e", [], [])
+ d = mock_repo.add_package("d", [f, g], [default, default])
+ c = mock_repo.add_package("c", [], [])
+ b = mock_repo.add_package("b", [d, e], [default, default])
+ mock_repo.add_package("a", [b, c], [default, default])
with repo.use_repositories(mock_repo):
- spec_a = Spec('a')
+ spec_a = Spec("a")
spec_a.concretize()
- b_spec = spec_a['b']
- c_spec = spec_a['c']
+ b_spec = spec_a["b"]
+ c_spec = spec_a["c"]
spec_a_json = spec_a.to_json()
- save_dependency_specfiles(spec_a_json, output_path, ['b', 'c'])
+ save_dependency_specfiles(spec_a_json, output_path, ["b", "c"])
- assert check_specs_equal(b_spec, os.path.join(output_path, 'b.json'))
- assert check_specs_equal(c_spec, os.path.join(output_path, 'c.json'))
+ assert check_specs_equal(b_spec, os.path.join(output_path, "b.json"))
+ assert check_specs_equal(c_spec, os.path.join(output_path, "c.json"))
def test_legacy_yaml(tmpdir, install_mockery, mock_packages):
@@ -441,60 +433,85 @@ spec:
#: A well ordered Spec dictionary, using ``OrderdDict``.
#: Any operation that transforms Spec dictionaries should
#: preserve this order.
-ordered_spec = collections.OrderedDict([
- ("arch", collections.OrderedDict([
- ("platform", "darwin"),
- ("platform_os", "bigsur"),
- ("target", collections.OrderedDict([
- ("features", [
- "adx",
- "aes",
- "avx",
- "avx2",
- "bmi1",
- "bmi2",
- "clflushopt",
- "f16c",
- "fma",
- "mmx",
- "movbe",
- "pclmulqdq",
- "popcnt",
- "rdrand",
- "rdseed",
- "sse",
- "sse2",
- "sse4_1",
- "sse4_2",
- "ssse3",
- "xsavec",
- "xsaveopt"
- ]),
- ("generation", 0),
- ("name", "skylake"),
- ("parents", ["broadwell"]),
- ("vendor", "GenuineIntel"),
- ])),
- ])),
- ("compiler", collections.OrderedDict([
- ("name", "apple-clang"),
- ("version", "13.0.0"),
- ])),
- ("name", "zlib"),
- ("namespace", "builtin"),
- ("parameters", collections.OrderedDict([
- ("cflags", []),
- ("cppflags", []),
- ("cxxflags", []),
- ("fflags", []),
- ("ldflags", []),
- ("ldlibs", []),
- ("optimize", True),
- ("pic", True),
- ("shared", True),
- ])),
- ("version", "1.2.11"),
-])
+ordered_spec = collections.OrderedDict(
+ [
+ (
+ "arch",
+ collections.OrderedDict(
+ [
+ ("platform", "darwin"),
+ ("platform_os", "bigsur"),
+ (
+ "target",
+ collections.OrderedDict(
+ [
+ (
+ "features",
+ [
+ "adx",
+ "aes",
+ "avx",
+ "avx2",
+ "bmi1",
+ "bmi2",
+ "clflushopt",
+ "f16c",
+ "fma",
+ "mmx",
+ "movbe",
+ "pclmulqdq",
+ "popcnt",
+ "rdrand",
+ "rdseed",
+ "sse",
+ "sse2",
+ "sse4_1",
+ "sse4_2",
+ "ssse3",
+ "xsavec",
+ "xsaveopt",
+ ],
+ ),
+ ("generation", 0),
+ ("name", "skylake"),
+ ("parents", ["broadwell"]),
+ ("vendor", "GenuineIntel"),
+ ]
+ ),
+ ),
+ ]
+ ),
+ ),
+ (
+ "compiler",
+ collections.OrderedDict(
+ [
+ ("name", "apple-clang"),
+ ("version", "13.0.0"),
+ ]
+ ),
+ ),
+ ("name", "zlib"),
+ ("namespace", "builtin"),
+ (
+ "parameters",
+ collections.OrderedDict(
+ [
+ ("cflags", []),
+ ("cppflags", []),
+ ("cxxflags", []),
+ ("fflags", []),
+ ("ldflags", []),
+ ("ldlibs", []),
+ ("optimize", True),
+ ("pic", True),
+ ("shared", True),
+ ]
+ ),
+ ),
+ ("version", "1.2.11"),
+ ]
+)
@pytest.mark.regression("31092")
diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py
index 61228dd40c..44294ff6bd 100644
--- a/lib/spack/spack/test/stage.py
+++ b/lib/spack/spack/test/stage.py
@@ -24,15 +24,15 @@ from spack.stage import DIYStage, ResourceStage, Stage, StageComposite
from spack.util.path import canonicalize_path
# The following values are used for common fetch and stage mocking fixtures:
-_archive_base = 'test-files'
-_archive_fn = '%s.tar.gz' % _archive_base
-_extra_fn = 'extra.sh'
-_hidden_fn = '.hidden'
-_readme_fn = 'README.txt'
+_archive_base = "test-files"
+_archive_fn = "%s.tar.gz" % _archive_base
+_extra_fn = "extra.sh"
+_hidden_fn = ".hidden"
+_readme_fn = "README.txt"
-_extra_contents = '#!/bin/sh\n'
-_hidden_contents = ''
-_readme_contents = 'hello world!\n'
+_extra_contents = "#!/bin/sh\n"
+_hidden_contents = ""
+_readme_contents = "hello world!\n"
# TODO: Replace the following with an enum once guarantee supported (or
# include enum34 for python versions < 3.4.
@@ -40,9 +40,9 @@ _include_readme = 1
_include_hidden = 2
_include_extra = 3
-_file_prefix = 'file://'
-if sys.platform == 'win32':
- _file_prefix += '/'
+_file_prefix = "file://"
+if sys.platform == "win32":
+ _file_prefix += "/"
# Mock fetch directories are expected to appear as follows:
@@ -74,7 +74,7 @@ if sys.platform == 'win32':
@pytest.fixture
def clear_stage_root(monkeypatch):
"""Ensure spack.stage._stage_root is not set at test start."""
- monkeypatch.setattr(spack.stage, '_stage_root', None)
+ monkeypatch.setattr(spack.stage, "_stage_root", None)
yield
@@ -190,8 +190,8 @@ def get_stage_path(stage, stage_name):
@pytest.fixture
def tmp_build_stage_dir(tmpdir, clear_stage_root):
"""Use a temporary test directory for the stage root."""
- test_path = str(tmpdir.join('stage'))
- with spack.config.override('config:build_stage', test_path):
+ test_path = str(tmpdir.join("stage"))
+ with spack.config.override("config:build_stage", test_path):
yield tmpdir, spack.stage.get_stage_root()
shutil.rmtree(test_path)
@@ -222,7 +222,7 @@ def mock_stage_archive(tmp_build_stage_dir):
# Create the optional files as requested and make sure expanded
# archive peers are included.
- tar_args = ['czf', str(_archive_fn), _archive_base]
+ tar_args = ["czf", str(_archive_fn), _archive_base]
for _include in expected_file_list:
if _include == _include_hidden:
# The hidden file case stands in for the way Mac OS X tar files
@@ -247,14 +247,13 @@ def mock_stage_archive(tmp_build_stage_dir):
# Create the archive file
with tmpdir.as_cwd():
- tar = spack.util.executable.which('tar', required=True)
+ tar = spack.util.executable.which("tar", required=True)
tar(*tar_args)
- Archive = collections.namedtuple(
- 'Archive', ['url', 'tmpdir', 'stage_path', 'archive_dir']
+ Archive = collections.namedtuple("Archive", ["url", "tmpdir", "stage_path", "archive_dir"])
+ return Archive(
+ url=archive_url, tmpdir=tmpdir, stage_path=test_stage_path, archive_dir=archive_dir
)
- return Archive(url=archive_url, tmpdir=tmpdir,
- stage_path=test_stage_path, archive_dir=archive_dir)
return create_stage_archive
@@ -262,7 +261,7 @@ def mock_stage_archive(tmp_build_stage_dir):
@pytest.fixture
def mock_noexpand_resource(tmpdir):
"""Set up a non-expandable resource in the tmpdir prior to staging."""
- test_resource = tmpdir.join('resource-no-expand.sh')
+ test_resource = tmpdir.join("resource-no-expand.sh")
test_resource.write("an example resource")
return str(test_resource)
@@ -277,45 +276,40 @@ def mock_expand_resource(tmpdir):
# resource-file.txt resource contents (contains 'test content')
# resource.tar.gz archive of resource content
#
- subdir = 'resource-expand'
+ subdir = "resource-expand"
resource_dir = tmpdir.join(subdir)
resource_dir.ensure(dir=True)
- archive_name = 'resource.tar.gz'
+ archive_name = "resource.tar.gz"
archive = tmpdir.join(archive_name)
archive_url = _file_prefix + str(archive)
- filename = 'resource-file.txt'
+ filename = "resource-file.txt"
test_file = resource_dir.join(filename)
- test_file.write('test content\n')
+ test_file.write("test content\n")
with tmpdir.as_cwd():
- tar = spack.util.executable.which('tar', required=True)
- tar('czf', str(archive_name), subdir)
+ tar = spack.util.executable.which("tar", required=True)
+ tar("czf", str(archive_name), subdir)
- MockResource = collections.namedtuple(
- 'MockResource', ['url', 'files'])
+ MockResource = collections.namedtuple("MockResource", ["url", "files"])
return MockResource(archive_url, [filename])
@pytest.fixture
-def composite_stage_with_expanding_resource(
- mock_stage_archive, mock_expand_resource):
+def composite_stage_with_expanding_resource(mock_stage_archive, mock_expand_resource):
"""Sets up a composite for expanding resources prior to staging."""
composite_stage = StageComposite()
archive = mock_stage_archive()
root_stage = Stage(archive.url)
composite_stage.append(root_stage)
- test_resource_fetcher = spack.fetch_strategy.from_kwargs(
- url=mock_expand_resource.url)
+ test_resource_fetcher = spack.fetch_strategy.from_kwargs(url=mock_expand_resource.url)
# Specify that the resource files are to be placed in the 'resource-dir'
# directory
- test_resource = Resource(
- 'test_resource', test_resource_fetcher, '', 'resource-dir')
- resource_stage = ResourceStage(
- test_resource_fetcher, root_stage, test_resource)
+ test_resource = Resource("test_resource", test_resource_fetcher, "", "resource-dir")
+ resource_stage = ResourceStage(test_resource_fetcher, root_stage, test_resource)
composite_stage.append(resource_stage)
return composite_stage, root_stage, resource_stage, mock_expand_resource
@@ -323,26 +317,30 @@ def composite_stage_with_expanding_resource(
@pytest.fixture
def failing_search_fn():
"""Returns a search function that fails! Always!"""
+
def _mock():
raise Exception("This should not have been called")
+
return _mock
@pytest.fixture
def failing_fetch_strategy():
"""Returns a fetch strategy that fails."""
+
class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy):
def fetch(self):
raise spack.fetch_strategy.FailedDownloadError(
- "<non-existent URL>",
- "This implementation of FetchStrategy always fails"
+ "<non-existent URL>", "This implementation of FetchStrategy always fails"
)
+
return FailingFetchStrategy()
@pytest.fixture
def search_fn():
"""Returns a search function that always succeeds."""
+
class _Mock(object):
performed_search = False
@@ -369,8 +367,7 @@ def check_stage_dir_perms(prefix, path):
# Skip processing prefix ancestors since no guarantee they will be in the
# required group (e.g. $TEMPDIR on HPC machines).
skip = prefix if prefix.endswith(os.sep) else prefix + os.sep
- group_paths, user_node, user_paths = partition_path(path.replace(skip, ""),
- user)
+ group_paths, user_node, user_paths = partition_path(path.replace(skip, ""), user)
for p in group_paths:
p_status = os.stat(os.path.join(prefix, p))
@@ -388,10 +385,10 @@ def check_stage_dir_perms(prefix, path):
assert p_status.st_mode & stat.S_IRWXU == stat.S_IRWXU
-@pytest.mark.usefixtures('mock_packages')
+@pytest.mark.usefixtures("mock_packages")
class TestStage(object):
- stage_name = 'spack-test-stage'
+ stage_name = "spack-test-stage"
def test_setup_and_destroy_name_with_tmp(self, mock_stage_archive):
archive = mock_stage_archive()
@@ -411,13 +408,13 @@ class TestStage(object):
check_setup(stage, None, archive)
check_destroy(stage, None)
- def test_noexpand_stage_file(
- self, mock_stage_archive, mock_noexpand_resource):
+ def test_noexpand_stage_file(self, mock_stage_archive, mock_noexpand_resource):
"""When creating a stage with a nonexpanding URL, the 'archive_file'
property of the stage should refer to the path of that file.
"""
test_noexpand_fetcher = spack.fetch_strategy.from_kwargs(
- url=_file_prefix + mock_noexpand_resource, expand=False)
+ url=_file_prefix + mock_noexpand_resource, expand=False
+ )
with Stage(test_noexpand_fetcher) as stage:
stage.fetch()
stage.expand_archive()
@@ -425,19 +422,19 @@ class TestStage(object):
@pytest.mark.disable_clean_stage_check
def test_composite_stage_with_noexpand_resource(
- self, mock_stage_archive, mock_noexpand_resource):
+ self, mock_stage_archive, mock_noexpand_resource
+ ):
archive = mock_stage_archive()
composite_stage = StageComposite()
root_stage = Stage(archive.url)
composite_stage.append(root_stage)
- resource_dst_name = 'resource-dst-name.sh'
+ resource_dst_name = "resource-dst-name.sh"
test_resource_fetcher = spack.fetch_strategy.from_kwargs(
- url=_file_prefix + mock_noexpand_resource, expand=False)
- test_resource = Resource(
- 'test_resource', test_resource_fetcher, resource_dst_name, None)
- resource_stage = ResourceStage(
- test_resource_fetcher, root_stage, test_resource)
+ url=_file_prefix + mock_noexpand_resource, expand=False
+ )
+ test_resource = Resource("test_resource", test_resource_fetcher, resource_dst_name, None)
+ resource_stage = ResourceStage(test_resource_fetcher, root_stage, test_resource)
composite_stage.append(resource_stage)
composite_stage.create()
@@ -445,15 +442,17 @@ class TestStage(object):
composite_stage.expand_archive()
assert composite_stage.expanded # Archive is expanded
- assert os.path.exists(
- os.path.join(composite_stage.source_path, resource_dst_name))
+ assert os.path.exists(os.path.join(composite_stage.source_path, resource_dst_name))
@pytest.mark.disable_clean_stage_check
- def test_composite_stage_with_expand_resource(
- self, composite_stage_with_expanding_resource):
+ def test_composite_stage_with_expand_resource(self, composite_stage_with_expanding_resource):
- composite_stage, root_stage, resource_stage, mock_resource = (
- composite_stage_with_expanding_resource)
+ (
+ composite_stage,
+ root_stage,
+ resource_stage,
+ mock_resource,
+ ) = composite_stage_with_expanding_resource
composite_stage.create()
composite_stage.fetch()
@@ -462,8 +461,7 @@ class TestStage(object):
assert composite_stage.expanded # Archive is expanded
for fname in mock_resource.files:
- file_path = os.path.join(
- root_stage.source_path, 'resource-dir', fname)
+ file_path = os.path.join(root_stage.source_path, "resource-dir", fname)
assert os.path.exists(file_path)
# Perform a little cleanup
@@ -471,15 +469,20 @@ class TestStage(object):
@pytest.mark.disable_clean_stage_check
def test_composite_stage_with_expand_resource_default_placement(
- self, composite_stage_with_expanding_resource):
+ self, composite_stage_with_expanding_resource
+ ):
"""For a resource which refers to a compressed archive which expands
to a directory, check that by default the resource is placed in
the source_path of the root stage with the name of the decompressed
directory.
"""
- composite_stage, root_stage, resource_stage, mock_resource = (
- composite_stage_with_expanding_resource)
+ (
+ composite_stage,
+ root_stage,
+ resource_stage,
+ mock_resource,
+ ) = composite_stage_with_expanding_resource
resource_stage.resource.placement = None
@@ -488,8 +491,7 @@ class TestStage(object):
composite_stage.expand_archive()
for fname in mock_resource.files:
- file_path = os.path.join(
- root_stage.source_path, 'resource-expand', fname)
+ file_path = os.path.join(root_stage.source_path, "resource-expand", fname)
assert os.path.exists(file_path)
# Perform a little cleanup
@@ -501,30 +503,25 @@ class TestStage(object):
check_setup(stage, None, archive)
check_destroy(stage, None)
- @pytest.mark.parametrize('debug', [False, True])
+ @pytest.mark.parametrize("debug", [False, True])
def test_fetch(self, mock_stage_archive, debug):
archive = mock_stage_archive()
- with spack.config.override('config:debug', debug):
+ with spack.config.override("config:debug", debug):
with Stage(archive.url, name=self.stage_name) as stage:
stage.fetch()
check_setup(stage, self.stage_name, archive)
check_fetch(stage, self.stage_name)
check_destroy(stage, self.stage_name)
- def test_no_search_if_default_succeeds(
- self, mock_stage_archive, failing_search_fn):
+ def test_no_search_if_default_succeeds(self, mock_stage_archive, failing_search_fn):
archive = mock_stage_archive()
- stage = Stage(archive.url, name=self.stage_name,
- search_fn=failing_search_fn)
+ stage = Stage(archive.url, name=self.stage_name, search_fn=failing_search_fn)
with stage:
stage.fetch()
check_destroy(stage, self.stage_name)
- def test_no_search_mirror_only(
- self, failing_fetch_strategy, failing_search_fn):
- stage = Stage(failing_fetch_strategy,
- name=self.stage_name,
- search_fn=failing_search_fn)
+ def test_no_search_mirror_only(self, failing_fetch_strategy, failing_search_fn):
+ stage = Stage(failing_fetch_strategy, name=self.stage_name, search_fn=failing_search_fn)
with stage:
try:
stage.fetch(mirror_only=True)
@@ -533,18 +530,17 @@ class TestStage(object):
check_destroy(stage, self.stage_name)
@pytest.mark.parametrize(
- "err_msg,expected", [('Fetch from fetch.test.com',
- 'Fetch from fetch.test.com'),
- (None, 'All fetchers failed')])
- def test_search_if_default_fails(self, failing_fetch_strategy, search_fn,
- err_msg, expected):
- stage = Stage(failing_fetch_strategy,
- name=self.stage_name,
- search_fn=search_fn)
+ "err_msg,expected",
+ [
+ ("Fetch from fetch.test.com", "Fetch from fetch.test.com"),
+ (None, "All fetchers failed"),
+ ],
+ )
+ def test_search_if_default_fails(self, failing_fetch_strategy, search_fn, err_msg, expected):
+ stage = Stage(failing_fetch_strategy, name=self.stage_name, search_fn=search_fn)
with stage:
- with pytest.raises(spack.fetch_strategy.FetchError,
- match=expected):
+ with pytest.raises(spack.fetch_strategy.FetchError, match=expected):
stage.fetch(mirror_only=False, err_msg=err_msg)
check_destroy(stage, self.stage_name)
@@ -558,11 +554,15 @@ class TestStage(object):
spack.fetch_strategy._ensure_one_stage_entry(stage_path)
check_destroy(stage, self.stage_name)
- @pytest.mark.parametrize("expected_file_list", [
- [],
- [_include_readme],
- [_include_extra, _include_readme],
- [_include_hidden, _include_readme]])
+ @pytest.mark.parametrize(
+ "expected_file_list",
+ [
+ [],
+ [_include_readme],
+ [_include_extra, _include_readme],
+ [_include_hidden, _include_readme],
+ ],
+ )
def test_expand_archive(self, expected_file_list, mock_stage_archive):
archive = mock_stage_archive(expected_file_list)
with Stage(archive.url, name=self.stage_name) as stage:
@@ -595,15 +595,15 @@ class TestStage(object):
check_expand_archive(stage, self.stage_name, [_include_readme])
# Try to make a file in the old archive dir
- with open('foobar', 'w') as file:
+ with open("foobar", "w") as file:
file.write("this file is to be destroyed.")
- assert 'foobar' in os.listdir(stage.source_path)
+ assert "foobar" in os.listdir(stage.source_path)
# Make sure the file is not there after restage.
stage.restage()
check_fetch(stage, self.stage_name)
- assert 'foobar' not in os.listdir(stage.source_path)
+ assert "foobar" not in os.listdir(stage.source_path)
check_destroy(stage, self.stage_name)
def test_no_keep_without_exceptions(self, mock_stage_archive):
@@ -662,14 +662,13 @@ class TestStage(object):
assert source_path.endswith(spack.stage._source_path_subdir)
assert not os.path.exists(source_path)
- @pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
- @pytest.mark.skipif(getuid() == 0, reason='user is root')
+ @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+ @pytest.mark.skipif(getuid() == 0, reason="user is root")
def test_first_accessible_path(self, tmpdir):
"""Test _first_accessible_path names."""
- spack_dir = tmpdir.join('paths')
+ spack_dir = tmpdir.join("paths")
name = str(spack_dir)
- files = [os.path.join(os.path.sep, 'no', 'such', 'path'), name]
+ files = [os.path.join(os.path.sep, "no", "such", "path"), name]
# Ensure the tmpdir path is returned since the user should have access
path = spack.stage._first_accessible_path(files)
@@ -678,7 +677,7 @@ class TestStage(object):
check_stage_dir_perms(str(tmpdir), path)
# Ensure an existing path is returned
- spack_subdir = spack_dir.join('existing').ensure(dir=True)
+ spack_subdir = spack_dir.join("existing").ensure(dir=True)
subdir = str(spack_subdir)
path = spack.stage._first_accessible_path([subdir])
assert path == subdir
@@ -686,7 +685,7 @@ class TestStage(object):
# Ensure a path with a `$user` node has the right permissions
# for its subdirectories.
user = getpass.getuser()
- user_dir = spack_dir.join(user, 'has', 'paths')
+ user_dir = spack_dir.join(user, "has", "paths")
user_path = str(user_dir)
path = spack.stage._first_accessible_path([user_path])
assert path == user_path
@@ -695,11 +694,10 @@ class TestStage(object):
# Cleanup
shutil.rmtree(str(name))
- @pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+ @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_create_stage_root(self, tmpdir, no_path_access):
"""Test create_stage_root permissions."""
- test_dir = tmpdir.join('path')
+ test_dir = tmpdir.join("path")
test_path = str(test_dir)
try:
@@ -757,7 +755,7 @@ class TestStage(object):
# m.setattr(os, 'stat', _stat)
# spack.stage.create_stage_root(user_path)
# assert os.stat(user_path).st_uid != os.getuid()
- monkeypatch.setattr(os, 'stat', _stat)
+ monkeypatch.setattr(os, "stat", _stat)
spack.stage.create_stage_root(user_path)
# The following check depends on the patched os.stat as a poor
@@ -769,61 +767,65 @@ class TestStage(object):
assert spack.stage._resolve_paths([]) == []
# resolved path without user appends user
- paths = [os.path.join(os.path.sep, 'a', 'b', 'c')]
+ paths = [os.path.join(os.path.sep, "a", "b", "c")]
user = getpass.getuser()
can_paths = [os.path.join(paths[0], user)]
assert spack.stage._resolve_paths(paths) == can_paths
# resolved path with node including user does not append user
- paths = [os.path.join(os.path.sep, 'spack-{0}'.format(user), 'stage')]
+ paths = [os.path.join(os.path.sep, "spack-{0}".format(user), "stage")]
assert spack.stage._resolve_paths(paths) == paths
- tempdir = '$tempdir'
+ tempdir = "$tempdir"
can_tempdir = canonicalize_path(tempdir)
user = getpass.getuser()
temp_has_user = user in can_tempdir.split(os.sep)
- paths = [os.path.join(tempdir, 'stage'),
- os.path.join(tempdir, '$user'),
- os.path.join(tempdir, '$user', '$user'),
- os.path.join(tempdir, '$user', 'stage', '$user')]
+ paths = [
+ os.path.join(tempdir, "stage"),
+ os.path.join(tempdir, "$user"),
+ os.path.join(tempdir, "$user", "$user"),
+ os.path.join(tempdir, "$user", "stage", "$user"),
+ ]
res_paths = [canonicalize_path(p) for p in paths]
if temp_has_user:
res_paths[1] = can_tempdir
res_paths[2] = os.path.join(can_tempdir, user)
- res_paths[3] = os.path.join(can_tempdir, 'stage', user)
+ res_paths[3] = os.path.join(can_tempdir, "stage", user)
else:
res_paths[0] = os.path.join(res_paths[0], user)
assert spack.stage._resolve_paths(paths) == res_paths
- @pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
- @pytest.mark.skipif(getuid() == 0, reason='user is root')
+ @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+ @pytest.mark.skipif(getuid() == 0, reason="user is root")
def test_get_stage_root_bad_path(self, clear_stage_root):
"""Ensure an invalid stage path root raises a StageError."""
- with spack.config.override('config:build_stage', '/no/such/path'):
- with pytest.raises(spack.stage.StageError,
- match="No accessible stage paths in"):
+ with spack.config.override("config:build_stage", "/no/such/path"):
+ with pytest.raises(spack.stage.StageError, match="No accessible stage paths in"):
spack.stage.get_stage_root()
# Make sure the cached stage path values are unchanged.
assert spack.stage._stage_root is None
@pytest.mark.parametrize(
- 'path,purged', [('spack-stage-1234567890abcdef1234567890abcdef', True),
- ('spack-stage-anything-goes-here', True),
- ('stage-spack', False)])
+ "path,purged",
+ [
+ ("spack-stage-1234567890abcdef1234567890abcdef", True),
+ ("spack-stage-anything-goes-here", True),
+ ("stage-spack", False),
+ ],
+ )
def test_stage_purge(self, tmpdir, clear_stage_root, path, purged):
"""Test purging of stage directories."""
- stage_dir = tmpdir.join('stage')
+ stage_dir = tmpdir.join("stage")
stage_path = str(stage_dir)
test_dir = stage_dir.join(path)
test_dir.ensure(dir=True)
test_path = str(test_dir)
- with spack.config.override('config:build_stage', stage_path):
+ with spack.config.override("config:build_stage", stage_path):
stage_root = spack.stage.get_stage_root()
assert stage_path == stage_root
@@ -844,7 +846,7 @@ class TestStage(object):
def test_stage_constructor_with_path(self, tmpdir):
"""Ensure Stage constructor with a path uses it."""
testpath = str(tmpdir)
- with Stage('file:///does-not-exist', path=testpath) as stage:
+ with Stage("file:///does-not-exist", path=testpath) as stage:
assert stage.path == testpath
def test_diystage_path_none(self):
@@ -855,7 +857,7 @@ class TestStage(object):
def test_diystage_path_invalid(self):
"""Ensure DIYStage for an invalid path behaves as expected."""
with pytest.raises(spack.stage.StagePathError):
- DIYStage('/path/does/not/exist')
+ DIYStage("/path/does/not/exist")
def test_diystage_path_valid(self, tmpdir):
"""Ensure DIYStage for a valid path behaves as expected."""
@@ -904,31 +906,29 @@ class TestStage(object):
_file.read() == _readme_contents
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_stage_create_replace_path(tmp_build_stage_dir):
"""Ensure stage creation replaces a non-directory path."""
_, test_stage_path = tmp_build_stage_dir
mkdirp(test_stage_path)
- nondir = os.path.join(test_stage_path, 'afile')
+ nondir = os.path.join(test_stage_path, "afile")
touch(nondir)
path = str(nondir)
- stage = Stage(path, name='')
+ stage = Stage(path, name="")
stage.create()
# Ensure the stage path is "converted" to a directory
assert os.path.isdir(stage.path)
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_cannot_access(capsys):
"""Ensure can_access dies with the expected error."""
with pytest.raises(SystemExit):
# It's far more portable to use a non-existent filename.
- spack.stage.ensure_access('/no/such/file')
+ spack.stage.ensure_access("/no/such/file")
captured = capsys.readouterr()
- assert 'Insufficient permissions' in str(captured)
+ assert "Insufficient permissions" in str(captured)
diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py
index 1e6bbb6ed0..4a5a9a5517 100644
--- a/lib/spack/spack/test/svn_fetch.py
+++ b/lib/spack/spack/test/svn_fetch.py
@@ -18,23 +18,17 @@ from spack.stage import Stage
from spack.util.executable import which
from spack.version import ver
-pytestmark = [pytest.mark.skipif(
- not which('svn') or not which('svnadmin'),
- reason='requires subversion to be installed'),
- pytest.mark.skipif(sys.platform == "win32",
- reason="does not run on windows")]
+pytestmark = [
+ pytest.mark.skipif(
+ not which("svn") or not which("svnadmin"), reason="requires subversion to be installed"
+ ),
+ pytest.mark.skipif(sys.platform == "win32", reason="does not run on windows"),
+]
-@pytest.mark.parametrize("type_of_test", ['default', 'rev0'])
+@pytest.mark.parametrize("type_of_test", ["default", "rev0"])
@pytest.mark.parametrize("secure", [True, False])
-def test_fetch(
- type_of_test,
- secure,
- mock_svn_repository,
- config,
- mutable_mock_repo,
- monkeypatch
-):
+def test_fetch(type_of_test, secure, mock_svn_repository, config, mutable_mock_repo, monkeypatch):
"""Tries to:
1. Fetch the repo using a fetch strategy constructed with
@@ -49,12 +43,12 @@ def test_fetch(
h = mock_svn_repository.hash
# Construct the package under test
- s = Spec('svn-test').concretized()
- monkeypatch.setitem(s.package.versions, ver('svn'), t.args)
+ s = Spec("svn-test").concretized()
+ monkeypatch.setitem(s.package.versions, ver("svn"), t.args)
# Enter the stage directory and check some properties
with s.package.stage:
- with spack.config.override('config:verify_ssl', secure):
+ with spack.config.override("config:verify_ssl", secure):
s.package.do_stage()
with working_dir(s.package.stage.source_path):
@@ -67,7 +61,7 @@ def test_fetch(
os.unlink(file_path)
assert not os.path.isfile(file_path)
- untracked_file = 'foobarbaz'
+ untracked_file = "foobarbaz"
touch(untracked_file)
assert os.path.isfile(untracked_file)
s.package.do_restage()
@@ -83,7 +77,7 @@ def test_svn_extra_fetch(tmpdir):
"""Ensure a fetch after downloading is effectively a no-op."""
testpath = str(tmpdir)
- fetcher = SvnFetchStrategy(svn='file:///not-a-real-svn-repo')
+ fetcher = SvnFetchStrategy(svn="file:///not-a-real-svn-repo")
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
diff --git a/lib/spack/spack/test/tag.py b/lib/spack/spack/test/tag.py
index 92445dd5cf..6e43ef7d1c 100644
--- a/lib/spack/spack/test/tag.py
+++ b/lib/spack/spack/test/tag.py
@@ -11,11 +11,10 @@ import spack.cmd.install
import spack.tag
from spack.main import SpackCommand
-install = SpackCommand('install')
+install = SpackCommand("install")
# Alternate representation
-tags_json = \
- """
+tags_json = """
{
"tags": {
"no-version": [
@@ -29,8 +28,7 @@ tags_json = \
}
"""
-more_tags_json = \
- """
+more_tags_json = """
{
"tags": {
"merge": [
@@ -51,9 +49,9 @@ def test_tag_copy(mock_packages):
def test_tag_get_all_available(mock_packages):
for skip in [False, True]:
all_pkgs = spack.tag.packages_with_tags(None, False, skip)
- assert sorted(all_pkgs['tag1']) == ['mpich', 'mpich2']
- assert all_pkgs['tag2'] == ['mpich']
- assert all_pkgs['tag3'] == ['mpich2']
+ assert sorted(all_pkgs["tag1"]) == ["mpich", "mpich2"]
+ assert all_pkgs["tag2"] == ["mpich"]
+ assert all_pkgs["tag3"] == ["mpich2"]
def ensure_tags_results_equal(results, expected):
@@ -65,12 +63,15 @@ def ensure_tags_results_equal(results, expected):
assert results == expected
-@pytest.mark.parametrize('tags,expected', [
- (['tag1'], {'tag1': ['mpich', 'mpich2']}),
- (['tag2'], {'tag2': ['mpich']}),
- (['tag3'], {'tag3': ['mpich2']}),
- (['nosuchpackage'], {'nosuchpackage': {}}),
-])
+@pytest.mark.parametrize(
+ "tags,expected",
+ [
+ (["tag1"], {"tag1": ["mpich", "mpich2"]}),
+ (["tag2"], {"tag2": ["mpich"]}),
+ (["tag3"], {"tag3": ["mpich2"]}),
+ (["nosuchpackage"], {"nosuchpackage": {}}),
+ ],
+)
def test_tag_get_available(tags, expected, mock_packages):
# Ensure results for all tags
all_tag_pkgs = spack.tag.packages_with_tags(tags, False, False)
@@ -84,15 +85,14 @@ def test_tag_get_available(tags, expected, mock_packages):
assert not only_pkgs
-def test_tag_get_installed_packages(
- mock_packages, mock_archive, mock_fetch, install_mockery):
- install('mpich')
+def test_tag_get_installed_packages(mock_packages, mock_archive, mock_fetch, install_mockery):
+ install("mpich")
for skip in [False, True]:
all_pkgs = spack.tag.packages_with_tags(None, True, skip)
- assert sorted(all_pkgs['tag1']) == ['mpich']
- assert all_pkgs['tag2'] == ['mpich']
- assert skip or all_pkgs['tag3'] == []
+ assert sorted(all_pkgs["tag1"]) == ["mpich"]
+ assert all_pkgs["tag2"] == ["mpich"]
+ assert skip or all_pkgs["tag3"] == []
def test_tag_index_round_trip(mock_packages):
@@ -144,7 +144,7 @@ def test_tag_not_dict():
def test_tag_no_tags():
- pkg_json = "{\"packages\": []}"
+ pkg_json = '{"packages": []}'
with pytest.raises(spack.tag.TagIndexError) as e:
spack.tag.TagIndex.from_json(StringIO(pkg_json))
assert "does not start with" in str(e)
diff --git a/lib/spack/spack/test/tengine.py b/lib/spack/spack/test/tengine.py
index af653c390a..801568b409 100644
--- a/lib/spack/spack/test/tengine.py
+++ b/lib/spack/spack/test/tengine.py
@@ -12,7 +12,6 @@ from spack.util.path import canonicalize_path
class TestContext(object):
-
class A(tengine.Context):
@tengine.context_property
def foo(self):
@@ -42,37 +41,36 @@ class TestContext(object):
d = a.to_dict()
assert len(d) == 1
- assert 'foo' in d
- assert d['foo'] == 1
+ assert "foo" in d
+ assert d["foo"] == 1
# So does B
b = TestContext.B()
d = b.to_dict()
assert len(d) == 1
- assert 'bar' in d
- assert d['bar'] == 2
+ assert "bar" in d
+ assert d["bar"] == 2
# C derives from both and overrides 'foo'
c = TestContext.C()
d = c.to_dict()
assert len(d) == 3
- for x in ('foo', 'bar', 'foobar'):
+ for x in ("foo", "bar", "foobar"):
assert x in d
- assert d['foo'] == 10
- assert d['bar'] == 2
- assert d['foobar'] == 3
+ assert d["foo"] == 10
+ assert d["bar"] == 2
+ assert d["foobar"] == 3
-@pytest.mark.usefixtures('config')
+@pytest.mark.usefixtures("config")
class TestTengineEnvironment(object):
-
def test_template_retrieval(self):
"""Tests the template retrieval mechanism hooked into config files"""
# Check the directories are correct
- template_dirs = spack.config.get('config:template_dirs')
+ template_dirs = spack.config.get("config:template_dirs")
template_dirs = [canonicalize_path(x) for x in template_dirs]
assert len(template_dirs) == 3
@@ -80,12 +78,12 @@ class TestTengineEnvironment(object):
# Retrieve a.txt, which resides in the second
# template directory specified in the mock configuration
- template = env.get_template('a.txt')
- text = template.render({'word': 'world'})
- assert 'Hello world!' == text
+ template = env.get_template("a.txt")
+ text = template.render({"word": "world"})
+ assert "Hello world!" == text
# Retrieve b.txt, which resides in the third
# template directory specified in the mock configuration
- template = env.get_template('b.txt')
- text = template.render({'word': 'world'})
- assert 'Howdy world!' == text
+ template = env.get_template("b.txt")
+ text = template.render({"word": "world"})
+ assert "Howdy world!" == text
diff --git a/lib/spack/spack/test/test_activations.py b/lib/spack/spack/test/test_activations.py
index a4439f9596..a3addccea7 100644
--- a/lib/spack/spack/test/test_activations.py
+++ b/lib/spack/spack/test/test_activations.py
@@ -20,9 +20,11 @@ from spack.directory_layout import DirectoryLayout
from spack.filesystem_view import YamlFilesystemView
from spack.repo import RepoPath
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="Python activation not \
- currently supported on Windows")
+pytestmark = pytest.mark.skipif(
+ sys.platform == "win32",
+ reason="Python activation not \
+ currently supported on Windows",
+)
def create_ext_pkg(name, prefix, extendee_spec, monkeypatch):
@@ -38,8 +40,7 @@ def create_ext_pkg(name, prefix, extendee_spec, monkeypatch):
return ext_pkg
-def create_python_ext_pkg(name, prefix, python_spec, monkeypatch,
- namespace=None):
+def create_python_ext_pkg(name, prefix, python_spec, monkeypatch, namespace=None):
ext_pkg = create_ext_pkg(name, prefix, python_spec, monkeypatch)
ext_pkg.py_namespace = namespace
return ext_pkg
@@ -47,7 +48,7 @@ def create_python_ext_pkg(name, prefix, python_spec, monkeypatch,
def create_dir_structure(tmpdir, dir_structure):
for fname, children in dir_structure.items():
- tmpdir.ensure(fname, dir=fname.endswith('/'))
+ tmpdir.ensure(fname, dir=fname.endswith("/"))
if children:
create_dir_structure(tmpdir.join(fname), children)
@@ -67,48 +68,31 @@ def builtin_and_mock_packages():
@pytest.fixture()
def python_and_extension_dirs(tmpdir, builtin_and_mock_packages):
- python_dirs = {
- 'bin/': {
- 'python': None
- },
- 'lib/': {
- 'python2.7/': {
- 'site-packages/': None
- }
- }
- }
+ python_dirs = {"bin/": {"python": None}, "lib/": {"python2.7/": {"site-packages/": None}}}
- python_name = 'python'
+ python_name = "python"
python_prefix = tmpdir.join(python_name)
create_dir_structure(python_prefix, python_dirs)
- python_spec = spack.spec.Spec('python@2.7.12')
+ python_spec = spack.spec.Spec("python@2.7.12")
python_spec._concrete = True
python_spec.package.spec.prefix = str(python_prefix)
ext_dirs = {
- 'bin/': {
- 'py-ext-tool': None
- },
- 'lib/': {
- 'python2.7/': {
- 'site-packages/': {
- 'py-extension1/': {
- 'sample.py': None
- }
- }
- }
- }
+ "bin/": {"py-ext-tool": None},
+ "lib/": {"python2.7/": {"site-packages/": {"py-extension1/": {"sample.py": None}}}},
}
- ext_name = 'py-extension1'
+ ext_name = "py-extension1"
ext_prefix = tmpdir.join(ext_name)
create_dir_structure(ext_prefix, ext_dirs)
- easy_install_location = 'lib/python2.7/site-packages/easy-install.pth'
- with open(str(ext_prefix.join(easy_install_location)), 'w') as f:
- f.write("""path/to/ext1.egg
-path/to/setuptools.egg""")
+ easy_install_location = "lib/python2.7/site-packages/easy-install.pth"
+ with open(str(ext_prefix.join(easy_install_location)), "w") as f:
+ f.write(
+ """path/to/ext1.egg
+path/to/setuptools.egg"""
+ )
return str(python_prefix), str(ext_prefix)
@@ -116,111 +100,104 @@ path/to/setuptools.egg""")
@pytest.fixture()
def namespace_extensions(tmpdir, builtin_and_mock_packages):
ext1_dirs = {
- 'bin/': {
- 'py-ext-tool1': None
- },
- 'lib/': {
- 'python2.7/': {
- 'site-packages/': {
- 'examplenamespace/': {
- '__init__.py': None,
- 'ext1_sample.py': None
- }
+ "bin/": {"py-ext-tool1": None},
+ "lib/": {
+ "python2.7/": {
+ "site-packages/": {
+ "examplenamespace/": {"__init__.py": None, "ext1_sample.py": None}
}
}
- }
+ },
}
ext2_dirs = {
- 'bin/': {
- 'py-ext-tool2': None
- },
- 'lib/': {
- 'python2.7/': {
- 'site-packages/': {
- 'examplenamespace/': {
- '__init__.py': None,
- 'ext2_sample.py': None
- }
+ "bin/": {"py-ext-tool2": None},
+ "lib/": {
+ "python2.7/": {
+ "site-packages/": {
+ "examplenamespace/": {"__init__.py": None, "ext2_sample.py": None}
}
}
- }
+ },
}
- ext1_name = 'py-extension1'
+ ext1_name = "py-extension1"
ext1_prefix = tmpdir.join(ext1_name)
create_dir_structure(ext1_prefix, ext1_dirs)
- ext2_name = 'py-extension2'
+ ext2_name = "py-extension2"
ext2_prefix = tmpdir.join(ext2_name)
create_dir_structure(ext2_prefix, ext2_dirs)
- return str(ext1_prefix), str(ext2_prefix), 'examplenamespace'
+ return str(ext1_prefix), str(ext2_prefix), "examplenamespace"
-def test_python_activation_with_files(tmpdir, python_and_extension_dirs,
- monkeypatch, builtin_and_mock_packages):
+def test_python_activation_with_files(
+ tmpdir, python_and_extension_dirs, monkeypatch, builtin_and_mock_packages
+):
python_prefix, ext_prefix = python_and_extension_dirs
- python_spec = spack.spec.Spec('python@2.7.12')
+ python_spec = spack.spec.Spec("python@2.7.12")
python_spec._concrete = True
python_spec.package.spec.prefix = python_prefix
- ext_pkg = create_python_ext_pkg(
- 'py-extension1', ext_prefix, python_spec, monkeypatch)
+ ext_pkg = create_python_ext_pkg("py-extension1", ext_prefix, python_spec, monkeypatch)
python_pkg = python_spec.package
python_pkg.activate(ext_pkg, python_pkg.view())
- assert os.path.exists(os.path.join(python_prefix, 'bin/py-ext-tool'))
+ assert os.path.exists(os.path.join(python_prefix, "bin/py-ext-tool"))
- easy_install_location = 'lib/python2.7/site-packages/easy-install.pth'
- with open(os.path.join(python_prefix, easy_install_location), 'r') as f:
+ easy_install_location = "lib/python2.7/site-packages/easy-install.pth"
+ with open(os.path.join(python_prefix, easy_install_location), "r") as f:
easy_install_contents = f.read()
- assert 'ext1.egg' in easy_install_contents
- assert 'setuptools.egg' not in easy_install_contents
+ assert "ext1.egg" in easy_install_contents
+ assert "setuptools.egg" not in easy_install_contents
-def test_python_activation_view(tmpdir, python_and_extension_dirs,
- builtin_and_mock_packages, monkeypatch):
+def test_python_activation_view(
+ tmpdir, python_and_extension_dirs, builtin_and_mock_packages, monkeypatch
+):
python_prefix, ext_prefix = python_and_extension_dirs
- python_spec = spack.spec.Spec('python@2.7.12')
+ python_spec = spack.spec.Spec("python@2.7.12")
python_spec._concrete = True
python_spec.package.spec.prefix = python_prefix
- ext_pkg = create_python_ext_pkg('py-extension1', ext_prefix, python_spec,
- monkeypatch)
+ ext_pkg = create_python_ext_pkg("py-extension1", ext_prefix, python_spec, monkeypatch)
- view_dir = str(tmpdir.join('view'))
+ view_dir = str(tmpdir.join("view"))
layout = DirectoryLayout(view_dir)
view = YamlFilesystemView(view_dir, layout)
python_pkg = python_spec.package
python_pkg.activate(ext_pkg, view)
- assert not os.path.exists(os.path.join(python_prefix, 'bin/py-ext-tool'))
+ assert not os.path.exists(os.path.join(python_prefix, "bin/py-ext-tool"))
- assert os.path.exists(os.path.join(view_dir, 'bin/py-ext-tool'))
+ assert os.path.exists(os.path.join(view_dir, "bin/py-ext-tool"))
def test_python_ignore_namespace_init_conflict(
- tmpdir, namespace_extensions, builtin_and_mock_packages, monkeypatch):
+ tmpdir, namespace_extensions, builtin_and_mock_packages, monkeypatch
+):
"""Test the view update logic in PythonPackage ignores conflicting
- instances of __init__ for packages which are in the same namespace.
+ instances of __init__ for packages which are in the same namespace.
"""
ext1_prefix, ext2_prefix, py_namespace = namespace_extensions
- python_spec = spack.spec.Spec('python@2.7.12')
+ python_spec = spack.spec.Spec("python@2.7.12")
python_spec._concrete = True
- ext1_pkg = create_python_ext_pkg('py-extension1', ext1_prefix, python_spec,
- monkeypatch, py_namespace)
- ext2_pkg = create_python_ext_pkg('py-extension2', ext2_prefix, python_spec,
- monkeypatch, py_namespace)
+ ext1_pkg = create_python_ext_pkg(
+ "py-extension1", ext1_prefix, python_spec, monkeypatch, py_namespace
+ )
+ ext2_pkg = create_python_ext_pkg(
+ "py-extension2", ext2_prefix, python_spec, monkeypatch, py_namespace
+ )
- view_dir = str(tmpdir.join('view'))
+ view_dir = str(tmpdir.join("view"))
layout = DirectoryLayout(view_dir)
view = YamlFilesystemView(view_dir, layout)
@@ -230,9 +207,9 @@ def test_python_ignore_namespace_init_conflict(
view.extensions_layout.add_extension(python_spec, ext1_pkg.spec)
python_pkg.activate(ext2_pkg, view)
- f1 = 'lib/python2.7/site-packages/examplenamespace/ext1_sample.py'
- f2 = 'lib/python2.7/site-packages/examplenamespace/ext2_sample.py'
- init_file = 'lib/python2.7/site-packages/examplenamespace/__init__.py'
+ f1 = "lib/python2.7/site-packages/examplenamespace/ext1_sample.py"
+ f2 = "lib/python2.7/site-packages/examplenamespace/ext2_sample.py"
+ init_file = "lib/python2.7/site-packages/examplenamespace/__init__.py"
assert os.path.exists(os.path.join(view_dir, f1))
assert os.path.exists(os.path.join(view_dir, f2))
@@ -240,22 +217,25 @@ def test_python_ignore_namespace_init_conflict(
def test_python_keep_namespace_init(
- tmpdir, namespace_extensions, builtin_and_mock_packages, monkeypatch):
+ tmpdir, namespace_extensions, builtin_and_mock_packages, monkeypatch
+):
"""Test the view update logic in PythonPackage keeps the namespace
- __init__ file as long as one package in the namespace still
- exists.
+ __init__ file as long as one package in the namespace still
+ exists.
"""
ext1_prefix, ext2_prefix, py_namespace = namespace_extensions
- python_spec = spack.spec.Spec('python@2.7.12')
+ python_spec = spack.spec.Spec("python@2.7.12")
python_spec._concrete = True
- ext1_pkg = create_python_ext_pkg('py-extension1', ext1_prefix, python_spec,
- monkeypatch, py_namespace)
- ext2_pkg = create_python_ext_pkg('py-extension2', ext2_prefix, python_spec,
- monkeypatch, py_namespace)
+ ext1_pkg = create_python_ext_pkg(
+ "py-extension1", ext1_prefix, python_spec, monkeypatch, py_namespace
+ )
+ ext2_pkg = create_python_ext_pkg(
+ "py-extension2", ext2_prefix, python_spec, monkeypatch, py_namespace
+ )
- view_dir = str(tmpdir.join('view'))
+ view_dir = str(tmpdir.join("view"))
layout = DirectoryLayout(view_dir)
view = YamlFilesystemView(view_dir, layout)
@@ -266,8 +246,8 @@ def test_python_keep_namespace_init(
python_pkg.activate(ext2_pkg, view)
view.extensions_layout.add_extension(python_spec, ext2_pkg.spec)
- f1 = 'lib/python2.7/site-packages/examplenamespace/ext1_sample.py'
- init_file = 'lib/python2.7/site-packages/examplenamespace/__init__.py'
+ f1 = "lib/python2.7/site-packages/examplenamespace/ext1_sample.py"
+ init_file = "lib/python2.7/site-packages/examplenamespace/__init__.py"
python_pkg.deactivate(ext1_pkg, view)
view.extensions_layout.remove_extension(python_spec, ext1_pkg.spec)
@@ -281,24 +261,27 @@ def test_python_keep_namespace_init(
assert not os.path.exists(os.path.join(view_dir, init_file))
-def test_python_namespace_conflict(tmpdir, namespace_extensions,
- monkeypatch, builtin_and_mock_packages):
+def test_python_namespace_conflict(
+ tmpdir, namespace_extensions, monkeypatch, builtin_and_mock_packages
+):
"""Test the view update logic in PythonPackage reports an error when two
- python extensions with different namespaces have a conflicting __init__
- file.
+ python extensions with different namespaces have a conflicting __init__
+ file.
"""
ext1_prefix, ext2_prefix, py_namespace = namespace_extensions
- other_namespace = py_namespace + 'other'
+ other_namespace = py_namespace + "other"
- python_spec = spack.spec.Spec('python@2.7.12')
+ python_spec = spack.spec.Spec("python@2.7.12")
python_spec._concrete = True
- ext1_pkg = create_python_ext_pkg('py-extension1', ext1_prefix, python_spec,
- monkeypatch, py_namespace)
- ext2_pkg = create_python_ext_pkg('py-extension2', ext2_prefix, python_spec,
- monkeypatch, other_namespace)
+ ext1_pkg = create_python_ext_pkg(
+ "py-extension1", ext1_prefix, python_spec, monkeypatch, py_namespace
+ )
+ ext2_pkg = create_python_ext_pkg(
+ "py-extension2", ext2_prefix, python_spec, monkeypatch, other_namespace
+ )
- view_dir = str(tmpdir.join('view'))
+ view_dir = str(tmpdir.join("view"))
layout = DirectoryLayout(view_dir)
view = YamlFilesystemView(view_dir, layout)
@@ -312,43 +295,24 @@ def test_python_namespace_conflict(tmpdir, namespace_extensions,
@pytest.fixture()
def perl_and_extension_dirs(tmpdir, builtin_and_mock_packages):
perl_dirs = {
- 'bin/': {
- 'perl': None
- },
- 'lib/': {
- 'site_perl/': {
- '5.24.1/': {
- 'x86_64-linux/': None
- }
- }
- }
+ "bin/": {"perl": None},
+ "lib/": {"site_perl/": {"5.24.1/": {"x86_64-linux/": None}}},
}
- perl_name = 'perl'
+ perl_name = "perl"
perl_prefix = tmpdir.join(perl_name)
create_dir_structure(perl_prefix, perl_dirs)
- perl_spec = spack.spec.Spec('perl@5.24.1')
+ perl_spec = spack.spec.Spec("perl@5.24.1")
perl_spec._concrete = True
perl_spec.package.spec.prefix = str(perl_prefix)
ext_dirs = {
- 'bin/': {
- 'perl-ext-tool': None
- },
- 'lib/': {
- 'site_perl/': {
- '5.24.1/': {
- 'x86_64-linux/': {
- 'TestExt/': {
- }
- }
- }
- }
- }
+ "bin/": {"perl-ext-tool": None},
+ "lib/": {"site_perl/": {"5.24.1/": {"x86_64-linux/": {"TestExt/": {}}}}},
}
- ext_name = 'perl-extension'
+ ext_name = "perl-extension"
ext_prefix = tmpdir.join(ext_name)
create_dir_structure(ext_prefix, ext_dirs)
@@ -357,10 +321,10 @@ def perl_and_extension_dirs(tmpdir, builtin_and_mock_packages):
def test_perl_activation(tmpdir, builtin_and_mock_packages, monkeypatch):
# Note the lib directory is based partly on the perl version
- perl_spec = spack.spec.Spec('perl@5.24.1')
+ perl_spec = spack.spec.Spec("perl@5.24.1")
perl_spec._concrete = True
- perl_name = 'perl'
+ perl_name = "perl"
tmpdir.ensure(perl_name, dir=True)
perl_prefix = str(tmpdir.join(perl_name))
@@ -368,76 +332,73 @@ def test_perl_activation(tmpdir, builtin_and_mock_packages, monkeypatch):
# the original spec
perl_spec.package.spec.prefix = perl_prefix
- ext_name = 'perl-extension'
+ ext_name = "perl-extension"
tmpdir.ensure(ext_name, dir=True)
- ext_pkg = create_ext_pkg(
- ext_name, str(tmpdir.join(ext_name)), perl_spec, monkeypatch)
+ ext_pkg = create_ext_pkg(ext_name, str(tmpdir.join(ext_name)), perl_spec, monkeypatch)
perl_pkg = perl_spec.package
perl_pkg.activate(ext_pkg, perl_pkg.view())
-def test_perl_activation_with_files(tmpdir, perl_and_extension_dirs,
- monkeypatch, builtin_and_mock_packages):
+def test_perl_activation_with_files(
+ tmpdir, perl_and_extension_dirs, monkeypatch, builtin_and_mock_packages
+):
perl_prefix, ext_prefix = perl_and_extension_dirs
- perl_spec = spack.spec.Spec('perl@5.24.1')
+ perl_spec = spack.spec.Spec("perl@5.24.1")
perl_spec._concrete = True
perl_spec.package.spec.prefix = perl_prefix
- ext_pkg = create_ext_pkg(
- 'perl-extension', ext_prefix, perl_spec, monkeypatch)
+ ext_pkg = create_ext_pkg("perl-extension", ext_prefix, perl_spec, monkeypatch)
perl_pkg = perl_spec.package
perl_pkg.activate(ext_pkg, perl_pkg.view())
- assert os.path.exists(os.path.join(perl_prefix, 'bin/perl-ext-tool'))
+ assert os.path.exists(os.path.join(perl_prefix, "bin/perl-ext-tool"))
-def test_perl_activation_view(tmpdir, perl_and_extension_dirs,
- monkeypatch, builtin_and_mock_packages):
+def test_perl_activation_view(
+ tmpdir, perl_and_extension_dirs, monkeypatch, builtin_and_mock_packages
+):
perl_prefix, ext_prefix = perl_and_extension_dirs
- perl_spec = spack.spec.Spec('perl@5.24.1')
+ perl_spec = spack.spec.Spec("perl@5.24.1")
perl_spec._concrete = True
perl_spec.package.spec.prefix = perl_prefix
- ext_pkg = create_ext_pkg(
- 'perl-extension', ext_prefix, perl_spec, monkeypatch)
+ ext_pkg = create_ext_pkg("perl-extension", ext_prefix, perl_spec, monkeypatch)
- view_dir = str(tmpdir.join('view'))
+ view_dir = str(tmpdir.join("view"))
layout = DirectoryLayout(view_dir)
view = YamlFilesystemView(view_dir, layout)
perl_pkg = perl_spec.package
perl_pkg.activate(ext_pkg, view)
- assert not os.path.exists(os.path.join(perl_prefix, 'bin/perl-ext-tool'))
+ assert not os.path.exists(os.path.join(perl_prefix, "bin/perl-ext-tool"))
- assert os.path.exists(os.path.join(view_dir, 'bin/perl-ext-tool'))
+ assert os.path.exists(os.path.join(view_dir, "bin/perl-ext-tool"))
-def test_is_activated_upstream_extendee(tmpdir, builtin_and_mock_packages,
- monkeypatch):
+def test_is_activated_upstream_extendee(tmpdir, builtin_and_mock_packages, monkeypatch):
"""When an extendee is installed upstream, make sure that the extension
spec is never considered to be globally activated for it.
"""
- extendee_spec = spack.spec.Spec('python')
+ extendee_spec = spack.spec.Spec("python")
extendee_spec._concrete = True
- python_name = 'python'
+ python_name = "python"
tmpdir.ensure(python_name, dir=True)
python_prefix = str(tmpdir.join(python_name))
# Set the prefix on the package's spec reference because that is a copy of
# the original spec
extendee_spec.package.spec.prefix = python_prefix
- monkeypatch.setattr(extendee_spec.__class__, 'installed_upstream', True)
+ monkeypatch.setattr(extendee_spec.__class__, "installed_upstream", True)
- ext_name = 'py-extension1'
+ ext_name = "py-extension1"
tmpdir.ensure(ext_name, dir=True)
- ext_pkg = create_ext_pkg(
- ext_name, str(tmpdir.join(ext_name)), extendee_spec, monkeypatch)
+ ext_pkg = create_ext_pkg(ext_name, str(tmpdir.join(ext_name)), extendee_spec, monkeypatch)
# The view should not be checked at all if the extendee is installed
# upstream, so use 'None' here
diff --git a/lib/spack/spack/test/test_suite.py b/lib/spack/spack/test/test_suite.py
index 20d7172eea..0f3fe97f90 100644
--- a/lib/spack/spack/test/test_suite.py
+++ b/lib/spack/spack/test/test_suite.py
@@ -12,15 +12,14 @@ import llnl.util.filesystem as fs
import spack.install_test
import spack.spec
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="Tests fail on Windows")
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Windows")
def test_test_log_pathname(mock_packages, config):
"""Ensure test log path is reasonable."""
- spec = spack.spec.Spec('libdwarf').concretized()
+ spec = spack.spec.Spec("libdwarf").concretized()
- test_name = 'test_name'
+ test_name = "test_name"
test_suite = spack.install_test.TestSuite([spec], test_name)
logfile = test_suite.log_file_for_spec(spec)
@@ -31,9 +30,9 @@ def test_test_log_pathname(mock_packages, config):
def test_test_ensure_stage(mock_test_stage):
"""Make sure test stage directory is properly set up."""
- spec = spack.spec.Spec('libdwarf').concretized()
+ spec = spack.spec.Spec("libdwarf").concretized()
- test_name = 'test_name'
+ test_name = "test_name"
test_suite = spack.install_test.TestSuite([spec], test_name)
test_suite.ensure_stage()
@@ -44,16 +43,16 @@ def test_test_ensure_stage(mock_test_stage):
def test_write_test_result(mock_packages, mock_test_stage):
"""Ensure test results written to a results file."""
- spec = spack.spec.Spec('libdwarf').concretized()
- result = 'TEST'
- test_name = 'write-test'
+ spec = spack.spec.Spec("libdwarf").concretized()
+ result = "TEST"
+ test_name = "write-test"
test_suite = spack.install_test.TestSuite([spec], test_name)
test_suite.ensure_stage()
results_file = test_suite.results_file
test_suite.write_test_result(spec, result)
- with open(results_file, 'r') as f:
+ with open(results_file, "r") as f:
lines = f.readlines()
assert len(lines) == 1
@@ -64,9 +63,9 @@ def test_write_test_result(mock_packages, mock_test_stage):
def test_do_test(mock_packages, install_mockery, mock_test_stage):
"""Perform a stand-alone test with files to copy."""
- spec = spack.spec.Spec('trivial-smoke-test').concretized()
- test_name = 'test_do_test'
- test_filename = 'test_file.in'
+ spec = spack.spec.Spec("trivial-smoke-test").concretized()
+ test_name = "test_do_test"
+ test_filename = "test_file.in"
pkg = spec.package
pkg.create_extra_test_source()
@@ -78,10 +77,8 @@ def test_do_test(mock_packages, install_mockery, mock_test_stage):
# Save off target paths for current spec since test suite processing
# assumes testing multiple specs.
- cached_filename = fs.join_path(test_suite.current_test_cache_dir,
- pkg.test_source_filename)
- data_filename = fs.join_path(test_suite.current_test_data_dir,
- test_filename)
+ cached_filename = fs.join_path(test_suite.current_test_cache_dir, pkg.test_source_filename)
+ data_filename = fs.join_path(test_suite.current_test_data_dir, test_filename)
# Run the test, making sure to retain the test stage directory
# so we can ensure the files were copied.
@@ -91,15 +88,17 @@ def test_do_test(mock_packages, install_mockery, mock_test_stage):
assert os.path.exists(data_filename)
-@pytest.mark.parametrize('arguments,status,msg', [
- ({}, 'SKIPPED', 'Skipped'),
- ({'externals': True}, 'NO-TESTS', 'No tests'),
-])
-def test_test_external(mock_packages, install_mockery, mock_test_stage,
- arguments, status, msg):
+@pytest.mark.parametrize(
+ "arguments,status,msg",
+ [
+ ({}, "SKIPPED", "Skipped"),
+ ({"externals": True}, "NO-TESTS", "No tests"),
+ ],
+)
+def test_test_external(mock_packages, install_mockery, mock_test_stage, arguments, status, msg):
def ensure_results(filename, expected):
assert os.path.exists(filename)
- with open(filename, 'r') as fd:
+ with open(filename, "r") as fd:
lines = fd.readlines()
have = False
for line in lines:
@@ -108,9 +107,9 @@ def test_test_external(mock_packages, install_mockery, mock_test_stage,
break
assert have
- name = 'trivial-smoke-test'
+ name = "trivial-smoke-test"
spec = spack.spec.Spec(name).concretized()
- spec.external_path = '/path/to/external/{0}'.format(name)
+ spec.external_path = "/path/to/external/{0}".format(name)
test_suite = spack.install_test.TestSuite([spec])
test_suite(**arguments)
@@ -127,8 +126,8 @@ def test_test_stage_caches(mock_packages, install_mockery, mock_test_stage):
with pytest.raises(spack.install_test.TestSuiteSpecError):
_ = test_suite.current_test_data_dir
- spec = spack.spec.Spec('libelf').concretized()
- test_suite = spack.install_test.TestSuite([spec], 'test-cache')
+ spec = spack.spec.Spec("libelf").concretized()
+ test_suite = spack.install_test.TestSuite([spec], "test-cache")
# Check no current specs yield failure
ensure_current_cache_fail(test_suite)
@@ -145,8 +144,8 @@ def test_test_stage_caches(mock_packages, install_mockery, mock_test_stage):
def test_test_spec_run_once(mock_packages, install_mockery, mock_test_stage):
- spec = spack.spec.Spec('libelf').concretized()
- test_suite = spack.install_test.TestSuite([spec], 'test-dups')
+ spec = spack.spec.Spec("libelf").concretized()
+ test_suite = spack.install_test.TestSuite([spec], "test-dups")
(test_suite.specs[0]).package.test_suite = test_suite
with pytest.raises(spack.install_test.TestSuiteFailure):
@@ -154,16 +153,16 @@ def test_test_spec_run_once(mock_packages, install_mockery, mock_test_stage):
def test_test_spec_verbose(mock_packages, install_mockery, mock_test_stage):
- spec = spack.spec.Spec('simple-standalone-test').concretized()
+ spec = spack.spec.Spec("simple-standalone-test").concretized()
test_suite = spack.install_test.TestSuite([spec])
test_suite(verbose=True)
passed, msg = False, False
- with open(test_suite.log_file_for_spec(spec), 'r') as fd:
+ with open(test_suite.log_file_for_spec(spec), "r") as fd:
for line in fd:
- if 'simple stand-alone test' in line:
+ if "simple stand-alone test" in line:
msg = True
- elif 'PASSED' in line:
+ elif "PASSED" in line:
passed = True
assert msg
@@ -171,19 +170,19 @@ def test_test_spec_verbose(mock_packages, install_mockery, mock_test_stage):
def test_get_test_suite():
- assert not spack.install_test.get_test_suite('nothing')
+ assert not spack.install_test.get_test_suite("nothing")
def test_get_test_suite_no_name(mock_packages, mock_test_stage):
with pytest.raises(spack.install_test.TestSuiteNameError) as exc_info:
- spack.install_test.get_test_suite('')
+ spack.install_test.get_test_suite("")
- assert 'name is required' in str(exc_info)
+ assert "name is required" in str(exc_info)
def test_get_test_suite_too_many(mock_packages, mock_test_stage):
test_suites = []
- name = 'duplicate-alias'
+ name = "duplicate-alias"
def add_suite(package):
spec = spack.spec.Spec(package).concretized()
@@ -192,11 +191,11 @@ def test_get_test_suite_too_many(mock_packages, mock_test_stage):
spack.install_test.write_test_suite_file(suite)
test_suites.append(suite)
- add_suite('libdwarf')
+ add_suite("libdwarf")
suite = spack.install_test.get_test_suite(name)
assert suite.alias == name
- add_suite('libelf')
+ add_suite("libelf")
with pytest.raises(spack.install_test.TestSuiteNameError) as exc_info:
spack.install_test.get_test_suite(name)
- assert 'many suites named' in str(exc_info)
+ assert "many suites named" in str(exc_info)
diff --git a/lib/spack/spack/test/url_fetch.py b/lib/spack/spack/test/url_fetch.py
index 9d66afb814..2703700233 100644
--- a/lib/spack/spack/test/url_fetch.py
+++ b/lib/spack/spack/test/url_fetch.py
@@ -31,7 +31,8 @@ def checksum_type(request):
@pytest.fixture
def pkg_factory():
Pkg = collections.namedtuple(
- "Pkg", [
+ "Pkg",
+ [
"url_for_version",
"all_urls_for_version",
"find_valid_url_for_version",
@@ -39,11 +40,10 @@ def pkg_factory():
"url",
"versions",
"fetch_options",
- ]
+ ],
)
def factory(url, urls, fetch_options={}):
-
def fn(v):
main_url = url or urls[0]
return spack.url.substitute_version(main_url, v)
@@ -59,28 +59,28 @@ def pkg_factory():
url=url,
urls=(urls,),
versions=collections.defaultdict(dict),
- fetch_options=fetch_options
+ fetch_options=fetch_options,
)
return factory
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_urlfetchstrategy_sans_url(_fetch_method):
"""Ensure constructor with no URL fails."""
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(ValueError):
with fs.URLFetchStrategy(None):
pass
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_urlfetchstrategy_bad_url(tmpdir, _fetch_method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
with pytest.raises(fs.FailedDownloadError):
- fetcher = fs.URLFetchStrategy(url='file:///does-not-exist')
+ fetcher = fs.URLFetchStrategy(url="file:///does-not-exist")
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
@@ -91,10 +91,10 @@ def test_urlfetchstrategy_bad_url(tmpdir, _fetch_method):
def test_fetch_options(tmpdir, mock_archive):
testpath = str(tmpdir)
- with spack.config.override('config:url_fetch_method', 'curl'):
- fetcher = fs.URLFetchStrategy(url=mock_archive.url,
- fetch_options={'cookie': 'True',
- 'timeout': 10})
+ with spack.config.override("config:url_fetch_method", "curl"):
+ fetcher = fs.URLFetchStrategy(
+ url=mock_archive.url, fetch_options={"cookie": "True", "timeout": 10}
+ )
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
@@ -103,11 +103,11 @@ def test_fetch_options(tmpdir, mock_archive):
fetcher.fetch()
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_archive_file_errors(tmpdir, mock_archive, _fetch_method):
"""Ensure FetchStrategy commands may only be used as intended"""
testpath = str(tmpdir)
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
fetcher = fs.URLFetchStrategy(url=mock_archive.url)
assert fetcher is not None
with pytest.raises(fs.FailedDownloadError):
@@ -124,117 +124,101 @@ def test_archive_file_errors(tmpdir, mock_archive, _fetch_method):
with pytest.raises(fs.NoDigestError):
fetcher.check()
assert fetcher.archive_file is not None
- fetcher._fetch_from_url('file:///does-not-exist')
+ fetcher._fetch_from_url("file:///does-not-exist")
-files = [('.tar.gz', 'z'), ('.tgz', 'z')]
+files = [(".tar.gz", "z"), (".tgz", "z")]
if sys.platform != "win32":
- files += [('.tar.bz2', 'j'), ('.tbz2', 'j'),
- ('.tar.xz', 'J'), ('.txz', 'J')]
-
-
-@pytest.mark.parametrize('secure', [True, False])
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
-@pytest.mark.parametrize('mock_archive',
- files,
- indirect=True)
-def test_fetch(
- mock_archive,
- secure,
- _fetch_method,
- checksum_type,
- config,
- mutable_mock_repo
-):
+ files += [(".tar.bz2", "j"), (".tbz2", "j"), (".tar.xz", "J"), (".txz", "J")]
+
+
+@pytest.mark.parametrize("secure", [True, False])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
+@pytest.mark.parametrize("mock_archive", files, indirect=True)
+def test_fetch(mock_archive, secure, _fetch_method, checksum_type, config, mutable_mock_repo):
"""Fetch an archive and make sure we can checksum it."""
mock_archive.url
mock_archive.path
algo = crypto.hash_fun_for_algo(checksum_type)()
- with open(mock_archive.archive_file, 'rb') as f:
+ with open(mock_archive.archive_file, "rb") as f:
algo.update(f.read())
checksum = algo.hexdigest()
# Get a spec and tweak the test package with new chcecksum params
- s = Spec('url-test').concretized()
+ s = Spec("url-test").concretized()
s.package.url = mock_archive.url
- s.package.versions[ver('test')] = {checksum_type: checksum, 'url': s.package.url}
+ s.package.versions[ver("test")] = {checksum_type: checksum, "url": s.package.url}
# Enter the stage directory and check some properties
with s.package.stage:
- with spack.config.override('config:verify_ssl', secure):
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:verify_ssl", secure):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
s.package.do_stage()
with working_dir(s.package.stage.source_path):
- assert os.path.exists('configure')
- assert is_exe('configure')
+ assert os.path.exists("configure")
+ assert is_exe("configure")
- with open('configure') as f:
+ with open("configure") as f:
contents = f.read()
- assert contents.startswith('#!/bin/sh')
- assert 'echo Building...' in contents
+ assert contents.startswith("#!/bin/sh")
+ assert "echo Building..." in contents
# TODO-27021
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-@pytest.mark.parametrize('spec,url,digest', [
- ('url-list-test @0.0.0', 'foo-0.0.0.tar.gz', '00000000000000000000000000000000'),
- ('url-list-test @1.0.0', 'foo-1.0.0.tar.gz', '00000000000000000000000000000100'),
- ('url-list-test @3.0', 'foo-3.0.tar.gz', '00000000000000000000000000000030'),
- ('url-list-test @4.5', 'foo-4.5.tar.gz', '00000000000000000000000000000450'),
- (
- 'url-list-test @2.0.0b2',
- 'foo-2.0.0b2.tar.gz',
- '000000000000000000000000000200b2'
- ),
- ('url-list-test @3.0a1', 'foo-3.0a1.tar.gz', '000000000000000000000000000030a1'),
- (
- 'url-list-test @4.5-rc5',
- 'foo-4.5-rc5.tar.gz',
- '000000000000000000000000000045c5'
- ),
-])
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+@pytest.mark.parametrize(
+ "spec,url,digest",
+ [
+ ("url-list-test @0.0.0", "foo-0.0.0.tar.gz", "00000000000000000000000000000000"),
+ ("url-list-test @1.0.0", "foo-1.0.0.tar.gz", "00000000000000000000000000000100"),
+ ("url-list-test @3.0", "foo-3.0.tar.gz", "00000000000000000000000000000030"),
+ ("url-list-test @4.5", "foo-4.5.tar.gz", "00000000000000000000000000000450"),
+ ("url-list-test @2.0.0b2", "foo-2.0.0b2.tar.gz", "000000000000000000000000000200b2"),
+ ("url-list-test @3.0a1", "foo-3.0a1.tar.gz", "000000000000000000000000000030a1"),
+ ("url-list-test @4.5-rc5", "foo-4.5-rc5.tar.gz", "000000000000000000000000000045c5"),
+ ],
+)
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_from_list_url(mock_packages, config, spec, url, digest, _fetch_method):
"""
Test URLs in the url-list-test package, which means they should
have checksums in the package.
"""
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
s = Spec(spec).concretized()
fetch_strategy = fs.from_list_url(s.package)
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == url
assert fetch_strategy.digest == digest
assert fetch_strategy.extra_options == {}
- s.package.fetch_options = {'timeout': 60}
+ s.package.fetch_options = {"timeout": 60}
fetch_strategy = fs.from_list_url(s.package)
- assert fetch_strategy.extra_options == {'timeout': 60}
+ assert fetch_strategy.extra_options == {"timeout": 60}
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
-@pytest.mark.parametrize("requested_version,tarball,digest", [
- # This version is in the web data path (test/data/web/4.html), but not in the
- # url-list-test package. We expect Spack to generate a URL with the new version.
- ("4.5.0", "foo-4.5.0.tar.gz", None),
- # This version is in web data path and not in the package file, BUT the 2.0.0b2
- # version in the package file satisfies 2.0.0, so Spack will use the known version.
- # TODO: this is *probably* not what the user wants, but it's here as an example
- # TODO: for that reason. We can't express "exactly 2.0.0" right now, and we don't
- # TODO: have special cases that would make 2.0.0b2 less than 2.0.0. We should
- # TODO: probably revisit this in our versioning scheme.
- ("2.0.0", "foo-2.0.0b2.tar.gz", "000000000000000000000000000200b2"),
-])
+@pytest.mark.parametrize(
+ "requested_version,tarball,digest",
+ [
+ # This version is in the web data path (test/data/web/4.html), but not in the
+ # url-list-test package. We expect Spack to generate a URL with the new version.
+ ("4.5.0", "foo-4.5.0.tar.gz", None),
+ # This version is in web data path and not in the package file, BUT the 2.0.0b2
+ # version in the package file satisfies 2.0.0, so Spack will use the known version.
+ # TODO: this is *probably* not what the user wants, but it's here as an example
+ # TODO: for that reason. We can't express "exactly 2.0.0" right now, and we don't
+ # TODO: have special cases that would make 2.0.0b2 less than 2.0.0. We should
+ # TODO: probably revisit this in our versioning scheme.
+ ("2.0.0", "foo-2.0.0b2.tar.gz", "000000000000000000000000000200b2"),
+ ],
+)
def test_new_version_from_list_url(
- mock_packages, config, _fetch_method, requested_version, tarball, digest
+ mock_packages, config, _fetch_method, requested_version, tarball, digest
):
- if spack.config.get('config:concretizer') == 'original':
- pytest.skip(
- "Original concretizer doesn't resolve concrete versions to known ones"
- )
+ if spack.config.get("config:concretizer") == "original":
+ pytest.skip("Original concretizer doesn't resolve concrete versions to known ones")
"""Test non-specific URLs from the url-list-test package."""
with spack.config.override("config:url_fetch_method", _fetch_method):
@@ -252,48 +236,48 @@ def test_new_version_from_list_url(
def test_nosource_from_list_url(mock_packages, config):
"""This test confirms BundlePackages do not have list url."""
- s = Spec('nosource').concretized()
+ s = Spec("nosource").concretized()
fetch_strategy = fs.from_list_url(s.package)
assert fetch_strategy is None
def test_hash_detection(checksum_type):
algo = crypto.hash_fun_for_algo(checksum_type)()
- h = 'f' * (algo.digest_size * 2) # hex -> bytes
+ h = "f" * (algo.digest_size * 2) # hex -> bytes
checker = crypto.Checker(h)
assert checker.hash_name == checksum_type
def test_unknown_hash(checksum_type):
with pytest.raises(ValueError):
- crypto.Checker('a')
+ crypto.Checker("a")
-@pytest.mark.skipif(which('curl') is None,
- reason='Urllib does not have built-in status bar')
+@pytest.mark.skipif(which("curl") is None, reason="Urllib does not have built-in status bar")
def test_url_with_status_bar(tmpdir, mock_archive, monkeypatch, capfd):
"""Ensure fetch with status bar option succeeds."""
+
def is_true():
return True
testpath = str(tmpdir)
- monkeypatch.setattr(sys.stdout, 'isatty', is_true)
- monkeypatch.setattr(tty, 'msg_enabled', is_true)
- with spack.config.override('config:url_fetch_method', 'curl'):
+ monkeypatch.setattr(sys.stdout, "isatty", is_true)
+ monkeypatch.setattr(tty, "msg_enabled", is_true)
+ with spack.config.override("config:url_fetch_method", "curl"):
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
assert fetcher.archive_file is None
stage.fetch()
status = capfd.readouterr()[1]
- assert '##### 100' in status
+ assert "##### 100" in status
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_url_extra_fetch(tmpdir, mock_archive, _fetch_method):
"""Ensure a fetch after downloading is effectively a no-op."""
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
testpath = str(tmpdir)
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
@@ -303,33 +287,42 @@ def test_url_extra_fetch(tmpdir, mock_archive, _fetch_method):
fetcher.fetch()
-@pytest.mark.parametrize('url,urls,version,expected', [
- (None,
- ['https://ftpmirror.gnu.org/autoconf/autoconf-2.69.tar.gz',
- 'https://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz'],
- '2.62',
- ['https://ftpmirror.gnu.org/autoconf/autoconf-2.62.tar.gz',
- 'https://ftp.gnu.org/gnu/autoconf/autoconf-2.62.tar.gz'])
-])
-@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+@pytest.mark.parametrize(
+ "url,urls,version,expected",
+ [
+ (
+ None,
+ [
+ "https://ftpmirror.gnu.org/autoconf/autoconf-2.69.tar.gz",
+ "https://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz",
+ ],
+ "2.62",
+ [
+ "https://ftpmirror.gnu.org/autoconf/autoconf-2.62.tar.gz",
+ "https://ftp.gnu.org/gnu/autoconf/autoconf-2.62.tar.gz",
+ ],
+ )
+ ],
+)
+@pytest.mark.parametrize("_fetch_method", ["curl", "urllib"])
def test_candidate_urls(pkg_factory, url, urls, version, expected, _fetch_method):
"""Tests that candidate urls include mirrors and that they go through
pattern matching and substitution for versions.
"""
- with spack.config.override('config:url_fetch_method', _fetch_method):
+ with spack.config.override("config:url_fetch_method", _fetch_method):
pkg = pkg_factory(url, urls)
f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
assert f.candidate_urls == expected
assert f.extra_options == {}
- pkg = pkg_factory(url, urls, fetch_options={'timeout': 60})
+ pkg = pkg_factory(url, urls, fetch_options={"timeout": 60})
f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
- assert f.extra_options == {'timeout': 60}
+ assert f.extra_options == {"timeout": 60}
-@pytest.mark.regression('19673')
+@pytest.mark.regression("19673")
def test_missing_curl(tmpdir, monkeypatch):
"""Ensure a fetch involving missing curl package reports the error."""
- err_fmt = 'No such command {0}'
+ err_fmt = "No such command {0}"
def _which(*args, **kwargs):
err_msg = err_fmt.format(args[0])
@@ -338,14 +331,14 @@ def test_missing_curl(tmpdir, monkeypatch):
# Patching the 'which' symbol imported by fetch_strategy works
# since it is too late in import processing to patch the defining
# (spack.util.executable) module's symbol.
- monkeypatch.setattr(fs, 'which', _which)
+ monkeypatch.setattr(fs, "which", _which)
testpath = str(tmpdir)
- url = 'http://github.com/spack/spack'
- with spack.config.override('config:url_fetch_method', 'curl'):
+ url = "http://github.com/spack/spack"
+ with spack.config.override("config:url_fetch_method", "curl"):
fetcher = fs.URLFetchStrategy(url=url)
assert fetcher is not None
- with pytest.raises(TypeError, match='object is not callable'):
+ with pytest.raises(TypeError, match="object is not callable"):
with Stage(fetcher, path=testpath) as stage:
out = stage.fetch()
- assert err_fmt.format('curl') in out
+ assert err_fmt.format("curl") in out
diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py
index 6175c5ad0f..37b7d9d869 100644
--- a/lib/spack/spack/test/url_parse.py
+++ b/lib/spack/spack/test/url_parse.py
@@ -23,182 +23,240 @@ from spack.url import (
from spack.version import Version
-@pytest.mark.parametrize('url,expected', [
- # No suffix
- ('rgb-1.0.6', 'rgb-1.0.6'),
- # Misleading prefix
- ('jpegsrc.v9b', 'jpegsrc.v9b'),
- ('turbolinux702', 'turbolinux702'),
- ('converge_install_2.3.16', 'converge_install_2.3.16'),
- # Download type - code, source
- ('cistem-1.0.0-beta-source-code', 'cistem-1.0.0-beta'),
- # Download type - src
- ('apache-ant-1.9.7-src', 'apache-ant-1.9.7'),
- ('go1.7.4.src', 'go1.7.4'),
- # Download type - source
- ('bowtie2-2.2.5-source', 'bowtie2-2.2.5'),
- ('grib_api-1.17.0-Source', 'grib_api-1.17.0'),
- # Download type - full
- ('julia-0.4.3-full', 'julia-0.4.3'),
- # Download type - bin
- ('apache-maven-3.3.9-bin', 'apache-maven-3.3.9'),
- # Download type - binary
- ('Jmol-14.8.0-binary', 'Jmol-14.8.0'),
- # Download type - gem
- ('rubysl-date-2.0.9.gem', 'rubysl-date-2.0.9'),
- # Download type - tar
- ('gromacs-4.6.1-tar', 'gromacs-4.6.1'),
- # Download type - sh
- ('Miniconda2-4.3.11-Linux-x86_64.sh', 'Miniconda2-4.3.11'),
- # Download version - release
- ('v1.0.4-release', 'v1.0.4'),
- # Download version - stable
- ('libevent-2.0.21-stable', 'libevent-2.0.21'),
- # Download version - final
- ('2.6.7-final', '2.6.7'),
- # Download version - rel
- ('v1.9.5.1rel', 'v1.9.5.1'),
- # Download version - orig
- ('dash_0.5.5.1.orig', 'dash_0.5.5.1'),
- # Download version - plus
- ('ncbi-blast-2.6.0+-src', 'ncbi-blast-2.6.0'),
- # License
- ('cppad-20170114.gpl', 'cppad-20170114'),
- # Arch
- ('pcraster-4.1.0_x86-64', 'pcraster-4.1.0'),
- ('dislin-11.0.linux.i586_64', 'dislin-11.0'),
- ('PAGIT.V1.01.64bit', 'PAGIT.V1.01'),
- # OS - linux
- ('astyle_2.04_linux', 'astyle_2.04'),
- # OS - unix
- ('install-tl-unx', 'install-tl'),
- # OS - macos
- ('astyle_1.23_macosx', 'astyle_1.23'),
- ('haxe-2.08-osx', 'haxe-2.08'),
- # PyPI - wheel
- ('entrypoints-0.2.2-py2.py3-none-any.whl', 'entrypoints-0.2.2'),
- (
- 'numpy-1.12.0-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.'
- 'macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl', 'numpy-1.12.0'
- ),
- # PyPI - exe
- ('PyYAML-3.12.win-amd64-py3.5.exe', 'PyYAML-3.12'),
- # Combinations of multiple patterns - bin, release
- ('rocketmq-all-4.5.2-bin-release', 'rocketmq-all-4.5.2'),
- # Combinations of multiple patterns - all
- ('p7zip_9.04_src_all', 'p7zip_9.04'),
- # Combinations of multiple patterns - run
- ('cuda_8.0.44_linux.run', 'cuda_8.0.44'),
- # Combinations of multiple patterns - file
- ('ack-2.14-single-file', 'ack-2.14'),
- # Combinations of multiple patterns - jar
- ('antlr-3.4-complete.jar', 'antlr-3.4'),
- # Combinations of multiple patterns - oss
- ('tbb44_20160128oss_src_0', 'tbb44_20160128'),
- # Combinations of multiple patterns - darwin
- ('ghc-7.0.4-x86_64-apple-darwin', 'ghc-7.0.4'),
- ('ghc-7.0.4-i386-apple-darwin', 'ghc-7.0.4'),
- # Combinations of multiple patterns - centos
- ('sratoolkit.2.8.2-1-centos_linux64', 'sratoolkit.2.8.2-1'),
- # Combinations of multiple patterns - arch
- ('VizGlow_v2.2alpha17-R21November2016-Linux-x86_64-Install',
- 'VizGlow_v2.2alpha17-R21November2016'),
- ('jdk-8u92-linux-x64', 'jdk-8u92'),
- ('cuda_6.5.14_linux_64.run', 'cuda_6.5.14'),
- ('Mathematica_12.0.0_LINUX.sh', 'Mathematica_12.0.0'),
- ('trf407b.linux64', 'trf407b'),
- # Combinations of multiple patterns - with
- ('mafft-7.221-with-extensions-src', 'mafft-7.221'),
- ('spark-2.0.0-bin-without-hadoop', 'spark-2.0.0'),
- ('conduit-v0.3.0-src-with-blt', 'conduit-v0.3.0'),
- # Combinations of multiple patterns - rock
- ('bitlib-23-2.src.rock', 'bitlib-23-2'),
- # Combinations of multiple patterns - public
- ('dakota-6.3-public.src', 'dakota-6.3'),
- # Combinations of multiple patterns - universal
- ('synergy-1.3.6p2-MacOSX-Universal', 'synergy-1.3.6p2'),
- # Combinations of multiple patterns - dynamic
- ('snptest_v2.5.2_linux_x86_64_dynamic', 'snptest_v2.5.2'),
- # Combinations of multiple patterns - other
- ('alglib-3.11.0.cpp.gpl', 'alglib-3.11.0'),
- ('hpcviewer-2019.08-linux.gtk.x86_64', 'hpcviewer-2019.08'),
- ('apache-mxnet-src-1.3.0-incubating', 'apache-mxnet-src-1.3.0'),
-])
+@pytest.mark.parametrize(
+ "url,expected",
+ [
+ # No suffix
+ ("rgb-1.0.6", "rgb-1.0.6"),
+ # Misleading prefix
+ ("jpegsrc.v9b", "jpegsrc.v9b"),
+ ("turbolinux702", "turbolinux702"),
+ ("converge_install_2.3.16", "converge_install_2.3.16"),
+ # Download type - code, source
+ ("cistem-1.0.0-beta-source-code", "cistem-1.0.0-beta"),
+ # Download type - src
+ ("apache-ant-1.9.7-src", "apache-ant-1.9.7"),
+ ("go1.7.4.src", "go1.7.4"),
+ # Download type - source
+ ("bowtie2-2.2.5-source", "bowtie2-2.2.5"),
+ ("grib_api-1.17.0-Source", "grib_api-1.17.0"),
+ # Download type - full
+ ("julia-0.4.3-full", "julia-0.4.3"),
+ # Download type - bin
+ ("apache-maven-3.3.9-bin", "apache-maven-3.3.9"),
+ # Download type - binary
+ ("Jmol-14.8.0-binary", "Jmol-14.8.0"),
+ # Download type - gem
+ ("rubysl-date-2.0.9.gem", "rubysl-date-2.0.9"),
+ # Download type - tar
+ ("gromacs-4.6.1-tar", "gromacs-4.6.1"),
+ # Download type - sh
+ ("Miniconda2-4.3.11-Linux-x86_64.sh", "Miniconda2-4.3.11"),
+ # Download version - release
+ ("v1.0.4-release", "v1.0.4"),
+ # Download version - stable
+ ("libevent-2.0.21-stable", "libevent-2.0.21"),
+ # Download version - final
+ ("2.6.7-final", "2.6.7"),
+ # Download version - rel
+ ("v1.9.5.1rel", "v1.9.5.1"),
+ # Download version - orig
+ ("dash_0.5.5.1.orig", "dash_0.5.5.1"),
+ # Download version - plus
+ ("ncbi-blast-2.6.0+-src", "ncbi-blast-2.6.0"),
+ # License
+ ("cppad-20170114.gpl", "cppad-20170114"),
+ # Arch
+ ("pcraster-4.1.0_x86-64", "pcraster-4.1.0"),
+ ("dislin-11.0.linux.i586_64", "dislin-11.0"),
+ ("PAGIT.V1.01.64bit", "PAGIT.V1.01"),
+ # OS - linux
+ ("astyle_2.04_linux", "astyle_2.04"),
+ # OS - unix
+ ("install-tl-unx", "install-tl"),
+ # OS - macos
+ ("astyle_1.23_macosx", "astyle_1.23"),
+ ("haxe-2.08-osx", "haxe-2.08"),
+ # PyPI - wheel
+ ("entrypoints-0.2.2-py2.py3-none-any.whl", "entrypoints-0.2.2"),
+ (
+ "numpy-1.12.0-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel."
+ "macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl",
+ "numpy-1.12.0",
+ ),
+ # PyPI - exe
+ ("PyYAML-3.12.win-amd64-py3.5.exe", "PyYAML-3.12"),
+ # Combinations of multiple patterns - bin, release
+ ("rocketmq-all-4.5.2-bin-release", "rocketmq-all-4.5.2"),
+ # Combinations of multiple patterns - all
+ ("p7zip_9.04_src_all", "p7zip_9.04"),
+ # Combinations of multiple patterns - run
+ ("cuda_8.0.44_linux.run", "cuda_8.0.44"),
+ # Combinations of multiple patterns - file
+ ("ack-2.14-single-file", "ack-2.14"),
+ # Combinations of multiple patterns - jar
+ ("antlr-3.4-complete.jar", "antlr-3.4"),
+ # Combinations of multiple patterns - oss
+ ("tbb44_20160128oss_src_0", "tbb44_20160128"),
+ # Combinations of multiple patterns - darwin
+ ("ghc-7.0.4-x86_64-apple-darwin", "ghc-7.0.4"),
+ ("ghc-7.0.4-i386-apple-darwin", "ghc-7.0.4"),
+ # Combinations of multiple patterns - centos
+ ("sratoolkit.2.8.2-1-centos_linux64", "sratoolkit.2.8.2-1"),
+ # Combinations of multiple patterns - arch
+ (
+ "VizGlow_v2.2alpha17-R21November2016-Linux-x86_64-Install",
+ "VizGlow_v2.2alpha17-R21November2016",
+ ),
+ ("jdk-8u92-linux-x64", "jdk-8u92"),
+ ("cuda_6.5.14_linux_64.run", "cuda_6.5.14"),
+ ("Mathematica_12.0.0_LINUX.sh", "Mathematica_12.0.0"),
+ ("trf407b.linux64", "trf407b"),
+ # Combinations of multiple patterns - with
+ ("mafft-7.221-with-extensions-src", "mafft-7.221"),
+ ("spark-2.0.0-bin-without-hadoop", "spark-2.0.0"),
+ ("conduit-v0.3.0-src-with-blt", "conduit-v0.3.0"),
+ # Combinations of multiple patterns - rock
+ ("bitlib-23-2.src.rock", "bitlib-23-2"),
+ # Combinations of multiple patterns - public
+ ("dakota-6.3-public.src", "dakota-6.3"),
+ # Combinations of multiple patterns - universal
+ ("synergy-1.3.6p2-MacOSX-Universal", "synergy-1.3.6p2"),
+ # Combinations of multiple patterns - dynamic
+ ("snptest_v2.5.2_linux_x86_64_dynamic", "snptest_v2.5.2"),
+ # Combinations of multiple patterns - other
+ ("alglib-3.11.0.cpp.gpl", "alglib-3.11.0"),
+ ("hpcviewer-2019.08-linux.gtk.x86_64", "hpcviewer-2019.08"),
+ ("apache-mxnet-src-1.3.0-incubating", "apache-mxnet-src-1.3.0"),
+ ],
+)
def test_url_strip_version_suffixes(url, expected):
stripped = strip_version_suffixes(url)
assert stripped == expected
-@pytest.mark.parametrize('url,version,expected', [
- # No suffix
- ('rgb-1.0.6', '1.0.6', 'rgb'),
- ('nauty26r7', '26r7', 'nauty'),
- ('PAGIT.V1.01', '1.01', 'PAGIT'),
- ('AmpliconNoiseV1.29', '1.29', 'AmpliconNoise'),
- # Download type - install
- ('converge_install_2.3.16', '2.3.16', 'converge'),
- # Download type - src
- ('jpegsrc.v9b', '9b', 'jpeg'),
- ('blatSrc35', '35', 'blat'),
- # Download type - open
- ('RepeatMasker-open-4-0-7', '4-0-7', 'RepeatMasker'),
- # Download type - archive
- ('coinhsl-archive-2014.01.17', '2014.01.17', 'coinhsl'),
- # Download type - std
- ('ghostscript-fonts-std-8.11', '8.11', 'ghostscript-fonts'),
- # Download type - bin
- ('GapCloser-bin-v1.12-r6', '1.12-r6', 'GapCloser'),
- # Download type - software
- ('orthomclSoftware-v2.0.9', '2.0.9', 'orthomcl'),
- # Download version - release
- ('cbench_release_1.3.0.tar.gz', '1.3.0', 'cbench'),
- # Download version - snapshot
- ('gts-snapshot-121130', '121130', 'gts'),
- # Download version - distrib
- ('zoltan_distrib_v3.83', '3.83', 'zoltan'),
- # Download version - latest
- ('Platypus-latest', 'N/A', 'Platypus'),
- # Download version - complex
- ('qt-everywhere-opensource-src-5.7.0', '5.7.0', 'qt'),
- # Arch
- ('VESTA-x86_64', '3.4.6', 'VESTA'),
- # VCS - bazaar
- ('libvterm-0+bzr681', '681', 'libvterm'),
- # License - gpl
- ('PyQt-x11-gpl-4.11.3', '4.11.3', 'PyQt'),
- ('PyQt4_gpl_x11-4.12.3', '4.12.3', 'PyQt4'),
-])
+@pytest.mark.parametrize(
+ "url,version,expected",
+ [
+ # No suffix
+ ("rgb-1.0.6", "1.0.6", "rgb"),
+ ("nauty26r7", "26r7", "nauty"),
+ ("PAGIT.V1.01", "1.01", "PAGIT"),
+ ("AmpliconNoiseV1.29", "1.29", "AmpliconNoise"),
+ # Download type - install
+ ("converge_install_2.3.16", "2.3.16", "converge"),
+ # Download type - src
+ ("jpegsrc.v9b", "9b", "jpeg"),
+ ("blatSrc35", "35", "blat"),
+ # Download type - open
+ ("RepeatMasker-open-4-0-7", "4-0-7", "RepeatMasker"),
+ # Download type - archive
+ ("coinhsl-archive-2014.01.17", "2014.01.17", "coinhsl"),
+ # Download type - std
+ ("ghostscript-fonts-std-8.11", "8.11", "ghostscript-fonts"),
+ # Download type - bin
+ ("GapCloser-bin-v1.12-r6", "1.12-r6", "GapCloser"),
+ # Download type - software
+ ("orthomclSoftware-v2.0.9", "2.0.9", "orthomcl"),
+ # Download version - release
+ ("cbench_release_1.3.0.tar.gz", "1.3.0", "cbench"),
+ # Download version - snapshot
+ ("gts-snapshot-121130", "121130", "gts"),
+ # Download version - distrib
+ ("zoltan_distrib_v3.83", "3.83", "zoltan"),
+ # Download version - latest
+ ("Platypus-latest", "N/A", "Platypus"),
+ # Download version - complex
+ ("qt-everywhere-opensource-src-5.7.0", "5.7.0", "qt"),
+ # Arch
+ ("VESTA-x86_64", "3.4.6", "VESTA"),
+ # VCS - bazaar
+ ("libvterm-0+bzr681", "681", "libvterm"),
+ # License - gpl
+ ("PyQt-x11-gpl-4.11.3", "4.11.3", "PyQt"),
+ ("PyQt4_gpl_x11-4.12.3", "4.12.3", "PyQt4"),
+ ],
+)
def test_url_strip_name_suffixes(url, version, expected):
stripped = strip_name_suffixes(url, version)
assert stripped == expected
-@pytest.mark.parametrize('name,noffset,ver,voffset,path', [
- # Name in path
- ('antlr', 25, '2.7.7', 40, 'https://github.com/antlr/antlr/tarball/v2.7.7'),
- # Name in stem
- ('gmp', 32, '6.0.0a', 36, 'https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2'),
- # Name in suffix
-
- # Don't think I've ever seen one of these before
- # We don't look for it, so it would probably fail anyway
-
- # Version in path
- ('nextflow', 31, '0.20.1', 59, 'https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow'),
- # Version in stem
- ('zlib', 24, '1.2.10', 29, 'http://zlib.net/fossils/zlib-1.2.10.tar.gz'),
- ('slepc', 51, '3.6.2', 57, 'http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz'),
- ('cloog', 61, '0.18.1', 67, 'http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz'),
- ('libxc', 58, '2.2.2', 64, 'http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz'),
- # Version in suffix
- ('swiftsim', 36, '0.3.0', 76, 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0'),
- ('swiftsim', 55, '0.3.0', 95, 'https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0'),
- ('sionlib', 30, '1.7.1', 59, 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1'),
- # Regex in name
- ('voro++', 40, '0.4.6', 47, 'http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz'),
- # SourceForge download
- ('glew', 55, '2.0.0', 60, 'https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download'),
-])
+@pytest.mark.parametrize(
+ "name,noffset,ver,voffset,path",
+ [
+ # Name in path
+ ("antlr", 25, "2.7.7", 40, "https://github.com/antlr/antlr/tarball/v2.7.7"),
+ # Name in stem
+ ("gmp", 32, "6.0.0a", 36, "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"),
+ # Name in suffix
+ # Don't think I've ever seen one of these before
+ # We don't look for it, so it would probably fail anyway
+ # Version in path
+ (
+ "nextflow",
+ 31,
+ "0.20.1",
+ 59,
+ "https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow",
+ ),
+ # Version in stem
+ ("zlib", 24, "1.2.10", 29, "http://zlib.net/fossils/zlib-1.2.10.tar.gz"),
+ (
+ "slepc",
+ 51,
+ "3.6.2",
+ 57,
+ "http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz",
+ ),
+ (
+ "cloog",
+ 61,
+ "0.18.1",
+ 67,
+ "http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz",
+ ),
+ (
+ "libxc",
+ 58,
+ "2.2.2",
+ 64,
+ "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz",
+ ),
+ # Version in suffix
+ (
+ "swiftsim",
+ 36,
+ "0.3.0",
+ 76,
+ "http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0",
+ ),
+ (
+ "swiftsim",
+ 55,
+ "0.3.0",
+ 95,
+ "https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0",
+ ),
+ (
+ "sionlib",
+ 30,
+ "1.7.1",
+ 59,
+ "http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1",
+ ),
+ # Regex in name
+ ("voro++", 40, "0.4.6", 47, "http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz"),
+ # SourceForge download
+ (
+ "glew",
+ 55,
+ "2.0.0",
+ 60,
+ "https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download",
+ ),
+ ],
+)
def test_url_parse_offset(name, noffset, ver, voffset, path):
"""Tests that the name, version and offsets are computed correctly.
@@ -219,335 +277,536 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
assert vstart == voffset
-@pytest.mark.parametrize('name,version,url', [
- # Common Repositories - github downloads
-
- # name/archive/ver.ver
- ('nco', '4.6.2', 'https://github.com/nco/nco/archive/4.6.2.tar.gz'),
- # name/archive/vver.ver
- ('vim', '8.0.0134', 'https://github.com/vim/vim/archive/v8.0.0134.tar.gz'),
- # name/archive/name-ver.ver
- ('oce', '0.18', 'https://github.com/tpaviot/oce/archive/OCE-0.18.tar.gz'),
- # name/releases/download/vver/name-ver.ver
- ('libmesh', '1.0.0', 'https://github.com/libMesh/libmesh/releases/download/v1.0.0/libmesh-1.0.0.tar.bz2'),
- # name/tarball/vver.ver
- ('git', '2.7.1', 'https://github.com/git/git/tarball/v2.7.1'),
- # name/zipball/vver.ver
- ('git', '2.7.1', 'https://github.com/git/git/zipball/v2.7.1'),
-
- # Common Repositories - gitlab downloads
-
- # name/repository/archive.ext?ref=vver.ver
- ('swiftsim', '0.3.0',
- 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0'),
- # /api/v4/projects/NAMESPACE%2Fname/repository/archive.ext?sha=vver.ver
- ('swiftsim', '0.3.0',
- 'https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0'),
- # name/repository/archive.ext?ref=name-ver.ver
- ('icet', '1.2.3',
- 'https://gitlab.kitware.com/icet/icet/repository/archive.tar.gz?ref=IceT-1.2.3'),
- # /api/v4/projects/NAMESPACE%2Fname/repository/archive.ext?sha=name-ver.ver
- ('icet', '1.2.3',
- 'https://gitlab.kitware.com/api/v4/projects/icet%2Ficet/repository/archive.tar.bz2?sha=IceT-1.2.3'),
-
- # Common Repositories - bitbucket downloads
-
- # name/get/ver.ver
- ('eigen', '3.2.7', 'https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2'),
- # name/get/vver.ver
- ('hoomd-blue', '1.3.3',
- 'https://bitbucket.org/glotzer/hoomd-blue/get/v1.3.3.tar.bz2'),
- # name/downloads/name-ver.ver
- ('dolfin', '2016.1.0',
- 'https://bitbucket.org/fenics-project/dolfin/downloads/dolfin-2016.1.0.tar.gz'),
-
- # Common Repositories - sourceforge downloads
-
- # name-ver.ver
- ('libpng', '1.6.27',
- 'http://download.sourceforge.net/libpng/libpng-1.6.27.tar.gz'),
- ('lcms2', '2.6',
- 'http://downloads.sourceforge.net/project/lcms/lcms/2.6/lcms2-2.6.tar.gz'),
- ('modules', '3.2.10',
- 'http://prdownloads.sourceforge.net/modules/modules-3.2.10.tar.gz'),
- # name-ver.ver.ext/download
- ('glew', '2.0.0',
- 'https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download'),
-
- # Common Repositories - cran downloads
-
- # name.name_ver.ver-ver.ver
- ('TH.data', '1.0-8', 'https://cran.r-project.org/src/contrib/TH.data_1.0-8.tar.gz'),
- ('knitr', '1.14', 'https://cran.rstudio.com/src/contrib/knitr_1.14.tar.gz'),
- ('devtools', '1.12.0', 'https://cloud.r-project.org/src/contrib/devtools_1.12.0.tar.gz'),
-
- # Common Repositories - pypi downloads
-
- # name.name_name-ver.ver
- ('3to2', '1.1.1', 'https://pypi.python.org/packages/source/3/3to2/3to2-1.1.1.zip'),
- ('mpmath', '0.19',
- 'https://pypi.python.org/packages/source/m/mpmath/mpmath-all-0.19.tar.gz'),
- ('pandas', '0.16.0',
- 'https://pypi.python.org/packages/source/p/pandas/pandas-0.16.0.tar.gz#md5=bfe311f05dc0c351f8955fbd1e296e73'),
- ('sphinx_rtd_theme', '0.1.10a0',
- 'https://pypi.python.org/packages/da/6b/1b75f13d8aa3333f19c6cdf1f0bc9f52ea739cae464fbee050307c121857/sphinx_rtd_theme-0.1.10a0.tar.gz'),
- ('backports.ssl_match_hostname', '3.5.0.1',
- 'https://pypi.io/packages/source/b/backports.ssl_match_hostname/backports.ssl_match_hostname-3.5.0.1.tar.gz'),
- # Common Repositories - bazaar downloads
- ('libvterm', '681', 'http://www.leonerd.org.uk/code/libvterm/libvterm-0+bzr681.tar.gz'),
-
- # Common Tarball Formats
-
- # 1st Pass: Simplest case
- # Assume name contains no digits and version contains no letters
-
- # name-ver.ver
- ('libpng', '1.6.37', 'http://download.sourceforge.net/libpng/libpng-1.6.37.tar.gz'),
-
- # 2nd Pass: Version only
- # Assume version contains no letters
-
- # ver.ver
- ('eigen', '3.2.7', 'https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2'),
- # ver.ver-ver
- ('ImageMagick', '7.0.2-7', 'https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz'),
- # vver.ver
- ('CGNS', '3.3.0', 'https://github.com/CGNS/CGNS/archive/v3.3.0.tar.gz'),
- # vver_ver
- ('luafilesystem', '1_6_3', 'https://github.com/keplerproject/luafilesystem/archive/v1_6_3.tar.gz'),
-
- # 3rd Pass: No separator characters are used
- # Assume name contains no digits
-
- # namever
- ('turbolinux', '702', 'file://{0}/turbolinux702.tar.gz'.format(os.getcwd())),
- ('nauty', '26r7', 'http://pallini.di.uniroma1.it/nauty26r7.tar.gz'),
-
- # 4th Pass: A single separator character is used
- # Assume name contains no digits
-
- # name-name-ver-ver
- ('Trilinos', '12-10-1',
- 'https://github.com/trilinos/Trilinos/archive/trilinos-release-12-10-1.tar.gz'),
- ('panda', '2016-03-07',
- 'http://comopt.ifi.uni-heidelberg.de/software/PANDA/downloads/panda-2016-03-07.tar'),
- ('gts', '121130',
- 'http://gts.sourceforge.net/tarballs/gts-snapshot-121130.tar.gz'),
- ('cdd', '061a',
- 'http://www.cs.mcgill.ca/~fukuda/download/cdd/cdd-061a.tar.gz'),
- # name_name_ver_ver
- ('tinyxml', '2_6_2',
- 'https://sourceforge.net/projects/tinyxml/files/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz'),
- ('boost', '1_55_0',
- 'http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2'),
- ('yorick', '2_2_04',
- 'https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz'),
- ('tbb', '44_20160413',
- 'https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160413oss_src.tgz'),
- # name.name.ver.ver
- ('prank', '150803', 'http://wasabiapp.org/download/prank/prank.source.150803.tgz'),
- ('jpeg', '9b', 'http://www.ijg.org/files/jpegsrc.v9b.tar.gz'),
- ('openjpeg', '2.1',
- 'https://github.com/uclouvain/openjpeg/archive/version.2.1.tar.gz'),
- # name.namever.ver
- ('atlas', '3.11.34',
- 'http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2'),
- ('visit', '2.10.1', 'http://portal.nersc.gov/project/visit/releases/2.10.1/visit2.10.1.tar.gz'),
- ('geant', '4.10.01.p03', 'http://geant4.cern.ch/support/source/geant4.10.01.p03.tar.gz'),
- ('tcl', '8.6.5', 'http://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz'),
-
- # 5th Pass: Two separator characters are used
- # Name may contain digits, version may contain letters
-
- # name-name-ver.ver
- ('m4', '1.4.17', 'https://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz'),
- ('gmp', '6.0.0a', 'https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2'),
- ('LaunchMON', '1.0.2',
- 'https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz'),
- # name-ver-ver.ver
- ('libedit', '20150325-3.1', 'http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz'),
- # name-name-ver_ver
- ('icu4c', '57_1', 'http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz'),
- # name_name_ver.ver
- ('superlu_dist', '4.1', 'http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz'),
- ('pexsi', '0.9.0', 'https://math.berkeley.edu/~linlin/pexsi/download/pexsi_v0.9.0.tar.gz'),
- # name_name.ver.ver
- ('fer', '696', 'ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.v696.tar.gz'),
- # name_name_ver-ver
- ('Bridger', '2014-12-01',
- 'https://downloads.sourceforge.net/project/rnaseqassembly/Bridger_r2014-12-01.tar.gz'),
- # name-name-ver.ver-ver.ver
- ('sowing', '1.1.23-p1', 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/sowing-1.1.23-p1.tar.gz'),
- ('bib2xhtml', '3.0-15-gf506', 'http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz'),
- # namever.ver-ver.ver
- ('go', '1.4-bootstrap-20161024', 'https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz'),
-
- # 6th Pass: All three separator characters are used
- # Name may contain digits, version may contain letters
-
- # name_name-ver.ver
- ('the_silver_searcher', '0.32.0', 'http://geoff.greer.fm/ag/releases/the_silver_searcher-0.32.0.tar.gz'),
- ('sphinx_rtd_theme', '0.1.10a0',
- 'https://pypi.python.org/packages/source/s/sphinx_rtd_theme/sphinx_rtd_theme-0.1.10a0.tar.gz'),
- # name.name_ver.ver-ver.ver
- ('TH.data', '1.0-8', 'https://cran.r-project.org/src/contrib/TH.data_1.0-8.tar.gz'),
- ('XML', '3.98-1.4', 'https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz'),
- # name-name-ver.ver_ver.ver
- ('pypar', '2.1.5_108',
- 'https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/pypar/pypar-2.1.5_108.tgz'),
- # name-namever.ver_ver.ver
- ('STAR-CCM+', '11.06.010_02',
- 'file://{0}/STAR-CCM+11.06.010_02_linux-x86_64.tar.gz'.format(os.getcwd())),
- # name-name_name-ver.ver
- ('PerlIO-utf8_strict', '0.002',
- 'http://search.cpan.org/CPAN/authors/id/L/LE/LEONT/PerlIO-utf8_strict-0.002.tar.gz'),
-
- # Various extensions
- # .tar.gz
- ('libXcursor', '1.1.14',
- 'https://www.x.org/archive/individual/lib/libXcursor-1.1.14.tar.gz'),
- # .tar.bz2
- ('mpfr', '4.0.1', 'https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.tar.bz2'),
- # .tar.xz
- ('pkgconf', '1.5.4',
- 'http://distfiles.dereferenced.org/pkgconf/pkgconf-1.5.4.tar.xz'),
- # .tar.Z
- ('Gblocks', '0.91b',
- 'http://molevol.cmima.csic.es/castresana/Gblocks/Gblocks_Linux64_0.91b.tar.Z'),
- # .tar.zip
- ('bcl2fastq2', '2.19.1.403',
- 'ftp://webdata2:webdata2@ussd-ftp.illumina.com/downloads/software/bcl2fastq/bcl2fastq2-v2.19.1.403-tar.zip'),
- # .tar, .TAR
- ('python-meep', '1.4.2',
- 'https://launchpad.net/python-meep/1.4/1.4/+download/python-meep-1.4.2.tar'),
- ('python-meep', '1.4.2',
- 'https://launchpad.net/python-meep/1.4/1.4/+download/python-meep-1.4.2.TAR'),
- # .gz
- ('libXcursor', '1.1.14',
- 'https://www.x.org/archive/individual/lib/libXcursor-1.1.14.gz'),
- # .bz2
- ('mpfr', '4.0.1', 'https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.bz2'),
- # .xz
- ('pkgconf', '1.5.4',
- 'http://distfiles.dereferenced.org/pkgconf/pkgconf-1.5.4.xz'),
- # .Z
- ('Gblocks', '0.91b',
- 'http://molevol.cmima.csic.es/castresana/Gblocks/Gblocks_Linux64_0.91b.Z'),
- # .zip
- ('bliss', '0.73', 'http://www.tcs.hut.fi/Software/bliss/bliss-0.73.zip'),
- # .tgz
- ('ADOL-C', '2.6.1',
- 'http://www.coin-or.org/download/source/ADOL-C/ADOL-C-2.6.1.tgz'),
- # .tbz
- ('mpfr', '4.0.1', 'https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.tbz'),
- # .tbz2
- ('mpfr', '4.0.1', 'https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.tbz2'),
- # .txz
- ('kim-api', '2.1.0', 'https://s3.openkim.org/kim-api/kim-api-2.1.0.txz'),
-
- # 8th Pass: Query strings
-
- # suffix queries
- ('swiftsim', '0.3.0', 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0'),
- ('swiftsim', '0.3.0',
- 'https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0'),
- ('sionlib', '1.7.1', 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1'),
- ('jube2', '2.2.2', 'https://apps.fz-juelich.de/jsc/jube/jube2/download.php?version=2.2.2'),
- ('archive', '1.0.0', 'https://code.ornl.gov/eck/papyrus/repository/archive.tar.bz2?ref=v1.0.0'),
- ('VecGeom', '0.3.rc',
- 'https://gitlab.cern.ch/api/v4/projects/VecGeom%2FVecGeom/repository/archive.tar.gz?sha=v0.3.rc'),
- ('parsplice', '1.1',
- 'https://gitlab.com/api/v4/projects/exaalt%2Fparsplice/repository/archive.tar.gz?sha=v1.1'),
- ('busco', '2.0.1', 'https://gitlab.com/api/v4/projects/ezlab%2Fbusco/repository/archive.tar.gz?sha=2.0.1'),
- ('libaec', '1.0.2',
- 'https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2'),
- ('icet', '2.1.1',
- 'https://gitlab.kitware.com/api/v4/projects/icet%2Ficet/repository/archive.tar.bz2?sha=IceT-2.1.1'),
- ('vtk-m', '1.3.0',
- 'https://gitlab.kitware.com/api/v4/projects/vtk%2Fvtk-m/repository/archive.tar.gz?sha=v1.3.0'),
- ('GATK', '3.8-1-0-gf15c1c3ef',
- 'https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef'),
- # stem queries
- ('slepc', '3.6.2', 'http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz'),
- ('otf', '1.12.5salmon',
- 'http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz'),
- ('eospac', '6.4.0beta.1',
- 'http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz'),
- ('vampirtrace', '5.14.4',
- 'http://wwwpub.zih.tu-dresden.de/~mlieber/dcount/dcount.php?package=vampirtrace&get=VampirTrace-5.14.4.tar.gz'),
- ('EvtGen', '01.07.00',
- 'https://evtgen.hepforge.org/downloads?f=EvtGen-01.07.00.tar.gz'),
- # (we don't actually look for these, they are picked up
- # during the preliminary stem parsing)
- ('octopus', '6.0', 'http://octopus-code.org/down.php?file=6.0/octopus-6.0.tar.gz'),
- ('cloog', '0.18.1', 'http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz'),
- ('libxc', '2.2.2', 'http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz'),
- ('cistem', '1.0.0-beta',
- 'https://cistem.org/system/tdf/upload3/cistem-1.0.0-beta-source-code.tar.gz?file=1&type=cistem_details&id=37&force=0'),
- ('Magics', '4.1.0',
- 'https://confluence.ecmwf.int/download/attachments/3473464/Magics-4.1.0-Source.tar.gz?api=v2'),
- ('grib_api', '1.17.0',
- 'https://software.ecmwf.int/wiki/download/attachments/3473437/grib_api-1.17.0-Source.tar.gz?api=v2'),
- ('eccodes', '2.2.0',
- 'https://software.ecmwf.int/wiki/download/attachments/45757960/eccodes-2.2.0-Source.tar.gz?api=v2'),
- ('SWFFT', '1.0',
- 'https://xgitlab.cels.anl.gov/api/v4/projects/hacc%2FSWFFT/repository/archive.tar.gz?sha=v1.0'),
-
- # 9th Pass: Version in path
-
- # github.com/repo/name/releases/download/name-vver/name
- ('nextflow', '0.20.1', 'https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow'),
- # ver/name
- ('ncbi', '2.2.26', 'ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz'),
-
- # Other tests for corner cases
-
- # single character name
- ('R', '3.3.2', 'https://cloud.r-project.org/src/base/R-3/R-3.3.2.tar.gz'),
- # name starts with digit
- ('3to2', '1.1.1', 'https://pypi.python.org/packages/source/3/3to2/3to2-1.1.1.zip'),
- # plus in name
- ('gtk+', '2.24.31', 'http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.31.tar.xz'),
- ('voro++', '0.4.6', 'http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz'),
- # Name comes before download.php
- ('sionlib', '1.7.1', 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1'),
- # Ignore download.php
- ('slepc', '3.6.2', 'http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz'),
- ('ScientificPython', '2.8.1',
- 'https://sourcesup.renater.fr/frs/download.php/file/4411/ScientificPython-2.8.1.tar.gz'),
- # gloox beta style
- ('gloox', '1.0-beta7', 'http://camaya.net/download/gloox-1.0-beta7.tar.bz2'),
- # sphinx beta style
- ('sphinx', '1.10-beta', 'http://sphinxsearch.com/downloads/sphinx-1.10-beta.tar.gz'),
- # ruby version style
- ('ruby', '1.9.1-p243', 'ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.1-p243.tar.gz'),
- # rc style
- ('libvorbis', '1.2.2rc1', 'http://downloads.xiph.org/releases/vorbis/libvorbis-1.2.2rc1.tar.bz2'),
- # dash rc style
- ('js', '1.8.0-rc1', 'http://ftp.mozilla.org/pub/mozilla.org/js/js-1.8.0-rc1.tar.gz'),
- # apache version style
- ('apache-cassandra', '1.2.0-rc2',
- 'http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz'),
- # xaw3d version
- ('Xaw3d', '1.5E', 'ftp://ftp.visi.com/users/hawkeyd/X/Xaw3d-1.5E.tar.gz'),
- # fann version
- ('fann', '2.1.0beta', 'http://downloads.sourceforge.net/project/fann/fann/2.1.0beta/fann-2.1.0beta.zip'),
- # imap version
- ('imap', '2007f', 'ftp://ftp.cac.washington.edu/imap/imap-2007f.tar.gz'),
- # suite3270 version
- ('suite3270', '3.3.12ga7',
- 'http://sourceforge.net/projects/x3270/files/x3270/3.3.12ga7/suite3270-3.3.12ga7-src.tgz'),
- # scalasca version
- ('cube', '4.2.3', 'http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz'),
- ('cube', '4.3-TP1', 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz'),
- # github raw url
- ('CLAMR', '2.0.7', 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true'),
- # luaposix version
- ('luaposix', '33.4.0', 'https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz'),
- # nco version
- ('nco', '4.6.2-beta03', 'https://github.com/nco/nco/archive/4.6.2-beta03.tar.gz'),
- ('nco', '4.6.3-alpha04', 'https://github.com/nco/nco/archive/4.6.3-alpha04.tar.gz'),
-])
+@pytest.mark.parametrize(
+ "name,version,url",
+ [
+ # Common Repositories - github downloads
+ # name/archive/ver.ver
+ ("nco", "4.6.2", "https://github.com/nco/nco/archive/4.6.2.tar.gz"),
+ # name/archive/vver.ver
+ ("vim", "8.0.0134", "https://github.com/vim/vim/archive/v8.0.0134.tar.gz"),
+ # name/archive/name-ver.ver
+ ("oce", "0.18", "https://github.com/tpaviot/oce/archive/OCE-0.18.tar.gz"),
+ # name/releases/download/vver/name-ver.ver
+ (
+ "libmesh",
+ "1.0.0",
+ "https://github.com/libMesh/libmesh/releases/download/v1.0.0/libmesh-1.0.0.tar.bz2",
+ ),
+ # name/tarball/vver.ver
+ ("git", "2.7.1", "https://github.com/git/git/tarball/v2.7.1"),
+ # name/zipball/vver.ver
+ ("git", "2.7.1", "https://github.com/git/git/zipball/v2.7.1"),
+ # Common Repositories - gitlab downloads
+ # name/repository/archive.ext?ref=vver.ver
+ (
+ "swiftsim",
+ "0.3.0",
+ "http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0",
+ ),
+ # /api/v4/projects/NAMESPACE%2Fname/repository/archive.ext?sha=vver.ver
+ (
+ "swiftsim",
+ "0.3.0",
+ "https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0",
+ ),
+ # name/repository/archive.ext?ref=name-ver.ver
+ (
+ "icet",
+ "1.2.3",
+ "https://gitlab.kitware.com/icet/icet/repository/archive.tar.gz?ref=IceT-1.2.3",
+ ),
+ # /api/v4/projects/NAMESPACE%2Fname/repository/archive.ext?sha=name-ver.ver
+ (
+ "icet",
+ "1.2.3",
+ "https://gitlab.kitware.com/api/v4/projects/icet%2Ficet/repository/archive.tar.bz2?sha=IceT-1.2.3",
+ ),
+ # Common Repositories - bitbucket downloads
+ # name/get/ver.ver
+ ("eigen", "3.2.7", "https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2"),
+ # name/get/vver.ver
+ ("hoomd-blue", "1.3.3", "https://bitbucket.org/glotzer/hoomd-blue/get/v1.3.3.tar.bz2"),
+ # name/downloads/name-ver.ver
+ (
+ "dolfin",
+ "2016.1.0",
+ "https://bitbucket.org/fenics-project/dolfin/downloads/dolfin-2016.1.0.tar.gz",
+ ),
+ # Common Repositories - sourceforge downloads
+ # name-ver.ver
+ ("libpng", "1.6.27", "http://download.sourceforge.net/libpng/libpng-1.6.27.tar.gz"),
+ (
+ "lcms2",
+ "2.6",
+ "http://downloads.sourceforge.net/project/lcms/lcms/2.6/lcms2-2.6.tar.gz",
+ ),
+ ("modules", "3.2.10", "http://prdownloads.sourceforge.net/modules/modules-3.2.10.tar.gz"),
+ # name-ver.ver.ext/download
+ (
+ "glew",
+ "2.0.0",
+ "https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download",
+ ),
+ # Common Repositories - cran downloads
+ # name.name_ver.ver-ver.ver
+ ("TH.data", "1.0-8", "https://cran.r-project.org/src/contrib/TH.data_1.0-8.tar.gz"),
+ ("knitr", "1.14", "https://cran.rstudio.com/src/contrib/knitr_1.14.tar.gz"),
+ ("devtools", "1.12.0", "https://cloud.r-project.org/src/contrib/devtools_1.12.0.tar.gz"),
+ # Common Repositories - pypi downloads
+ # name.name_name-ver.ver
+ ("3to2", "1.1.1", "https://pypi.python.org/packages/source/3/3to2/3to2-1.1.1.zip"),
+ (
+ "mpmath",
+ "0.19",
+ "https://pypi.python.org/packages/source/m/mpmath/mpmath-all-0.19.tar.gz",
+ ),
+ (
+ "pandas",
+ "0.16.0",
+ "https://pypi.python.org/packages/source/p/pandas/pandas-0.16.0.tar.gz#md5=bfe311f05dc0c351f8955fbd1e296e73",
+ ),
+ (
+ "sphinx_rtd_theme",
+ "0.1.10a0",
+ "https://pypi.python.org/packages/da/6b/1b75f13d8aa3333f19c6cdf1f0bc9f52ea739cae464fbee050307c121857/sphinx_rtd_theme-0.1.10a0.tar.gz",
+ ),
+ (
+ "backports.ssl_match_hostname",
+ "3.5.0.1",
+ "https://pypi.io/packages/source/b/backports.ssl_match_hostname/backports.ssl_match_hostname-3.5.0.1.tar.gz",
+ ),
+ # Common Repositories - bazaar downloads
+ ("libvterm", "681", "http://www.leonerd.org.uk/code/libvterm/libvterm-0+bzr681.tar.gz"),
+ # Common Tarball Formats
+ # 1st Pass: Simplest case
+ # Assume name contains no digits and version contains no letters
+ # name-ver.ver
+ ("libpng", "1.6.37", "http://download.sourceforge.net/libpng/libpng-1.6.37.tar.gz"),
+ # 2nd Pass: Version only
+ # Assume version contains no letters
+ # ver.ver
+ ("eigen", "3.2.7", "https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2"),
+ # ver.ver-ver
+ (
+ "ImageMagick",
+ "7.0.2-7",
+ "https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz",
+ ),
+ # vver.ver
+ ("CGNS", "3.3.0", "https://github.com/CGNS/CGNS/archive/v3.3.0.tar.gz"),
+ # vver_ver
+ (
+ "luafilesystem",
+ "1_6_3",
+ "https://github.com/keplerproject/luafilesystem/archive/v1_6_3.tar.gz",
+ ),
+ # 3rd Pass: No separator characters are used
+ # Assume name contains no digits
+ # namever
+ ("turbolinux", "702", "file://{0}/turbolinux702.tar.gz".format(os.getcwd())),
+ ("nauty", "26r7", "http://pallini.di.uniroma1.it/nauty26r7.tar.gz"),
+ # 4th Pass: A single separator character is used
+ # Assume name contains no digits
+ # name-name-ver-ver
+ (
+ "Trilinos",
+ "12-10-1",
+ "https://github.com/trilinos/Trilinos/archive/trilinos-release-12-10-1.tar.gz",
+ ),
+ (
+ "panda",
+ "2016-03-07",
+ "http://comopt.ifi.uni-heidelberg.de/software/PANDA/downloads/panda-2016-03-07.tar",
+ ),
+ ("gts", "121130", "http://gts.sourceforge.net/tarballs/gts-snapshot-121130.tar.gz"),
+ ("cdd", "061a", "http://www.cs.mcgill.ca/~fukuda/download/cdd/cdd-061a.tar.gz"),
+ # name_name_ver_ver
+ (
+ "tinyxml",
+ "2_6_2",
+ "https://sourceforge.net/projects/tinyxml/files/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz",
+ ),
+ (
+ "boost",
+ "1_55_0",
+ "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2",
+ ),
+ ("yorick", "2_2_04", "https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz"),
+ (
+ "tbb",
+ "44_20160413",
+ "https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160413oss_src.tgz",
+ ),
+ # name.name.ver.ver
+ ("prank", "150803", "http://wasabiapp.org/download/prank/prank.source.150803.tgz"),
+ ("jpeg", "9b", "http://www.ijg.org/files/jpegsrc.v9b.tar.gz"),
+ ("openjpeg", "2.1", "https://github.com/uclouvain/openjpeg/archive/version.2.1.tar.gz"),
+ # name.namever.ver
+ (
+ "atlas",
+ "3.11.34",
+ "http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2",
+ ),
+ (
+ "visit",
+ "2.10.1",
+ "http://portal.nersc.gov/project/visit/releases/2.10.1/visit2.10.1.tar.gz",
+ ),
+ ("geant", "4.10.01.p03", "http://geant4.cern.ch/support/source/geant4.10.01.p03.tar.gz"),
+ ("tcl", "8.6.5", "http://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz"),
+ # 5th Pass: Two separator characters are used
+ # Name may contain digits, version may contain letters
+ # name-name-ver.ver
+ ("m4", "1.4.17", "https://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"),
+ ("gmp", "6.0.0a", "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"),
+ (
+ "LaunchMON",
+ "1.0.2",
+ "https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz",
+ ),
+ # name-ver-ver.ver
+ ("libedit", "20150325-3.1", "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz"),
+ # name-name-ver_ver
+ ("icu4c", "57_1", "http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz"),
+ # name_name_ver.ver
+ (
+ "superlu_dist",
+ "4.1",
+ "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz",
+ ),
+ ("pexsi", "0.9.0", "https://math.berkeley.edu/~linlin/pexsi/download/pexsi_v0.9.0.tar.gz"),
+ # name_name.ver.ver
+ ("fer", "696", "ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.v696.tar.gz"),
+ # name_name_ver-ver
+ (
+ "Bridger",
+ "2014-12-01",
+ "https://downloads.sourceforge.net/project/rnaseqassembly/Bridger_r2014-12-01.tar.gz",
+ ),
+ # name-name-ver.ver-ver.ver
+ (
+ "sowing",
+ "1.1.23-p1",
+ "http://ftp.mcs.anl.gov/pub/petsc/externalpackages/sowing-1.1.23-p1.tar.gz",
+ ),
+ (
+ "bib2xhtml",
+ "3.0-15-gf506",
+ "http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz",
+ ),
+ # namever.ver-ver.ver
+ (
+ "go",
+ "1.4-bootstrap-20161024",
+ "https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz",
+ ),
+ # 6th Pass: All three separator characters are used
+ # Name may contain digits, version may contain letters
+ # name_name-ver.ver
+ (
+ "the_silver_searcher",
+ "0.32.0",
+ "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.32.0.tar.gz",
+ ),
+ (
+ "sphinx_rtd_theme",
+ "0.1.10a0",
+ "https://pypi.python.org/packages/source/s/sphinx_rtd_theme/sphinx_rtd_theme-0.1.10a0.tar.gz",
+ ),
+ # name.name_ver.ver-ver.ver
+ ("TH.data", "1.0-8", "https://cran.r-project.org/src/contrib/TH.data_1.0-8.tar.gz"),
+ ("XML", "3.98-1.4", "https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz"),
+ # name-name-ver.ver_ver.ver
+ (
+ "pypar",
+ "2.1.5_108",
+ "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/pypar/pypar-2.1.5_108.tgz",
+ ),
+ # name-namever.ver_ver.ver
+ (
+ "STAR-CCM+",
+ "11.06.010_02",
+ "file://{0}/STAR-CCM+11.06.010_02_linux-x86_64.tar.gz".format(os.getcwd()),
+ ),
+ # name-name_name-ver.ver
+ (
+ "PerlIO-utf8_strict",
+ "0.002",
+ "http://search.cpan.org/CPAN/authors/id/L/LE/LEONT/PerlIO-utf8_strict-0.002.tar.gz",
+ ),
+ # Various extensions
+ # .tar.gz
+ (
+ "libXcursor",
+ "1.1.14",
+ "https://www.x.org/archive/individual/lib/libXcursor-1.1.14.tar.gz",
+ ),
+ # .tar.bz2
+ ("mpfr", "4.0.1", "https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.tar.bz2"),
+ # .tar.xz
+ ("pkgconf", "1.5.4", "http://distfiles.dereferenced.org/pkgconf/pkgconf-1.5.4.tar.xz"),
+ # .tar.Z
+ (
+ "Gblocks",
+ "0.91b",
+ "http://molevol.cmima.csic.es/castresana/Gblocks/Gblocks_Linux64_0.91b.tar.Z",
+ ),
+ # .tar.zip
+ (
+ "bcl2fastq2",
+ "2.19.1.403",
+ "ftp://webdata2:webdata2@ussd-ftp.illumina.com/downloads/software/bcl2fastq/bcl2fastq2-v2.19.1.403-tar.zip",
+ ),
+ # .tar, .TAR
+ (
+ "python-meep",
+ "1.4.2",
+ "https://launchpad.net/python-meep/1.4/1.4/+download/python-meep-1.4.2.tar",
+ ),
+ (
+ "python-meep",
+ "1.4.2",
+ "https://launchpad.net/python-meep/1.4/1.4/+download/python-meep-1.4.2.TAR",
+ ),
+ # .gz
+ ("libXcursor", "1.1.14", "https://www.x.org/archive/individual/lib/libXcursor-1.1.14.gz"),
+ # .bz2
+ ("mpfr", "4.0.1", "https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.bz2"),
+ # .xz
+ ("pkgconf", "1.5.4", "http://distfiles.dereferenced.org/pkgconf/pkgconf-1.5.4.xz"),
+ # .Z
+ (
+ "Gblocks",
+ "0.91b",
+ "http://molevol.cmima.csic.es/castresana/Gblocks/Gblocks_Linux64_0.91b.Z",
+ ),
+ # .zip
+ ("bliss", "0.73", "http://www.tcs.hut.fi/Software/bliss/bliss-0.73.zip"),
+ # .tgz
+ ("ADOL-C", "2.6.1", "http://www.coin-or.org/download/source/ADOL-C/ADOL-C-2.6.1.tgz"),
+ # .tbz
+ ("mpfr", "4.0.1", "https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.tbz"),
+ # .tbz2
+ ("mpfr", "4.0.1", "https://ftpmirror.gnu.org/mpfr/mpfr-4.0.1.tbz2"),
+ # .txz
+ ("kim-api", "2.1.0", "https://s3.openkim.org/kim-api/kim-api-2.1.0.txz"),
+ # 8th Pass: Query strings
+ # suffix queries
+ (
+ "swiftsim",
+ "0.3.0",
+ "http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0",
+ ),
+ (
+ "swiftsim",
+ "0.3.0",
+ "https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0",
+ ),
+ ("sionlib", "1.7.1", "http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1"),
+ ("jube2", "2.2.2", "https://apps.fz-juelich.de/jsc/jube/jube2/download.php?version=2.2.2"),
+ (
+ "archive",
+ "1.0.0",
+ "https://code.ornl.gov/eck/papyrus/repository/archive.tar.bz2?ref=v1.0.0",
+ ),
+ (
+ "VecGeom",
+ "0.3.rc",
+ "https://gitlab.cern.ch/api/v4/projects/VecGeom%2FVecGeom/repository/archive.tar.gz?sha=v0.3.rc",
+ ),
+ (
+ "parsplice",
+ "1.1",
+ "https://gitlab.com/api/v4/projects/exaalt%2Fparsplice/repository/archive.tar.gz?sha=v1.1",
+ ),
+ (
+ "busco",
+ "2.0.1",
+ "https://gitlab.com/api/v4/projects/ezlab%2Fbusco/repository/archive.tar.gz?sha=2.0.1",
+ ),
+ (
+ "libaec",
+ "1.0.2",
+ "https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2",
+ ),
+ (
+ "icet",
+ "2.1.1",
+ "https://gitlab.kitware.com/api/v4/projects/icet%2Ficet/repository/archive.tar.bz2?sha=IceT-2.1.1",
+ ),
+ (
+ "vtk-m",
+ "1.3.0",
+ "https://gitlab.kitware.com/api/v4/projects/vtk%2Fvtk-m/repository/archive.tar.gz?sha=v1.3.0",
+ ),
+ (
+ "GATK",
+ "3.8-1-0-gf15c1c3ef",
+ "https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef",
+ ),
+ # stem queries
+ (
+ "slepc",
+ "3.6.2",
+ "http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz",
+ ),
+ (
+ "otf",
+ "1.12.5salmon",
+ "http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz",
+ ),
+ (
+ "eospac",
+ "6.4.0beta.1",
+ "http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz",
+ ),
+ (
+ "vampirtrace",
+ "5.14.4",
+ "http://wwwpub.zih.tu-dresden.de/~mlieber/dcount/dcount.php?package=vampirtrace&get=VampirTrace-5.14.4.tar.gz",
+ ),
+ ("EvtGen", "01.07.00", "https://evtgen.hepforge.org/downloads?f=EvtGen-01.07.00.tar.gz"),
+ # (we don't actually look for these, they are picked up
+ # during the preliminary stem parsing)
+ ("octopus", "6.0", "http://octopus-code.org/down.php?file=6.0/octopus-6.0.tar.gz"),
+ (
+ "cloog",
+ "0.18.1",
+ "http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz",
+ ),
+ (
+ "libxc",
+ "2.2.2",
+ "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz",
+ ),
+ (
+ "cistem",
+ "1.0.0-beta",
+ "https://cistem.org/system/tdf/upload3/cistem-1.0.0-beta-source-code.tar.gz?file=1&type=cistem_details&id=37&force=0",
+ ),
+ (
+ "Magics",
+ "4.1.0",
+ "https://confluence.ecmwf.int/download/attachments/3473464/Magics-4.1.0-Source.tar.gz?api=v2",
+ ),
+ (
+ "grib_api",
+ "1.17.0",
+ "https://software.ecmwf.int/wiki/download/attachments/3473437/grib_api-1.17.0-Source.tar.gz?api=v2",
+ ),
+ (
+ "eccodes",
+ "2.2.0",
+ "https://software.ecmwf.int/wiki/download/attachments/45757960/eccodes-2.2.0-Source.tar.gz?api=v2",
+ ),
+ (
+ "SWFFT",
+ "1.0",
+ "https://xgitlab.cels.anl.gov/api/v4/projects/hacc%2FSWFFT/repository/archive.tar.gz?sha=v1.0",
+ ),
+ # 9th Pass: Version in path
+ # github.com/repo/name/releases/download/name-vver/name
+ (
+ "nextflow",
+ "0.20.1",
+ "https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow",
+ ),
+ # ver/name
+ (
+ "ncbi",
+ "2.2.26",
+ "ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz",
+ ),
+ # Other tests for corner cases
+ # single character name
+ ("R", "3.3.2", "https://cloud.r-project.org/src/base/R-3/R-3.3.2.tar.gz"),
+ # name starts with digit
+ ("3to2", "1.1.1", "https://pypi.python.org/packages/source/3/3to2/3to2-1.1.1.zip"),
+ # plus in name
+ (
+ "gtk+",
+ "2.24.31",
+ "http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.31.tar.xz",
+ ),
+ ("voro++", "0.4.6", "http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz"),
+ # Name comes before download.php
+ ("sionlib", "1.7.1", "http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1"),
+ # Ignore download.php
+ (
+ "slepc",
+ "3.6.2",
+ "http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz",
+ ),
+ (
+ "ScientificPython",
+ "2.8.1",
+ "https://sourcesup.renater.fr/frs/download.php/file/4411/ScientificPython-2.8.1.tar.gz",
+ ),
+ # gloox beta style
+ ("gloox", "1.0-beta7", "http://camaya.net/download/gloox-1.0-beta7.tar.bz2"),
+ # sphinx beta style
+ ("sphinx", "1.10-beta", "http://sphinxsearch.com/downloads/sphinx-1.10-beta.tar.gz"),
+ # ruby version style
+ ("ruby", "1.9.1-p243", "ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.1-p243.tar.gz"),
+ # rc style
+ (
+ "libvorbis",
+ "1.2.2rc1",
+ "http://downloads.xiph.org/releases/vorbis/libvorbis-1.2.2rc1.tar.bz2",
+ ),
+ # dash rc style
+ ("js", "1.8.0-rc1", "http://ftp.mozilla.org/pub/mozilla.org/js/js-1.8.0-rc1.tar.gz"),
+ # apache version style
+ (
+ "apache-cassandra",
+ "1.2.0-rc2",
+ "http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz",
+ ),
+ # xaw3d version
+ ("Xaw3d", "1.5E", "ftp://ftp.visi.com/users/hawkeyd/X/Xaw3d-1.5E.tar.gz"),
+ # fann version
+ (
+ "fann",
+ "2.1.0beta",
+ "http://downloads.sourceforge.net/project/fann/fann/2.1.0beta/fann-2.1.0beta.zip",
+ ),
+ # imap version
+ ("imap", "2007f", "ftp://ftp.cac.washington.edu/imap/imap-2007f.tar.gz"),
+ # suite3270 version
+ (
+ "suite3270",
+ "3.3.12ga7",
+ "http://sourceforge.net/projects/x3270/files/x3270/3.3.12ga7/suite3270-3.3.12ga7-src.tgz",
+ ),
+ # scalasca version
+ (
+ "cube",
+ "4.2.3",
+ "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz",
+ ),
+ (
+ "cube",
+ "4.3-TP1",
+ "http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz",
+ ),
+ # github raw url
+ (
+ "CLAMR",
+ "2.0.7",
+ "https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true",
+ ),
+ # luaposix version
+ (
+ "luaposix",
+ "33.4.0",
+ "https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz",
+ ),
+ # nco version
+ ("nco", "4.6.2-beta03", "https://github.com/nco/nco/archive/4.6.2-beta03.tar.gz"),
+ ("nco", "4.6.3-alpha04", "https://github.com/nco/nco/archive/4.6.3-alpha04.tar.gz"),
+ ],
+)
def test_url_parse_name_and_version(name, version, url):
# Make sure correct name and version are extracted.
parsed_name, parsed_version = parse_name_and_version(url)
@@ -559,10 +818,13 @@ def test_url_parse_name_and_version(name, version, url):
assert url == substitute_version(url, version)
-@pytest.mark.parametrize('not_detectable_url', [
- 'http://www.netlib.org/blas/blast-forum/cblas.tgz',
- 'http://www.netlib.org/voronoi/triangle.zip',
-])
+@pytest.mark.parametrize(
+ "not_detectable_url",
+ [
+ "http://www.netlib.org/blas/blast-forum/cblas.tgz",
+ "http://www.netlib.org/voronoi/triangle.zip",
+ ],
+)
def test_no_version(not_detectable_url):
with pytest.raises(UndetectableVersionError):
parse_name_and_version(not_detectable_url)
diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py
index 50d8eea715..2c18f7c7bc 100644
--- a/lib/spack/spack/test/url_substitution.py
+++ b/lib/spack/spack/test/url_substitution.py
@@ -12,36 +12,65 @@ import pytest
import spack.url
-@pytest.mark.parametrize('base_url,version,expected', [
- # Ensures that substituting the same version results in the same URL
- ('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '0.8.13',
- 'http://www.mr511.de/software/libelf-0.8.13.tar.gz'),
- # Test a completely different version syntax
- ('http://www.prevanders.net/libdwarf-20130729.tar.gz', '8.12',
- 'http://www.prevanders.net/libdwarf-8.12.tar.gz'),
- # Test a URL where the version appears twice
- # It should get substituted both times
- ('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
- 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz'),
- # Test now with a partial prefix earlier in the URL
- # This is hard to figure out so Spack only substitutes
- # the last instance of the version
- ('https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2', '2.2.0',
- 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.0.tar.bz2'),
- ('https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2', '2.2',
- 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.tar.bz2'),
- # No separator between the name and version of the package
- ('file://{0}/turbolinux702.tar.gz'.format(os.getcwd()), '703',
- 'file://{0}/turbolinux703.tar.gz'.format(os.getcwd())),
-
- ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7',
- 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true'),
- ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '4.7',
- 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v4.7.tgz?raw=true'),
- # Package name contains regex characters
- ('http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz', '1.2.3',
- 'http://math.lbl.gov/voro++/download/dir/voro++-1.2.3.tar.gz'),
-])
+@pytest.mark.parametrize(
+ "base_url,version,expected",
+ [
+ # Ensures that substituting the same version results in the same URL
+ (
+ "http://www.mr511.de/software/libelf-0.8.13.tar.gz",
+ "0.8.13",
+ "http://www.mr511.de/software/libelf-0.8.13.tar.gz",
+ ),
+ # Test a completely different version syntax
+ (
+ "http://www.prevanders.net/libdwarf-20130729.tar.gz",
+ "8.12",
+ "http://www.prevanders.net/libdwarf-8.12.tar.gz",
+ ),
+ # Test a URL where the version appears twice
+ # It should get substituted both times
+ (
+ "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz",
+ "2.1.3",
+ "https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz",
+ ),
+ # Test now with a partial prefix earlier in the URL
+ # This is hard to figure out so Spack only substitutes
+ # the last instance of the version
+ (
+ "https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2",
+ "2.2.0",
+ "https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.0.tar.bz2",
+ ),
+ (
+ "https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2",
+ "2.2",
+ "https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.tar.bz2",
+ ),
+ # No separator between the name and version of the package
+ (
+ "file://{0}/turbolinux702.tar.gz".format(os.getcwd()),
+ "703",
+ "file://{0}/turbolinux703.tar.gz".format(os.getcwd()),
+ ),
+ (
+ "https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true",
+ "2.0.7",
+ "https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true",
+ ),
+ (
+ "https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true",
+ "4.7",
+ "https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v4.7.tgz?raw=true",
+ ),
+ # Package name contains regex characters
+ (
+ "http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz",
+ "1.2.3",
+ "http://math.lbl.gov/voro++/download/dir/voro++-1.2.3.tar.gz",
+ ),
+ ],
+)
def test_url_substitution(base_url, version, expected):
computed = spack.url.substitute_version(base_url, version)
assert computed == expected
diff --git a/lib/spack/spack/test/util/compression.py b/lib/spack/spack/test/util/compression.py
index ed2cf1fa51..13d1a44a73 100644
--- a/lib/spack/spack/test/util/compression.py
+++ b/lib/spack/spack/test/util/compression.py
@@ -14,12 +14,14 @@ from spack.paths import spack_root
from spack.util import compression as scomp
from spack.util.executable import CommandNotFoundError
-datadir = os.path.join(spack_root, 'lib', 'spack',
- 'spack', 'test', 'data', 'compression')
+datadir = os.path.join(spack_root, "lib", "spack", "spack", "test", "data", "compression")
ext_archive = {}
-[ext_archive.update({ext: '.'.join(['Foo', ext])}) for
- ext in scomp.ALLOWED_ARCHIVE_TYPES if 'TAR' not in ext]
+[
+ ext_archive.update({ext: ".".join(["Foo", ext])})
+ for ext in scomp.ALLOWED_ARCHIVE_TYPES
+ if "TAR" not in ext
+]
def support_stub():
@@ -28,23 +30,23 @@ def support_stub():
@pytest.fixture
def compr_support_check(monkeypatch):
- monkeypatch.setattr(scomp, 'lzma_support', support_stub)
- monkeypatch.setattr(scomp, 'tar_support', support_stub)
- monkeypatch.setattr(scomp, 'gzip_support', support_stub)
- monkeypatch.setattr(scomp, 'bz2_support', support_stub)
+ monkeypatch.setattr(scomp, "lzma_support", support_stub)
+ monkeypatch.setattr(scomp, "tar_support", support_stub)
+ monkeypatch.setattr(scomp, "gzip_support", support_stub)
+ monkeypatch.setattr(scomp, "bz2_support", support_stub)
@pytest.fixture
def archive_file(tmpdir_factory, request):
"""Copy example archive to temp directory for test"""
- archive_file_stub = os.path.join(datadir, 'Foo')
+ archive_file_stub = os.path.join(datadir, "Foo")
extension = request.param
- tmpdir = tmpdir_factory.mktemp('compression')
- shutil.copy(archive_file_stub + '.' + extension, str(tmpdir))
- return os.path.join(str(tmpdir), 'Foo.%s' % extension)
+ tmpdir = tmpdir_factory.mktemp("compression")
+ shutil.copy(archive_file_stub + "." + extension, str(tmpdir))
+ return os.path.join(str(tmpdir), "Foo.%s" % extension)
-@pytest.mark.parametrize('archive_file', ext_archive.keys(), indirect=True)
+@pytest.mark.parametrize("archive_file", ext_archive.keys(), indirect=True)
def test_native_unpacking(tmpdir_factory, archive_file):
extension = scomp.extension(archive_file)
util = scomp.decompressor_for(archive_file, extension)
@@ -54,12 +56,12 @@ def test_native_unpacking(tmpdir_factory, archive_file):
util(archive_file)
files = os.listdir(os.getcwd())
assert len(files) == 1
- with open(files[0], 'r') as f:
+ with open(files[0], "r") as f:
contents = f.read()
- assert 'TEST' in contents
+ assert "TEST" in contents
-@pytest.mark.parametrize('archive_file', ext_archive.keys(), indirect=True)
+@pytest.mark.parametrize("archive_file", ext_archive.keys(), indirect=True)
def test_system_unpacking(tmpdir_factory, archive_file, compr_support_check):
extension = scomp.extension(archive_file)
# actually run test
@@ -70,29 +72,29 @@ def test_system_unpacking(tmpdir_factory, archive_file, compr_support_check):
util(archive_file)
files = os.listdir(os.getcwd())
assert len(files) == 1
- with open(files[0], 'r') as f:
+ with open(files[0], "r") as f:
contents = f.read()
- assert 'TEST' in contents
+ assert "TEST" in contents
def test_unallowed_extension():
- bad_ext_archive = 'Foo.py'
+ bad_ext_archive = "Foo.py"
with pytest.raises(CommandNotFoundError):
- scomp.decompressor_for(bad_ext_archive, 'py')
+ scomp.decompressor_for(bad_ext_archive, "py")
-@pytest.mark.parametrize('archive', ext_archive.values())
+@pytest.mark.parametrize("archive", ext_archive.values())
def test_get_extension(archive):
ext = scomp.extension(archive)
assert ext_archive[ext] == archive
def test_get_bad_extension():
- archive = 'Foo.py'
+ archive = "Foo.py"
ext = scomp.extension(archive)
assert ext is None
-@pytest.mark.parametrize('path', ext_archive.values())
+@pytest.mark.parametrize("path", ext_archive.values())
def test_allowed_archvie(path):
assert scomp.allowed_archive(path)
diff --git a/lib/spack/spack/test/util/editor.py b/lib/spack/spack/test/util/editor.py
index 54b1c251fe..2266d96a57 100644
--- a/lib/spack/spack/test/util/editor.py
+++ b/lib/spack/spack/test/util/editor.py
@@ -12,137 +12,136 @@ from llnl.util.filesystem import set_executable
import spack.util.editor as ed
-pytestmark = [pytest.mark.usefixtures('working_env'),
- pytest.mark.skipif(sys.platform == 'win32',
- reason="editor not implemented on windows")]
+pytestmark = [
+ pytest.mark.usefixtures("working_env"),
+ pytest.mark.skipif(sys.platform == "win32", reason="editor not implemented on windows"),
+]
def _make_exe(tmpdir_factory, name, contents=None):
if sys.platform == "win32":
- name += '.exe'
- path = str(tmpdir_factory.mktemp('%s_exe' % name).join(name))
+ name += ".exe"
+ path = str(tmpdir_factory.mktemp("%s_exe" % name).join(name))
if contents is not None:
- with open(path, 'w') as f:
- f.write('#!/bin/sh\n%s\n' % contents)
+ with open(path, "w") as f:
+ f.write("#!/bin/sh\n%s\n" % contents)
set_executable(path)
return path
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def good_exe(tmpdir_factory):
- return _make_exe(tmpdir_factory, 'good', 'exit 0')
+ return _make_exe(tmpdir_factory, "good", "exit 0")
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def bad_exe(tmpdir_factory):
- return _make_exe(tmpdir_factory, 'bad', 'exit 1')
+ return _make_exe(tmpdir_factory, "bad", "exit 1")
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def nosuch_exe(tmpdir_factory):
- return _make_exe(tmpdir_factory, 'nosuch')
+ return _make_exe(tmpdir_factory, "nosuch")
-@pytest.fixture(scope='session')
+@pytest.fixture(scope="session")
def vim_exe(tmpdir_factory):
- return _make_exe(tmpdir_factory, 'vim', 'exit 0')
+ return _make_exe(tmpdir_factory, "vim", "exit 0")
def test_find_exe_from_env_var(good_exe):
- os.environ['EDITOR'] = good_exe
- assert ed._find_exe_from_env_var('EDITOR') == (good_exe, [good_exe])
+ os.environ["EDITOR"] = good_exe
+ assert ed._find_exe_from_env_var("EDITOR") == (good_exe, [good_exe])
def test_find_exe_from_env_var_with_args(good_exe):
- os.environ['EDITOR'] = good_exe + ' a b c'
- assert ed._find_exe_from_env_var('EDITOR') == (
- good_exe, [good_exe, 'a', 'b', 'c'])
+ os.environ["EDITOR"] = good_exe + " a b c"
+ assert ed._find_exe_from_env_var("EDITOR") == (good_exe, [good_exe, "a", "b", "c"])
def test_find_exe_from_env_var_bad_path(nosuch_exe):
- os.environ['EDITOR'] = nosuch_exe
- assert ed._find_exe_from_env_var('FOO') == (None, [])
+ os.environ["EDITOR"] = nosuch_exe
+ assert ed._find_exe_from_env_var("FOO") == (None, [])
def test_find_exe_from_env_var_no_editor():
- if 'FOO' in os.environ:
- os.environ.unset('FOO')
- assert ed._find_exe_from_env_var('FOO') == (None, [])
+ if "FOO" in os.environ:
+ os.environ.unset("FOO")
+ assert ed._find_exe_from_env_var("FOO") == (None, [])
def test_editor_visual(good_exe):
- os.environ['VISUAL'] = good_exe
+ os.environ["VISUAL"] = good_exe
def assert_exec(exe, args):
assert exe == good_exe
- assert args == [good_exe, '/path/to/file']
+ assert args == [good_exe, "/path/to/file"]
- ed.editor('/path/to/file', _exec_func=assert_exec)
+ ed.editor("/path/to/file", _exec_func=assert_exec)
def test_editor_visual_bad(good_exe, bad_exe):
- os.environ['VISUAL'] = bad_exe
- os.environ['EDITOR'] = good_exe
+ os.environ["VISUAL"] = bad_exe
+ os.environ["EDITOR"] = good_exe
def assert_exec(exe, args):
if exe == bad_exe:
raise OSError()
assert exe == good_exe
- assert args == [good_exe, '/path/to/file']
+ assert args == [good_exe, "/path/to/file"]
- ed.editor('/path/to/file', _exec_func=assert_exec)
+ ed.editor("/path/to/file", _exec_func=assert_exec)
def test_editor_no_visual(good_exe):
- if 'VISUAL' in os.environ:
- del os.environ['VISUAL']
- os.environ['EDITOR'] = good_exe
+ if "VISUAL" in os.environ:
+ del os.environ["VISUAL"]
+ os.environ["EDITOR"] = good_exe
def assert_exec(exe, args):
assert exe == good_exe
- assert args == [good_exe, '/path/to/file']
+ assert args == [good_exe, "/path/to/file"]
- ed.editor('/path/to/file', _exec_func=assert_exec)
+ ed.editor("/path/to/file", _exec_func=assert_exec)
def test_editor_no_visual_with_args(good_exe):
- if 'VISUAL' in os.environ:
- del os.environ['VISUAL']
+ if "VISUAL" in os.environ:
+ del os.environ["VISUAL"]
# editor has extra args in the var (e.g., emacs -nw)
- os.environ['EDITOR'] = good_exe + ' -nw --foo'
+ os.environ["EDITOR"] = good_exe + " -nw --foo"
def assert_exec(exe, args):
assert exe == good_exe
- assert args == [good_exe, '-nw', '--foo', '/path/to/file']
+ assert args == [good_exe, "-nw", "--foo", "/path/to/file"]
- ed.editor('/path/to/file', _exec_func=assert_exec)
+ ed.editor("/path/to/file", _exec_func=assert_exec)
def test_editor_both_bad(nosuch_exe, vim_exe):
- os.environ['VISUAL'] = nosuch_exe
- os.environ['EDITOR'] = nosuch_exe
+ os.environ["VISUAL"] = nosuch_exe
+ os.environ["EDITOR"] = nosuch_exe
- os.environ['PATH'] = '%s%s%s' % (
- os.path.dirname(vim_exe), os.pathsep, os.environ['PATH'])
+ os.environ["PATH"] = "%s%s%s" % (os.path.dirname(vim_exe), os.pathsep, os.environ["PATH"])
def assert_exec(exe, args):
assert exe == vim_exe
- assert args == [vim_exe, '/path/to/file']
+ assert args == [vim_exe, "/path/to/file"]
- ed.editor('/path/to/file', _exec_func=assert_exec)
+ ed.editor("/path/to/file", _exec_func=assert_exec)
def test_no_editor():
- if 'VISUAL' in os.environ:
- del os.environ['VISUAL']
- if 'EDITOR' in os.environ:
- del os.environ['EDITOR']
- os.environ['PATH'] = ''
+ if "VISUAL" in os.environ:
+ del os.environ["VISUAL"]
+ if "EDITOR" in os.environ:
+ del os.environ["EDITOR"]
+ os.environ["PATH"] = ""
def assert_exec(exe, args):
assert False
- with pytest.raises(EnvironmentError, match=r'No text editor found.*'):
- ed.editor('/path/to/file', _exec_func=assert_exec)
+ with pytest.raises(EnvironmentError, match=r"No text editor found.*"):
+ ed.editor("/path/to/file", _exec_func=assert_exec)
diff --git a/lib/spack/spack/test/util/environment.py b/lib/spack/spack/test/util/environment.py
index 1e28590e80..fba126058f 100644
--- a/lib/spack/spack/test/util/environment.py
+++ b/lib/spack/spack/test/util/environment.py
@@ -11,135 +11,135 @@ import pytest
import spack.util.environment as envutil
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
@pytest.fixture()
def prepare_environment_for_tests():
- if 'TEST_ENV_VAR' in os.environ:
- del os.environ['TEST_ENV_VAR']
+ if "TEST_ENV_VAR" in os.environ:
+ del os.environ["TEST_ENV_VAR"]
yield
- del os.environ['TEST_ENV_VAR']
+ del os.environ["TEST_ENV_VAR"]
def test_is_system_path():
- sys_path = 'C:\\Users' if is_windows else '/usr/bin'
- assert(envutil.is_system_path(sys_path))
- assert(not envutil.is_system_path('/nonsense_path/bin'))
- assert(not envutil.is_system_path(''))
- assert(not envutil.is_system_path(None))
+ sys_path = "C:\\Users" if is_windows else "/usr/bin"
+ assert envutil.is_system_path(sys_path)
+ assert not envutil.is_system_path("/nonsense_path/bin")
+ assert not envutil.is_system_path("")
+ assert not envutil.is_system_path(None)
if is_windows:
test_paths = [
- 'C:\\Users',
- 'C:\\',
- 'C:\\ProgramData',
- 'C:\\nonsense_path',
- 'C:\\Program Files',
- 'C:\\nonsense_path\\extra\\bin']
+ "C:\\Users",
+ "C:\\",
+ "C:\\ProgramData",
+ "C:\\nonsense_path",
+ "C:\\Program Files",
+ "C:\\nonsense_path\\extra\\bin",
+ ]
else:
- test_paths = ['/usr/bin',
- '/nonsense_path/lib',
- '/usr/local/lib',
- '/bin',
- '/nonsense_path/extra/bin',
- '/usr/lib64']
+ test_paths = [
+ "/usr/bin",
+ "/nonsense_path/lib",
+ "/usr/local/lib",
+ "/bin",
+ "/nonsense_path/extra/bin",
+ "/usr/lib64",
+ ]
def test_filter_system_paths():
- nonsense_prefix = 'C:\\nonsense_path' if is_windows else '/nonsense_path'
+ nonsense_prefix = "C:\\nonsense_path" if is_windows else "/nonsense_path"
expected = [p for p in test_paths if p.startswith(nonsense_prefix)]
filtered = envutil.filter_system_paths(test_paths)
- assert(expected == filtered)
+ assert expected == filtered
def deprioritize_system_paths():
- expected = [p for p in test_paths if p.startswith('/nonsense_path')]
- expected.extend([p for p in test_paths
- if not p.startswith('/nonsense_path')])
+ expected = [p for p in test_paths if p.startswith("/nonsense_path")]
+ expected.extend([p for p in test_paths if not p.startswith("/nonsense_path")])
filtered = envutil.deprioritize_system_paths(test_paths)
- assert(expected == filtered)
+ assert expected == filtered
def test_prune_duplicate_paths():
- test_paths = ['/a/b', '/a/c', '/a/b', '/a/a', '/a/c', '/a/a/..']
- expected = ['/a/b', '/a/c', '/a/a', '/a/a/..']
- assert(expected == envutil.prune_duplicate_paths(test_paths))
+ test_paths = ["/a/b", "/a/c", "/a/b", "/a/a", "/a/c", "/a/a/.."]
+ expected = ["/a/b", "/a/c", "/a/a", "/a/a/.."]
+ assert expected == envutil.prune_duplicate_paths(test_paths)
def test_get_path(prepare_environment_for_tests):
- os.environ['TEST_ENV_VAR'] = os.pathsep.join(['/a', '/b', '/c/d'])
- expected = ['/a', '/b', '/c/d']
- assert(envutil.get_path('TEST_ENV_VAR') == expected)
+ os.environ["TEST_ENV_VAR"] = os.pathsep.join(["/a", "/b", "/c/d"])
+ expected = ["/a", "/b", "/c/d"]
+ assert envutil.get_path("TEST_ENV_VAR") == expected
def test_env_flag(prepare_environment_for_tests):
- assert(not envutil.env_flag('TEST_NO_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = '1'
- assert(envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = 'TRUE'
- assert(envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = 'True'
- assert(envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = 'TRue'
- assert(envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = 'true'
- assert(envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = '27'
- assert(not envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = '-2.3'
- assert(not envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = '0'
- assert(not envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = 'False'
- assert(not envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = 'false'
- assert(not envutil.env_flag('TEST_ENV_VAR'))
- os.environ['TEST_ENV_VAR'] = 'garbage'
- assert(not envutil.env_flag('TEST_ENV_VAR'))
+ assert not envutil.env_flag("TEST_NO_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "1"
+ assert envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "TRUE"
+ assert envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "True"
+ assert envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "TRue"
+ assert envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "true"
+ assert envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "27"
+ assert not envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "-2.3"
+ assert not envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "0"
+ assert not envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "False"
+ assert not envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "false"
+ assert not envutil.env_flag("TEST_ENV_VAR")
+ os.environ["TEST_ENV_VAR"] = "garbage"
+ assert not envutil.env_flag("TEST_ENV_VAR")
def test_path_set(prepare_environment_for_tests):
- envutil.path_set('TEST_ENV_VAR', ['/a', '/a/b', '/a/a'])
- assert(os.environ['TEST_ENV_VAR'] == '/a' + os.pathsep
- + '/a/b' + os.pathsep + '/a/a')
+ envutil.path_set("TEST_ENV_VAR", ["/a", "/a/b", "/a/a"])
+ assert os.environ["TEST_ENV_VAR"] == "/a" + os.pathsep + "/a/b" + os.pathsep + "/a/a"
def test_path_put_first(prepare_environment_for_tests):
- envutil.path_set('TEST_ENV_VAR', test_paths)
- expected = ['/usr/bin', '/new_nonsense_path/a/b']
- expected.extend([p for p in test_paths if p != '/usr/bin'])
- envutil.path_put_first('TEST_ENV_VAR', expected)
- assert(envutil.get_path('TEST_ENV_VAR') == expected)
+ envutil.path_set("TEST_ENV_VAR", test_paths)
+ expected = ["/usr/bin", "/new_nonsense_path/a/b"]
+ expected.extend([p for p in test_paths if p != "/usr/bin"])
+ envutil.path_put_first("TEST_ENV_VAR", expected)
+ assert envutil.get_path("TEST_ENV_VAR") == expected
def test_dump_environment(prepare_environment_for_tests, tmpdir):
- test_paths = '/a:/b/x:/b/c'
- os.environ['TEST_ENV_VAR'] = test_paths
- dumpfile_path = str(tmpdir.join('envdump.txt'))
+ test_paths = "/a:/b/x:/b/c"
+ os.environ["TEST_ENV_VAR"] = test_paths
+ dumpfile_path = str(tmpdir.join("envdump.txt"))
envutil.dump_environment(dumpfile_path)
- with open(dumpfile_path, 'r') as dumpfile:
- assert('TEST_ENV_VAR={0}; export TEST_ENV_VAR\n'.format(test_paths)
- in list(dumpfile))
+ with open(dumpfile_path, "r") as dumpfile:
+ assert "TEST_ENV_VAR={0}; export TEST_ENV_VAR\n".format(test_paths) in list(dumpfile)
def test_reverse_environment_modifications(working_env):
start_env = {
- 'PREPEND_PATH': os.sep + os.path.join('path', 'to', 'prepend', 'to'),
- 'APPEND_PATH': os.sep + os.path.join('path', 'to', 'append', 'to'),
- 'UNSET': 'var_to_unset',
- 'APPEND_FLAGS': 'flags to append to',
+ "PREPEND_PATH": os.sep + os.path.join("path", "to", "prepend", "to"),
+ "APPEND_PATH": os.sep + os.path.join("path", "to", "append", "to"),
+ "UNSET": "var_to_unset",
+ "APPEND_FLAGS": "flags to append to",
}
to_reverse = envutil.EnvironmentModifications()
- to_reverse.prepend_path('PREPEND_PATH', '/new/path/prepended')
- to_reverse.append_path('APPEND_PATH', '/new/path/appended')
- to_reverse.set_path('SET_PATH', ['/one/set/path', '/two/set/path'])
- to_reverse.set('SET', 'a var')
- to_reverse.unset('UNSET')
- to_reverse.append_flags('APPEND_FLAGS', 'more_flags')
+ to_reverse.prepend_path("PREPEND_PATH", "/new/path/prepended")
+ to_reverse.append_path("APPEND_PATH", "/new/path/appended")
+ to_reverse.set_path("SET_PATH", ["/one/set/path", "/two/set/path"])
+ to_reverse.set("SET", "a var")
+ to_reverse.unset("UNSET")
+ to_reverse.append_flags("APPEND_FLAGS", "more_flags")
reversal = to_reverse.reversed()
@@ -151,5 +151,5 @@ def test_reverse_environment_modifications(working_env):
reversal.apply_modifications()
print(os.environ)
- start_env.pop('UNSET')
+ start_env.pop("UNSET")
assert os.environ == start_env
diff --git a/lib/spack/spack/test/util/executable.py b/lib/spack/spack/test/util/executable.py
index ea9213f063..8d360ea4e9 100644
--- a/lib/spack/spack/test/util/executable.py
+++ b/lib/spack/spack/test/util/executable.py
@@ -14,77 +14,81 @@ import spack
import spack.util.executable as ex
from spack.hooks.sbang import filter_shebangs_in_directory
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
def test_read_unicode(tmpdir, working_env):
- script_name = 'print_unicode.py'
+ script_name = "print_unicode.py"
# read the unicode back in and see whether things work
if is_windows:
- script = ex.Executable('%s %s' % (sys.executable, script_name))
+ script = ex.Executable("%s %s" % (sys.executable, script_name))
else:
- script = ex.Executable('./%s' % script_name)
+ script = ex.Executable("./%s" % script_name)
with tmpdir.as_cwd():
- os.environ['LD_LIBRARY_PATH'] = spack.main.spack_ld_library_path
+ os.environ["LD_LIBRARY_PATH"] = spack.main.spack_ld_library_path
# make a script that prints some unicode
- with open(script_name, 'w') as f:
- f.write('''#!{0}
+ with open(script_name, "w") as f:
+ f.write(
+ """#!{0}
from __future__ import print_function
import sys
if sys.version_info < (3, 0, 0):
reload(sys)
sys.setdefaultencoding('utf8')
print(u'\\xc3')
-'''.format(sys.executable))
+""".format(
+ sys.executable
+ )
+ )
# make it executable
fs.set_executable(script_name)
- filter_shebangs_in_directory('.', [script_name])
+ filter_shebangs_in_directory(".", [script_name])
- assert u'\xc3' == script(output=str).strip()
+ assert u"\xc3" == script(output=str).strip()
def test_which_relative_path_with_slash(tmpdir, working_env):
tmpdir.ensure("exe")
path = str(tmpdir.join("exe"))
- os.environ['PATH'] = ''
+ os.environ["PATH"] = ""
with tmpdir.as_cwd():
- no_exe = ex.which('.{0}exe'.format(os.path.sep))
+ no_exe = ex.which(".{0}exe".format(os.path.sep))
assert no_exe is None
if sys.platform == "win32":
# These checks are for 'executable' files, Windows
# determines this by file extension.
path += ".exe"
- tmpdir.ensure('exe.exe')
+ tmpdir.ensure("exe.exe")
else:
fs.set_executable(path)
- exe = ex.which('.{0}exe'.format(os.path.sep))
+ exe = ex.which(".{0}exe".format(os.path.sep))
assert exe.path == path
def test_which_with_slash_ignores_path(tmpdir, working_env):
- tmpdir.ensure('exe')
- tmpdir.ensure('bin{0}exe'.format(os.path.sep))
+ tmpdir.ensure("exe")
+ tmpdir.ensure("bin{0}exe".format(os.path.sep))
- path = str(tmpdir.join('exe'))
- wrong_path = str(tmpdir.join('bin', 'exe'))
- os.environ['PATH'] = os.path.dirname(wrong_path)
+ path = str(tmpdir.join("exe"))
+ wrong_path = str(tmpdir.join("bin", "exe"))
+ os.environ["PATH"] = os.path.dirname(wrong_path)
with tmpdir.as_cwd():
if sys.platform == "win32":
# For Windows, need to create files with .exe after any assert is none tests
- tmpdir.ensure('exe.exe')
- tmpdir.ensure('bin{0}exe.exe'.format(os.path.sep))
+ tmpdir.ensure("exe.exe")
+ tmpdir.ensure("bin{0}exe.exe".format(os.path.sep))
path = path + ".exe"
wrong_path = wrong_path + ".exe"
else:
fs.set_executable(path)
fs.set_executable(wrong_path)
- exe = ex.which('.{0}exe'.format(os.path.sep))
+ exe = ex.which(".{0}exe".format(os.path.sep))
assert exe.path == path
diff --git a/lib/spack/spack/test/util/file_cache.py b/lib/spack/spack/test/util/file_cache.py
index 815b21fb38..024ce329b3 100644
--- a/lib/spack/spack/test/util/file_cache.py
+++ b/lib/spack/spack/test/util/file_cache.py
@@ -22,12 +22,12 @@ def file_cache(tmpdir):
def test_write_and_read_cache_file(file_cache):
"""Test writing then reading a cached file."""
- with file_cache.write_transaction('test.yaml') as (old, new):
+ with file_cache.write_transaction("test.yaml") as (old, new):
assert old is None
assert new is not None
new.write("foobar\n")
- with file_cache.read_transaction('test.yaml') as stream:
+ with file_cache.read_transaction("test.yaml") as stream:
text = stream.read()
assert text == "foobar\n"
@@ -37,70 +37,69 @@ def test_write_and_remove_cache_file(file_cache):
entry from it.
"""
- with file_cache.write_transaction('test.yaml') as (old, new):
+ with file_cache.write_transaction("test.yaml") as (old, new):
assert old is None
assert new is not None
new.write("foobar\n")
- with file_cache.write_transaction('test.yaml') as (old, new):
+ with file_cache.write_transaction("test.yaml") as (old, new):
assert old is not None
text = old.read()
assert text == "foobar\n"
assert new is not None
new.write("barbaz\n")
- with file_cache.read_transaction('test.yaml') as stream:
+ with file_cache.read_transaction("test.yaml") as stream:
text = stream.read()
assert text == "barbaz\n"
- file_cache.remove('test.yaml')
+ file_cache.remove("test.yaml")
# After removal the file should not exist
- assert not os.path.exists(file_cache.cache_path('test.yaml'))
+ assert not os.path.exists(file_cache.cache_path("test.yaml"))
# Whether the lock file exists is more of an implementation detail, on Linux they
# continue to exist, on Windows they don't.
# assert os.path.exists(file_cache._lock_path('test.yaml'))
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_cache_init_entry_fails(file_cache):
"""Test init_entry failures."""
- relpath = fs.join_path('test-dir', 'read-only-file.txt')
+ relpath = fs.join_path("test-dir", "read-only-file.txt")
cachefile = file_cache.cache_path(relpath)
fs.touchp(cachefile)
# Ensure directory causes exception
- with pytest.raises(CacheError, match='not a file'):
+ with pytest.raises(CacheError, match="not a file"):
file_cache.init_entry(os.path.dirname(relpath))
# Ensure non-readable file causes exception
os.chmod(cachefile, 0o200)
- with pytest.raises(CacheError, match='Cannot access cache file'):
+ with pytest.raises(CacheError, match="Cannot access cache file"):
file_cache.init_entry(relpath)
# Ensure read-only parent causes exception
- relpath = fs.join_path('test-dir', 'another-file.txxt')
+ relpath = fs.join_path("test-dir", "another-file.txxt")
cachefile = file_cache.cache_path(relpath)
os.chmod(os.path.dirname(cachefile), 0o400)
- with pytest.raises(CacheError, match='Cannot access cache dir'):
+ with pytest.raises(CacheError, match="Cannot access cache dir"):
file_cache.init_entry(relpath)
def test_cache_write_readonly_cache_fails(file_cache):
"""Test writing a read-only cached file."""
- filename = 'read-only-file.txt'
+ filename = "read-only-file.txt"
path = file_cache.cache_path(filename)
fs.touch(path)
os.chmod(path, 0o400)
- with pytest.raises(CacheError, match='Insufficient permissions to write'):
+ with pytest.raises(CacheError, match="Insufficient permissions to write"):
file_cache.write_transaction(filename)
-@pytest.mark.regression('31475')
+@pytest.mark.regression("31475")
def test_delete_is_idempotent(file_cache):
"""Deleting a non-existent key should be idempotent, to simplify life when
running delete with multiple processes"""
- file_cache.remove('test.yaml')
+ file_cache.remove("test.yaml")
diff --git a/lib/spack/spack/test/util/log_parser.py b/lib/spack/spack/test/util/log_parser.py
index c8cc30dd00..9ffd7d8958 100644
--- a/lib/spack/spack/test/util/log_parser.py
+++ b/lib/spack/spack/test/util/log_parser.py
@@ -7,10 +7,11 @@ from ctest_log_parser import CTestLogParser
def test_log_parser(tmpdir):
- log_file = tmpdir.join('log.txt')
+ log_file = tmpdir.join("log.txt")
- with log_file.open('w') as f:
- f.write("""#!/bin/sh\n
+ with log_file.open("w") as f:
+ f.write(
+ """#!/bin/sh\n
checking build system type... x86_64-apple-darwin16.6.0
checking host system type... x86_64-apple-darwin16.6.0
error: weird_error.c:145: something weird happened E
@@ -22,13 +23,14 @@ ld: fatal: linker thing happened E
checking for suffix of executables...
configure: error: in /path/to/some/file: E
configure: error: cannot run C compiled programs. E
-""")
+"""
+ )
parser = CTestLogParser()
errors, warnings = parser.parse(str(log_file))
assert len(errors) == 4
- assert all(e.text.endswith('E') for e in errors)
+ assert all(e.text.endswith("E") for e in errors)
assert len(warnings) == 1
- assert all(w.text.endswith('W') for w in warnings)
+ assert all(w.text.endswith("W") for w in warnings)
diff --git a/lib/spack/spack/test/util/mock_package.py b/lib/spack/spack/test/util/mock_package.py
index 177b39cbec..9f8e8e297f 100644
--- a/lib/spack/spack/test/util/mock_package.py
+++ b/lib/spack/spack/test/util/mock_package.py
@@ -9,29 +9,24 @@ from spack.util.mock_package import MockPackageMultiRepo
def test_mock_package_possible_dependencies():
mock_repo = MockPackageMultiRepo()
- e = mock_repo.add_package('e')
- d = mock_repo.add_package('d', [e])
- c = mock_repo.add_package('c', [d])
- b = mock_repo.add_package('b', [d])
- a = mock_repo.add_package('a', [b, c])
+ e = mock_repo.add_package("e")
+ d = mock_repo.add_package("d", [e])
+ c = mock_repo.add_package("c", [d])
+ b = mock_repo.add_package("b", [d])
+ a = mock_repo.add_package("a", [b, c])
with spack.repo.use_repositories(mock_repo):
- assert set(a.possible_dependencies()) == set(['a', 'b', 'c', 'd', 'e'])
- assert set(b.possible_dependencies()) == set(['b', 'd', 'e'])
- assert set(c.possible_dependencies()) == set(['c', 'd', 'e'])
- assert set(d.possible_dependencies()) == set(['d', 'e'])
- assert set(e.possible_dependencies()) == set(['e'])
-
- assert set(
- a.possible_dependencies(transitive=False)) == set(['a', 'b', 'c'])
- assert set(
- b.possible_dependencies(transitive=False)) == set(['b', 'd'])
- assert set(
- c.possible_dependencies(transitive=False)) == set(['c', 'd'])
- assert set(
- d.possible_dependencies(transitive=False)) == set(['d', 'e'])
- assert set(
- e.possible_dependencies(transitive=False)) == set(['e'])
+ assert set(a.possible_dependencies()) == set(["a", "b", "c", "d", "e"])
+ assert set(b.possible_dependencies()) == set(["b", "d", "e"])
+ assert set(c.possible_dependencies()) == set(["c", "d", "e"])
+ assert set(d.possible_dependencies()) == set(["d", "e"])
+ assert set(e.possible_dependencies()) == set(["e"])
+
+ assert set(a.possible_dependencies(transitive=False)) == set(["a", "b", "c"])
+ assert set(b.possible_dependencies(transitive=False)) == set(["b", "d"])
+ assert set(c.possible_dependencies(transitive=False)) == set(["c", "d"])
+ assert set(d.possible_dependencies(transitive=False)) == set(["d", "e"])
+ assert set(e.possible_dependencies(transitive=False)) == set(["e"])
def test_mock_repo_is_virtual():
diff --git a/lib/spack/spack/test/util/package_hash.py b/lib/spack/spack/test/util/package_hash.py
index f8b1bd5067..c4c2083331 100644
--- a/lib/spack/spack/test/util/package_hash.py
+++ b/lib/spack/spack/test/util/package_hash.py
@@ -20,13 +20,9 @@ datadir = os.path.join(spack.paths.test_path, "data", "unparse")
def compare_sans_name(eq, spec1, spec2):
content1 = ph.canonical_source(spec1)
- content1 = content1.replace(
- spack.repo.path.get_pkg_class(spec1.name).__name__, 'TestPackage'
- )
+ content1 = content1.replace(spack.repo.path.get_pkg_class(spec1.name).__name__, "TestPackage")
content2 = ph.canonical_source(spec2)
- content2 = content2.replace(
- spack.repo.path.get_pkg_class(spec2.name).__name__, 'TestPackage'
- )
+ content2 = content2.replace(spack.repo.path.get_pkg_class(spec2.name).__name__, "TestPackage")
if eq:
assert content1 == content2
else:
@@ -36,12 +32,12 @@ def compare_sans_name(eq, spec1, spec2):
def compare_hash_sans_name(eq, spec1, spec2):
content1 = ph.canonical_source(spec1)
pkg_cls1 = spack.repo.path.get_pkg_class(spec1.name)
- content1 = content1.replace(pkg_cls1.__name__, 'TestPackage')
+ content1 = content1.replace(pkg_cls1.__name__, "TestPackage")
hash1 = pkg_cls1(spec1).content_hash(content=content1)
content2 = ph.canonical_source(spec2)
pkg_cls2 = spack.repo.path.get_pkg_class(spec2.name)
- content2 = content2.replace(pkg_cls2.__name__, 'TestPackage')
+ content2 = content2.replace(pkg_cls2.__name__, "TestPackage")
hash2 = pkg_cls2(spec2).content_hash(content=content2)
if eq:
@@ -131,13 +127,13 @@ def test_content_hash_different_variants(mock_packages, config):
def test_content_hash_cannot_get_details_from_ast(mock_packages, config):
"""Packages hash-test1 and hash-test3 would be considered the same
- except that hash-test3 conditionally executes a phase based on
- a "when" directive that Spack cannot evaluate by examining the
- AST. This test ensures that Spack can compute a content hash
- for hash-test3. If Spack cannot determine when a phase applies,
- it adds it by default, so the test also ensures that the hashes
- differ where Spack includes a phase on account of AST-examination
- failure.
+ except that hash-test3 conditionally executes a phase based on
+ a "when" directive that Spack cannot evaluate by examining the
+ AST. This test ensures that Spack can compute a content hash
+ for hash-test3. If Spack cannot determine when a phase applies,
+ it adds it by default, so the test also ensures that the hashes
+ differ where Spack includes a phase on account of AST-examination
+ failure.
"""
spec3 = Spec("hash-test1@1.7").concretized()
spec4 = Spec("hash-test3@1.7").concretized()
@@ -217,9 +213,9 @@ class HasManyDirectives:
pass
{directives}
-""".format(directives="\n".join(
- " %s()" % name for name in spack.directives.directive_names
-))
+""".format(
+ directives="\n".join(" %s()" % name for name in spack.directives.directive_names)
+)
def test_remove_all_directives():
@@ -337,23 +333,26 @@ def test_remove_complex_package_logic_filtered():
assert unparsed == complex_package_logic_filtered
-@pytest.mark.parametrize("package_spec,expected_hash", [
- ("amdfftw", "tivb752zddjgvfkogfs7cnnvp5olj6co"),
- ("grads", "rrlmwml3f2frdnqavmro3ias66h5b2ce"),
- ("llvm", "nufffum5dabmaf4l5tpfcblnbfjknvd3"),
- # has @when("@4.1.0") and raw unicode literals
- ("mfem", "tiiv7uq7v2xtv24vdij5ptcv76dpazrw"),
- ("mfem@4.0.0", "tiiv7uq7v2xtv24vdij5ptcv76dpazrw"),
- ("mfem@4.1.0", "gxastq64to74qt4he4knpyjfdhh5auel"),
- # has @when("@1.5.0:")
- ("py-torch", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
- ("py-torch@1.0", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
- ("py-torch@1.6", "p4ine4hc6f2ik2f2wyuwieslqbozll5w"),
- # has a print with multiple arguments
- ("legion", "zdpawm4avw3fllxcutvmqb5c3bj5twqt"),
- # has nested `with when()` blocks and loops
- ("trilinos", "vqrgscjrla4hi7bllink7v6v6dwxgc2p"),
-])
+@pytest.mark.parametrize(
+ "package_spec,expected_hash",
+ [
+ ("amdfftw", "tivb752zddjgvfkogfs7cnnvp5olj6co"),
+ ("grads", "rrlmwml3f2frdnqavmro3ias66h5b2ce"),
+ ("llvm", "nufffum5dabmaf4l5tpfcblnbfjknvd3"),
+ # has @when("@4.1.0") and raw unicode literals
+ ("mfem", "tiiv7uq7v2xtv24vdij5ptcv76dpazrw"),
+ ("mfem@4.0.0", "tiiv7uq7v2xtv24vdij5ptcv76dpazrw"),
+ ("mfem@4.1.0", "gxastq64to74qt4he4knpyjfdhh5auel"),
+ # has @when("@1.5.0:")
+ ("py-torch", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
+ ("py-torch@1.0", "qs7djgqn7dy7r3ps4g7hv2pjvjk4qkhd"),
+ ("py-torch@1.6", "p4ine4hc6f2ik2f2wyuwieslqbozll5w"),
+ # has a print with multiple arguments
+ ("legion", "zdpawm4avw3fllxcutvmqb5c3bj5twqt"),
+ # has nested `with when()` blocks and loops
+ ("trilinos", "vqrgscjrla4hi7bllink7v6v6dwxgc2p"),
+ ],
+)
def test_package_hash_consistency(package_spec, expected_hash):
"""Ensure that that package hash is consistent python version to version.
diff --git a/lib/spack/spack/test/util/path.py b/lib/spack/spack/test/util/path.py
index 11989517f9..ccefe8ff1f 100644
--- a/lib/spack/spack/test/util/path.py
+++ b/lib/spack/spack/test/util/path.py
@@ -13,7 +13,7 @@ import llnl.util.tty as tty
import spack.config
import spack.util.path as sup
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
#: Some lines with lots of placeholders
@@ -35,20 +35,19 @@ fixed_lines = [
def test_sanitze_file_path(tmpdir):
"""Test filtering illegal characters out of potential file paths"""
# *nix illegal files characters are '/' and none others
- illegal_file_path = str(tmpdir) + '//' + 'abcdefghi.txt'
+ illegal_file_path = str(tmpdir) + "//" + "abcdefghi.txt"
if is_windows:
# Windows has a larger set of illegal characters
illegal_file_path = os.path.join(tmpdir, 'a<b>cd?e:f"g|h*i.txt')
real_path = sup.sanitize_file_path(illegal_file_path)
- assert real_path == os.path.join(str(tmpdir), 'abcdefghi.txt')
+ assert real_path == os.path.join(str(tmpdir), "abcdefghi.txt")
# This class pertains to path string padding manipulation specifically
# which is used for binary caching. This functionality is not supported
# on Windows as of yet.
-@pytest.mark.skipif(is_windows,
- reason='Padding funtionality unsupported on Windows')
-class TestPathPadding():
+@pytest.mark.skipif(is_windows, reason="Padding funtionality unsupported on Windows")
+class TestPathPadding:
@pytest.mark.parametrize("padded,fixed", zip(padded_lines, fixed_lines))
def test_padding_substitution(self, padded, fixed):
"""Ensure that all padded lines are unpadded correctly."""
@@ -56,7 +55,7 @@ class TestPathPadding():
def test_no_substitution(self):
"""Ensure that a line not containing one full path placeholder
- is not modified."""
+ is not modified."""
partial = "--prefix=/Users/gamblin2/padding-log-test/opt/__spack_path_pla/darwin-bigsur-skylake/apple-clang-12.0.5/zlib-1.2.11-74mwnxgn6nujehpyyalhwizwojwn5zga'" # noqa: E501
assert sup.padding_filter(partial) == partial
@@ -74,12 +73,8 @@ class TestPathPadding():
def test_longest_prefix_re(self):
"""Test that longest_prefix_re generates correct regular expressions."""
- assert "(s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
- "string", capture=True
- )
- assert "(?:s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re(
- "string", capture=False
- )
+ assert "(s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re("string", capture=True)
+ assert "(?:s(?:t(?:r(?:i(?:ng?)?)?)?)?)" == sup.longest_prefix_re("string", capture=False)
def test_output_filtering(self, capfd, install_mockery, mutable_config):
"""Test filtering padding out of tty messages."""
@@ -87,7 +82,7 @@ class TestPathPadding():
padding_string = "[padded-to-%d-chars]" % len(long_path)
# test filtering when padding is enabled
- with spack.config.override('config:install_tree', {"padded_length": 256}):
+ with spack.config.override("config:install_tree", {"padded_length": 256}):
# tty.msg with filtering on the first argument
with sup.filter_padding():
tty.msg("here is a long path: %s/with/a/suffix" % long_path)
diff --git a/lib/spack/spack/test/util/prefix.py b/lib/spack/spack/test/util/prefix.py
index 8cd92306b5..479c165e2b 100644
--- a/lib/spack/spack/test/util/prefix.py
+++ b/lib/spack/spack/test/util/prefix.py
@@ -12,36 +12,36 @@ from spack.util.prefix import Prefix
def test_prefix_attributes():
"""Test normal prefix attributes like ``prefix.bin``"""
- prefix = Prefix(os.sep + 'usr')
+ prefix = Prefix(os.sep + "usr")
- assert prefix.bin == os.sep + os.path.join('usr', 'bin')
- assert prefix.lib == os.sep + os.path.join('usr', 'lib')
- assert prefix.include == os.sep + os.path.join('usr', 'include')
+ assert prefix.bin == os.sep + os.path.join("usr", "bin")
+ assert prefix.lib == os.sep + os.path.join("usr", "lib")
+ assert prefix.include == os.sep + os.path.join("usr", "include")
def test_prefix_join():
"""Test prefix join ``prefix.join(...)``"""
- prefix = Prefix(os.sep + 'usr')
+ prefix = Prefix(os.sep + "usr")
- a1 = prefix.join('a_{0}'.format(1)).lib64
- a2 = prefix.join('a-{0}'.format(1)).lib64
- a3 = prefix.join('a.{0}'.format(1)).lib64
+ a1 = prefix.join("a_{0}".format(1)).lib64
+ a2 = prefix.join("a-{0}".format(1)).lib64
+ a3 = prefix.join("a.{0}".format(1)).lib64
- assert a1 == os.sep + os.path.join('usr', 'a_1', 'lib64')
- assert a2 == os.sep + os.path.join('usr', 'a-1', 'lib64')
- assert a3 == os.sep + os.path.join('usr', 'a.1', 'lib64')
+ assert a1 == os.sep + os.path.join("usr", "a_1", "lib64")
+ assert a2 == os.sep + os.path.join("usr", "a-1", "lib64")
+ assert a3 == os.sep + os.path.join("usr", "a.1", "lib64")
assert isinstance(a1, Prefix)
assert isinstance(a2, Prefix)
assert isinstance(a3, Prefix)
- p1 = prefix.bin.join('executable.sh')
- p2 = prefix.share.join('pkg-config').join('foo.pc')
- p3 = prefix.join('dashed-directory').foo
+ p1 = prefix.bin.join("executable.sh")
+ p2 = prefix.share.join("pkg-config").join("foo.pc")
+ p3 = prefix.join("dashed-directory").foo
- assert p1 == os.sep + os.path.join('usr', 'bin', 'executable.sh')
- assert p2 == os.sep + os.path.join('usr', 'share', 'pkg-config', 'foo.pc')
- assert p3 == os.sep + os.path.join('usr', 'dashed-directory', 'foo')
+ assert p1 == os.sep + os.path.join("usr", "bin", "executable.sh")
+ assert p2 == os.sep + os.path.join("usr", "share", "pkg-config", "foo.pc")
+ assert p3 == os.sep + os.path.join("usr", "dashed-directory", "foo")
assert isinstance(p1, Prefix)
assert isinstance(p2, Prefix)
@@ -50,29 +50,29 @@ def test_prefix_join():
def test_multilevel_attributes():
"""Test attributes of attributes, like ``prefix.share.man``"""
- prefix = Prefix(os.sep + 'usr' + os.sep)
+ prefix = Prefix(os.sep + "usr" + os.sep)
- assert prefix.share.man == os.sep + os.path.join('usr', 'share', 'man')
- assert prefix.man.man8 == os.sep + os.path.join('usr', 'man', 'man8')
- assert prefix.foo.bar.baz == os.sep + os.path.join('usr', 'foo', 'bar', 'baz')
+ assert prefix.share.man == os.sep + os.path.join("usr", "share", "man")
+ assert prefix.man.man8 == os.sep + os.path.join("usr", "man", "man8")
+ assert prefix.foo.bar.baz == os.sep + os.path.join("usr", "foo", "bar", "baz")
share = prefix.share
assert isinstance(share, Prefix)
- assert share.man == os.sep + os.path.join('usr', 'share', 'man')
+ assert share.man == os.sep + os.path.join("usr", "share", "man")
def test_string_like_behavior():
"""Test string-like behavior of the prefix object"""
- prefix = Prefix('/usr')
+ prefix = Prefix("/usr")
- assert prefix == '/usr'
+ assert prefix == "/usr"
assert isinstance(prefix, str)
- assert prefix + '/bin' == '/usr/bin'
- assert '--prefix=%s' % prefix == '--prefix=/usr'
- assert '--prefix={0}'.format(prefix) == '--prefix=/usr'
+ assert prefix + "/bin" == "/usr/bin"
+ assert "--prefix=%s" % prefix == "--prefix=/usr"
+ assert "--prefix={0}".format(prefix) == "--prefix=/usr"
- assert prefix.find('u', 1)
- assert prefix.upper() == '/USR'
- assert prefix.lstrip('/') == 'usr'
+ assert prefix.find("u", 1)
+ assert prefix.upper() == "/USR"
+ assert prefix.lstrip("/") == "usr"
diff --git a/lib/spack/spack/test/util/spack_lock_wrapper.py b/lib/spack/spack/test/util/spack_lock_wrapper.py
index 7dd0212279..a590e95804 100644
--- a/lib/spack/spack/test/util/spack_lock_wrapper.py
+++ b/lib/spack/spack/test/util/spack_lock_wrapper.py
@@ -16,11 +16,11 @@ import spack.util.lock as lk
def test_disable_locking(tmpdir):
"""Ensure that locks do no real locking when disabled."""
- lock_path = str(tmpdir.join('lockfile'))
+ lock_path = str(tmpdir.join("lockfile"))
- old_value = spack.config.get('config:locks')
+ old_value = spack.config.get("config:locks")
- with spack.config.override('config:locks', False):
+ with spack.config.override("config:locks", False):
lock = lk.Lock(lock_path)
lock.acquire_read()
@@ -35,7 +35,7 @@ def test_disable_locking(tmpdir):
lock.release_read()
assert not os.path.exists(lock_path)
- assert old_value == spack.config.get('config:locks')
+ assert old_value == spack.config.get("config:locks")
# "Disable" mock_stage fixture to avoid subdir permissions issues on cleanup.
diff --git a/lib/spack/spack/test/util/spack_yaml.py b/lib/spack/spack/test/util/spack_yaml.py
index 8cde827b62..600c6d5bef 100644
--- a/lib/spack/spack/test/util/spack_yaml.py
+++ b/lib/spack/spack/test/util/spack_yaml.py
@@ -8,13 +8,13 @@ import re
import spack.config
from spack.main import SpackCommand
-config_cmd = SpackCommand('config')
+config_cmd = SpackCommand("config")
def get_config_line(pattern, lines):
"""Get a configuration line that matches a particular pattern."""
line = next((x for x in lines if re.search(pattern, x)), None)
- assert line is not None, 'no such line!'
+ assert line is not None, "no such line!"
return line
@@ -29,42 +29,42 @@ def check_blame(element, file_name, line=None):
``file_name``, which may just be a name for a special config scope
like ``_builtin`` or ``command_line``.
"""
- output = config_cmd('blame', 'config')
+ output = config_cmd("blame", "config")
- blame_lines = output.rstrip().split('\n')
- element_line = get_config_line(element + ':', blame_lines)
+ blame_lines = output.rstrip().split("\n")
+ element_line = get_config_line(element + ":", blame_lines)
annotation = file_name
if line is not None:
- annotation += ':%d' % line
+ annotation += ":%d" % line
assert file_name in element_line
def test_config_blame(config):
"""check blame info for elements in mock configuration."""
- config_file = config.get_config_filename('site', 'config')
+ config_file = config.get_config_filename("site", "config")
- check_blame('install_tree', config_file, 2)
- check_blame('source_cache', config_file, 11)
- check_blame('misc_cache', config_file, 12)
- check_blame('verify_ssl', config_file, 13)
- check_blame('checksum', config_file, 14)
- check_blame('dirty', config_file, 15)
+ check_blame("install_tree", config_file, 2)
+ check_blame("source_cache", config_file, 11)
+ check_blame("misc_cache", config_file, 12)
+ check_blame("verify_ssl", config_file, 13)
+ check_blame("checksum", config_file, 14)
+ check_blame("dirty", config_file, 15)
def test_config_blame_with_override(config):
"""check blame for an element from an override scope"""
- config_file = config.get_config_filename('site', 'config')
+ config_file = config.get_config_filename("site", "config")
- with spack.config.override('config:install_tree', {'root': 'foobar'}):
- check_blame('install_tree', 'overrides')
+ with spack.config.override("config:install_tree", {"root": "foobar"}):
+ check_blame("install_tree", "overrides")
- check_blame('source_cache', config_file, 11)
- check_blame('misc_cache', config_file, 12)
- check_blame('verify_ssl', config_file, 13)
- check_blame('checksum', config_file, 14)
- check_blame('dirty', config_file, 15)
+ check_blame("source_cache", config_file, 11)
+ check_blame("misc_cache", config_file, 12)
+ check_blame("verify_ssl", config_file, 13)
+ check_blame("checksum", config_file, 14)
+ check_blame("dirty", config_file, 15)
def test_config_blame_defaults():
diff --git a/lib/spack/spack/test/util/unparse/unparse.py b/lib/spack/spack/test/util/unparse/unparse.py
index f3a74aa9b7..217f67f35d 100644
--- a/lib/spack/spack/test/util/unparse/unparse.py
+++ b/lib/spack/spack/test/util/unparse/unparse.py
@@ -17,8 +17,9 @@ else:
import spack.util.unparse
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason="Test module unsupported on Windows")
+pytestmark = pytest.mark.skipif(
+ sys.platform == "win32", reason="Test module unsupported on Windows"
+)
def read_pyfile(filename):
@@ -193,34 +194,28 @@ def test_core_lib_files():
"""Roundtrip source files from the Python core libs."""
test_directories = [
os.path.join(
- getattr(sys, 'real_prefix', sys.prefix),
- 'lib',
- 'python%s.%s' % sys.version_info[:2]
+ getattr(sys, "real_prefix", sys.prefix), "lib", "python%s.%s" % sys.version_info[:2]
)
]
names = []
for test_dir in test_directories:
for n in os.listdir(test_dir):
- if n.endswith('.py') and not n.startswith('bad'):
+ if n.endswith(".py") and not n.startswith("bad"):
names.append(os.path.join(test_dir, n))
for filename in names:
- print('Testing %s' % filename)
+ print("Testing %s" % filename)
source = read_pyfile(filename)
check_ast_roundtrip(source)
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater"
-)
+@pytest.mark.skipif(sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater")
def test_simple_fstring():
check_ast_roundtrip("f'{x}'")
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater"
-)
+@pytest.mark.skipif(sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater")
def test_fstrings():
# See issue 25180
check_ast_roundtrip(r"""f'{f"{0}"*3}'""")
@@ -229,23 +224,20 @@ def test_fstrings():
check_ast_roundtrip('''f"""'end' "quote\\""""''')
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater"
-)
+@pytest.mark.skipif(sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater")
def test_fstrings_complicated():
# See issue 28002
check_ast_roundtrip("""f'''{"'"}'''""")
check_ast_roundtrip('''f\'\'\'-{f"""*{f"+{f'.{x}.'}+"}*"""}-\'\'\'''')
- check_ast_roundtrip(
- '''f\'\'\'-{f"""*{f"+{f'.{x}.'}+"}*"""}-'single quote\\'\'\'\'''')
- check_ast_roundtrip('f"""{\'\'\'\n\'\'\'}"""')
- check_ast_roundtrip('f"""{g(\'\'\'\n\'\'\')}"""')
+ check_ast_roundtrip('''f\'\'\'-{f"""*{f"+{f'.{x}.'}+"}*"""}-'single quote\\'\'\'\'''')
+ check_ast_roundtrip("f\"\"\"{'''\n'''}\"\"\"")
+ check_ast_roundtrip("f\"\"\"{g('''\n''')}\"\"\"")
check_ast_roundtrip('''f"a\\r\\nb"''')
check_ast_roundtrip('''f"\\u2028{'x'}"''')
def test_parser_modes():
- for mode in ['exec', 'single', 'eval']:
+ for mode in ["exec", "single", "eval"]:
check_ast_roundtrip(code_parseable_in_all_parser_modes, mode=mode)
@@ -294,8 +286,8 @@ def test_min_int27():
@pytest.mark.skipif(not six.PY3, reason="Only works for Python 3")
def test_min_int30():
- check_ast_roundtrip(str(-2**31))
- check_ast_roundtrip(str(-2**63))
+ check_ast_roundtrip(str(-(2 ** 31)))
+ check_ast_roundtrip(str(-(2 ** 63)))
def test_imaginary_literals():
@@ -399,10 +391,7 @@ def test_repr():
check_ast_roundtrip(a_repr)
-@pytest.mark.skipif(
- sys.version_info[:2] < (3, 6),
- reason="Only for Python 3.6 or greater"
-)
+@pytest.mark.skipif(sys.version_info[:2] < (3, 6), reason="Only for Python 3.6 or greater")
def test_complex_f_string():
check_ast_roundtrip(complex_f_string)
diff --git a/lib/spack/spack/test/util/util_gpg.py b/lib/spack/spack/test/util/util_gpg.py
index c37ae0ca4f..b8116561ee 100644
--- a/lib/spack/spack/test/util/util_gpg.py
+++ b/lib/spack/spack/test/util/util_gpg.py
@@ -30,8 +30,8 @@ ssb::2048:1:AAAAAAAAAAAAAAAA:AAAAAAAAAA::::::::::
keys = spack.util.gpg._parse_secret_keys_output(output)
assert len(keys) == 2
- assert keys[0] == 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
- assert keys[1] == 'YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY'
+ assert keys[0] == "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
+ assert keys[1] == "YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY"
def test_parse_gpg_output_case_two():
@@ -47,7 +47,7 @@ grp:::::::::AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA:
keys = spack.util.gpg._parse_secret_keys_output(output)
assert len(keys) == 1
- assert keys[0] == 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
+ assert keys[0] == "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
def test_parse_gpg_output_case_three():
@@ -66,28 +66,25 @@ fpr:::::::::ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ:"""
keys = spack.util.gpg._parse_secret_keys_output(output)
assert len(keys) == 2
- assert keys[0] == 'WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW'
- assert keys[1] == 'YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY'
+ assert keys[0] == "WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW"
+ assert keys[1] == "YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY"
-@pytest.mark.requires_executables('gpg2')
+@pytest.mark.requires_executables("gpg2")
def test_really_long_gnupghome_dir(tmpdir, has_socket_dir):
if not has_socket_dir:
- pytest.skip('This test requires /var/run/user/$(id -u)')
+ pytest.skip("This test requires /var/run/user/$(id -u)")
N = 960
tdir = str(tmpdir)
while len(tdir) < N:
- tdir = os.path.join(tdir, 'filler')
+ tdir = os.path.join(tdir, "filler")
tdir = tdir[:N].rstrip(os.sep)
- tdir += '0' * (N - len(tdir))
+ tdir += "0" * (N - len(tdir))
with spack.util.gpg.gnupghome_override(tdir):
spack.util.gpg.create(
- name='Spack testing 1',
- email='test@spack.io',
- comment='Spack testing key',
- expires='0'
+ name="Spack testing 1", email="test@spack.io", comment="Spack testing key", expires="0"
)
spack.util.gpg.list(True, True)
diff --git a/lib/spack/spack/test/util/util_string.py b/lib/spack/spack/test/util/util_string.py
index 411f026de6..a1f58600b6 100644
--- a/lib/spack/spack/test/util/util_string.py
+++ b/lib/spack/spack/test/util/util_string.py
@@ -7,8 +7,8 @@ from spack.util.string import plural
def test_plural():
- assert plural(0, 'thing') == '0 things'
- assert plural(1, 'thing') == '1 thing'
- assert plural(2, 'thing') == '2 things'
- assert plural(1, 'thing', 'wombats') == '1 thing'
- assert plural(2, 'thing', 'wombats') == '2 wombats'
+ assert plural(0, "thing") == "0 things"
+ assert plural(1, "thing") == "1 thing"
+ assert plural(2, "thing") == "2 things"
+ assert plural(1, "thing", "wombats") == "1 thing"
+ assert plural(2, "thing", "wombats") == "2 wombats"
diff --git a/lib/spack/spack/test/util/util_url.py b/lib/spack/spack/test/util/util_url.py
index 971375d9cc..38361fbf82 100644
--- a/lib/spack/spack/test/util/util_url.py
+++ b/lib/spack/spack/test/util/util_url.py
@@ -16,214 +16,203 @@ import spack.paths
import spack.util.url as url_util
from spack.util.path import convert_to_posix_path
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
if is_windows:
- drive_m = re.search(r'[A-Za-z]:', spack.paths.test_path)
+ drive_m = re.search(r"[A-Za-z]:", spack.paths.test_path)
drive = drive_m.group() if drive_m else None
def test_url_parse():
- parsed = url_util.parse('/path/to/resource', scheme='fake')
- assert(parsed.scheme == 'fake')
- assert(parsed.netloc == '')
- assert(parsed.path == '/path/to/resource')
-
- parsed = url_util.parse('file:///path/to/resource')
- assert(parsed.scheme == 'file')
- assert(parsed.netloc == '')
- assert(parsed.path == '/path/to/resource')
-
- parsed = url_util.parse('file:///path/to/resource', scheme='fake')
- assert(parsed.scheme == 'file')
- assert(parsed.netloc == '')
- assert(parsed.path == '/path/to/resource')
-
- parsed = url_util.parse('file://path/to/resource')
- assert(parsed.scheme == 'file')
- expected = convert_to_posix_path(
- os.path.abspath(
- posixpath.join('path', 'to', 'resource')))
+ parsed = url_util.parse("/path/to/resource", scheme="fake")
+ assert parsed.scheme == "fake"
+ assert parsed.netloc == ""
+ assert parsed.path == "/path/to/resource"
+
+ parsed = url_util.parse("file:///path/to/resource")
+ assert parsed.scheme == "file"
+ assert parsed.netloc == ""
+ assert parsed.path == "/path/to/resource"
+
+ parsed = url_util.parse("file:///path/to/resource", scheme="fake")
+ assert parsed.scheme == "file"
+ assert parsed.netloc == ""
+ assert parsed.path == "/path/to/resource"
+
+ parsed = url_util.parse("file://path/to/resource")
+ assert parsed.scheme == "file"
+ expected = convert_to_posix_path(os.path.abspath(posixpath.join("path", "to", "resource")))
if is_windows:
expected = expected.lstrip(drive)
- assert(parsed.path == expected)
+ assert parsed.path == expected
if is_windows:
- parsed = url_util.parse('file://%s\\path\\to\\resource' % drive)
- assert(parsed.scheme == 'file')
- expected = '/' + posixpath.join('path', 'to', 'resource')
+ parsed = url_util.parse("file://%s\\path\\to\\resource" % drive)
+ assert parsed.scheme == "file"
+ expected = "/" + posixpath.join("path", "to", "resource")
assert parsed.path == expected
- parsed = url_util.parse('https://path/to/resource')
- assert(parsed.scheme == 'https')
- assert(parsed.netloc == 'path')
- assert(parsed.path == '/to/resource')
+ parsed = url_util.parse("https://path/to/resource")
+ assert parsed.scheme == "https"
+ assert parsed.netloc == "path"
+ assert parsed.path == "/to/resource"
- parsed = url_util.parse('gs://path/to/resource')
- assert(parsed.scheme == 'gs')
- assert(parsed.netloc == 'path')
- assert(parsed.path == '/to/resource')
+ parsed = url_util.parse("gs://path/to/resource")
+ assert parsed.scheme == "gs"
+ assert parsed.netloc == "path"
+ assert parsed.path == "/to/resource"
spack_root = spack.paths.spack_root
- parsed = url_util.parse('file://$spack')
- assert(parsed.scheme == 'file')
+ parsed = url_util.parse("file://$spack")
+ assert parsed.scheme == "file"
if is_windows:
- spack_root = '/' + convert_to_posix_path(spack_root)
+ spack_root = "/" + convert_to_posix_path(spack_root)
- assert(parsed.netloc + parsed.path == spack_root)
+ assert parsed.netloc + parsed.path == spack_root
def test_url_local_file_path():
spack_root = spack.paths.spack_root
sep = os.path.sep
- lfp = url_util.local_file_path('/a/b/c.txt')
- assert(lfp == sep + os.path.join('a', 'b', 'c.txt'))
+ lfp = url_util.local_file_path("/a/b/c.txt")
+ assert lfp == sep + os.path.join("a", "b", "c.txt")
- lfp = url_util.local_file_path('file:///a/b/c.txt')
- assert(lfp == sep + os.path.join('a', 'b', 'c.txt'))
+ lfp = url_util.local_file_path("file:///a/b/c.txt")
+ assert lfp == sep + os.path.join("a", "b", "c.txt")
if is_windows:
- lfp = url_util.local_file_path('file://a/b/c.txt')
- expected = os.path.abspath(os.path.join('a', 'b', 'c.txt'))
- assert(lfp == expected)
+ lfp = url_util.local_file_path("file://a/b/c.txt")
+ expected = os.path.abspath(os.path.join("a", "b", "c.txt"))
+ assert lfp == expected
- lfp = url_util.local_file_path('file://$spack/a/b/c.txt')
- expected = os.path.abspath(os.path.join(spack_root, 'a', 'b', 'c.txt'))
- assert(lfp == expected)
+ lfp = url_util.local_file_path("file://$spack/a/b/c.txt")
+ expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt"))
+ assert lfp == expected
if is_windows:
- lfp = url_util.local_file_path('file:///$spack/a/b/c.txt')
- expected = os.path.abspath(os.path.join(spack_root, 'a', 'b', 'c.txt'))
- assert(lfp == expected)
+ lfp = url_util.local_file_path("file:///$spack/a/b/c.txt")
+ expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt"))
+ assert lfp == expected
- lfp = url_util.local_file_path('file://$spack/a/b/c.txt')
- expected = os.path.abspath(os.path.join(spack_root, 'a', 'b', 'c.txt'))
- assert(lfp == expected)
+ lfp = url_util.local_file_path("file://$spack/a/b/c.txt")
+ expected = os.path.abspath(os.path.join(spack_root, "a", "b", "c.txt"))
+ assert lfp == expected
# not a file:// URL - so no local file path
- lfp = url_util.local_file_path('http:///a/b/c.txt')
- assert(lfp is None)
+ lfp = url_util.local_file_path("http:///a/b/c.txt")
+ assert lfp is None
- lfp = url_util.local_file_path('http://a/b/c.txt')
- assert(lfp is None)
+ lfp = url_util.local_file_path("http://a/b/c.txt")
+ assert lfp is None
- lfp = url_util.local_file_path('http:///$spack/a/b/c.txt')
- assert(lfp is None)
+ lfp = url_util.local_file_path("http:///$spack/a/b/c.txt")
+ assert lfp is None
- lfp = url_util.local_file_path('http://$spack/a/b/c.txt')
- assert(lfp is None)
+ lfp = url_util.local_file_path("http://$spack/a/b/c.txt")
+ assert lfp is None
def test_url_join_local_paths():
# Resolve local link against page URL
# wrong:
- assert(
- url_util.join(
- 's3://bucket/index.html',
- '../other-bucket/document.txt')
- ==
- 's3://bucket/other-bucket/document.txt')
+ assert (
+ url_util.join("s3://bucket/index.html", "../other-bucket/document.txt")
+ == "s3://bucket/other-bucket/document.txt"
+ )
# correct - need to specify resolve_href=True:
- assert(
- url_util.join(
- 's3://bucket/index.html',
- '../other-bucket/document.txt',
- resolve_href=True)
- ==
- 's3://other-bucket/document.txt')
+ assert (
+ url_util.join("s3://bucket/index.html", "../other-bucket/document.txt", resolve_href=True)
+ == "s3://other-bucket/document.txt"
+ )
# same as above: make sure several components are joined together correctly
- assert(
+ assert (
url_util.join(
# with resolve_href=True, first arg is the base url; can not be
# broken up
- 's3://bucket/index.html',
-
+ "s3://bucket/index.html",
# with resolve_href=True, remaining arguments are the components of
# the local href that needs to be resolved
- '..', 'other-bucket', 'document.txt',
- resolve_href=True)
- ==
- 's3://other-bucket/document.txt')
+ "..",
+ "other-bucket",
+ "document.txt",
+ resolve_href=True,
+ )
+ == "s3://other-bucket/document.txt"
+ )
# Append local path components to prefix URL
# wrong:
- assert(
- url_util.join(
- 'https://mirror.spack.io/build_cache',
- 'my-package',
- resolve_href=True)
- ==
- 'https://mirror.spack.io/my-package')
+ assert (
+ url_util.join("https://mirror.spack.io/build_cache", "my-package", resolve_href=True)
+ == "https://mirror.spack.io/my-package"
+ )
# correct - Need to specify resolve_href=False:
- assert(
- url_util.join(
- 'https://mirror.spack.io/build_cache',
- 'my-package',
- resolve_href=False)
- ==
- 'https://mirror.spack.io/build_cache/my-package')
+ assert (
+ url_util.join("https://mirror.spack.io/build_cache", "my-package", resolve_href=False)
+ == "https://mirror.spack.io/build_cache/my-package"
+ )
# same as above; make sure resolve_href=False is default
- assert(
- url_util.join(
- 'https://mirror.spack.io/build_cache',
- 'my-package')
- ==
- 'https://mirror.spack.io/build_cache/my-package')
+ assert (
+ url_util.join("https://mirror.spack.io/build_cache", "my-package")
+ == "https://mirror.spack.io/build_cache/my-package"
+ )
# same as above: make sure several components are joined together correctly
- assert(
+ assert (
url_util.join(
# with resolve_href=False, first arg is just a prefix. No
# resolution is done. So, there should be no difference between
# join('/a/b/c', 'd/e'),
# join('/a/b', 'c', 'd/e'),
# join('/a', 'b/c', 'd', 'e'), etc.
- 'https://mirror.spack.io',
- 'build_cache',
- 'my-package')
- ==
- 'https://mirror.spack.io/build_cache/my-package')
+ "https://mirror.spack.io",
+ "build_cache",
+ "my-package",
+ )
+ == "https://mirror.spack.io/build_cache/my-package"
+ )
# file:// URL path components are *NOT* canonicalized
spack_root = spack.paths.spack_root
- if sys.platform != 'win32':
- join_result = url_util.join('/a/b/c', '$spack')
- assert(join_result == 'file:///a/b/c/$spack') # not canonicalized
+ if sys.platform != "win32":
+ join_result = url_util.join("/a/b/c", "$spack")
+ assert join_result == "file:///a/b/c/$spack" # not canonicalized
format_result = url_util.format(join_result)
# canoncalize by hand
- expected = url_util.format(os.path.abspath(os.path.join(
- '/', 'a', 'b', 'c', '.' + spack_root)))
- assert(format_result == expected)
+ expected = url_util.format(
+ os.path.abspath(os.path.join("/", "a", "b", "c", "." + spack_root))
+ )
+ assert format_result == expected
# see test_url_join_absolute_paths() for more on absolute path components
- join_result = url_util.join('/a/b/c', '/$spack')
- assert(join_result == 'file:///$spack') # not canonicalized
+ join_result = url_util.join("/a/b/c", "/$spack")
+ assert join_result == "file:///$spack" # not canonicalized
format_result = url_util.format(join_result)
expected = url_util.format(spack_root)
- assert(format_result == expected)
+ assert format_result == expected
# For s3:// URLs, the "netloc" (bucket) is considered part of the path.
# Make sure join() can cross bucket boundaries in this case.
- args = ['s3://bucket/a/b', 'new-bucket', 'c']
- assert(url_util.join(*args) == 's3://bucket/a/b/new-bucket/c')
+ args = ["s3://bucket/a/b", "new-bucket", "c"]
+ assert url_util.join(*args) == "s3://bucket/a/b/new-bucket/c"
- args.insert(1, '..')
- assert(url_util.join(*args) == 's3://bucket/a/new-bucket/c')
+ args.insert(1, "..")
+ assert url_util.join(*args) == "s3://bucket/a/new-bucket/c"
- args.insert(1, '..')
- assert(url_util.join(*args) == 's3://bucket/new-bucket/c')
+ args.insert(1, "..")
+ assert url_util.join(*args) == "s3://bucket/new-bucket/c"
# new-bucket is now the "netloc" (bucket name)
- args.insert(1, '..')
- assert(url_util.join(*args) == 's3://new-bucket/c')
+ args.insert(1, "..")
+ assert url_util.join(*args) == "s3://new-bucket/c"
def test_url_join_absolute_paths():
@@ -239,19 +228,15 @@ def test_url_join_absolute_paths():
# such scheme is provided).
# For eaxmple:
- p = '/path/to/resource'
+ p = "/path/to/resource"
# ...is an absolute path
# http:// URL
- assert(
- url_util.join('http://example.com/a/b/c', p)
- == 'http://example.com/path/to/resource')
+ assert url_util.join("http://example.com/a/b/c", p) == "http://example.com/path/to/resource"
# s3:// URL
# also notice how the netloc is treated as part of the path for s3:// URLs
- assert(
- url_util.join('s3://example.com/a/b/c', p)
- == 's3://path/to/resource')
+ assert url_util.join("s3://example.com/a/b/c", p) == "s3://path/to/resource"
# - URL components that specify a scheme are always absolute path
# components. Joining a base URL with these components effectively
@@ -259,28 +244,24 @@ def test_url_join_absolute_paths():
# component in question and using it as the new base URL.
# For eaxmple:
- p = 'http://example.com/path/to'
+ p = "http://example.com/path/to"
# ...is an http:// URL
- join_result = url_util.join(p, 'resource')
- assert(join_result == 'http://example.com/path/to/resource')
+ join_result = url_util.join(p, "resource")
+ assert join_result == "http://example.com/path/to/resource"
# works as if everything before the http:// URL was left out
- assert(
- url_util.join(
- 'literally', 'does', 'not', 'matter',
- p, 'resource')
- == join_result)
+ assert url_util.join("literally", "does", "not", "matter", p, "resource") == join_result
# It's important to keep in mind that this logic applies even if the
# component's path is not an absolute path!
# For eaxmple:
- p = './d'
+ p = "./d"
# ...is *NOT* an absolute path
# ...is also *NOT* an absolute path component
- u = 'file://./d'
+ u = "file://./d"
# ...is a URL
# The path of this URL is *NOT* an absolute path
# HOWEVER, the URL, itself, *is* an absolute path component
@@ -291,106 +272,124 @@ def test_url_join_absolute_paths():
if sys.platform == "win32":
convert_to_posix_path(cwd)
- cwd = '/' + cwd
+ cwd = "/" + cwd
# So, even though parse() assumes "file://" URL, the scheme is still
# significant in URL path components passed to join(), even if the base
# is a file:// URL.
- path_join_result = 'file:///a/b/c/d'
- assert(url_util.join('/a/b/c', p) == path_join_result)
- assert(url_util.join('file:///a/b/c', p) == path_join_result)
+ path_join_result = "file:///a/b/c/d"
+ assert url_util.join("/a/b/c", p) == path_join_result
+ assert url_util.join("file:///a/b/c", p) == path_join_result
- url_join_result = 'file://{CWD}/d'.format(CWD=cwd)
- assert(url_util.join('/a/b/c', u) == url_join_result)
- assert(url_util.join('file:///a/b/c', u) == url_join_result)
+ url_join_result = "file://{CWD}/d".format(CWD=cwd)
+ assert url_util.join("/a/b/c", u) == url_join_result
+ assert url_util.join("file:///a/b/c", u) == url_join_result
# Finally, resolve_href should have no effect for how absolute path
# components are handled because local hrefs can not be absolute path
# components.
- args = ['s3://does', 'not', 'matter',
- 'http://example.com',
- 'also', 'does', 'not', 'matter',
- '/path']
-
- expected = 'http://example.com/path'
- assert(url_util.join(*args, resolve_href=True) == expected)
- assert(url_util.join(*args, resolve_href=False) == expected)
+ args = [
+ "s3://does",
+ "not",
+ "matter",
+ "http://example.com",
+ "also",
+ "does",
+ "not",
+ "matter",
+ "/path",
+ ]
+
+ expected = "http://example.com/path"
+ assert url_util.join(*args, resolve_href=True) == expected
+ assert url_util.join(*args, resolve_href=False) == expected
# resolve_href only matters for the local path components at the end of the
# argument list.
- args[-1] = '/path/to/page'
- args.extend(('..', '..', 'resource'))
-
- assert(url_util.join(*args, resolve_href=True) ==
- 'http://example.com/resource')
-
- assert(url_util.join(*args, resolve_href=False) ==
- 'http://example.com/path/resource')
-
-
-@pytest.mark.parametrize("url,parts", [
- ("ssh://user@host.xz:500/path/to/repo.git/",
- ("ssh", "user", "host.xz", 500, "/path/to/repo.git")),
- ("ssh://user@host.xz/path/to/repo.git/",
- ("ssh", "user", "host.xz", None, "/path/to/repo.git")),
- ("ssh://host.xz:500/path/to/repo.git/",
- ("ssh", None, "host.xz", 500, "/path/to/repo.git")),
- ("ssh://host.xz/path/to/repo.git/",
- ("ssh", None, "host.xz", None, "/path/to/repo.git")),
- ("ssh://user@host.xz/path/to/repo.git/",
- ("ssh", "user", "host.xz", None, "/path/to/repo.git")),
- ("ssh://host.xz/path/to/repo.git/",
- ("ssh", None, "host.xz", None, "/path/to/repo.git")),
- ("ssh://user@host.xz/~user/path/to/repo.git/",
- ("ssh", "user", "host.xz", None, "~user/path/to/repo.git")),
- ("ssh://host.xz/~user/path/to/repo.git/",
- ("ssh", None, "host.xz", None, "~user/path/to/repo.git")),
- ("ssh://user@host.xz/~/path/to/repo.git",
- ("ssh", "user", "host.xz", None, "~/path/to/repo.git")),
- ("ssh://host.xz/~/path/to/repo.git",
- ("ssh", None, "host.xz", None, "~/path/to/repo.git")),
- ("git@github.com:spack/spack.git",
- (None, "git", "github.com", None, "spack/spack.git")),
- ("user@host.xz:/path/to/repo.git/",
- (None, "user", "host.xz", None, "/path/to/repo.git")),
- ("host.xz:/path/to/repo.git/",
- (None, None, "host.xz", None, "/path/to/repo.git")),
- ("user@host.xz:~user/path/to/repo.git/",
- (None, "user", "host.xz", None, "~user/path/to/repo.git")),
- ("host.xz:~user/path/to/repo.git/",
- (None, None, "host.xz", None, "~user/path/to/repo.git")),
- ("user@host.xz:path/to/repo.git",
- (None, "user", "host.xz", None, "path/to/repo.git")),
- ("host.xz:path/to/repo.git",
- (None, None, "host.xz", None, "path/to/repo.git")),
- ("rsync://host.xz/path/to/repo.git/",
- ("rsync", None, "host.xz", None, "/path/to/repo.git")),
- ("git://host.xz/path/to/repo.git/",
- ("git", None, "host.xz", None, "/path/to/repo.git")),
- ("git://host.xz/~user/path/to/repo.git/",
- ("git", None, "host.xz", None, "~user/path/to/repo.git")),
- ("http://host.xz/path/to/repo.git/",
- ("http", None, "host.xz", None, "/path/to/repo.git")),
- ("https://host.xz/path/to/repo.git/",
- ("https", None, "host.xz", None, "/path/to/repo.git")),
- ("https://github.com/spack/spack",
- ("https", None, "github.com", None, "/spack/spack")),
- ("https://github.com/spack/spack/",
- ("https", None, "github.com", None, "/spack/spack")),
- ("file:///path/to/repo.git/",
- ("file", None, None, None, "/path/to/repo.git")),
- ("file://~/path/to/repo.git/",
- ("file", None, None, None, "~/path/to/repo.git")),
- # bad ports should give us None
- ("ssh://host.xz:port/path/to/repo.git/", None),
- # bad ports should give us None
- ("ssh://host-foo.xz:port/path/to/repo.git/", None),
- # regular file paths should give us None
- ("/path/to/repo.git/", None),
- ("path/to/repo.git/", None),
- ("~/path/to/repo.git", None),
-])
+ args[-1] = "/path/to/page"
+ args.extend(("..", "..", "resource"))
+
+ assert url_util.join(*args, resolve_href=True) == "http://example.com/resource"
+
+ assert url_util.join(*args, resolve_href=False) == "http://example.com/path/resource"
+
+
+@pytest.mark.parametrize(
+ "url,parts",
+ [
+ (
+ "ssh://user@host.xz:500/path/to/repo.git/",
+ ("ssh", "user", "host.xz", 500, "/path/to/repo.git"),
+ ),
+ (
+ "ssh://user@host.xz/path/to/repo.git/",
+ ("ssh", "user", "host.xz", None, "/path/to/repo.git"),
+ ),
+ (
+ "ssh://host.xz:500/path/to/repo.git/",
+ ("ssh", None, "host.xz", 500, "/path/to/repo.git"),
+ ),
+ ("ssh://host.xz/path/to/repo.git/", ("ssh", None, "host.xz", None, "/path/to/repo.git")),
+ (
+ "ssh://user@host.xz/path/to/repo.git/",
+ ("ssh", "user", "host.xz", None, "/path/to/repo.git"),
+ ),
+ ("ssh://host.xz/path/to/repo.git/", ("ssh", None, "host.xz", None, "/path/to/repo.git")),
+ (
+ "ssh://user@host.xz/~user/path/to/repo.git/",
+ ("ssh", "user", "host.xz", None, "~user/path/to/repo.git"),
+ ),
+ (
+ "ssh://host.xz/~user/path/to/repo.git/",
+ ("ssh", None, "host.xz", None, "~user/path/to/repo.git"),
+ ),
+ (
+ "ssh://user@host.xz/~/path/to/repo.git",
+ ("ssh", "user", "host.xz", None, "~/path/to/repo.git"),
+ ),
+ ("ssh://host.xz/~/path/to/repo.git", ("ssh", None, "host.xz", None, "~/path/to/repo.git")),
+ ("git@github.com:spack/spack.git", (None, "git", "github.com", None, "spack/spack.git")),
+ ("user@host.xz:/path/to/repo.git/", (None, "user", "host.xz", None, "/path/to/repo.git")),
+ ("host.xz:/path/to/repo.git/", (None, None, "host.xz", None, "/path/to/repo.git")),
+ (
+ "user@host.xz:~user/path/to/repo.git/",
+ (None, "user", "host.xz", None, "~user/path/to/repo.git"),
+ ),
+ (
+ "host.xz:~user/path/to/repo.git/",
+ (None, None, "host.xz", None, "~user/path/to/repo.git"),
+ ),
+ ("user@host.xz:path/to/repo.git", (None, "user", "host.xz", None, "path/to/repo.git")),
+ ("host.xz:path/to/repo.git", (None, None, "host.xz", None, "path/to/repo.git")),
+ (
+ "rsync://host.xz/path/to/repo.git/",
+ ("rsync", None, "host.xz", None, "/path/to/repo.git"),
+ ),
+ ("git://host.xz/path/to/repo.git/", ("git", None, "host.xz", None, "/path/to/repo.git")),
+ (
+ "git://host.xz/~user/path/to/repo.git/",
+ ("git", None, "host.xz", None, "~user/path/to/repo.git"),
+ ),
+ ("http://host.xz/path/to/repo.git/", ("http", None, "host.xz", None, "/path/to/repo.git")),
+ (
+ "https://host.xz/path/to/repo.git/",
+ ("https", None, "host.xz", None, "/path/to/repo.git"),
+ ),
+ ("https://github.com/spack/spack", ("https", None, "github.com", None, "/spack/spack")),
+ ("https://github.com/spack/spack/", ("https", None, "github.com", None, "/spack/spack")),
+ ("file:///path/to/repo.git/", ("file", None, None, None, "/path/to/repo.git")),
+ ("file://~/path/to/repo.git/", ("file", None, None, None, "~/path/to/repo.git")),
+ # bad ports should give us None
+ ("ssh://host.xz:port/path/to/repo.git/", None),
+ # bad ports should give us None
+ ("ssh://host-foo.xz:port/path/to/repo.git/", None),
+ # regular file paths should give us None
+ ("/path/to/repo.git/", None),
+ ("path/to/repo.git/", None),
+ ("~/path/to/repo.git", None),
+ ],
+)
def test_git_url_parse(url, parts):
if parts is None:
with pytest.raises(ValueError):
diff --git a/lib/spack/spack/test/variant.py b/lib/spack/spack/test/variant.py
index b25215bbcc..204514b58d 100644
--- a/lib/spack/spack/test/variant.py
+++ b/lib/spack/spack/test/variant.py
@@ -25,36 +25,35 @@ from spack.variant import (
class TestMultiValuedVariant(object):
-
def test_initialization(self):
# Basic properties
- a = MultiValuedVariant('foo', 'bar,baz')
+ a = MultiValuedVariant("foo", "bar,baz")
assert repr(a) == "MultiValuedVariant('foo', 'bar,baz')"
- assert str(a) == 'foo=bar,baz'
- assert a.value == ('bar', 'baz')
- assert 'bar' in a
- assert 'baz' in a
+ assert str(a) == "foo=bar,baz"
+ assert a.value == ("bar", "baz")
+ assert "bar" in a
+ assert "baz" in a
assert eval(repr(a)) == a
# Spaces are trimmed
- b = MultiValuedVariant('foo', 'bar, baz')
+ b = MultiValuedVariant("foo", "bar, baz")
assert repr(b) == "MultiValuedVariant('foo', 'bar, baz')"
- assert str(b) == 'foo=bar,baz'
- assert b.value == ('bar', 'baz')
- assert 'bar' in b
- assert 'baz' in b
+ assert str(b) == "foo=bar,baz"
+ assert b.value == ("bar", "baz")
+ assert "bar" in b
+ assert "baz" in b
assert a == b
assert hash(a) == hash(b)
assert eval(repr(b)) == a
# Order is not important
- c = MultiValuedVariant('foo', 'baz, bar')
+ c = MultiValuedVariant("foo", "baz, bar")
assert repr(c) == "MultiValuedVariant('foo', 'baz, bar')"
- assert str(c) == 'foo=bar,baz'
- assert c.value == ('bar', 'baz')
- assert 'bar' in c
- assert 'baz' in c
+ assert str(c) == "foo=bar,baz"
+ assert c.value == ("bar", "baz")
+ assert "bar" in c
+ assert "baz" in c
assert a == c
assert hash(a) == hash(c)
assert eval(repr(c)) == a
@@ -63,9 +62,9 @@ class TestMultiValuedVariant(object):
d = a.copy()
assert repr(a) == repr(d)
assert str(a) == str(d)
- assert d.value == ('bar', 'baz')
- assert 'bar' in d
- assert 'baz' in d
+ assert d.value == ("bar", "baz")
+ assert "bar" in d
+ assert "baz" in d
assert a == d
assert a is not d
assert hash(a) == hash(d)
@@ -73,10 +72,10 @@ class TestMultiValuedVariant(object):
def test_satisfies(self):
- a = MultiValuedVariant('foo', 'bar,baz')
- b = MultiValuedVariant('foo', 'bar')
- c = MultiValuedVariant('fee', 'bar,baz')
- d = MultiValuedVariant('foo', 'True')
+ a = MultiValuedVariant("foo", "bar,baz")
+ b = MultiValuedVariant("foo", "bar")
+ c = MultiValuedVariant("fee", "bar,baz")
+ d = MultiValuedVariant("foo", "True")
# 'foo=bar,baz' satisfies 'foo=bar'
assert a.satisfies(b)
@@ -90,27 +89,27 @@ class TestMultiValuedVariant(object):
# Implicit type conversion for variants of other types
- b_sv = SingleValuedVariant('foo', 'bar')
+ b_sv = SingleValuedVariant("foo", "bar")
assert b.satisfies(b_sv)
- d_sv = SingleValuedVariant('foo', 'True')
+ d_sv = SingleValuedVariant("foo", "True")
assert d.satisfies(d_sv)
- almost_d_bv = SingleValuedVariant('foo', 'true')
+ almost_d_bv = SingleValuedVariant("foo", "true")
assert not d.satisfies(almost_d_bv)
- d_bv = BoolValuedVariant('foo', 'True')
+ d_bv = BoolValuedVariant("foo", "True")
assert d.satisfies(d_bv)
# This case is 'peculiar': the two BV instances are
# equivalent, but if converted to MV they are not
# as MV is case sensitive with respect to 'True' and 'False'
- almost_d_bv = BoolValuedVariant('foo', 'true')
+ almost_d_bv = BoolValuedVariant("foo", "true")
assert not d.satisfies(almost_d_bv)
def test_compatible(self):
- a = MultiValuedVariant('foo', 'bar,baz')
- b = MultiValuedVariant('foo', 'True')
- c = MultiValuedVariant('fee', 'bar,baz')
- d = MultiValuedVariant('foo', 'bar,barbaz')
+ a = MultiValuedVariant("foo", "bar,baz")
+ b = MultiValuedVariant("foo", "True")
+ c = MultiValuedVariant("fee", "bar,baz")
+ d = MultiValuedVariant("foo", "bar,barbaz")
# If the name of two multi-valued variants is the same,
# they are compatible
@@ -132,112 +131,111 @@ class TestMultiValuedVariant(object):
# Implicit type conversion for other types
- b_sv = SingleValuedVariant('foo', 'True')
+ b_sv = SingleValuedVariant("foo", "True")
assert b.compatible(b_sv)
assert not c.compatible(b_sv)
- b_bv = BoolValuedVariant('foo', 'True')
+ b_bv = BoolValuedVariant("foo", "True")
assert b.compatible(b_bv)
assert not c.compatible(b_bv)
def test_constrain(self):
# Try to constrain on a value with less constraints than self
- a = MultiValuedVariant('foo', 'bar,baz')
- b = MultiValuedVariant('foo', 'bar')
+ a = MultiValuedVariant("foo", "bar,baz")
+ b = MultiValuedVariant("foo", "bar")
changed = a.constrain(b)
assert not changed
- t = MultiValuedVariant('foo', 'bar,baz')
+ t = MultiValuedVariant("foo", "bar,baz")
assert a == t
# Try to constrain on a value with more constraints than self
- a = MultiValuedVariant('foo', 'bar,baz')
- b = MultiValuedVariant('foo', 'bar')
+ a = MultiValuedVariant("foo", "bar,baz")
+ b = MultiValuedVariant("foo", "bar")
changed = b.constrain(a)
assert changed
- t = MultiValuedVariant('foo', 'bar,baz')
+ t = MultiValuedVariant("foo", "bar,baz")
assert a == t
# Try to constrain on the same value
- a = MultiValuedVariant('foo', 'bar,baz')
+ a = MultiValuedVariant("foo", "bar,baz")
b = a.copy()
changed = a.constrain(b)
assert not changed
- t = MultiValuedVariant('foo', 'bar,baz')
+ t = MultiValuedVariant("foo", "bar,baz")
assert a == t
# Try to constrain on a different name
- a = MultiValuedVariant('foo', 'bar,baz')
- b = MultiValuedVariant('fee', 'bar')
+ a = MultiValuedVariant("foo", "bar,baz")
+ b = MultiValuedVariant("fee", "bar")
with pytest.raises(ValueError):
a.constrain(b)
# Implicit type conversion for variants of other types
- a = MultiValuedVariant('foo', 'bar,baz')
- b_sv = SingleValuedVariant('foo', 'bar')
- c_sv = SingleValuedVariant('foo', 'barbaz')
+ a = MultiValuedVariant("foo", "bar,baz")
+ b_sv = SingleValuedVariant("foo", "bar")
+ c_sv = SingleValuedVariant("foo", "barbaz")
assert not a.constrain(b_sv)
assert a.constrain(c_sv)
- d_bv = BoolValuedVariant('foo', 'True')
+ d_bv = BoolValuedVariant("foo", "True")
assert a.constrain(d_bv)
assert not a.constrain(d_bv)
def test_yaml_entry(self):
- a = MultiValuedVariant('foo', 'bar,baz,barbaz')
- b = MultiValuedVariant('foo', 'bar, baz, barbaz')
- expected = ('foo', sorted(['bar', 'baz', 'barbaz']))
+ a = MultiValuedVariant("foo", "bar,baz,barbaz")
+ b = MultiValuedVariant("foo", "bar, baz, barbaz")
+ expected = ("foo", sorted(["bar", "baz", "barbaz"]))
assert a.yaml_entry() == expected
assert b.yaml_entry() == expected
- a = MultiValuedVariant('foo', 'bar')
- expected = ('foo', sorted(['bar']))
+ a = MultiValuedVariant("foo", "bar")
+ expected = ("foo", sorted(["bar"]))
assert a.yaml_entry() == expected
class TestSingleValuedVariant(object):
-
def test_initialization(self):
# Basic properties
- a = SingleValuedVariant('foo', 'bar')
+ a = SingleValuedVariant("foo", "bar")
assert repr(a) == "SingleValuedVariant('foo', 'bar')"
- assert str(a) == 'foo=bar'
- assert a.value == 'bar'
- assert 'bar' in a
+ assert str(a) == "foo=bar"
+ assert a.value == "bar"
+ assert "bar" in a
assert eval(repr(a)) == a
# Raise if multiple values are passed
with pytest.raises(ValueError):
- SingleValuedVariant('foo', 'bar, baz')
+ SingleValuedVariant("foo", "bar, baz")
# Check the copy
b = a.copy()
assert repr(a) == repr(b)
assert str(a) == str(b)
- assert b.value == 'bar'
- assert 'bar' in b
+ assert b.value == "bar"
+ assert "bar" in b
assert a == b
assert a is not b
assert hash(a) == hash(b)
assert eval(repr(b)) == a
def test_satisfies(self):
- a = SingleValuedVariant('foo', 'bar')
- b = SingleValuedVariant('foo', 'bar')
- c = SingleValuedVariant('foo', 'baz')
- d = SingleValuedVariant('fee', 'bar')
- e = SingleValuedVariant('foo', 'True')
+ a = SingleValuedVariant("foo", "bar")
+ b = SingleValuedVariant("foo", "bar")
+ c = SingleValuedVariant("foo", "baz")
+ d = SingleValuedVariant("fee", "bar")
+ e = SingleValuedVariant("foo", "True")
# 'foo=bar' can only satisfy 'foo=bar'
assert a.satisfies(b)
@@ -254,22 +252,22 @@ class TestSingleValuedVariant(object):
# Implicit type conversion for variants of other types
- a_mv = MultiValuedVariant('foo', 'bar')
+ a_mv = MultiValuedVariant("foo", "bar")
assert a.satisfies(a_mv)
- multiple_values = MultiValuedVariant('foo', 'bar,baz')
+ multiple_values = MultiValuedVariant("foo", "bar,baz")
assert not a.satisfies(multiple_values)
- e_bv = BoolValuedVariant('foo', 'True')
+ e_bv = BoolValuedVariant("foo", "True")
assert e.satisfies(e_bv)
- almost_e_bv = BoolValuedVariant('foo', 'true')
+ almost_e_bv = BoolValuedVariant("foo", "true")
assert not e.satisfies(almost_e_bv)
def test_compatible(self):
- a = SingleValuedVariant('foo', 'bar')
- b = SingleValuedVariant('fee', 'bar')
- c = SingleValuedVariant('foo', 'baz')
- d = SingleValuedVariant('foo', 'bar')
+ a = SingleValuedVariant("foo", "bar")
+ b = SingleValuedVariant("fee", "bar")
+ c = SingleValuedVariant("foo", "baz")
+ d = SingleValuedVariant("foo", "bar")
# If the name of two multi-valued variants is the same,
# they are compatible
@@ -291,10 +289,10 @@ class TestSingleValuedVariant(object):
# Implicit type conversion for variants of other types
- a_mv = MultiValuedVariant('foo', 'bar')
- b_mv = MultiValuedVariant('fee', 'bar')
- c_mv = MultiValuedVariant('foo', 'baz')
- d_mv = MultiValuedVariant('foo', 'bar')
+ a_mv = MultiValuedVariant("foo", "bar")
+ b_mv = MultiValuedVariant("fee", "bar")
+ c_mv = MultiValuedVariant("foo", "baz")
+ d_mv = MultiValuedVariant("foo", "bar")
assert not a.compatible(b_mv)
assert not a.compatible(c_mv)
@@ -312,9 +310,9 @@ class TestSingleValuedVariant(object):
assert not d.compatible(b_mv)
assert not d.compatible(c_mv)
- e = SingleValuedVariant('foo', 'True')
- e_bv = BoolValuedVariant('foo', 'True')
- almost_e_bv = BoolValuedVariant('foo', 'true')
+ e = SingleValuedVariant("foo", "True")
+ e_bv = BoolValuedVariant("foo", "True")
+ almost_e_bv = BoolValuedVariant("foo", "true")
assert e.compatible(e_bv)
assert not e.compatible(almost_e_bv)
@@ -322,59 +320,58 @@ class TestSingleValuedVariant(object):
def test_constrain(self):
# Try to constrain on a value equal to self
- a = SingleValuedVariant('foo', 'bar')
- b = SingleValuedVariant('foo', 'bar')
+ a = SingleValuedVariant("foo", "bar")
+ b = SingleValuedVariant("foo", "bar")
changed = a.constrain(b)
assert not changed
- t = SingleValuedVariant('foo', 'bar')
+ t = SingleValuedVariant("foo", "bar")
assert a == t
# Try to constrain on a value with a different value
- a = SingleValuedVariant('foo', 'bar')
- b = SingleValuedVariant('foo', 'baz')
+ a = SingleValuedVariant("foo", "bar")
+ b = SingleValuedVariant("foo", "baz")
with pytest.raises(UnsatisfiableVariantSpecError):
b.constrain(a)
# Try to constrain on a value with a different value
- a = SingleValuedVariant('foo', 'bar')
- b = SingleValuedVariant('fee', 'bar')
+ a = SingleValuedVariant("foo", "bar")
+ b = SingleValuedVariant("fee", "bar")
with pytest.raises(ValueError):
b.constrain(a)
# Try to constrain on the same value
- a = SingleValuedVariant('foo', 'bar')
+ a = SingleValuedVariant("foo", "bar")
b = a.copy()
changed = a.constrain(b)
assert not changed
- t = SingleValuedVariant('foo', 'bar')
+ t = SingleValuedVariant("foo", "bar")
assert a == t
# Implicit type conversion for variants of other types
- a = SingleValuedVariant('foo', 'True')
- mv = MultiValuedVariant('foo', 'True')
- bv = BoolValuedVariant('foo', 'True')
+ a = SingleValuedVariant("foo", "True")
+ mv = MultiValuedVariant("foo", "True")
+ bv = BoolValuedVariant("foo", "True")
for v in (mv, bv):
assert not a.constrain(v)
def test_yaml_entry(self):
- a = SingleValuedVariant('foo', 'bar')
- expected = ('foo', 'bar')
+ a = SingleValuedVariant("foo", "bar")
+ expected = ("foo", "bar")
assert a.yaml_entry() == expected
class TestBoolValuedVariant(object):
-
def test_initialization(self):
# Basic properties - True value
- for v in (True, 'True', 'TRUE', 'TrUe'):
- a = BoolValuedVariant('foo', v)
+ for v in (True, "True", "TRUE", "TrUe"):
+ a = BoolValuedVariant("foo", v)
assert repr(a) == "BoolValuedVariant('foo', {0})".format(repr(v))
- assert str(a) == '+foo'
+ assert str(a) == "+foo"
assert a.value is True
assert True in a
assert eval(repr(a)) == a
@@ -391,10 +388,10 @@ class TestBoolValuedVariant(object):
assert eval(repr(b)) == a
# Basic properties - False value
- for v in (False, 'False', 'FALSE', 'FaLsE'):
- a = BoolValuedVariant('foo', v)
+ for v in (False, "False", "FALSE", "FaLsE"):
+ a = BoolValuedVariant("foo", v)
assert repr(a) == "BoolValuedVariant('foo', {0})".format(repr(v))
- assert str(a) == '~foo'
+ assert str(a) == "~foo"
assert a.value is False
assert False in a
assert eval(repr(a)) == a
@@ -410,15 +407,15 @@ class TestBoolValuedVariant(object):
assert eval(repr(b)) == a
# Invalid values
- for v in ('bar', 'bar,baz'):
+ for v in ("bar", "bar,baz"):
with pytest.raises(ValueError):
- BoolValuedVariant('foo', v)
+ BoolValuedVariant("foo", v)
def test_satisfies(self):
- a = BoolValuedVariant('foo', True)
- b = BoolValuedVariant('foo', False)
- c = BoolValuedVariant('fee', False)
- d = BoolValuedVariant('foo', 'True')
+ a = BoolValuedVariant("foo", True)
+ b = BoolValuedVariant("foo", False)
+ c = BoolValuedVariant("fee", False)
+ d = BoolValuedVariant("foo", "True")
assert not a.satisfies(b)
assert not a.satisfies(c)
@@ -437,27 +434,27 @@ class TestBoolValuedVariant(object):
assert not d.satisfies(c)
# BV variants are case insensitive to 'True' or 'False'
- d_mv = MultiValuedVariant('foo', 'True')
+ d_mv = MultiValuedVariant("foo", "True")
assert d.satisfies(d_mv)
assert not b.satisfies(d_mv)
- d_mv = MultiValuedVariant('foo', 'FaLsE')
+ d_mv = MultiValuedVariant("foo", "FaLsE")
assert not d.satisfies(d_mv)
assert b.satisfies(d_mv)
- d_mv = MultiValuedVariant('foo', 'bar')
+ d_mv = MultiValuedVariant("foo", "bar")
assert not d.satisfies(d_mv)
assert not b.satisfies(d_mv)
- d_sv = SingleValuedVariant('foo', 'True')
+ d_sv = SingleValuedVariant("foo", "True")
assert d.satisfies(d_sv)
def test_compatible(self):
- a = BoolValuedVariant('foo', True)
- b = BoolValuedVariant('fee', True)
- c = BoolValuedVariant('foo', False)
- d = BoolValuedVariant('foo', 'True')
+ a = BoolValuedVariant("foo", True)
+ b = BoolValuedVariant("fee", True)
+ c = BoolValuedVariant("foo", False)
+ d = BoolValuedVariant("foo", "True")
# If the name of two multi-valued variants is the same,
# they are compatible
@@ -477,197 +474,180 @@ class TestBoolValuedVariant(object):
assert not d.compatible(b)
assert not d.compatible(c)
- for value in ('True', 'TrUe', 'TRUE'):
- d_mv = MultiValuedVariant('foo', value)
+ for value in ("True", "TrUe", "TRUE"):
+ d_mv = MultiValuedVariant("foo", value)
assert d.compatible(d_mv)
assert not c.compatible(d_mv)
- d_sv = SingleValuedVariant('foo', value)
+ d_sv = SingleValuedVariant("foo", value)
assert d.compatible(d_sv)
assert not c.compatible(d_sv)
def test_constrain(self):
# Try to constrain on a value equal to self
- a = BoolValuedVariant('foo', 'True')
- b = BoolValuedVariant('foo', True)
+ a = BoolValuedVariant("foo", "True")
+ b = BoolValuedVariant("foo", True)
changed = a.constrain(b)
assert not changed
- t = BoolValuedVariant('foo', True)
+ t = BoolValuedVariant("foo", True)
assert a == t
# Try to constrain on a value with a different value
- a = BoolValuedVariant('foo', True)
- b = BoolValuedVariant('foo', False)
+ a = BoolValuedVariant("foo", True)
+ b = BoolValuedVariant("foo", False)
with pytest.raises(UnsatisfiableVariantSpecError):
b.constrain(a)
# Try to constrain on a value with a different value
- a = BoolValuedVariant('foo', True)
- b = BoolValuedVariant('fee', True)
+ a = BoolValuedVariant("foo", True)
+ b = BoolValuedVariant("fee", True)
with pytest.raises(ValueError):
b.constrain(a)
# Try to constrain on the same value
- a = BoolValuedVariant('foo', True)
+ a = BoolValuedVariant("foo", True)
b = a.copy()
changed = a.constrain(b)
assert not changed
- t = BoolValuedVariant('foo', True)
+ t = BoolValuedVariant("foo", True)
assert a == t
# Try to constrain on other values
- a = BoolValuedVariant('foo', 'True')
- sv = SingleValuedVariant('foo', 'True')
- mv = MultiValuedVariant('foo', 'True')
+ a = BoolValuedVariant("foo", "True")
+ sv = SingleValuedVariant("foo", "True")
+ mv = MultiValuedVariant("foo", "True")
for v in (sv, mv):
assert not a.constrain(v)
def test_yaml_entry(self):
- a = BoolValuedVariant('foo', 'True')
- expected = ('foo', True)
+ a = BoolValuedVariant("foo", "True")
+ expected = ("foo", True)
assert a.yaml_entry() == expected
- a = BoolValuedVariant('foo', 'False')
- expected = ('foo', False)
+ a = BoolValuedVariant("foo", "False")
+ expected = ("foo", False)
assert a.yaml_entry() == expected
def test_from_node_dict():
- a = MultiValuedVariant.from_node_dict('foo', ['bar'])
+ a = MultiValuedVariant.from_node_dict("foo", ["bar"])
assert type(a) == MultiValuedVariant
- a = MultiValuedVariant.from_node_dict('foo', 'bar')
+ a = MultiValuedVariant.from_node_dict("foo", "bar")
assert type(a) == SingleValuedVariant
- a = MultiValuedVariant.from_node_dict('foo', 'true')
+ a = MultiValuedVariant.from_node_dict("foo", "true")
assert type(a) == BoolValuedVariant
class TestVariant(object):
-
def test_validation(self):
a = Variant(
- 'foo',
- default='',
- description='',
- values=('bar', 'baz', 'foobar'),
- multi=False
+ "foo", default="", description="", values=("bar", "baz", "foobar"), multi=False
)
# Valid vspec, shouldn't raise
- vspec = a.make_variant('bar')
+ vspec = a.make_variant("bar")
a.validate_or_raise(vspec)
# Multiple values are not allowed
with pytest.raises(MultipleValuesInExclusiveVariantError):
- vspec.value = 'bar,baz'
+ vspec.value = "bar,baz"
# Inconsistent vspec
- vspec.name = 'FOO'
+ vspec.name = "FOO"
with pytest.raises(InconsistentValidationError):
a.validate_or_raise(vspec)
# Valid multi-value vspec
a.multi = True
- vspec = a.make_variant('bar,baz')
+ vspec = a.make_variant("bar,baz")
a.validate_or_raise(vspec)
# Add an invalid value
- vspec.value = 'bar,baz,barbaz'
+ vspec.value = "bar,baz,barbaz"
with pytest.raises(InvalidVariantValueError):
a.validate_or_raise(vspec)
def test_callable_validator(self):
-
def validator(x):
try:
return isinstance(int(x), numbers.Integral)
except ValueError:
return False
- a = Variant(
- 'foo',
- default=1024,
- description='',
- values=validator,
- multi=False
- )
+ a = Variant("foo", default=1024, description="", values=validator, multi=False)
vspec = a.make_default()
a.validate_or_raise(vspec)
vspec.value = 2056
a.validate_or_raise(vspec)
- vspec.value = 'foo'
+ vspec.value = "foo"
with pytest.raises(InvalidVariantValueError):
a.validate_or_raise(vspec)
def test_representation(self):
a = Variant(
- 'foo',
- default='',
- description='',
- values=('bar', 'baz', 'foobar'),
- multi=False
+ "foo", default="", description="", values=("bar", "baz", "foobar"), multi=False
)
- assert a.allowed_values == 'bar, baz, foobar'
+ assert a.allowed_values == "bar, baz, foobar"
class TestVariantMapTest(object):
-
def test_invalid_values(self):
# Value with invalid type
a = VariantMap(None)
with pytest.raises(TypeError):
- a['foo'] = 2
+ a["foo"] = 2
# Duplicate variant
- a['foo'] = MultiValuedVariant('foo', 'bar,baz')
+ a["foo"] = MultiValuedVariant("foo", "bar,baz")
with pytest.raises(DuplicateVariantError):
- a['foo'] = MultiValuedVariant('foo', 'bar')
+ a["foo"] = MultiValuedVariant("foo", "bar")
with pytest.raises(DuplicateVariantError):
- a['foo'] = SingleValuedVariant('foo', 'bar')
+ a["foo"] = SingleValuedVariant("foo", "bar")
with pytest.raises(DuplicateVariantError):
- a['foo'] = BoolValuedVariant('foo', True)
+ a["foo"] = BoolValuedVariant("foo", True)
# Non matching names between key and vspec.name
with pytest.raises(KeyError):
- a['bar'] = MultiValuedVariant('foo', 'bar')
+ a["bar"] = MultiValuedVariant("foo", "bar")
def test_set_item(self):
# Check that all the three types of variants are accepted
a = VariantMap(None)
- a['foo'] = BoolValuedVariant('foo', True)
- a['bar'] = SingleValuedVariant('bar', 'baz')
- a['foobar'] = MultiValuedVariant('foobar', 'a, b, c, d, e')
+ a["foo"] = BoolValuedVariant("foo", True)
+ a["bar"] = SingleValuedVariant("bar", "baz")
+ a["foobar"] = MultiValuedVariant("foobar", "a, b, c, d, e")
def test_substitute(self):
# Check substitution of a key that exists
a = VariantMap(None)
- a['foo'] = BoolValuedVariant('foo', True)
- a.substitute(SingleValuedVariant('foo', 'bar'))
+ a["foo"] = BoolValuedVariant("foo", True)
+ a.substitute(SingleValuedVariant("foo", "bar"))
# Trying to substitute something that is not
# in the map will raise a KeyError
with pytest.raises(KeyError):
- a.substitute(BoolValuedVariant('bar', True))
+ a.substitute(BoolValuedVariant("bar", True))
def test_satisfies_and_constrain(self):
# foo=bar foobar=fee feebar=foo
a = VariantMap(None)
- a['foo'] = MultiValuedVariant('foo', 'bar')
- a['foobar'] = SingleValuedVariant('foobar', 'fee')
- a['feebar'] = SingleValuedVariant('feebar', 'foo')
+ a["foo"] = MultiValuedVariant("foo", "bar")
+ a["foobar"] = SingleValuedVariant("foobar", "fee")
+ a["feebar"] = SingleValuedVariant("feebar", "foo")
# foo=bar,baz foobar=fee shared=True
b = VariantMap(None)
- b['foo'] = MultiValuedVariant('foo', 'bar, baz')
- b['foobar'] = SingleValuedVariant('foobar', 'fee')
- b['shared'] = BoolValuedVariant('shared', True)
+ b["foo"] = MultiValuedVariant("foo", "bar, baz")
+ b["foobar"] = SingleValuedVariant("foobar", "fee")
+ b["shared"] = BoolValuedVariant("shared", True)
assert not a.satisfies(b)
assert b.satisfies(a)
@@ -677,83 +657,83 @@ class TestVariantMapTest(object):
# foo=bar,baz foobar=fee feebar=foo shared=True
c = VariantMap(None)
- c['foo'] = MultiValuedVariant('foo', 'bar, baz')
- c['foobar'] = SingleValuedVariant('foobar', 'fee')
- c['feebar'] = SingleValuedVariant('feebar', 'foo')
- c['shared'] = BoolValuedVariant('shared', True)
+ c["foo"] = MultiValuedVariant("foo", "bar, baz")
+ c["foobar"] = SingleValuedVariant("foobar", "fee")
+ c["feebar"] = SingleValuedVariant("feebar", "foo")
+ c["shared"] = BoolValuedVariant("shared", True)
assert a.constrain(b)
assert a == c
def test_copy(self):
a = VariantMap(None)
- a['foo'] = BoolValuedVariant('foo', True)
- a['bar'] = SingleValuedVariant('bar', 'baz')
- a['foobar'] = MultiValuedVariant('foobar', 'a, b, c, d, e')
+ a["foo"] = BoolValuedVariant("foo", True)
+ a["bar"] = SingleValuedVariant("bar", "baz")
+ a["foobar"] = MultiValuedVariant("foobar", "a, b, c, d, e")
c = a.copy()
assert a == c
def test_str(self):
c = VariantMap(None)
- c['foo'] = MultiValuedVariant('foo', 'bar, baz')
- c['foobar'] = SingleValuedVariant('foobar', 'fee')
- c['feebar'] = SingleValuedVariant('feebar', 'foo')
- c['shared'] = BoolValuedVariant('shared', True)
- assert str(c) == '+shared feebar=foo foo=bar,baz foobar=fee'
+ c["foo"] = MultiValuedVariant("foo", "bar, baz")
+ c["foobar"] = SingleValuedVariant("foobar", "fee")
+ c["feebar"] = SingleValuedVariant("feebar", "foo")
+ c["shared"] = BoolValuedVariant("shared", True)
+ assert str(c) == "+shared feebar=foo foo=bar,baz foobar=fee"
def test_disjoint_set_initialization_errors():
# Constructing from non-disjoint sets should raise an exception
with pytest.raises(spack.error.SpecError) as exc_info:
- disjoint_sets(('a', 'b'), ('b', 'c'))
- assert 'sets in input must be disjoint' in str(exc_info.value)
+ disjoint_sets(("a", "b"), ("b", "c"))
+ assert "sets in input must be disjoint" in str(exc_info.value)
# A set containing the reserved item 'none' along with other items
# should raise an exception
with pytest.raises(spack.error.SpecError) as exc_info:
- disjoint_sets(('a', 'b'), ('none', 'c'))
+ disjoint_sets(("a", "b"), ("none", "c"))
assert "The value 'none' represents the empty set," in str(exc_info.value)
def test_disjoint_set_initialization():
# Test that no error is thrown when the sets are disjoint
- d = disjoint_sets(('a',), ('b', 'c'), ('e', 'f'))
+ d = disjoint_sets(("a",), ("b", "c"), ("e", "f"))
- assert d.default == 'none'
+ assert d.default == "none"
assert d.multi is True
- assert set(x for x in d) == set(['none', 'a', 'b', 'c', 'e', 'f'])
+ assert set(x for x in d) == set(["none", "a", "b", "c", "e", "f"])
def test_disjoint_set_fluent_methods():
# Construct an object without the empty set
- d = disjoint_sets(('a',), ('b', 'c'), ('e', 'f')).prohibit_empty_set()
- assert set(('none',)) not in d.sets
+ d = disjoint_sets(("a",), ("b", "c"), ("e", "f")).prohibit_empty_set()
+ assert set(("none",)) not in d.sets
# Call this 2 times to check that no matter whether
# the empty set was allowed or not before, the state
# returned is consistent.
for _ in range(2):
d = d.allow_empty_set()
- assert set(('none',)) in d.sets
- assert 'none' in d
- assert 'none' in [x for x in d]
- assert 'none' in d.feature_values
+ assert set(("none",)) in d.sets
+ assert "none" in d
+ assert "none" in [x for x in d]
+ assert "none" in d.feature_values
# Marking a value as 'non-feature' removes it from the
# list of feature values, but not for the items returned
# when iterating over the object.
- d = d.with_non_feature_values('none')
- assert 'none' in d
- assert 'none' in [x for x in d]
- assert 'none' not in d.feature_values
+ d = d.with_non_feature_values("none")
+ assert "none" in d
+ assert "none" in [x for x in d]
+ assert "none" not in d.feature_values
# Call this 2 times to check that no matter whether
# the empty set was allowed or not before, the state
# returned is consistent.
for _ in range(2):
d = d.prohibit_empty_set()
- assert set(('none',)) not in d.sets
- assert 'none' not in d
- assert 'none' not in [x for x in d]
- assert 'none' not in d.feature_values
+ assert set(("none",)) not in d.sets
+ assert "none" not in d
+ assert "none" not in [x for x in d]
+ assert "none" not in d.feature_values
diff --git a/lib/spack/spack/test/verification.py b/lib/spack/spack/test/verification.py
index ad7373a439..be2fbb3a44 100644
--- a/lib/spack/spack/test/verification.py
+++ b/lib/spack/spack/test/verification.py
@@ -18,103 +18,102 @@ import spack.store
import spack.util.spack_json as sjson
import spack.verify
-pytestmark = pytest.mark.skipif(sys.platform == 'win32',
- reason='Tests fail on Win')
+pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Tests fail on Win")
def test_link_manifest_entry(tmpdir):
# Test that symlinks are properly checked against the manifest.
# Test that the appropriate errors are generated when the check fails.
- file = str(tmpdir.join('file'))
- open(file, 'a').close()
- link = str(tmpdir.join('link'))
+ file = str(tmpdir.join("file"))
+ open(file, "a").close()
+ link = str(tmpdir.join("link"))
os.symlink(file, link)
data = spack.verify.create_manifest_entry(link)
- assert data['type'] == 'link'
- assert data['dest'] == file
- assert all(x in data for x in ('mode', 'owner', 'group'))
+ assert data["type"] == "link"
+ assert data["dest"] == file
+ assert all(x in data for x in ("mode", "owner", "group"))
results = spack.verify.check_entry(link, data)
assert not results.has_errors()
- data['type'] = 'garbage'
+ data["type"] = "garbage"
results = spack.verify.check_entry(link, data)
assert results.has_errors()
assert link in results.errors
- assert results.errors[link] == ['type']
+ assert results.errors[link] == ["type"]
- data['type'] = 'link'
+ data["type"] = "link"
- file2 = str(tmpdir.join('file2'))
- open(file2, 'a').close()
+ file2 = str(tmpdir.join("file2"))
+ open(file2, "a").close()
os.remove(link)
os.symlink(file2, link)
results = spack.verify.check_entry(link, data)
assert results.has_errors()
assert link in results.errors
- assert results.errors[link] == ['link']
+ assert results.errors[link] == ["link"]
def test_dir_manifest_entry(tmpdir):
# Test that directories are properly checked against the manifest.
# Test that the appropriate errors are generated when the check fails.
- dirent = str(tmpdir.join('dir'))
+ dirent = str(tmpdir.join("dir"))
fs.mkdirp(dirent)
data = spack.verify.create_manifest_entry(dirent)
- assert data['type'] == 'dir'
- assert all(x in data for x in ('mode', 'owner', 'group'))
+ assert data["type"] == "dir"
+ assert all(x in data for x in ("mode", "owner", "group"))
results = spack.verify.check_entry(dirent, data)
assert not results.has_errors()
- data['type'] = 'garbage'
+ data["type"] = "garbage"
results = spack.verify.check_entry(dirent, data)
assert results.has_errors()
assert dirent in results.errors
- assert results.errors[dirent] == ['type']
+ assert results.errors[dirent] == ["type"]
def test_file_manifest_entry(tmpdir):
# Test that files are properly checked against the manifest.
# Test that the appropriate errors are generated when the check fails.
- orig_str = 'This is a file'
- new_str = 'The file has changed'
+ orig_str = "This is a file"
+ new_str = "The file has changed"
- file = str(tmpdir.join('dir'))
- with open(file, 'w') as f:
+ file = str(tmpdir.join("dir"))
+ with open(file, "w") as f:
f.write(orig_str)
data = spack.verify.create_manifest_entry(file)
- assert data['type'] == 'file'
- assert data['size'] == len(orig_str)
- assert all(x in data for x in ('mode', 'owner', 'group'))
+ assert data["type"] == "file"
+ assert data["size"] == len(orig_str)
+ assert all(x in data for x in ("mode", "owner", "group"))
results = spack.verify.check_entry(file, data)
assert not results.has_errors()
- data['type'] = 'garbage'
+ data["type"] = "garbage"
results = spack.verify.check_entry(file, data)
assert results.has_errors()
assert file in results.errors
- assert results.errors[file] == ['type']
+ assert results.errors[file] == ["type"]
- data['type'] = 'file'
+ data["type"] = "file"
- with open(file, 'w') as f:
+ with open(file, "w") as f:
f.write(new_str)
results = spack.verify.check_entry(file, data)
- expected = ['size', 'hash']
+ expected = ["size", "hash"]
mtime = os.stat(file).st_mtime
- if mtime != data['time']:
- expected.append('mtime')
+ if mtime != data["time"]:
+ expected.append("mtime")
assert results.has_errors()
assert file in results.errors
@@ -124,46 +123,46 @@ def test_file_manifest_entry(tmpdir):
def test_check_chmod_manifest_entry(tmpdir):
# Check that the verification properly identifies errors for files whose
# permissions have been modified.
- file = str(tmpdir.join('dir'))
- with open(file, 'w') as f:
- f.write('This is a file')
+ file = str(tmpdir.join("dir"))
+ with open(file, "w") as f:
+ f.write("This is a file")
data = spack.verify.create_manifest_entry(file)
- os.chmod(file, data['mode'] - 1)
+ os.chmod(file, data["mode"] - 1)
results = spack.verify.check_entry(file, data)
assert results.has_errors()
assert file in results.errors
- assert results.errors[file] == ['mode']
+ assert results.errors[file] == ["mode"]
def test_check_prefix_manifest(tmpdir):
# Test the verification of an entire prefix and its contents
- prefix_path = tmpdir.join('prefix')
+ prefix_path = tmpdir.join("prefix")
prefix = str(prefix_path)
- spec = spack.spec.Spec('libelf')
+ spec = spack.spec.Spec("libelf")
spec._mark_concrete()
spec.prefix = prefix
results = spack.verify.check_spec_manifest(spec)
assert results.has_errors()
assert prefix in results.errors
- assert results.errors[prefix] == ['manifest missing']
+ assert results.errors[prefix] == ["manifest missing"]
- metadata_dir = str(prefix_path.join('.spack'))
- bin_dir = str(prefix_path.join('bin'))
- other_dir = str(prefix_path.join('other'))
+ metadata_dir = str(prefix_path.join(".spack"))
+ bin_dir = str(prefix_path.join("bin"))
+ other_dir = str(prefix_path.join("other"))
for d in (metadata_dir, bin_dir, other_dir):
fs.mkdirp(d)
- file = os.path.join(other_dir, 'file')
- with open(file, 'w') as f:
+ file = os.path.join(other_dir, "file")
+ with open(file, "w") as f:
f.write("I'm a little file short and stout")
- link = os.path.join(bin_dir, 'run')
+ link = os.path.join(bin_dir, "run")
symlink(file, link)
spack.verify.write_manifest(spec)
@@ -171,8 +170,8 @@ def test_check_prefix_manifest(tmpdir):
assert not results.has_errors()
os.remove(link)
- malware = os.path.join(metadata_dir, 'hiddenmalware')
- with open(malware, 'w') as f:
+ malware = os.path.join(metadata_dir, "hiddenmalware")
+ with open(malware, "w") as f:
f.write("Foul evil deeds")
results = spack.verify.check_spec_manifest(spec)
@@ -180,54 +179,53 @@ def test_check_prefix_manifest(tmpdir):
assert all(x in results.errors for x in (malware, link))
assert len(results.errors) == 2
- assert results.errors[link] == ['deleted']
- assert results.errors[malware] == ['added']
+ assert results.errors[link] == ["deleted"]
+ assert results.errors[malware] == ["added"]
- manifest_file = os.path.join(spec.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
- with open(manifest_file, 'w') as f:
+ manifest_file = os.path.join(
+ spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
+ with open(manifest_file, "w") as f:
f.write("{This) string is not proper json")
results = spack.verify.check_spec_manifest(spec)
assert results.has_errors()
- assert results.errors[spec.prefix] == ['manifest corrupted']
+ assert results.errors[spec.prefix] == ["manifest corrupted"]
def test_single_file_verification(tmpdir):
# Test the API to verify a single file, including finding the package
# to which it belongs
- filedir = os.path.join(str(tmpdir), 'a', 'b', 'c', 'd')
- filepath = os.path.join(filedir, 'file')
+ filedir = os.path.join(str(tmpdir), "a", "b", "c", "d")
+ filepath = os.path.join(filedir, "file")
metadir = os.path.join(str(tmpdir), spack.store.layout.metadata_dir)
fs.mkdirp(filedir)
fs.mkdirp(metadir)
- with open(filepath, 'w') as f:
+ with open(filepath, "w") as f:
f.write("I'm a file")
data = spack.verify.create_manifest_entry(filepath)
- manifest_file = os.path.join(metadir,
- spack.store.layout.manifest_file_name)
+ manifest_file = os.path.join(metadir, spack.store.layout.manifest_file_name)
- with open(manifest_file, 'w') as f:
+ with open(manifest_file, "w") as f:
sjson.dump({filepath: data}, f)
results = spack.verify.check_file_manifest(filepath)
assert not results.has_errors()
os.utime(filepath, (0, 0))
- with open(filepath, 'w') as f:
+ with open(filepath, "w") as f:
f.write("I changed.")
results = spack.verify.check_file_manifest(filepath)
- expected = ['hash']
+ expected = ["hash"]
mtime = os.stat(filepath).st_mtime
- if mtime != data['time']:
- expected.append('mtime')
+ if mtime != data["time"]:
+ expected.append("mtime")
assert results.has_errors()
assert filepath in results.errors
@@ -236,4 +234,4 @@ def test_single_file_verification(tmpdir):
shutil.rmtree(metadir)
results = spack.verify.check_file_manifest(filepath)
assert results.has_errors()
- assert results.errors[filepath] == ['not owned by any package']
+ assert results.errors[filepath] == ["not owned by any package"]
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index bcfebcd264..d73a48cc1b 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -106,425 +106,428 @@ def check_union(expected, a, b):
def test_string_prefix():
- assert_ver_eq('xsdk-0.2.0', 'xsdk-0.2.0')
- assert_ver_lt('xsdk-0.2.0', 'xsdk-0.3')
- assert_ver_gt('xsdk-0.3', 'xsdk-0.2.0')
+ assert_ver_eq("xsdk-0.2.0", "xsdk-0.2.0")
+ assert_ver_lt("xsdk-0.2.0", "xsdk-0.3")
+ assert_ver_gt("xsdk-0.3", "xsdk-0.2.0")
def test_two_segments():
- assert_ver_eq('1.0', '1.0')
- assert_ver_lt('1.0', '2.0')
- assert_ver_gt('2.0', '1.0')
+ assert_ver_eq("1.0", "1.0")
+ assert_ver_lt("1.0", "2.0")
+ assert_ver_gt("2.0", "1.0")
def test_develop():
- assert_ver_eq('develop', 'develop')
- assert_ver_eq('develop.local', 'develop.local')
- assert_ver_lt('1.0', 'develop')
- assert_ver_gt('develop', '1.0')
- assert_ver_eq('1.develop', '1.develop')
- assert_ver_lt('1.1', '1.develop')
- assert_ver_gt('1.develop', '1.0')
- assert_ver_gt('0.5.develop', '0.5')
- assert_ver_lt('0.5', '0.5.develop')
- assert_ver_lt('1.develop', '2.1')
- assert_ver_gt('2.1', '1.develop')
- assert_ver_lt('1.develop.1', '1.develop.2')
- assert_ver_gt('1.develop.2', '1.develop.1')
- assert_ver_lt('develop.1', 'develop.2')
- assert_ver_gt('develop.2', 'develop.1')
+ assert_ver_eq("develop", "develop")
+ assert_ver_eq("develop.local", "develop.local")
+ assert_ver_lt("1.0", "develop")
+ assert_ver_gt("develop", "1.0")
+ assert_ver_eq("1.develop", "1.develop")
+ assert_ver_lt("1.1", "1.develop")
+ assert_ver_gt("1.develop", "1.0")
+ assert_ver_gt("0.5.develop", "0.5")
+ assert_ver_lt("0.5", "0.5.develop")
+ assert_ver_lt("1.develop", "2.1")
+ assert_ver_gt("2.1", "1.develop")
+ assert_ver_lt("1.develop.1", "1.develop.2")
+ assert_ver_gt("1.develop.2", "1.develop.1")
+ assert_ver_lt("develop.1", "develop.2")
+ assert_ver_gt("develop.2", "develop.1")
# other +infinity versions
- assert_ver_gt('master', '9.0')
- assert_ver_gt('head', '9.0')
- assert_ver_gt('trunk', '9.0')
- assert_ver_gt('develop', '9.0')
+ assert_ver_gt("master", "9.0")
+ assert_ver_gt("head", "9.0")
+ assert_ver_gt("trunk", "9.0")
+ assert_ver_gt("develop", "9.0")
# hierarchical develop-like versions
- assert_ver_gt('develop', 'master')
- assert_ver_gt('master', 'head')
- assert_ver_gt('head', 'trunk')
- assert_ver_gt('9.0', 'system')
+ assert_ver_gt("develop", "master")
+ assert_ver_gt("master", "head")
+ assert_ver_gt("head", "trunk")
+ assert_ver_gt("9.0", "system")
# not develop
- assert_ver_lt('mydevelopmentnightmare', '1.1')
- assert_ver_lt('1.mydevelopmentnightmare', '1.1')
- assert_ver_gt('1.1', '1.mydevelopmentnightmare')
+ assert_ver_lt("mydevelopmentnightmare", "1.1")
+ assert_ver_lt("1.mydevelopmentnightmare", "1.1")
+ assert_ver_gt("1.1", "1.mydevelopmentnightmare")
def test_isdevelop():
- assert ver('develop').isdevelop()
- assert ver('develop.1').isdevelop()
- assert ver('develop.local').isdevelop()
- assert ver('master').isdevelop()
- assert ver('head').isdevelop()
- assert ver('trunk').isdevelop()
- assert ver('1.develop').isdevelop()
- assert ver('1.develop.2').isdevelop()
- assert not ver('1.1').isdevelop()
- assert not ver('1.mydevelopmentnightmare.3').isdevelop()
- assert not ver('mydevelopmentnightmare.3').isdevelop()
+ assert ver("develop").isdevelop()
+ assert ver("develop.1").isdevelop()
+ assert ver("develop.local").isdevelop()
+ assert ver("master").isdevelop()
+ assert ver("head").isdevelop()
+ assert ver("trunk").isdevelop()
+ assert ver("1.develop").isdevelop()
+ assert ver("1.develop.2").isdevelop()
+ assert not ver("1.1").isdevelop()
+ assert not ver("1.mydevelopmentnightmare.3").isdevelop()
+ assert not ver("mydevelopmentnightmare.3").isdevelop()
def test_three_segments():
- assert_ver_eq('2.0.1', '2.0.1')
- assert_ver_lt('2.0', '2.0.1')
- assert_ver_gt('2.0.1', '2.0')
+ assert_ver_eq("2.0.1", "2.0.1")
+ assert_ver_lt("2.0", "2.0.1")
+ assert_ver_gt("2.0.1", "2.0")
def test_alpha():
# TODO: not sure whether I like this. 2.0.1a is *usually*
# TODO: less than 2.0.1, but special-casing it makes version
# TODO: comparison complicated. See version.py
- assert_ver_eq('2.0.1a', '2.0.1a')
- assert_ver_gt('2.0.1a', '2.0.1')
- assert_ver_lt('2.0.1', '2.0.1a')
+ assert_ver_eq("2.0.1a", "2.0.1a")
+ assert_ver_gt("2.0.1a", "2.0.1")
+ assert_ver_lt("2.0.1", "2.0.1a")
def test_patch():
- assert_ver_eq('5.5p1', '5.5p1')
- assert_ver_lt('5.5p1', '5.5p2')
- assert_ver_gt('5.5p2', '5.5p1')
- assert_ver_eq('5.5p10', '5.5p10')
- assert_ver_lt('5.5p1', '5.5p10')
- assert_ver_gt('5.5p10', '5.5p1')
+ assert_ver_eq("5.5p1", "5.5p1")
+ assert_ver_lt("5.5p1", "5.5p2")
+ assert_ver_gt("5.5p2", "5.5p1")
+ assert_ver_eq("5.5p10", "5.5p10")
+ assert_ver_lt("5.5p1", "5.5p10")
+ assert_ver_gt("5.5p10", "5.5p1")
def test_num_alpha_with_no_separator():
- assert_ver_lt('10xyz', '10.1xyz')
- assert_ver_gt('10.1xyz', '10xyz')
- assert_ver_eq('xyz10', 'xyz10')
- assert_ver_lt('xyz10', 'xyz10.1')
- assert_ver_gt('xyz10.1', 'xyz10')
+ assert_ver_lt("10xyz", "10.1xyz")
+ assert_ver_gt("10.1xyz", "10xyz")
+ assert_ver_eq("xyz10", "xyz10")
+ assert_ver_lt("xyz10", "xyz10.1")
+ assert_ver_gt("xyz10.1", "xyz10")
def test_alpha_with_dots():
- assert_ver_eq('xyz.4', 'xyz.4')
- assert_ver_lt('xyz.4', '8')
- assert_ver_gt('8', 'xyz.4')
- assert_ver_lt('xyz.4', '2')
- assert_ver_gt('2', 'xyz.4')
+ assert_ver_eq("xyz.4", "xyz.4")
+ assert_ver_lt("xyz.4", "8")
+ assert_ver_gt("8", "xyz.4")
+ assert_ver_lt("xyz.4", "2")
+ assert_ver_gt("2", "xyz.4")
def test_nums_and_patch():
- assert_ver_lt('5.5p2', '5.6p1')
- assert_ver_gt('5.6p1', '5.5p2')
- assert_ver_lt('5.6p1', '6.5p1')
- assert_ver_gt('6.5p1', '5.6p1')
+ assert_ver_lt("5.5p2", "5.6p1")
+ assert_ver_gt("5.6p1", "5.5p2")
+ assert_ver_lt("5.6p1", "6.5p1")
+ assert_ver_gt("6.5p1", "5.6p1")
def test_rc_versions():
- assert_ver_gt('6.0.rc1', '6.0')
- assert_ver_lt('6.0', '6.0.rc1')
+ assert_ver_gt("6.0.rc1", "6.0")
+ assert_ver_lt("6.0", "6.0.rc1")
def test_alpha_beta():
- assert_ver_gt('10b2', '10a1')
- assert_ver_lt('10a2', '10b2')
+ assert_ver_gt("10b2", "10a1")
+ assert_ver_lt("10a2", "10b2")
def test_double_alpha():
- assert_ver_eq('1.0aa', '1.0aa')
- assert_ver_lt('1.0a', '1.0aa')
- assert_ver_gt('1.0aa', '1.0a')
+ assert_ver_eq("1.0aa", "1.0aa")
+ assert_ver_lt("1.0a", "1.0aa")
+ assert_ver_gt("1.0aa", "1.0a")
def test_padded_numbers():
- assert_ver_eq('10.0001', '10.0001')
- assert_ver_eq('10.0001', '10.1')
- assert_ver_eq('10.1', '10.0001')
- assert_ver_lt('10.0001', '10.0039')
- assert_ver_gt('10.0039', '10.0001')
+ assert_ver_eq("10.0001", "10.0001")
+ assert_ver_eq("10.0001", "10.1")
+ assert_ver_eq("10.1", "10.0001")
+ assert_ver_lt("10.0001", "10.0039")
+ assert_ver_gt("10.0039", "10.0001")
def test_close_numbers():
- assert_ver_lt('4.999.9', '5.0')
- assert_ver_gt('5.0', '4.999.9')
+ assert_ver_lt("4.999.9", "5.0")
+ assert_ver_gt("5.0", "4.999.9")
def test_date_stamps():
- assert_ver_eq('20101121', '20101121')
- assert_ver_lt('20101121', '20101122')
- assert_ver_gt('20101122', '20101121')
+ assert_ver_eq("20101121", "20101121")
+ assert_ver_lt("20101121", "20101122")
+ assert_ver_gt("20101122", "20101121")
def test_underscores():
- assert_ver_eq('2_0', '2_0')
- assert_ver_eq('2.0', '2_0')
- assert_ver_eq('2_0', '2.0')
- assert_ver_eq('2-0', '2_0')
- assert_ver_eq('2_0', '2-0')
+ assert_ver_eq("2_0", "2_0")
+ assert_ver_eq("2.0", "2_0")
+ assert_ver_eq("2_0", "2.0")
+ assert_ver_eq("2-0", "2_0")
+ assert_ver_eq("2_0", "2-0")
def test_rpm_oddities():
- assert_ver_eq('1b.fc17', '1b.fc17')
- assert_ver_lt('1b.fc17', '1.fc17')
- assert_ver_gt('1.fc17', '1b.fc17')
- assert_ver_eq('1g.fc17', '1g.fc17')
- assert_ver_gt('1g.fc17', '1.fc17')
- assert_ver_lt('1.fc17', '1g.fc17')
+ assert_ver_eq("1b.fc17", "1b.fc17")
+ assert_ver_lt("1b.fc17", "1.fc17")
+ assert_ver_gt("1.fc17", "1b.fc17")
+ assert_ver_eq("1g.fc17", "1g.fc17")
+ assert_ver_gt("1g.fc17", "1.fc17")
+ assert_ver_lt("1.fc17", "1g.fc17")
# Stuff below here is not taken from RPM's tests and is
# unique to spack
def test_version_ranges():
- assert_ver_lt('1.2:1.4', '1.6')
- assert_ver_gt('1.6', '1.2:1.4')
- assert_ver_eq('1.2:1.4', '1.2:1.4')
- assert ver('1.2:1.4') != ver('1.2:1.6')
+ assert_ver_lt("1.2:1.4", "1.6")
+ assert_ver_gt("1.6", "1.2:1.4")
+ assert_ver_eq("1.2:1.4", "1.2:1.4")
+ assert ver("1.2:1.4") != ver("1.2:1.6")
- assert_ver_lt('1.2:1.4', '1.5:1.6')
- assert_ver_gt('1.5:1.6', '1.2:1.4')
+ assert_ver_lt("1.2:1.4", "1.5:1.6")
+ assert_ver_gt("1.5:1.6", "1.2:1.4")
def test_contains():
- assert_in('1.3', '1.2:1.4')
- assert_in('1.2.5', '1.2:1.4')
- assert_in('1.3.5', '1.2:1.4')
- assert_in('1.3.5-7', '1.2:1.4')
- assert_not_in('1.1', '1.2:1.4')
- assert_not_in('1.5', '1.2:1.4')
- assert_not_in('1.5', '1.5.1:1.6')
- assert_not_in('1.5', '1.5.1:')
+ assert_in("1.3", "1.2:1.4")
+ assert_in("1.2.5", "1.2:1.4")
+ assert_in("1.3.5", "1.2:1.4")
+ assert_in("1.3.5-7", "1.2:1.4")
+ assert_not_in("1.1", "1.2:1.4")
+ assert_not_in("1.5", "1.2:1.4")
+ assert_not_in("1.5", "1.5.1:1.6")
+ assert_not_in("1.5", "1.5.1:")
- assert_in('1.4.2', '1.2:1.4')
- assert_not_in('1.4.2', '1.2:1.4.0')
+ assert_in("1.4.2", "1.2:1.4")
+ assert_not_in("1.4.2", "1.2:1.4.0")
- assert_in('1.2.8', '1.2.7:1.4')
- assert_in('1.2.7:1.4', ':')
- assert_not_in('1.2.5', '1.2.7:1.4')
+ assert_in("1.2.8", "1.2.7:1.4")
+ assert_in("1.2.7:1.4", ":")
+ assert_not_in("1.2.5", "1.2.7:1.4")
- assert_in('1.4.1', '1.2.7:1.4')
- assert_not_in('1.4.1', '1.2.7:1.4.0')
+ assert_in("1.4.1", "1.2.7:1.4")
+ assert_not_in("1.4.1", "1.2.7:1.4.0")
def test_in_list():
- assert_in('1.2', ['1.5', '1.2', '1.3'])
- assert_in('1.2.5', ['1.5', '1.2:1.3'])
- assert_in('1.5', ['1.5', '1.2:1.3'])
- assert_not_in('1.4', ['1.5', '1.2:1.3'])
+ assert_in("1.2", ["1.5", "1.2", "1.3"])
+ assert_in("1.2.5", ["1.5", "1.2:1.3"])
+ assert_in("1.5", ["1.5", "1.2:1.3"])
+ assert_not_in("1.4", ["1.5", "1.2:1.3"])
- assert_in('1.2.5:1.2.7', [':'])
- assert_in('1.2.5:1.2.7', ['1.5', '1.2:1.3'])
- assert_not_in('1.2.5:1.5', ['1.5', '1.2:1.3'])
- assert_not_in('1.1:1.2.5', ['1.5', '1.2:1.3'])
+ assert_in("1.2.5:1.2.7", [":"])
+ assert_in("1.2.5:1.2.7", ["1.5", "1.2:1.3"])
+ assert_not_in("1.2.5:1.5", ["1.5", "1.2:1.3"])
+ assert_not_in("1.1:1.2.5", ["1.5", "1.2:1.3"])
def test_ranges_overlap():
- assert_overlaps('1.2', '1.2')
- assert_overlaps('1.2.1', '1.2.1')
- assert_overlaps('1.2.1b', '1.2.1b')
-
- assert_overlaps('1.2:1.7', '1.6:1.9')
- assert_overlaps(':1.7', '1.6:1.9')
- assert_overlaps(':1.7', ':1.9')
- assert_overlaps(':1.7', '1.6:')
- assert_overlaps('1.2:', '1.6:1.9')
- assert_overlaps('1.2:', ':1.9')
- assert_overlaps('1.2:', '1.6:')
- assert_overlaps(':', ':')
- assert_overlaps(':', '1.6:1.9')
- assert_overlaps('1.6:1.9', ':')
+ assert_overlaps("1.2", "1.2")
+ assert_overlaps("1.2.1", "1.2.1")
+ assert_overlaps("1.2.1b", "1.2.1b")
+
+ assert_overlaps("1.2:1.7", "1.6:1.9")
+ assert_overlaps(":1.7", "1.6:1.9")
+ assert_overlaps(":1.7", ":1.9")
+ assert_overlaps(":1.7", "1.6:")
+ assert_overlaps("1.2:", "1.6:1.9")
+ assert_overlaps("1.2:", ":1.9")
+ assert_overlaps("1.2:", "1.6:")
+ assert_overlaps(":", ":")
+ assert_overlaps(":", "1.6:1.9")
+ assert_overlaps("1.6:1.9", ":")
def test_overlap_with_containment():
- assert_in('1.6.5', '1.6')
- assert_in('1.6.5', ':1.6')
+ assert_in("1.6.5", "1.6")
+ assert_in("1.6.5", ":1.6")
- assert_overlaps('1.6.5', ':1.6')
- assert_overlaps(':1.6', '1.6.5')
+ assert_overlaps("1.6.5", ":1.6")
+ assert_overlaps(":1.6", "1.6.5")
- assert_not_in(':1.6', '1.6.5')
- assert_in('1.6.5', ':1.6')
+ assert_not_in(":1.6", "1.6.5")
+ assert_in("1.6.5", ":1.6")
def test_lists_overlap():
- assert_overlaps('1.2b:1.7,5', '1.6:1.9,1')
- assert_overlaps('1,2,3,4,5', '3,4,5,6,7')
- assert_overlaps('1,2,3,4,5', '5,6,7')
- assert_overlaps('1,2,3,4,5', '5:7')
- assert_overlaps('1,2,3,4,5', '3, 6:7')
- assert_overlaps('1, 2, 4, 6.5', '3, 6:7')
- assert_overlaps('1, 2, 4, 6.5', ':, 5, 8')
- assert_overlaps('1, 2, 4, 6.5', ':')
- assert_no_overlap('1, 2, 4', '3, 6:7')
- assert_no_overlap('1,2,3,4,5', '6,7')
- assert_no_overlap('1,2,3,4,5', '6:7')
+ assert_overlaps("1.2b:1.7,5", "1.6:1.9,1")
+ assert_overlaps("1,2,3,4,5", "3,4,5,6,7")
+ assert_overlaps("1,2,3,4,5", "5,6,7")
+ assert_overlaps("1,2,3,4,5", "5:7")
+ assert_overlaps("1,2,3,4,5", "3, 6:7")
+ assert_overlaps("1, 2, 4, 6.5", "3, 6:7")
+ assert_overlaps("1, 2, 4, 6.5", ":, 5, 8")
+ assert_overlaps("1, 2, 4, 6.5", ":")
+ assert_no_overlap("1, 2, 4", "3, 6:7")
+ assert_no_overlap("1,2,3,4,5", "6,7")
+ assert_no_overlap("1,2,3,4,5", "6:7")
def test_canonicalize_list():
- assert_canonical(['1.2', '1.3', '1.4'], ['1.2', '1.3', '1.3', '1.4'])
+ assert_canonical(["1.2", "1.3", "1.4"], ["1.2", "1.3", "1.3", "1.4"])
- assert_canonical(['1.2', '1.3:1.4'], ['1.2', '1.3', '1.3:1.4'])
+ assert_canonical(["1.2", "1.3:1.4"], ["1.2", "1.3", "1.3:1.4"])
- assert_canonical(['1.2', '1.3:1.4'], ['1.2', '1.3:1.4', '1.4'])
+ assert_canonical(["1.2", "1.3:1.4"], ["1.2", "1.3:1.4", "1.4"])
- assert_canonical(['1.3:1.4'], ['1.3:1.4', '1.3', '1.3.1', '1.3.9', '1.4'])
+ assert_canonical(["1.3:1.4"], ["1.3:1.4", "1.3", "1.3.1", "1.3.9", "1.4"])
- assert_canonical(['1.3:1.4'], ['1.3', '1.3.1', '1.3.9', '1.4', '1.3:1.4'])
+ assert_canonical(["1.3:1.4"], ["1.3", "1.3.1", "1.3.9", "1.4", "1.3:1.4"])
- assert_canonical(
- ['1.3:1.5'], ['1.3', '1.3.1', '1.3.9', '1.4:1.5', '1.3:1.4']
- )
+ assert_canonical(["1.3:1.5"], ["1.3", "1.3.1", "1.3.9", "1.4:1.5", "1.3:1.4"])
- assert_canonical(['1.3:1.5'], ['1.3, 1.3.1,1.3.9,1.4:1.5,1.3:1.4'])
+ assert_canonical(["1.3:1.5"], ["1.3, 1.3.1,1.3.9,1.4:1.5,1.3:1.4"])
- assert_canonical(['1.3:1.5'], ['1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
+ assert_canonical(["1.3:1.5"], ["1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4"])
- assert_canonical([':'], [':,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
+ assert_canonical([":"], [":,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4"])
def test_intersection():
- check_intersection('2.5', '1.0:2.5', '2.5:3.0')
- check_intersection('2.5:2.7', '1.0:2.7', '2.5:3.0')
- check_intersection('0:1', ':', '0:1')
+ check_intersection("2.5", "1.0:2.5", "2.5:3.0")
+ check_intersection("2.5:2.7", "1.0:2.7", "2.5:3.0")
+ check_intersection("0:1", ":", "0:1")
- check_intersection(['1.0', '2.5:2.7'], ['1.0:2.7'], ['2.5:3.0', '1.0'])
- check_intersection(['2.5:2.7'], ['1.1:2.7'], ['2.5:3.0', '1.0'])
- check_intersection(['0:1'], [':'], ['0:1'])
+ check_intersection(["1.0", "2.5:2.7"], ["1.0:2.7"], ["2.5:3.0", "1.0"])
+ check_intersection(["2.5:2.7"], ["1.1:2.7"], ["2.5:3.0", "1.0"])
+ check_intersection(["0:1"], [":"], ["0:1"])
def test_intersect_with_containment():
- check_intersection('1.6.5', '1.6.5', ':1.6')
- check_intersection('1.6.5', ':1.6', '1.6.5')
+ check_intersection("1.6.5", "1.6.5", ":1.6")
+ check_intersection("1.6.5", ":1.6", "1.6.5")
- check_intersection('1.6:1.6.5', ':1.6.5', '1.6')
- check_intersection('1.6:1.6.5', '1.6', ':1.6.5')
+ check_intersection("1.6:1.6.5", ":1.6.5", "1.6")
+ check_intersection("1.6:1.6.5", "1.6", ":1.6.5")
- check_intersection('11.2', '11', '11.2')
- check_intersection('11.2', '11.2', '11')
+ check_intersection("11.2", "11", "11.2")
+ check_intersection("11.2", "11.2", "11")
def test_union_with_containment():
- check_union(':1.6', '1.6.5', ':1.6')
- check_union(':1.6', ':1.6', '1.6.5')
+ check_union(":1.6", "1.6.5", ":1.6")
+ check_union(":1.6", ":1.6", "1.6.5")
- check_union(':1.6', ':1.6.5', '1.6')
- check_union(':1.6', '1.6', ':1.6.5')
+ check_union(":1.6", ":1.6.5", "1.6")
+ check_union(":1.6", "1.6", ":1.6.5")
- check_union(':', '1.0:', ':2.0')
+ check_union(":", "1.0:", ":2.0")
- check_union('1:4', '1:3', '2:4')
- check_union('1:4', '2:4', '1:3')
+ check_union("1:4", "1:3", "2:4")
+ check_union("1:4", "2:4", "1:3")
# Tests successor/predecessor case.
- check_union('1:4', '1:2', '3:4')
+ check_union("1:4", "1:2", "3:4")
def test_basic_version_satisfaction():
- assert_satisfies('4.7.3', '4.7.3')
+ assert_satisfies("4.7.3", "4.7.3")
- assert_satisfies('4.7.3', '4.7')
- assert_satisfies('4.7.3b2', '4.7')
- assert_satisfies('4.7b6', '4.7')
+ assert_satisfies("4.7.3", "4.7")
+ assert_satisfies("4.7.3b2", "4.7")
+ assert_satisfies("4.7b6", "4.7")
- assert_satisfies('4.7.3', '4')
- assert_satisfies('4.7.3b2', '4')
- assert_satisfies('4.7b6', '4')
+ assert_satisfies("4.7.3", "4")
+ assert_satisfies("4.7.3b2", "4")
+ assert_satisfies("4.7b6", "4")
- assert_does_not_satisfy('4.8.0', '4.9')
- assert_does_not_satisfy('4.8', '4.9')
- assert_does_not_satisfy('4', '4.9')
+ assert_does_not_satisfy("4.8.0", "4.9")
+ assert_does_not_satisfy("4.8", "4.9")
+ assert_does_not_satisfy("4", "4.9")
def test_basic_version_satisfaction_in_lists():
- assert_satisfies(['4.7.3'], ['4.7.3'])
+ assert_satisfies(["4.7.3"], ["4.7.3"])
- assert_satisfies(['4.7.3'], ['4.7'])
- assert_satisfies(['4.7.3b2'], ['4.7'])
- assert_satisfies(['4.7b6'], ['4.7'])
+ assert_satisfies(["4.7.3"], ["4.7"])
+ assert_satisfies(["4.7.3b2"], ["4.7"])
+ assert_satisfies(["4.7b6"], ["4.7"])
- assert_satisfies(['4.7.3'], ['4'])
- assert_satisfies(['4.7.3b2'], ['4'])
- assert_satisfies(['4.7b6'], ['4'])
+ assert_satisfies(["4.7.3"], ["4"])
+ assert_satisfies(["4.7.3b2"], ["4"])
+ assert_satisfies(["4.7b6"], ["4"])
- assert_does_not_satisfy(['4.8.0'], ['4.9'])
- assert_does_not_satisfy(['4.8'], ['4.9'])
- assert_does_not_satisfy(['4'], ['4.9'])
+ assert_does_not_satisfy(["4.8.0"], ["4.9"])
+ assert_does_not_satisfy(["4.8"], ["4.9"])
+ assert_does_not_satisfy(["4"], ["4.9"])
def test_version_range_satisfaction():
- assert_satisfies('4.7b6', '4.3:4.7')
- assert_satisfies('4.3.0', '4.3:4.7')
- assert_satisfies('4.3.2', '4.3:4.7')
+ assert_satisfies("4.7b6", "4.3:4.7")
+ assert_satisfies("4.3.0", "4.3:4.7")
+ assert_satisfies("4.3.2", "4.3:4.7")
- assert_does_not_satisfy('4.8.0', '4.3:4.7')
- assert_does_not_satisfy('4.3', '4.4:4.7')
+ assert_does_not_satisfy("4.8.0", "4.3:4.7")
+ assert_does_not_satisfy("4.3", "4.4:4.7")
- assert_satisfies('4.7b6', '4.3:4.7')
- assert_does_not_satisfy('4.8.0', '4.3:4.7')
+ assert_satisfies("4.7b6", "4.3:4.7")
+ assert_does_not_satisfy("4.8.0", "4.3:4.7")
def test_version_range_satisfaction_in_lists():
- assert_satisfies(['4.7b6'], ['4.3:4.7'])
- assert_satisfies(['4.3.0'], ['4.3:4.7'])
- assert_satisfies(['4.3.2'], ['4.3:4.7'])
+ assert_satisfies(["4.7b6"], ["4.3:4.7"])
+ assert_satisfies(["4.3.0"], ["4.3:4.7"])
+ assert_satisfies(["4.3.2"], ["4.3:4.7"])
- assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
- assert_does_not_satisfy(['4.3'], ['4.4:4.7'])
+ assert_does_not_satisfy(["4.8.0"], ["4.3:4.7"])
+ assert_does_not_satisfy(["4.3"], ["4.4:4.7"])
- assert_satisfies(['4.7b6'], ['4.3:4.7'])
- assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
+ assert_satisfies(["4.7b6"], ["4.3:4.7"])
+ assert_does_not_satisfy(["4.8.0"], ["4.3:4.7"])
def test_satisfaction_with_lists():
- assert_satisfies('4.7', '4.3, 4.6, 4.7')
- assert_satisfies('4.7.3', '4.3, 4.6, 4.7')
- assert_satisfies('4.6.5', '4.3, 4.6, 4.7')
- assert_satisfies('4.6.5.2', '4.3, 4.6, 4.7')
+ assert_satisfies("4.7", "4.3, 4.6, 4.7")
+ assert_satisfies("4.7.3", "4.3, 4.6, 4.7")
+ assert_satisfies("4.6.5", "4.3, 4.6, 4.7")
+ assert_satisfies("4.6.5.2", "4.3, 4.6, 4.7")
- assert_does_not_satisfy('4', '4.3, 4.6, 4.7')
- assert_does_not_satisfy('4.8.0', '4.2, 4.3:4.7')
+ assert_does_not_satisfy("4", "4.3, 4.6, 4.7")
+ assert_does_not_satisfy("4.8.0", "4.2, 4.3:4.7")
- assert_satisfies('4.8.0', '4.2, 4.3:4.8')
- assert_satisfies('4.8.2', '4.2, 4.3:4.8')
+ assert_satisfies("4.8.0", "4.2, 4.3:4.8")
+ assert_satisfies("4.8.2", "4.2, 4.3:4.8")
def test_formatted_strings():
versions = (
- '1.2.3b', '1_2_3b', '1-2-3b',
- '1.2-3b', '1.2_3b', '1-2.3b',
- '1-2_3b', '1_2.3b', '1_2-3b'
+ "1.2.3b",
+ "1_2_3b",
+ "1-2-3b",
+ "1.2-3b",
+ "1.2_3b",
+ "1-2.3b",
+ "1-2_3b",
+ "1_2.3b",
+ "1_2-3b",
)
for item in versions:
v = Version(item)
- assert v.dotted.string == '1.2.3b'
- assert v.dashed.string == '1-2-3b'
- assert v.underscored.string == '1_2_3b'
- assert v.joined.string == '123b'
+ assert v.dotted.string == "1.2.3b"
+ assert v.dashed.string == "1-2-3b"
+ assert v.underscored.string == "1_2_3b"
+ assert v.joined.string == "123b"
- assert v.dotted.dashed.string == '1-2-3b'
- assert v.dotted.underscored.string == '1_2_3b'
- assert v.dotted.dotted.string == '1.2.3b'
- assert v.dotted.joined.string == '123b'
+ assert v.dotted.dashed.string == "1-2-3b"
+ assert v.dotted.underscored.string == "1_2_3b"
+ assert v.dotted.dotted.string == "1.2.3b"
+ assert v.dotted.joined.string == "123b"
def test_up_to():
- v = Version('1.23-4_5b')
+ v = Version("1.23-4_5b")
- assert v.up_to(1).string == '1'
- assert v.up_to(2).string == '1.23'
- assert v.up_to(3).string == '1.23-4'
- assert v.up_to(4).string == '1.23-4_5'
- assert v.up_to(5).string == '1.23-4_5b'
+ assert v.up_to(1).string == "1"
+ assert v.up_to(2).string == "1.23"
+ assert v.up_to(3).string == "1.23-4"
+ assert v.up_to(4).string == "1.23-4_5"
+ assert v.up_to(5).string == "1.23-4_5b"
- assert v.up_to(-1).string == '1.23-4_5'
- assert v.up_to(-2).string == '1.23-4'
- assert v.up_to(-3).string == '1.23'
- assert v.up_to(-4).string == '1'
+ assert v.up_to(-1).string == "1.23-4_5"
+ assert v.up_to(-2).string == "1.23-4"
+ assert v.up_to(-3).string == "1.23"
+ assert v.up_to(-4).string == "1"
- assert v.up_to(2).dotted.string == '1.23'
- assert v.up_to(2).dashed.string == '1-23'
- assert v.up_to(2).underscored.string == '1_23'
- assert v.up_to(2).joined.string == '123'
+ assert v.up_to(2).dotted.string == "1.23"
+ assert v.up_to(2).dashed.string == "1-23"
+ assert v.up_to(2).underscored.string == "1_23"
+ assert v.up_to(2).joined.string == "123"
- assert v.dotted.up_to(2).string == '1.23' == v.up_to(2).dotted.string
- assert v.dashed.up_to(2).string == '1-23' == v.up_to(2).dashed.string
- assert v.underscored.up_to(2).string == '1_23'
- assert v.up_to(2).underscored.string == '1_23'
+ assert v.dotted.up_to(2).string == "1.23" == v.up_to(2).dotted.string
+ assert v.dashed.up_to(2).string == "1-23" == v.up_to(2).dashed.string
+ assert v.underscored.up_to(2).string == "1_23"
+ assert v.up_to(2).underscored.string == "1_23"
- assert v.up_to(2).up_to(1).string == '1'
+ assert v.up_to(2).up_to(1).string == "1"
def test_repr_and_str():
-
def check_repr_and_str(vrs):
a = Version(vrs)
assert repr(a) == "VersionBase('" + vrs + "')"
@@ -533,189 +536,183 @@ def test_repr_and_str():
assert str(a) == vrs
assert str(a) == str(b)
- check_repr_and_str('1.2.3')
- check_repr_and_str('R2016a')
- check_repr_and_str('R2016a.2-3_4')
+ check_repr_and_str("1.2.3")
+ check_repr_and_str("R2016a")
+ check_repr_and_str("R2016a.2-3_4")
def test_len():
- a = Version('1.2.3.4')
+ a = Version("1.2.3.4")
assert len(a) == len(a.version)
- assert(len(a) == 4)
- b = Version('2018.0')
- assert(len(b) == 2)
+ assert len(a) == 4
+ b = Version("2018.0")
+ assert len(b) == 2
def test_get_item():
- a = Version('0.1_2-3')
+ a = Version("0.1_2-3")
assert isinstance(a[1], int)
# Test slicing
b = a[0:2]
assert isinstance(b, VersionBase)
- assert b == Version('0.1')
+ assert b == Version("0.1")
assert repr(b) == "VersionBase('0.1')"
- assert str(b) == '0.1'
+ assert str(b) == "0.1"
b = a[0:3]
assert isinstance(b, VersionBase)
- assert b == Version('0.1_2')
+ assert b == Version("0.1_2")
assert repr(b) == "VersionBase('0.1_2')"
- assert str(b) == '0.1_2'
+ assert str(b) == "0.1_2"
b = a[1:]
assert isinstance(b, VersionBase)
- assert b == Version('1_2-3')
+ assert b == Version("1_2-3")
assert repr(b) == "VersionBase('1_2-3')"
- assert str(b) == '1_2-3'
+ assert str(b) == "1_2-3"
# Raise TypeError on tuples
with pytest.raises(TypeError):
b.__getitem__(1, 2)
def test_list_highest():
- vl = VersionList(['master', '1.2.3', 'develop', '3.4.5', 'foobar'])
- assert vl.highest() == Version('develop')
- assert vl.lowest() == Version('foobar')
- assert vl.highest_numeric() == Version('3.4.5')
+ vl = VersionList(["master", "1.2.3", "develop", "3.4.5", "foobar"])
+ assert vl.highest() == Version("develop")
+ assert vl.lowest() == Version("foobar")
+ assert vl.highest_numeric() == Version("3.4.5")
- vl2 = VersionList(['master', 'develop'])
+ vl2 = VersionList(["master", "develop"])
assert vl2.highest_numeric() is None
- assert vl2.preferred() == Version('develop')
- assert vl2.lowest() == Version('master')
+ assert vl2.preferred() == Version("develop")
+ assert vl2.lowest() == Version("master")
-@pytest.mark.parametrize('version_str', [
- "foo 1.2.0",
- "!",
- "1!2"
-])
+@pytest.mark.parametrize("version_str", ["foo 1.2.0", "!", "1!2"])
def test_invalid_versions(version_str):
"""Ensure invalid versions are rejected with a ValueError"""
with pytest.raises(ValueError):
Version(version_str)
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_versions_from_git(mock_git_version_info, monkeypatch, mock_packages):
repo_path, filename, commits = mock_git_version_info
- monkeypatch.setattr(spack.package_base.PackageBase, 'git', 'file://%s' % repo_path,
- raising=False)
+ monkeypatch.setattr(
+ spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False
+ )
for commit in commits:
- spec = spack.spec.Spec('git-test-commit@%s' % commit)
+ spec = spack.spec.Spec("git-test-commit@%s" % commit)
version = spec.version
- comparator = [str(v) if not isinstance(v, int) else v
- for v in version._cmp(version.ref_lookup)]
+ comparator = [
+ str(v) if not isinstance(v, int) else v for v in version._cmp(version.ref_lookup)
+ ]
with working_dir(repo_path):
- which('git')('checkout', commit)
- with open(os.path.join(repo_path, filename), 'r') as f:
+ which("git")("checkout", commit)
+ with open(os.path.join(repo_path, filename), "r") as f:
expected = f.read()
assert str(comparator) == expected
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-def test_git_hash_comparisons(
- mock_git_version_info, install_mockery, mock_packages, monkeypatch):
- """Check that hashes compare properly to versions
- """
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+def test_git_hash_comparisons(mock_git_version_info, install_mockery, mock_packages, monkeypatch):
+ """Check that hashes compare properly to versions"""
repo_path, filename, commits = mock_git_version_info
- monkeypatch.setattr(spack.package_base.PackageBase,
- 'git', 'file://%s' % repo_path,
- raising=False)
+ monkeypatch.setattr(
+ spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False
+ )
# Spec based on earliest commit
- spec0 = spack.spec.Spec('git-test-commit@%s' % commits[-1])
+ spec0 = spack.spec.Spec("git-test-commit@%s" % commits[-1])
spec0.concretize()
- assert spec0.satisfies('@:0')
- assert not spec0.satisfies('@1.0')
+ assert spec0.satisfies("@:0")
+ assert not spec0.satisfies("@1.0")
# Spec based on second commit (same as version 1.0)
- spec1 = spack.spec.Spec('git-test-commit@%s' % commits[-2])
+ spec1 = spack.spec.Spec("git-test-commit@%s" % commits[-2])
spec1.concretize()
- assert spec1.satisfies('@1.0')
- assert not spec1.satisfies('@1.1:')
+ assert spec1.satisfies("@1.0")
+ assert not spec1.satisfies("@1.1:")
# Spec based on 4th commit (in timestamp order)
- spec4 = spack.spec.Spec('git-test-commit@%s' % commits[-4])
+ spec4 = spack.spec.Spec("git-test-commit@%s" % commits[-4])
spec4.concretize()
- assert spec4.satisfies('@1.1')
- assert spec4.satisfies('@1.0:1.2')
+ assert spec4.satisfies("@1.1")
+ assert spec4.satisfies("@1.0:1.2")
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
-def test_git_ref_comparisons(
- mock_git_version_info, install_mockery, mock_packages, monkeypatch):
- """Check that hashes compare properly to versions
- """
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
+def test_git_ref_comparisons(mock_git_version_info, install_mockery, mock_packages, monkeypatch):
+ """Check that hashes compare properly to versions"""
repo_path, filename, commits = mock_git_version_info
- monkeypatch.setattr(spack.package_base.PackageBase,
- 'git', 'file://%s' % repo_path,
- raising=False)
+ monkeypatch.setattr(
+ spack.package_base.PackageBase, "git", "file://%s" % repo_path, raising=False
+ )
# Spec based on tag v1.0
- spec_tag = spack.spec.Spec('git-test-commit@git.v1.0')
+ spec_tag = spack.spec.Spec("git-test-commit@git.v1.0")
spec_tag.concretize()
- assert spec_tag.satisfies('@1.0')
- assert not spec_tag.satisfies('@1.1:')
- assert str(spec_tag.version) == 'git.v1.0'
+ assert spec_tag.satisfies("@1.0")
+ assert not spec_tag.satisfies("@1.1:")
+ assert str(spec_tag.version) == "git.v1.0"
# Spec based on branch 1.x
- spec_branch = spack.spec.Spec('git-test-commit@git.1.x')
+ spec_branch = spack.spec.Spec("git-test-commit@git.1.x")
spec_branch.concretize()
- assert spec_branch.satisfies('@1.2')
- assert spec_branch.satisfies('@1.1:1.3')
- assert str(spec_branch.version) == 'git.1.x'
-
-
-@pytest.mark.parametrize('string,git', [
- ('1.2.9', False),
- ('gitmain', False),
- ('git.foo', True),
- ('git.abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd', True),
- ('abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd', True),
-])
+ assert spec_branch.satisfies("@1.2")
+ assert spec_branch.satisfies("@1.1:1.3")
+ assert str(spec_branch.version) == "git.1.x"
+
+
+@pytest.mark.parametrize(
+ "string,git",
+ [
+ ("1.2.9", False),
+ ("gitmain", False),
+ ("git.foo", True),
+ ("git.abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd", True),
+ ("abcdabcdabcdabcdabcdabcdabcdabcdabcdabcd", True),
+ ],
+)
def test_version_git_vs_base(string, git):
assert isinstance(Version(string), GitVersion) == git
def test_version_range_nonempty():
- assert Version('1.2.9') in VersionRange('1.2.0', '1.2')
- assert Version('1.1.1') in ver('1.0:1')
+ assert Version("1.2.9") in VersionRange("1.2.0", "1.2")
+ assert Version("1.1.1") in ver("1.0:1")
def test_empty_version_range_raises():
with pytest.raises(ValueError):
- assert VersionRange('2', '1.0')
+ assert VersionRange("2", "1.0")
with pytest.raises(ValueError):
- assert ver('2:1.0')
+ assert ver("2:1.0")
def test_version_empty_slice():
"""Check an empty slice to confirm get "empty" version instead of
- an IndexError (#25953).
+ an IndexError (#25953).
"""
- assert Version('1.')[1:] == Version('')
+ assert Version("1.")[1:] == Version("")
def test_version_wrong_idx_type():
"""Ensure exception raised if attempt to use non-integer index."""
- v = Version('1.1')
+ v = Version("1.1")
with pytest.raises(TypeError):
- v['0:']
+ v["0:"]
-@pytest.mark.regression('29170')
+@pytest.mark.regression("29170")
def test_version_range_satisfies_means_nonempty_intersection():
- x = VersionRange('3.7.0', '3')
- y = VersionRange('3.6.0', '3.6.0')
+ x = VersionRange("3.7.0", "3")
+ y = VersionRange("3.6.0", "3.6.0")
assert not x.satisfies(y)
assert not y.satisfies(x)
-@pytest.mark.regression('26482')
+@pytest.mark.regression("26482")
def test_version_list_with_range_included_in_concrete_version_interpreted_as_range():
# Note: this test only tests whether we can construct a version list of a range
# and a version, where the range is contained in the version when it is interpreted
@@ -723,10 +720,10 @@ def test_version_list_with_range_included_in_concrete_version_interpreted_as_ran
# Cleary it *shouldn't* be interpreted that way, but that is how Spack currently
# behaves, and this test only ensures that creating a VersionList of this type
# does not throw like reported in the linked Github issue.
- VersionList([Version('3.1'), VersionRange('3.1.1', '3.1.2')])
+ VersionList([Version("3.1"), VersionRange("3.1.1", "3.1.2")])
@pytest.mark.xfail
def test_version_list_with_range_and_concrete_version_is_not_concrete():
- v = VersionList([Version('3.1'), VersionRange('3.1.1', '3.1.2')])
+ v = VersionList([Version("3.1"), VersionRange("3.1.1", "3.1.2")])
assert v.concrete
diff --git a/lib/spack/spack/test/views.py b/lib/spack/spack/test/views.py
index c7e9bb5213..d2801f12ba 100644
--- a/lib/spack/spack/test/views.py
+++ b/lib/spack/spack/test/views.py
@@ -13,38 +13,34 @@ from spack.filesystem_view import YamlFilesystemView
from spack.spec import Spec
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_global_activation(install_mockery, mock_fetch):
"""This test ensures that views which are maintained inside of an extendee
- package's prefix are maintained as expected and are compatible with
- global activations prior to #7152.
+ package's prefix are maintained as expected and are compatible with
+ global activations prior to #7152.
"""
- spec = Spec('extension1').concretized()
+ spec = Spec("extension1").concretized()
pkg = spec.package
pkg.do_install()
pkg.do_activate()
- extendee_spec = spec['extendee']
- extendee_pkg = spec['extendee'].package
+ extendee_spec = spec["extendee"]
+ extendee_pkg = spec["extendee"].package
view = extendee_pkg.view()
assert pkg.is_activated(view)
- expected_path = os.path.join(
- extendee_spec.prefix, '.spack', 'extensions.yaml')
- assert (view.extensions_layout.extension_file_path(extendee_spec) ==
- expected_path)
+ expected_path = os.path.join(extendee_spec.prefix, ".spack", "extensions.yaml")
+ assert view.extensions_layout.extension_file_path(extendee_spec) == expected_path
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_remove_extensions_ordered(install_mockery, mock_fetch, tmpdir):
- view_dir = str(tmpdir.join('view'))
+ view_dir = str(tmpdir.join("view"))
layout = DirectoryLayout(view_dir)
view = YamlFilesystemView(view_dir, layout)
- e2 = Spec('extension2').concretized()
+ e2 = Spec("extension2").concretized()
e2.package.do_install()
view.add_specs(e2)
- e1 = e2['extension1']
+ e1 = e2["extension1"]
view.remove_specs(e1, e2)
diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py
index 72f5c88ad4..21c00e652c 100644
--- a/lib/spack/spack/test/web.py
+++ b/lib/spack/spack/test/web.py
@@ -19,63 +19,70 @@ from spack.version import ver
def _create_url(relative_url):
- web_data_path = posixpath.join(spack.paths.test_path, 'data', 'web')
- return 'file://' + posixpath.join(web_data_path, relative_url)
+ web_data_path = posixpath.join(spack.paths.test_path, "data", "web")
+ return "file://" + posixpath.join(web_data_path, relative_url)
-root = _create_url('index.html')
-root_tarball = _create_url('foo-0.0.0.tar.gz')
-page_1 = _create_url('1.html')
-page_2 = _create_url('2.html')
-page_3 = _create_url('3.html')
-page_4 = _create_url('4.html')
+root = _create_url("index.html")
+root_tarball = _create_url("foo-0.0.0.tar.gz")
+page_1 = _create_url("1.html")
+page_2 = _create_url("2.html")
+page_3 = _create_url("3.html")
+page_4 = _create_url("4.html")
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
@pytest.mark.parametrize(
- 'depth,expected_found,expected_not_found,expected_text', [
- (0,
- {'pages': [root], 'links': [page_1]},
- {'pages': [page_1, page_2, page_3, page_4],
- 'links': [root, page_2, page_3, page_4]},
- {root: "This is the root page."}),
- (1,
- {'pages': [root, page_1], 'links': [page_1, page_2]},
- {'pages': [page_2, page_3, page_4],
- 'links': [root, page_3, page_4]},
- {root: "This is the root page.",
- page_1: "This is page 1."}),
- (2,
- {'pages': [root, page_1, page_2],
- 'links': [page_1, page_2, page_3, page_4]},
- {'pages': [page_3, page_4], 'links': [root]},
- {root: "This is the root page.",
- page_1: "This is page 1.",
- page_2: "This is page 2."}),
- (3,
- {'pages': [root, page_1, page_2, page_3, page_4],
- 'links': [root, page_1, page_2, page_3, page_4]},
- {'pages': [], 'links': []},
- {root: "This is the root page.",
- page_1: "This is page 1.",
- page_2: "This is page 2.",
- page_3: "This is page 3.",
- page_4: "This is page 4."}),
- ])
+ "depth,expected_found,expected_not_found,expected_text",
+ [
+ (
+ 0,
+ {"pages": [root], "links": [page_1]},
+ {"pages": [page_1, page_2, page_3, page_4], "links": [root, page_2, page_3, page_4]},
+ {root: "This is the root page."},
+ ),
+ (
+ 1,
+ {"pages": [root, page_1], "links": [page_1, page_2]},
+ {"pages": [page_2, page_3, page_4], "links": [root, page_3, page_4]},
+ {root: "This is the root page.", page_1: "This is page 1."},
+ ),
+ (
+ 2,
+ {"pages": [root, page_1, page_2], "links": [page_1, page_2, page_3, page_4]},
+ {"pages": [page_3, page_4], "links": [root]},
+ {root: "This is the root page.", page_1: "This is page 1.", page_2: "This is page 2."},
+ ),
+ (
+ 3,
+ {
+ "pages": [root, page_1, page_2, page_3, page_4],
+ "links": [root, page_1, page_2, page_3, page_4],
+ },
+ {"pages": [], "links": []},
+ {
+ root: "This is the root page.",
+ page_1: "This is page 1.",
+ page_2: "This is page 2.",
+ page_3: "This is page 3.",
+ page_4: "This is page 4.",
+ },
+ ),
+ ],
+)
def test_spider(depth, expected_found, expected_not_found, expected_text):
pages, links = spack.util.web.spider(root, depth=depth)
- for page in expected_found['pages']:
+ for page in expected_found["pages"]:
assert page in pages
- for page in expected_not_found['pages']:
+ for page in expected_not_found["pages"]:
assert page not in pages
- for link in expected_found['links']:
+ for link in expected_found["links"]:
assert link in links
- for link in expected_not_found['links']:
+ for link in expected_not_found["links"]:
assert link not in links
for page, text in expected_text.items():
@@ -84,148 +91,127 @@ def test_spider(depth, expected_found, expected_not_found, expected_text):
def test_spider_no_response(monkeypatch):
# Mock the absence of a response
- monkeypatch.setattr(
- spack.util.web, 'read_from_url', lambda x, y: (None, None, None)
- )
+ monkeypatch.setattr(spack.util.web, "read_from_url", lambda x, y: (None, None, None))
pages, links = spack.util.web.spider(root, depth=0)
assert not pages and not links
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_0():
- versions = spack.util.web.find_versions_of_archive(
- root_tarball, root, list_depth=0)
- assert ver('0.0.0') in versions
+ versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=0)
+ assert ver("0.0.0") in versions
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_1():
- versions = spack.util.web.find_versions_of_archive(
- root_tarball, root, list_depth=1)
- assert ver('0.0.0') in versions
- assert ver('1.0.0') in versions
+ versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=1)
+ assert ver("0.0.0") in versions
+ assert ver("1.0.0") in versions
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_2():
- versions = spack.util.web.find_versions_of_archive(
- root_tarball, root, list_depth=2)
- assert ver('0.0.0') in versions
- assert ver('1.0.0') in versions
- assert ver('2.0.0') in versions
+ versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
+ assert ver("0.0.0") in versions
+ assert ver("1.0.0") in versions
+ assert ver("2.0.0") in versions
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_exotic_versions_of_archive_2():
- versions = spack.util.web.find_versions_of_archive(
- root_tarball, root, list_depth=2)
+ versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=2)
# up for grabs to make this better.
- assert ver('2.0.0b2') in versions
+ assert ver("2.0.0b2") in versions
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_versions_of_archive_3():
- versions = spack.util.web.find_versions_of_archive(
- root_tarball, root, list_depth=3)
- assert ver('0.0.0') in versions
- assert ver('1.0.0') in versions
- assert ver('2.0.0') in versions
- assert ver('3.0') in versions
- assert ver('4.5') in versions
+ versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
+ assert ver("0.0.0") in versions
+ assert ver("1.0.0") in versions
+ assert ver("2.0.0") in versions
+ assert ver("3.0") in versions
+ assert ver("4.5") in versions
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_find_exotic_versions_of_archive_3():
- versions = spack.util.web.find_versions_of_archive(
- root_tarball, root, list_depth=3)
- assert ver('2.0.0b2') in versions
- assert ver('3.0a1') in versions
- assert ver('4.5-rc5') in versions
+ versions = spack.util.web.find_versions_of_archive(root_tarball, root, list_depth=3)
+ assert ver("2.0.0b2") in versions
+ assert ver("3.0a1") in versions
+ assert ver("4.5-rc5") in versions
def test_get_header():
- headers = {
- 'Content-type': 'text/plain'
- }
+ headers = {"Content-type": "text/plain"}
# looking up headers should just work like a plain dict
# lookup when there is an entry with the right key
- assert(spack.util.web.get_header(headers, 'Content-type') == 'text/plain')
+ assert spack.util.web.get_header(headers, "Content-type") == "text/plain"
# looking up headers should still work if there is a fuzzy match
- assert(spack.util.web.get_header(headers, 'contentType') == 'text/plain')
+ assert spack.util.web.get_header(headers, "contentType") == "text/plain"
# ...unless there is an exact match for the "fuzzy" spelling.
- headers['contentType'] = 'text/html'
- assert(spack.util.web.get_header(headers, 'contentType') == 'text/html')
+ headers["contentType"] = "text/html"
+ assert spack.util.web.get_header(headers, "contentType") == "text/html"
# If lookup has to fallback to fuzzy matching and there are more than one
# fuzzy match, the result depends on the internal ordering of the given
# mapping
headers = collections.OrderedDict()
- headers['Content-type'] = 'text/plain'
- headers['contentType'] = 'text/html'
+ headers["Content-type"] = "text/plain"
+ headers["contentType"] = "text/html"
- assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/plain')
- del headers['Content-type']
- assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/html')
+ assert spack.util.web.get_header(headers, "CONTENT_TYPE") == "text/plain"
+ del headers["Content-type"]
+ assert spack.util.web.get_header(headers, "CONTENT_TYPE") == "text/html"
# Same as above, but different ordering
headers = collections.OrderedDict()
- headers['contentType'] = 'text/html'
- headers['Content-type'] = 'text/plain'
+ headers["contentType"] = "text/html"
+ headers["Content-type"] = "text/plain"
- assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/html')
- del headers['contentType']
- assert(spack.util.web.get_header(headers, 'CONTENT_TYPE') == 'text/plain')
+ assert spack.util.web.get_header(headers, "CONTENT_TYPE") == "text/html"
+ del headers["contentType"]
+ assert spack.util.web.get_header(headers, "CONTENT_TYPE") == "text/plain"
# If there isn't even a fuzzy match, raise KeyError
with pytest.raises(KeyError):
- spack.util.web.get_header(headers, 'ContentLength')
+ spack.util.web.get_header(headers, "ContentLength")
-@pytest.mark.skipif(sys.platform == 'win32',
- reason="Not supported on Windows (yet)")
+@pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)")
def test_list_url(tmpdir):
testpath = str(tmpdir)
- os.mkdir(os.path.join(testpath, 'dir'))
+ os.mkdir(os.path.join(testpath, "dir"))
- with open(os.path.join(testpath, 'file-0.txt'), 'w'):
+ with open(os.path.join(testpath, "file-0.txt"), "w"):
pass
- with open(os.path.join(testpath, 'file-1.txt'), 'w'):
+ with open(os.path.join(testpath, "file-1.txt"), "w"):
pass
- with open(os.path.join(testpath, 'file-2.txt'), 'w'):
+ with open(os.path.join(testpath, "file-2.txt"), "w"):
pass
- with open(os.path.join(testpath, 'dir', 'another-file.txt'), 'w'):
+ with open(os.path.join(testpath, "dir", "another-file.txt"), "w"):
pass
- list_url = lambda recursive: list(sorted(
- spack.util.web.list_url(testpath, recursive=recursive)))
+ list_url = lambda recursive: list(
+ sorted(spack.util.web.list_url(testpath, recursive=recursive))
+ )
- assert list_url(False) == ['file-0.txt',
- 'file-1.txt',
- 'file-2.txt']
+ assert list_url(False) == ["file-0.txt", "file-1.txt", "file-2.txt"]
- assert list_url(True) == ['dir/another-file.txt',
- 'file-0.txt',
- 'file-1.txt',
- 'file-2.txt']
+ assert list_url(True) == ["dir/another-file.txt", "file-0.txt", "file-1.txt", "file-2.txt"]
class MockPages(object):
def search(self, *args, **kwargs):
return [
- {'Key': 'keyone'},
- {'Key': 'keytwo'},
- {'Key': 'keythree'},
+ {"Key": "keyone"},
+ {"Key": "keytwo"},
+ {"Key": "keythree"},
]
@@ -236,7 +222,7 @@ class MockPaginator(object):
class MockClientError(Exception):
def __init__(self):
- self.response = {'Error': {'Code': 'NoSuchKey'}}
+ self.response = {"Error": {"Code": "NoSuchKey"}}
class MockS3Client(object):
@@ -245,13 +231,8 @@ class MockS3Client(object):
def delete_objects(self, *args, **kwargs):
return {
- 'Errors': [
- {'Key': 'keyone', 'Message': 'Access Denied'}
- ],
- 'Deleted': [
- {'Key': 'keytwo'},
- {'Key': 'keythree'}
- ],
+ "Errors": [{"Key": "keyone", "Message": "Access Denied"}],
+ "Deleted": [{"Key": "keytwo"}, {"Key": "keythree"}],
}
def delete_object(self, *args, **kwargs):
@@ -259,16 +240,18 @@ class MockS3Client(object):
def get_object(self, Bucket=None, Key=None):
self.ClientError = MockClientError
- if Bucket == 'my-bucket' and Key == 'subdirectory/my-file':
+ if Bucket == "my-bucket" and Key == "subdirectory/my-file":
return True
raise self.ClientError
def test_gather_s3_information(monkeypatch, capfd):
- mock_connection_data = {"access_token": "AAAAAAA",
- "profile": "SPacKDeV",
- "access_pair": ("SPA", "CK"),
- "endpoint_url": "https://127.0.0.1:8888"}
+ mock_connection_data = {
+ "access_token": "AAAAAAA",
+ "profile": "SPacKDeV",
+ "access_pair": ("SPA", "CK"),
+ "endpoint_url": "https://127.0.0.1:8888",
+ }
session_args, client_args = spack.util.s3.get_mirror_s3_connection_info(mock_connection_data)
@@ -288,13 +271,12 @@ def test_gather_s3_information(monkeypatch, capfd):
def test_remove_s3_url(monkeypatch, capfd):
- fake_s3_url = 's3://my-bucket/subdirectory/mirror'
+ fake_s3_url = "s3://my-bucket/subdirectory/mirror"
def mock_create_s3_session(url, connection={}):
return MockS3Client()
- monkeypatch.setattr(
- spack.util.s3, 'create_s3_session', mock_create_s3_session)
+ monkeypatch.setattr(spack.util.s3, "create_s3_session", mock_create_s3_session)
current_debug_level = tty.debug_level()
tty.set_debug(1)
@@ -304,24 +286,24 @@ def test_remove_s3_url(monkeypatch, capfd):
tty.set_debug(current_debug_level)
- assert('Failed to delete keyone (Access Denied)' in err)
- assert('Deleted keythree' in err)
- assert('Deleted keytwo' in err)
+ assert "Failed to delete keyone (Access Denied)" in err
+ assert "Deleted keythree" in err
+ assert "Deleted keytwo" in err
def test_s3_url_exists(monkeypatch, capfd):
def mock_create_s3_session(url, connection={}):
return MockS3Client()
- monkeypatch.setattr(
- spack.util.s3, 'create_s3_session', mock_create_s3_session)
- fake_s3_url_exists = 's3://my-bucket/subdirectory/my-file'
- assert(spack.util.web.url_exists(fake_s3_url_exists))
+ monkeypatch.setattr(spack.util.s3, "create_s3_session", mock_create_s3_session)
+
+ fake_s3_url_exists = "s3://my-bucket/subdirectory/my-file"
+ assert spack.util.web.url_exists(fake_s3_url_exists)
- fake_s3_url_does_not_exist = 's3://my-bucket/subdirectory/my-notfound-file'
- assert(not spack.util.web.url_exists(fake_s3_url_does_not_exist))
+ fake_s3_url_does_not_exist = "s3://my-bucket/subdirectory/my-notfound-file"
+ assert not spack.util.web.url_exists(fake_s3_url_does_not_exist)
def test_s3_url_parsing():
- assert(spack.util.s3._parse_s3_endpoint_url("example.com") == 'https://example.com')
- assert(spack.util.s3._parse_s3_endpoint_url("http://example.com") == 'http://example.com')
+ assert spack.util.s3._parse_s3_endpoint_url("example.com") == "https://example.com"
+ assert spack.util.s3._parse_s3_endpoint_url("http://example.com") == "http://example.com"
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index f12a0c88ce..00c7d68063 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -74,30 +74,26 @@ def find_list_urls(url):
url_types = [
# GitHub
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
- (r'(.*github\.com/[^/]+/[^/]+)',
- lambda m: m.group(1) + '/releases'),
-
+ (r"(.*github\.com/[^/]+/[^/]+)", lambda m: m.group(1) + "/releases"),
# GitLab API endpoint
# e.g. https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2
- (r'(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)',
- lambda m: m.group(1) + '/' + m.group(2) + '/' + m.group(3) + '/tags'),
-
+ (
+ r"(.*gitlab[^/]+)/api/v4/projects/([^/]+)%2F([^/]+)",
+ lambda m: m.group(1) + "/" + m.group(2) + "/" + m.group(3) + "/tags",
+ ),
# GitLab non-API endpoint
# e.g. https://gitlab.dkrz.de/k202009/libaec/uploads/631e85bcf877c2dcaca9b2e6d6526339/libaec-1.0.0.tar.gz
- (r'(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)',
- lambda m: m.group(1) + '/tags'),
-
+ (r"(.*gitlab[^/]+/(?!api/v4/projects)[^/]+/[^/]+)", lambda m: m.group(1) + "/tags"),
# BitBucket
# e.g. https://bitbucket.org/eigen/eigen/get/3.3.3.tar.bz2
- (r'(.*bitbucket.org/[^/]+/[^/]+)',
- lambda m: m.group(1) + '/downloads/?tab=tags'),
-
+ (r"(.*bitbucket.org/[^/]+/[^/]+)", lambda m: m.group(1) + "/downloads/?tab=tags"),
# CRAN
# e.g. https://cran.r-project.org/src/contrib/Rcpp_0.12.9.tar.gz
# e.g. https://cloud.r-project.org/src/contrib/rgl_0.98.1.tar.gz
- (r'(.*\.r-project\.org/src/contrib)/([^_]+)',
- lambda m: m.group(1) + '/Archive/' + m.group(2)),
-
+ (
+ r"(.*\.r-project\.org/src/contrib)/([^_]+)",
+ lambda m: m.group(1) + "/Archive/" + m.group(2),
+ ),
# PyPI
# e.g. https://pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://www.pypi.io/packages/source/n/numpy/numpy-1.19.4.zip
@@ -105,15 +101,22 @@ def find_list_urls(url):
# e.g. https://pypi.python.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://files.pythonhosted.org/packages/source/n/numpy/numpy-1.19.4.zip
# e.g. https://pypi.io/packages/py2.py3/o/opencensus-context/opencensus_context-0.1.1-py2.py3-none-any.whl
- (r'(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)',
- lambda m: 'https://pypi.org/simple/' + m.group(1) + '/'),
-
+ (
+ r"(?:pypi|pythonhosted)[^/]+/packages/[^/]+/./([^/]+)",
+ lambda m: "https://pypi.org/simple/" + m.group(1) + "/",
+ ),
# LuaRocks
# e.g. https://luarocks.org/manifests/gvvaughan/lpeg-1.0.2-1.src.rock
# e.g. https://luarocks.org/manifests/openresty/lua-cjson-2.1.0-1.src.rock
- (r'luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/'
- + r'(?P<name>.+?)-[0-9.-]*\.src\.rock',
- lambda m: 'https://luarocks.org/modules/' + m.group('org') + '/' + m.group('name') + '/'),
+ (
+ r"luarocks[^/]+/(?:modules|manifests)/(?P<org>[^/]+)/"
+ + r"(?P<name>.+?)-[0-9.-]*\.src\.rock",
+ lambda m: "https://luarocks.org/modules/"
+ + m.group("org")
+ + "/"
+ + m.group("name")
+ + "/",
+ ),
]
list_urls = set([os.path.dirname(url)])
@@ -132,17 +135,17 @@ def strip_query_and_fragment(path):
stripped = components[:3] + (None, None)
query, frag = components[3:5]
- suffix = ''
+ suffix = ""
if query:
- suffix += '?' + query
+ suffix += "?" + query
if frag:
- suffix += '#' + frag
+ suffix += "#" + frag
return (urlunsplit(stripped), suffix)
except ValueError:
tty.debug("Got error parsing path %s" % path)
- return (path, '') # Ignore URL parse errors here
+ return (path, "") # Ignore URL parse errors here
def strip_version_suffixes(path):
@@ -173,102 +176,95 @@ def strip_version_suffixes(path):
suffix_regexes = [
# Download type
- r'[Ii]nstall',
- r'all',
- r'code',
- r'[Ss]ources?',
- r'file',
- r'full',
- r'single',
- r'with[a-zA-Z_-]+',
- r'rock',
- r'src(_0)?',
- r'public',
- r'bin',
- r'binary',
- r'run',
- r'[Uu]niversal',
- r'jar',
- r'complete',
- r'dynamic',
- r'oss',
- r'gem',
- r'tar',
- r'sh',
-
+ r"[Ii]nstall",
+ r"all",
+ r"code",
+ r"[Ss]ources?",
+ r"file",
+ r"full",
+ r"single",
+ r"with[a-zA-Z_-]+",
+ r"rock",
+ r"src(_0)?",
+ r"public",
+ r"bin",
+ r"binary",
+ r"run",
+ r"[Uu]niversal",
+ r"jar",
+ r"complete",
+ r"dynamic",
+ r"oss",
+ r"gem",
+ r"tar",
+ r"sh",
# Download version
- r'release',
- r'bin',
- r'stable',
- r'[Ff]inal',
- r'rel',
- r'orig',
- r'dist',
- r'\+',
-
+ r"release",
+ r"bin",
+ r"stable",
+ r"[Ff]inal",
+ r"rel",
+ r"orig",
+ r"dist",
+ r"\+",
# License
- r'gpl',
-
+ r"gpl",
# Arch
# Needs to come before and after OS, appears in both orders
- r'ia32',
- r'intel',
- r'amd64',
- r'linux64',
- r'x64',
- r'64bit',
- r'x86[_-]64',
- r'i586_64',
- r'x86',
- r'i[36]86',
- r'ppc64(le)?',
- r'armv?(7l|6l|64)',
-
+ r"ia32",
+ r"intel",
+ r"amd64",
+ r"linux64",
+ r"x64",
+ r"64bit",
+ r"x86[_-]64",
+ r"i586_64",
+ r"x86",
+ r"i[36]86",
+ r"ppc64(le)?",
+ r"armv?(7l|6l|64)",
# Other
- r'cpp',
- r'gtk',
- r'incubating',
-
+ r"cpp",
+ r"gtk",
+ r"incubating",
# OS
- r'[Ll]inux(_64)?',
- r'LINUX',
- r'[Uu]ni?x',
- r'[Ss]un[Oo][Ss]',
- r'[Mm]ac[Oo][Ss][Xx]?',
- r'[Oo][Ss][Xx]',
- r'[Dd]arwin(64)?',
- r'[Aa]pple',
- r'[Ww]indows',
- r'[Ww]in(64|32)?',
- r'[Cc]ygwin(64|32)?',
- r'[Mm]ingw',
- r'centos',
-
+ r"[Ll]inux(_64)?",
+ r"LINUX",
+ r"[Uu]ni?x",
+ r"[Ss]un[Oo][Ss]",
+ r"[Mm]ac[Oo][Ss][Xx]?",
+ r"[Oo][Ss][Xx]",
+ r"[Dd]arwin(64)?",
+ r"[Aa]pple",
+ r"[Ww]indows",
+ r"[Ww]in(64|32)?",
+ r"[Cc]ygwin(64|32)?",
+ r"[Mm]ingw",
+ r"centos",
# Arch
# Needs to come before and after OS, appears in both orders
- r'ia32',
- r'intel',
- r'amd64',
- r'linux64',
- r'x64',
- r'64bit',
- r'x86[_-]64',
- r'i586_64',
- r'x86',
- r'i[36]86',
- r'ppc64(le)?',
- r'armv?(7l|6l|64)?',
-
+ r"ia32",
+ r"intel",
+ r"amd64",
+ r"linux64",
+ r"x64",
+ r"64bit",
+ r"x86[_-]64",
+ r"i586_64",
+ r"x86",
+ r"i[36]86",
+ r"ppc64(le)?",
+ r"armv?(7l|6l|64)?",
# PyPI
- r'[._-]py[23].*\.whl',
- r'[._-]cp[23].*\.whl',
- r'[._-]win.*\.exe',
+ r"[._-]py[23].*\.whl",
+ r"[._-]cp[23].*\.whl",
+ r"[._-]win.*\.exe",
]
for regex in suffix_regexes:
# Remove the suffix from the end of the path
# This may be done multiple times
- path = re.sub(r'[._-]?' + regex + '$', '', path)
+ path = re.sub(r"[._-]?" + regex + "$", "", path)
return path
@@ -304,51 +300,43 @@ def strip_name_suffixes(path, version):
suffix_regexes = [
# Strip off the version and anything after it
-
# name-ver
# name_ver
# name.ver
- r'[._-][rvV]?' + str(version) + '.*',
-
+ r"[._-][rvV]?" + str(version) + ".*",
# namever
- r'V?' + str(version) + '.*',
-
+ r"V?" + str(version) + ".*",
# Download type
- r'install',
- r'[Ss]rc',
- r'(open)?[Ss]ources?',
- r'[._-]open',
- r'[._-]archive',
- r'[._-]std',
- r'[._-]bin',
- r'Software',
-
+ r"install",
+ r"[Ss]rc",
+ r"(open)?[Ss]ources?",
+ r"[._-]open",
+ r"[._-]archive",
+ r"[._-]std",
+ r"[._-]bin",
+ r"Software",
# Download version
- r'release',
- r'snapshot',
- r'distrib',
- r'everywhere',
- r'latest',
-
+ r"release",
+ r"snapshot",
+ r"distrib",
+ r"everywhere",
+ r"latest",
# Arch
- r'Linux(64)?',
- r'x86_64',
-
+ r"Linux(64)?",
+ r"x86_64",
# VCS
- r'0\+bzr',
-
+ r"0\+bzr",
# License
- r'gpl',
-
+ r"gpl",
# Needs to come before and after gpl, appears in both orders
- r'[._-]x11',
- r'gpl',
+ r"[._-]x11",
+ r"gpl",
]
for regex in suffix_regexes:
# Remove the suffix from the end of the path
# This may be done multiple times
- path = re.sub('[._-]?' + regex + '$', '', path)
+ path = re.sub("[._-]?" + regex + "$", "", path)
return path
@@ -374,11 +362,11 @@ def split_url_extension(path):
2. ``('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin', '.tar.gz', None)``
3. ``('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')``
"""
- prefix, ext, suffix = path, '', ''
+ prefix, ext, suffix = path, "", ""
# Strip off sourceforge download suffix.
# e.g. https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download
- match = re.search(r'(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$', path)
+ match = re.search(r"(.*(?:sourceforge\.net|sf\.net)/.*)(/download)$", path)
if match:
prefix, suffix = match.groups()
@@ -392,27 +380,27 @@ def split_url_extension(path):
prefix = comp.strip_extension(prefix)
suffix = suf + suffix
if ext is None:
- ext = ''
+ ext = ""
return prefix, ext, suffix
def determine_url_file_extension(path):
"""This returns the type of archive a URL refers to. This is
- sometimes confusing because of URLs like:
+ sometimes confusing because of URLs like:
- (1) https://github.com/petdance/ack/tarball/1.93_02
+ (1) https://github.com/petdance/ack/tarball/1.93_02
- Where the URL doesn't actually contain the filename. We need
- to know what type it is so that we can appropriately name files
- in mirrors.
+ Where the URL doesn't actually contain the filename. We need
+ to know what type it is so that we can appropriately name files
+ in mirrors.
"""
- match = re.search(r'github.com/.+/(zip|tar)ball/', path)
+ match = re.search(r"github.com/.+/(zip|tar)ball/", path)
if match:
- if match.group(1) == 'zip':
- return 'zip'
- elif match.group(1) == 'tar':
- return 'tar.gz'
+ if match.group(1) == "zip":
+ return "zip"
+ elif match.group(1) == "tar":
+ return "tar.gz"
prefix, ext, suffix = split_url_extension(path)
return ext
@@ -474,124 +462,93 @@ def parse_version_offset(path):
# 1st Pass: Simplest case
# Assume name contains no digits and version contains no letters
# e.g. libpng-1.6.27
- (r'^[a-zA-Z+._-]+[._-]v?(\d[\d._-]*)$', stem),
-
+ (r"^[a-zA-Z+._-]+[._-]v?(\d[\d._-]*)$", stem),
# 2nd Pass: Version only
# Assume version contains no letters
-
# ver
# e.g. 3.2.7, 7.0.2-7, v3.3.0, v1_6_3
- (r'^v?(\d[\d._-]*)$', stem),
-
+ (r"^v?(\d[\d._-]*)$", stem),
# 3rd Pass: No separator characters are used
# Assume name contains no digits
-
# namever
# e.g. turbolinux702, nauty26r7
- (r'^[a-zA-Z+]*(\d[\da-zA-Z]*)$', stem),
-
+ (r"^[a-zA-Z+]*(\d[\da-zA-Z]*)$", stem),
# 4th Pass: A single separator character is used
# Assume name contains no digits
-
# name-name-ver-ver
# e.g. panda-2016-03-07, gts-snapshot-121130, cdd-061a
- (r'^[a-zA-Z+-]*(\d[\da-zA-Z-]*)$', stem),
-
+ (r"^[a-zA-Z+-]*(\d[\da-zA-Z-]*)$", stem),
# name_name_ver_ver
# e.g. tinyxml_2_6_2, boost_1_55_0, tbb2017_20161128
- (r'^[a-zA-Z+_]*(\d[\da-zA-Z_]*)$', stem),
-
+ (r"^[a-zA-Z+_]*(\d[\da-zA-Z_]*)$", stem),
# name.name.ver.ver
# e.g. prank.source.150803, jpegsrc.v9b, atlas3.11.34, geant4.10.01.p03
- (r'^[a-zA-Z+.]*(\d[\da-zA-Z.]*)$', stem),
-
+ (r"^[a-zA-Z+.]*(\d[\da-zA-Z.]*)$", stem),
# 5th Pass: Two separator characters are used
# Name may contain digits, version may contain letters
-
# name-name-ver.ver
# e.g. m4-1.4.17, gmp-6.0.0a, launchmon-v1.0.2
- (r'^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem),
-
+ (r"^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$", stem),
# name-name-ver_ver
# e.g. icu4c-57_1
- (r'^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z_]*)$', stem),
-
+ (r"^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z_]*)$", stem),
# name_name_ver.ver
# e.g. superlu_dist_4.1, pexsi_v0.9.0
- (r'^[a-zA-Z\d+_]+_v?(\d[\da-zA-Z.]*)$', stem),
-
+ (r"^[a-zA-Z\d+_]+_v?(\d[\da-zA-Z.]*)$", stem),
# name_name.ver.ver
# e.g. fer_source.v696
- (r'^[a-zA-Z\d+_]+\.v?(\d[\da-zA-Z.]*)$', stem),
-
+ (r"^[a-zA-Z\d+_]+\.v?(\d[\da-zA-Z.]*)$", stem),
# name_ver-ver
# e.g. Bridger_r2014-12-01
- (r'^[a-zA-Z\d+]+_r?(\d[\da-zA-Z-]*)$', stem),
-
+ (r"^[a-zA-Z\d+]+_r?(\d[\da-zA-Z-]*)$", stem),
# name-name-ver.ver-ver.ver
# e.g. sowing-1.1.23-p1, bib2xhtml-v3.0-15-gf506, 4.6.3-alpha04
- (r'^(?:[a-zA-Z\d+-]+-)?v?(\d[\da-zA-Z.-]*)$', stem),
-
+ (r"^(?:[a-zA-Z\d+-]+-)?v?(\d[\da-zA-Z.-]*)$", stem),
# namever.ver-ver.ver
# e.g. go1.4-bootstrap-20161024
- (r'^[a-zA-Z+]+v?(\d[\da-zA-Z.-]*)$', stem),
-
+ (r"^[a-zA-Z+]+v?(\d[\da-zA-Z.-]*)$", stem),
# 6th Pass: All three separator characters are used
# Name may contain digits, version may contain letters
-
# name_name-ver.ver
# e.g. the_silver_searcher-0.32.0, sphinx_rtd_theme-0.1.10a0
- (r'^[a-zA-Z\d+_]+-v?(\d[\da-zA-Z.]*)$', stem),
-
+ (r"^[a-zA-Z\d+_]+-v?(\d[\da-zA-Z.]*)$", stem),
# name.name_ver.ver-ver.ver
# e.g. TH.data_1.0-8, XML_3.98-1.4
- (r'^[a-zA-Z\d+.]+_v?(\d[\da-zA-Z.-]*)$', stem),
-
+ (r"^[a-zA-Z\d+.]+_v?(\d[\da-zA-Z.-]*)$", stem),
# name-name-ver.ver_ver.ver
# e.g. pypar-2.1.5_108
- (r'^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z._]*)$', stem),
-
+ (r"^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z._]*)$", stem),
# name.name_name-ver.ver
# e.g. tap.py-1.6, backports.ssl_match_hostname-3.5.0.1
- (r'^[a-zA-Z\d+._]+-v?(\d[\da-zA-Z.]*)$', stem),
-
+ (r"^[a-zA-Z\d+._]+-v?(\d[\da-zA-Z.]*)$", stem),
# name-namever.ver_ver.ver
# e.g. STAR-CCM+11.06.010_02
- (r'^[a-zA-Z+-]+(\d[\da-zA-Z._]*)$', stem),
-
+ (r"^[a-zA-Z+-]+(\d[\da-zA-Z._]*)$", stem),
# name-name_name-ver.ver
# e.g. PerlIO-utf8_strict-0.002
- (r'^[a-zA-Z\d+_-]+-v?(\d[\da-zA-Z.]*)$', stem),
-
+ (r"^[a-zA-Z\d+_-]+-v?(\d[\da-zA-Z.]*)$", stem),
# 7th Pass: Specific VCS
-
# bazaar
# e.g. libvterm-0+bzr681
- (r'bzr(\d[\da-zA-Z._-]*)$', stem),
-
+ (r"bzr(\d[\da-zA-Z._-]*)$", stem),
# 8th Pass: Query strings
-
# e.g. https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0
# e.g. https://gitlab.kitware.com/api/v4/projects/icet%2Ficet/repository/archive.tar.bz2?sha=IceT-2.1.1
# e.g. http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0
# e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
# e.g. https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef
- (r'[?&](?:sha|ref|version)=[a-zA-Z\d+-]*[_-]?v?(\d[\da-zA-Z._-]*)$', suffix),
-
+ (r"[?&](?:sha|ref|version)=[a-zA-Z\d+-]*[_-]?v?(\d[\da-zA-Z._-]*)$", suffix),
# e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz
# e.g. http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz
# e.g. https://evtgen.hepforge.org/downloads?f=EvtGen-01.07.00.tar.gz
# e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz
- (r'[?&](?:filename|f|get)=[a-zA-Z\d+-]+[_-]v?(\d[\da-zA-Z.]*)', stem),
-
+ (r"[?&](?:filename|f|get)=[a-zA-Z\d+-]+[_-]v?(\d[\da-zA-Z.]*)", stem),
# 9th Pass: Version in path
-
# github.com/repo/name/releases/download/vver/name
# e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow
- (r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path),
-
+ (r"github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/", path),
# e.g. ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz
- (r'(\d[\da-zA-Z._-]*)/[^/]+$', path),
+ (r"(\d[\da-zA-Z._-]*)/[^/]+$", path),
]
for i, version_regex in enumerate(version_regexes):
@@ -599,7 +556,7 @@ def parse_version_offset(path):
match = re.search(regex, match_string)
if match and match.group(1) is not None:
version = match.group(1)
- start = match.start(1)
+ start = match.start(1)
# If we matched from the stem or suffix, we need to add offset
offset = 0
@@ -660,7 +617,7 @@ def parse_name_offset(path, v=None):
except UndetectableVersionError:
# Not all URLs contain a version. We still want to be able
# to determine a name if possible.
- v = 'unknown'
+ v = "unknown"
# path: The prefix of the URL, everything before the ext and suffix
# ext: The file extension
@@ -687,59 +644,46 @@ def parse_name_offset(path, v=None):
# ones that only catch one or two URLs at the bottom.
name_regexes = [
# 1st Pass: Common repositories
-
# GitHub: github.com/repo/name/
# e.g. https://github.com/nco/nco/archive/4.6.2.tar.gz
- (r'github\.com/[^/]+/([^/]+)', path),
-
+ (r"github\.com/[^/]+/([^/]+)", path),
# GitLab API endpoint: gitlab.*/api/v4/projects/NAMESPACE%2Fname/
# e.g. https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0
- (r'gitlab[^/]+/api/v4/projects/[^/]+%2F([^/]+)', path),
-
+ (r"gitlab[^/]+/api/v4/projects/[^/]+%2F([^/]+)", path),
# GitLab non-API endpoint: gitlab.*/repo/name/
# e.g. http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0
- (r'gitlab[^/]+/(?!api/v4/projects)[^/]+/([^/]+)', path),
-
+ (r"gitlab[^/]+/(?!api/v4/projects)[^/]+/([^/]+)", path),
# Bitbucket: bitbucket.org/repo/name/
# e.g. https://bitbucket.org/glotzer/hoomd-blue/get/v1.3.3.tar.bz2
- (r'bitbucket\.org/[^/]+/([^/]+)', path),
-
+ (r"bitbucket\.org/[^/]+/([^/]+)", path),
# PyPI: pypi.(python.org|io)/packages/source/first-letter/name/
# e.g. https://pypi.python.org/packages/source/m/mpmath/mpmath-all-0.19.tar.gz
# e.g. https://pypi.io/packages/source/b/backports.ssl_match_hostname/backports.ssl_match_hostname-3.5.0.1.tar.gz
- (r'pypi\.(?:python\.org|io)/packages/source/[A-Za-z\d]/([^/]+)', path),
-
+ (r"pypi\.(?:python\.org|io)/packages/source/[A-Za-z\d]/([^/]+)", path),
# 2nd Pass: Query strings
-
# ?filename=name-ver.ver
# e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz
- (r'\?filename=([A-Za-z\d+-]+)$', stem),
-
+ (r"\?filename=([A-Za-z\d+-]+)$", stem),
# ?f=name-ver.ver
# e.g. https://evtgen.hepforge.org/downloads?f=EvtGen-01.07.00.tar.gz
- (r'\?f=([A-Za-z\d+-]+)$', stem),
-
+ (r"\?f=([A-Za-z\d+-]+)$", stem),
# ?package=name
# e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz
- (r'\?package=([A-Za-z\d+-]+)', stem),
-
+ (r"\?package=([A-Za-z\d+-]+)", stem),
# ?package=name-version
- (r'\?package=([A-Za-z\d]+)', suffix),
-
+ (r"\?package=([A-Za-z\d]+)", suffix),
# download.php
# e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
- (r'([^/]+)/download.php$', path),
-
+ (r"([^/]+)/download.php$", path),
# 3rd Pass: Name followed by version in archive
-
- (r'^([A-Za-z\d+\._-]+)$', stem),
+ (r"^([A-Za-z\d+\._-]+)$", stem),
]
for i, name_regex in enumerate(name_regexes):
regex, match_string = name_regex
match = re.search(regex, match_string)
if match:
- name = match.group(1)
+ name = match.group(1)
start = match.start(1)
# If we matched from the stem or suffix, we need to add offset
@@ -795,12 +739,14 @@ def parse_name_and_version(path):
def insensitize(string):
"""Change upper and lowercase letters to be case insensitive in
- the provided string. e.g., 'a' becomes '[Aa]', 'B' becomes
- '[bB]', etc. Use for building regexes."""
+ the provided string. e.g., 'a' becomes '[Aa]', 'B' becomes
+ '[bB]', etc. Use for building regexes."""
+
def to_ins(match):
char = match.group(1)
- return '[%s%s]' % (char.lower(), char.upper())
- return re.sub(r'([a-zA-Z])', to_ins, string)
+ return "[%s%s]" % (char.lower(), char.upper())
+
+ return re.sub(r"([a-zA-Z])", to_ins, string)
def cumsum(elts, init=0, fn=lambda x: x):
@@ -831,11 +777,11 @@ def find_all(substring, string):
def substitution_offsets(path):
"""This returns offsets for substituting versions and names in the
- provided path. It is a helper for :func:`substitute_version`.
+ provided path. It is a helper for :func:`substitute_version`.
"""
# Get name and version offsets
try:
- ver, vs, vl, vi, vregex = parse_version_offset(path)
+ ver, vs, vl, vi, vregex = parse_version_offset(path)
name, ns, nl, ni, nregex = parse_name_offset(path, ver)
except UndetectableNameError:
return (None, -1, -1, (), ver, vs, vl, (vs,))
@@ -848,15 +794,14 @@ def substitution_offsets(path):
# Find the index of every occurrence of name and ver in path
name_offsets = find_all(name, path)
- ver_offsets = find_all(ver, path)
+ ver_offsets = find_all(ver, path)
- return (name, ns, nl, name_offsets,
- ver, vs, vl, ver_offsets)
+ return (name, ns, nl, name_offsets, ver, vs, vl, ver_offsets)
def wildcard_version(path):
"""Find the version in the supplied path, and return a regular expression
- that will match this path with any version in its place.
+ that will match this path with any version in its place.
"""
# Get version so we can replace it with a wildcard
version = parse_version(path)
@@ -866,7 +811,7 @@ def wildcard_version(path):
# Replace each version with a generic capture group to find versions
# and escape everything else so it's not interpreted as a regex
- result = r'(\d.*)'.join(re.escape(vp) for vp in vparts)
+ result = r"(\d.*)".join(re.escape(vp) for vp in vparts)
return result
@@ -890,10 +835,9 @@ def substitute_version(path, new_version):
substitute_version('https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.12/src/hdf-4.2.12.tar.gz', '2.3')
>>> 'https://www.hdfgroup.org/ftp/HDF/releases/HDF2.3/src/hdf-2.3.tar.gz'
"""
- (name, ns, nl, noffs,
- ver, vs, vl, voffs) = substitution_offsets(path)
+ (name, ns, nl, noffs, ver, vs, vl, voffs) = substitution_offsets(path)
- new_path = ''
+ new_path = ""
last = 0
for vo in voffs:
new_path += path[last:vo]
@@ -922,11 +866,10 @@ def color_url(path, **kwargs):
# Allow URLs containing @ and }
path = cescape(path)
- errors = kwargs.get('errors', False)
- subs = kwargs.get('subs', False)
+ errors = kwargs.get("errors", False)
+ subs = kwargs.get("subs", False)
- (name, ns, nl, noffs,
- ver, vs, vl, voffs) = substitution_offsets(path)
+ (name, ns, nl, noffs, ver, vs, vl, voffs) = substitution_offsets(path)
nends = [no + nl - 1 for no in noffs]
vends = [vo + vl - 1 for vo in voffs]
@@ -935,28 +878,28 @@ def color_url(path, **kwargs):
out = StringIO()
for i in range(len(path)):
if i == vs:
- out.write('@c')
+ out.write("@c")
verr += 1
elif i == ns:
- out.write('@r')
+ out.write("@r")
nerr += 1
elif subs:
if i in voffs:
- out.write('@g')
+ out.write("@g")
elif i in noffs:
- out.write('@m')
+ out.write("@m")
out.write(path[i])
if i == vs + vl - 1:
- out.write('@.')
+ out.write("@.")
verr += 1
elif i == ns + nl - 1:
- out.write('@.')
+ out.write("@.")
nerr += 1
elif subs:
if i in vends or i in nends:
- out.write('@.')
+ out.write("@.")
if errors:
if nerr == 0:
@@ -983,8 +926,7 @@ class UndetectableVersionError(UrlParseError):
"""Raised when we can't parse a version from a string."""
def __init__(self, path):
- super(UndetectableVersionError, self).__init__(
- "Couldn't detect version in: " + path, path)
+ super(UndetectableVersionError, self).__init__("Couldn't detect version in: " + path, path)
class UndetectableNameError(UrlParseError):
@@ -992,4 +934,5 @@ class UndetectableNameError(UrlParseError):
def __init__(self, path):
super(UndetectableNameError, self).__init__(
- "Couldn't parse package name in: " + path, path)
+ "Couldn't parse package name in: " + path, path
+ )
diff --git a/lib/spack/spack/user_environment.py b/lib/spack/spack/user_environment.py
index db5a834308..721150aae9 100644
--- a/lib/spack/spack/user_environment.py
+++ b/lib/spack/spack/user_environment.py
@@ -11,7 +11,7 @@ import spack.util.environment as environment
import spack.util.prefix as prefix
#: Environment variable name Spack uses to track individually loaded packages
-spack_loaded_hashes_var = 'SPACK_LOADED_HASHES'
+spack_loaded_hashes_var = "SPACK_LOADED_HASHES"
def prefix_inspections(platform):
@@ -26,27 +26,27 @@ def prefix_inspections(platform):
A dictionary mapping subdirectory names to lists of environment
variables to modify with that directory if it exists.
"""
- inspections = spack.config.get('modules:prefix_inspections', {})
+ inspections = spack.config.get("modules:prefix_inspections", {})
if inspections:
return inspections
inspections = {
- 'bin': ['PATH'],
- 'lib': ['LD_LIBRARY_PATH', 'LIBRARY_PATH'],
- 'lib64': ['LD_LIBRARY_PATH', 'LIBRARY_PATH'],
- 'man': ['MANPATH'],
- 'share/man': ['MANPATH'],
- 'share/aclocal': ['ACLOCAL_PATH'],
- 'include': ['CPATH'],
- 'lib/pkgconfig': ['PKG_CONFIG_PATH'],
- 'lib64/pkgconfig': ['PKG_CONFIG_PATH'],
- 'share/pkgconfig': ['PKG_CONFIG_PATH'],
- '': ['CMAKE_PREFIX_PATH']
+ "bin": ["PATH"],
+ "lib": ["LD_LIBRARY_PATH", "LIBRARY_PATH"],
+ "lib64": ["LD_LIBRARY_PATH", "LIBRARY_PATH"],
+ "man": ["MANPATH"],
+ "share/man": ["MANPATH"],
+ "share/aclocal": ["ACLOCAL_PATH"],
+ "include": ["CPATH"],
+ "lib/pkgconfig": ["PKG_CONFIG_PATH"],
+ "lib64/pkgconfig": ["PKG_CONFIG_PATH"],
+ "share/pkgconfig": ["PKG_CONFIG_PATH"],
+ "": ["CMAKE_PREFIX_PATH"],
}
- if platform == 'darwin':
- for subdir in ('lib', 'lib64'):
- inspections[subdir].append('DYLD_FALLBACK_LIBRARY_PATH')
+ if platform == "darwin":
+ for subdir in ("lib", "lib64"):
+ inspections[subdir].append("DYLD_FALLBACK_LIBRARY_PATH")
return inspections
@@ -85,16 +85,14 @@ def environment_modifications_for_spec(spec, view=None, set_package_py_globals=T
# generic environment modifications determined by inspecting the spec
# prefix
env = environment.inspect_path(
- spec.prefix,
- prefix_inspections(spec.platform),
- exclude=environment.is_system_path
+ spec.prefix, prefix_inspections(spec.platform), exclude=environment.is_system_path
)
# Let the extendee/dependency modify their extensions/dependents
# before asking for package-specific modifications
env.extend(
spack.build_environment.modifications_from_dependencies(
- spec, context='run', set_package_py_globals=set_package_py_globals
+ spec, context="run", set_package_py_globals=set_package_py_globals
)
)
diff --git a/lib/spack/spack/util/classes.py b/lib/spack/spack/util/classes.py
index 42f25083da..cad2de2c48 100644
--- a/lib/spack/spack/util/classes.py
+++ b/lib/spack/spack/util/classes.py
@@ -13,9 +13,7 @@ from llnl.util.lang import list_modules, memoized
from spack.util.naming import mod_to_class
-__all__ = [
- 'list_classes'
-]
+__all__ = ["list_classes"]
@memoized
@@ -27,14 +25,14 @@ def list_classes(parent_module, mod_path):
classes = []
for name in list_modules(mod_path):
- mod_name = '%s.%s' % (parent_module, name)
+ mod_name = "%s.%s" % (parent_module, name)
class_name = mod_to_class(name)
mod = __import__(mod_name, fromlist=[class_name])
if not hasattr(mod, class_name):
- tty.die('No class %s defined in %s' % (class_name, mod_name))
+ tty.die("No class %s defined in %s" % (class_name, mod_name))
cls = getattr(mod, class_name)
if not inspect.isclass(cls):
- tty.die('%s.%s is not a class' % (mod_name, class_name))
+ tty.die("%s.%s is not a class" % (mod_name, class_name))
classes.append(cls)
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index a6433b4542..d9c1f5bd18 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -12,20 +12,22 @@ from itertools import product
from spack.util.executable import CommandNotFoundError, which
# Supported archive extensions.
-PRE_EXTS = ["tar", "TAR"]
-EXTS = ["gz", "bz2", "xz", "Z"]
+PRE_EXTS = ["tar", "TAR"]
+EXTS = ["gz", "bz2", "xz", "Z"]
NOTAR_EXTS = ["zip", "tgz", "tbz", "tbz2", "txz"]
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
-ALLOWED_ARCHIVE_TYPES = [".".join(ext) for ext in product(
- PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
+ALLOWED_ARCHIVE_TYPES = (
+ [".".join(ext) for ext in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS + NOTAR_EXTS
+)
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
def bz2_support():
try:
- import bz2 # noqa: F401
+ import bz2 # noqa: F401
+
return True
except ImportError:
return False
@@ -33,7 +35,8 @@ def bz2_support():
def gzip_support():
try:
- import gzip # noqa: F401
+ import gzip # noqa: F401
+
return True
except ImportError:
return False
@@ -42,6 +45,7 @@ def gzip_support():
def lzma_support():
try:
import lzma # noqa: F401 # novm
+
return True
except ImportError:
return False
@@ -49,19 +53,19 @@ def lzma_support():
def tar_support():
try:
- import tarfile # noqa: F401
+ import tarfile # noqa: F401
+
return True
except ImportError:
return False
def allowed_archive(path):
- return False if not path else \
- any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
+ return False if not path else any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
def _untar(archive_file):
- """ Untar archive. Prefer native Python `tarfile`
+ """Untar archive. Prefer native Python `tarfile`
but fall back to system utility if there is a failure
to find the native Python module (tar on Unix).
Filters archives through native support gzip and xz
@@ -74,14 +78,14 @@ def _untar(archive_file):
_, ext = os.path.splitext(archive_file)
outfile = os.path.basename(archive_file.strip(ext))
- tar = which('tar', required=True)
- tar.add_default_arg('-oxf')
+ tar = which("tar", required=True)
+ tar.add_default_arg("-oxf")
tar(archive_file)
return outfile
def _bunzip2(archive_file):
- """ Use Python's bz2 module to decompress bz2 compressed archives
+ """Use Python's bz2 module to decompress bz2 compressed archives
Fall back to system utility failing to find Python module `bz2`
Args:
@@ -95,20 +99,21 @@ def _bunzip2(archive_file):
copy_path = os.path.join(working_dir, compressed_file_name)
if bz2_support():
import bz2
- f_bz = bz2.BZ2File(archive_file, mode='rb')
- with open(archive_out, 'wb') as ar:
+
+ f_bz = bz2.BZ2File(archive_file, mode="rb")
+ with open(archive_out, "wb") as ar:
shutil.copyfileobj(f_bz, ar)
f_bz.close()
else:
shutil.copy(archive_file, copy_path)
- bunzip2 = which('bunzip2', required=True)
- bunzip2.add_default_arg('-q')
+ bunzip2 = which("bunzip2", required=True)
+ bunzip2.add_default_arg("-q")
return bunzip2(copy_path)
return archive_out
def _gunzip(archive_file):
- """ Decompress `.gz` extensions. Prefer native Python `gzip` module.
+ """Decompress `.gz` extensions. Prefer native Python `gzip` module.
Failing back to system utility gunzip.
Like gunzip, but extracts in the current working directory
instead of in-place.
@@ -122,6 +127,7 @@ def _gunzip(archive_file):
destination_abspath = os.path.join(working_dir, decompressed_file)
if gzip_support():
import gzip
+
f_in = gzip.open(archive_file, "rb")
with open(destination_abspath, "wb") as f_out:
shutil.copyfileobj(f_in, f_out)
@@ -155,11 +161,11 @@ def _unzip(archive_file):
"""
destination_abspath = os.getcwd()
- exe = 'unzip'
- arg = '-q'
+ exe = "unzip"
+ arg = "-q"
if is_windows:
- exe = 'tar'
- arg = '-xf'
+ exe = "tar"
+ arg = "-xf"
unzip = which(exe, required=True)
unzip.add_default_arg(arg)
unzip(archive_file)
@@ -181,10 +187,11 @@ def _lzma_decomp(archive_file):
on Unix and 7z on Windows"""
if lzma_support():
import lzma # novermin
+
_, ext = os.path.splitext(archive_file)
decompressed_file = os.path.basename(archive_file.strip(ext))
archive_out = os.path.join(os.getcwd(), decompressed_file)
- with open(archive_out, 'wb') as ar:
+ with open(archive_out, "wb") as ar:
with lzma.open(archive_file) as lar:
shutil.copyfileobj(lar, ar)
else:
@@ -199,7 +206,7 @@ def _xz(archive_file):
tool. Available only on Unix
"""
if is_windows:
- raise RuntimeError('XZ tool unavailable on Windows')
+ raise RuntimeError("XZ tool unavailable on Windows")
_, ext = os.path.splitext(archive_file)
decompressed_file = os.path.basename(archive_file.strip(ext))
working_dir = os.getcwd()
@@ -207,8 +214,8 @@ def _xz(archive_file):
compressed_file = os.path.basename(archive_file)
copy_path = os.path.join(working_dir, compressed_file)
shutil.copy(archive_file, copy_path)
- xz = which('xz', required=True)
- xz.add_default_arg('-d')
+ xz = which("xz", required=True)
+ xz.add_default_arg("-d")
xz(copy_path)
return destination_abspath
@@ -229,11 +236,14 @@ def _7zip(archive_file):
"""
_, ext = os.path.splitext(archive_file)
outfile = os.path.basename(archive_file.strip(ext))
- _7z = which('7z')
+ _7z = which("7z")
if not _7z:
- raise CommandNotFoundError("7z unavailable,\
-unable to extract %s files. 7z can be installed via Spack" % ext)
- _7z.add_default_arg('e')
+ raise CommandNotFoundError(
+ "7z unavailable,\
+unable to extract %s files. 7z can be installed via Spack"
+ % ext
+ )
+ _7z.add_default_arg("e")
_7z(archive_file)
return outfile
@@ -247,32 +257,35 @@ def decompressor_for(path, ext):
ext (str): Extension of archive file
"""
if not allowed_archive(ext):
- raise CommandNotFoundError("Cannot extract archive, \
-unrecognized file extension: '%s'" % ext)
+ raise CommandNotFoundError(
+ "Cannot extract archive, \
+unrecognized file extension: '%s'"
+ % ext
+ )
- if re.match(r'\.?zip$', ext) or path.endswith('.zip'):
+ if re.match(r"\.?zip$", ext) or path.endswith(".zip"):
return _unzip
- if re.match(r'gz', ext):
+ if re.match(r"gz", ext):
return _gunzip
- if re.match(r'bz2', ext):
+ if re.match(r"bz2", ext):
return _bunzip2
# Python does not have native support
# of any kind for .Z files. In these cases,
# we rely on external tools such as tar,
# 7z, or uncompressZ
- if re.match(r'Z$', ext):
+ if re.match(r"Z$", ext):
return _unZ
# Python and platform may not have support for lzma
# compression. If no lzma support, use tools available on systems
# 7zip on Windows and the xz tool on Unix systems.
- if re.match(r'xz', ext):
+ if re.match(r"xz", ext):
return _lzma_decomp
- if ('xz' in ext or 'Z' in ext) and is_windows:
+ if ("xz" in ext or "Z" in ext) and is_windows:
return _7zip
return _untar
@@ -280,9 +293,9 @@ unrecognized file extension: '%s'" % ext)
def strip_extension(path):
"""Get the part of a path that does not include its compressed
- type extension."""
+ type extension."""
for type in ALLOWED_ARCHIVE_TYPES:
- suffix = r'\.%s$' % type
+ suffix = r"\.%s$" % type
if re.search(suffix, path):
return re.sub(suffix, "", path)
return path
@@ -294,11 +307,11 @@ def extension(path):
raise ValueError("Can't call extension() on None")
# Strip sourceforge suffix.
- if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):
+ if re.search(r"((?:sourceforge.net|sf.net)/.*)/download$", path):
path = os.path.dirname(path)
for t in ALLOWED_ARCHIVE_TYPES:
- suffix = r'\.%s$' % t
+ suffix = r"\.%s$" % t
if re.search(suffix, path):
return t
return None
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index 8aa4b80e6f..5595d15cd3 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -10,14 +10,7 @@ from typing import Any, Callable, Dict # novm
import llnl.util.tty as tty
#: Set of hash algorithms that Spack can use, mapped to digest size in bytes
-hashes = {
- 'md5': 16,
- 'sha1': 20,
- 'sha224': 28,
- 'sha256': 32,
- 'sha384': 48,
- 'sha512': 64
-}
+hashes = {"md5": 16, "sha1": 20, "sha224": 28, "sha256": 32, "sha384": 48, "sha512": 64}
#: size of hash digests in bytes, mapped to algoritm names
@@ -26,7 +19,7 @@ _size_to_hash = dict((v, k) for k, v in hashes.items())
#: List of deprecated hash functions. On some systems, these cannot be
#: used without special options to hashlib.
-_deprecated_hash_algorithms = ['md5']
+_deprecated_hash_algorithms = ["md5"]
#: cache of hash functions generated
@@ -41,12 +34,12 @@ class DeprecatedHash(object):
def __call__(self, disable_alert=False):
if not disable_alert:
- self.alert_fn("Deprecation warning: {0} checksums will not be"
- " supported in future Spack releases."
- .format(self.hash_alg))
+ self.alert_fn(
+ "Deprecation warning: {0} checksums will not be"
+ " supported in future Spack releases.".format(self.hash_alg)
+ )
if self.disable_security_check:
- return hashlib.new( # novermin
- self.hash_alg, usedforsecurity=False)
+ return hashlib.new(self.hash_alg, usedforsecurity=False) # novermin
else:
return hashlib.new(self.hash_alg)
@@ -57,16 +50,14 @@ def hash_fun_for_algo(algo):
if hash_gen is None:
if algo in _deprecated_hash_algorithms:
try:
- hash_gen = DeprecatedHash(
- algo, tty.debug, disable_security_check=False)
+ hash_gen = DeprecatedHash(algo, tty.debug, disable_security_check=False)
# call once to get a ValueError if usedforsecurity is needed
hash_gen(disable_alert=True)
except ValueError:
# Some systems may support the 'usedforsecurity' option
# so try with that (but display a warning when it is used)
- hash_gen = DeprecatedHash(
- algo, tty.warn, disable_security_check=True)
+ hash_gen = DeprecatedHash(algo, tty.warn, disable_security_check=True)
else:
hash_gen = getattr(hashlib, algo)
_hash_functions[algo] = hash_gen
@@ -78,8 +69,7 @@ def hash_algo_for_digest(hexdigest):
"""Gets name of the hash algorithm for a hex digest."""
bytes = len(hexdigest) / 2
if bytes not in _size_to_hash:
- raise ValueError(
- 'Spack knows no hash algorithm for this digest: %s' % hexdigest)
+ raise ValueError("Spack knows no hash algorithm for this digest: %s" % hexdigest)
return _size_to_hash[bytes]
@@ -90,11 +80,11 @@ def hash_fun_for_digest(hexdigest):
def checksum(hashlib_algo, filename, **kwargs):
"""Returns a hex digest of the filename generated using an
- algorithm from hashlib.
+ algorithm from hashlib.
"""
- block_size = kwargs.get('block_size', 2**20)
+ block_size = kwargs.get("block_size", 2 ** 20)
hasher = hashlib_algo()
- with open(filename, 'rb') as file:
+ with open(filename, "rb") as file:
while True:
data = file.read(block_size)
if not data:
@@ -105,28 +95,28 @@ def checksum(hashlib_algo, filename, **kwargs):
class Checker(object):
"""A checker checks files against one particular hex digest.
- It will automatically determine what hashing algorithm
- to used based on the length of the digest it's initialized
- with. e.g., if the digest is 32 hex characters long this will
- use md5.
+ It will automatically determine what hashing algorithm
+ to used based on the length of the digest it's initialized
+ with. e.g., if the digest is 32 hex characters long this will
+ use md5.
- Example: know your tarball should hash to 'abc123'. You want
- to check files against this. You would use this class like so::
+ Example: know your tarball should hash to 'abc123'. You want
+ to check files against this. You would use this class like so::
- hexdigest = 'abc123'
- checker = Checker(hexdigest)
- success = checker.check('downloaded.tar.gz')
+ hexdigest = 'abc123'
+ checker = Checker(hexdigest)
+ success = checker.check('downloaded.tar.gz')
- After the call to check, the actual checksum is available in
- checker.sum, in case it's needed for error output.
+ After the call to check, the actual checksum is available in
+ checker.sum, in case it's needed for error output.
- You can trade read performance and memory usage by
- adjusting the block_size optional arg. By default it's
- a 1MB (2**20 bytes) buffer.
+ You can trade read performance and memory usage by
+ adjusting the block_size optional arg. By default it's
+ a 1MB (2**20 bytes) buffer.
"""
def __init__(self, hexdigest, **kwargs):
- self.block_size = kwargs.get('block_size', 2**20)
+ self.block_size = kwargs.get("block_size", 2 ** 20)
self.hexdigest = hexdigest
self.sum = None
self.hash_fun = hash_fun_for_digest(hexdigest)
@@ -138,18 +128,17 @@ class Checker(object):
def check(self, filename):
"""Read the file with the specified name and check its checksum
- against self.hexdigest. Return True if they match, False
- otherwise. Actual checksum is stored in self.sum.
+ against self.hexdigest. Return True if they match, False
+ otherwise. Actual checksum is stored in self.sum.
"""
- self.sum = checksum(
- self.hash_fun, filename, block_size=self.block_size)
+ self.sum = checksum(self.hash_fun, filename, block_size=self.block_size)
return self.sum == self.hexdigest
def prefix_bits(byte_array, bits):
"""Return the first <bits> bits of a byte array as an integer."""
if sys.version_info < (3,):
- b2i = ord # In Python 2, indexing byte_array gives str
+ b2i = ord # In Python 2, indexing byte_array gives str
else:
b2i = lambda b: b # In Python 3, indexing byte_array gives int
@@ -161,12 +150,12 @@ def prefix_bits(byte_array, bits):
if n >= bits:
break
- result >>= (n - bits)
+ result >>= n - bits
return result
def bit_length(num):
"""Number of bits required to represent an integer in binary."""
s = bin(num)
- s = s.lstrip('-0b')
+ s = s.lstrip("-0b")
return len(s)
diff --git a/lib/spack/spack/util/debug.py b/lib/spack/spack/util/debug.py
index 64f7132a6f..eb3b59a4fc 100644
--- a/lib/spack/spack/util/debug.py
+++ b/lib/spack/spack/util/debug.py
@@ -20,13 +20,13 @@ import traceback
def debug_handler(sig, frame):
"""Interrupt running process, and provide a python prompt for
interactive debugging."""
- d = {'_frame': frame} # Allow access to frame object.
- d.update(frame.f_globals) # Unless shadowed by global
+ d = {"_frame": frame} # Allow access to frame object.
+ d.update(frame.f_globals) # Unless shadowed by global
d.update(frame.f_locals)
i = code.InteractiveConsole(d)
- message = "Signal received : entering python shell.\nTraceback:\n"
- message += ''.join(traceback.format_stack(frame))
+ message = "Signal received : entering python shell.\nTraceback:\n"
+ message += "".join(traceback.format_stack(frame))
i.interact(message)
os._exit(1) # Use os._exit to avoid test harness.
diff --git a/lib/spack/spack/util/editor.py b/lib/spack/spack/util/editor.py
index 58198907af..3b2359ab03 100644
--- a/lib/spack/spack/util/editor.py
+++ b/lib/spack/spack/util/editor.py
@@ -21,7 +21,7 @@ import spack.config
from spack.util.executable import which_string
#: editors to try if VISUAL and EDITOR are not set
-_default_editors = ['vim', 'vi', 'emacs', 'nano', 'notepad']
+_default_editors = ["vim", "vi", "emacs", "nano", "notepad"]
def _find_exe_from_env_var(var):
@@ -72,7 +72,7 @@ def editor(*args, **kwargs):
"""
# allow this to be customized for testing
- _exec_func = kwargs.get('_exec_func', os.execv)
+ _exec_func = kwargs.get("_exec_func", os.execv)
def try_exec(exe, args, var=None):
"""Try to execute an editor with execv, and warn if it fails.
@@ -86,13 +86,13 @@ def editor(*args, **kwargs):
return True
except OSError as e:
- if spack.config.get('config:debug'):
+ if spack.config.get("config:debug"):
raise
# Show variable we were trying to use, if it's from one
if var:
- exe = '$%s (%s)' % (var, exe)
- tty.warn('Could not execute %s due to error:' % exe, str(e))
+ exe = "$%s (%s)" % (var, exe)
+ tty.warn("Could not execute %s due to error:" % exe, str(e))
return False
def try_env_var(var):
@@ -106,16 +106,16 @@ def editor(*args, **kwargs):
exe, editor_args = _find_exe_from_env_var(var)
if not exe:
- tty.warn('$%s is not an executable:' % var, os.environ[var])
+ tty.warn("$%s is not an executable:" % var, os.environ[var])
return False
full_args = editor_args + list(args)
return try_exec(exe, full_args, var)
# try standard environment variables
- if try_env_var('VISUAL'):
+ if try_env_var("VISUAL"):
return
- if try_env_var('EDITOR'):
+ if try_env_var("EDITOR"):
return
# nothing worked -- try the first default we can find don't bother
@@ -127,5 +127,6 @@ def editor(*args, **kwargs):
# Fail if nothing could be found
raise EnvironmentError(
- 'No text editor found! Please set the VISUAL and/or EDITOR '
- 'environment variable(s) to your preferred text editor.')
+ "No text editor found! Please set the VISUAL and/or EDITOR "
+ "environment variable(s) to your preferred text editor."
+ )
diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py
index c90d0ce808..5f3fb7a61a 100644
--- a/lib/spack/spack/util/environment.py
+++ b/lib/spack/spack/util/environment.py
@@ -28,30 +28,30 @@ import spack.util.executable as executable
import spack.util.spack_json as sjson
from spack.util.path import path_to_os_path, system_path_filter
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
-system_paths = ['/', '/usr', '/usr/local'] if \
- not is_windows else ['C:\\', 'C:\\Program Files',
- 'C:\\Program Files (x86)', 'C:\\Users',
- 'C:\\ProgramData']
-suffixes = ['bin', 'bin64', 'include', 'lib', 'lib64'] if not is_windows else []
-system_dirs = [os.path.join(p, s) for s in suffixes for p in system_paths] + \
- system_paths
+system_paths = (
+ ["/", "/usr", "/usr/local"]
+ if not is_windows
+ else ["C:\\", "C:\\Program Files", "C:\\Program Files (x86)", "C:\\Users", "C:\\ProgramData"]
+)
+suffixes = ["bin", "bin64", "include", "lib", "lib64"] if not is_windows else []
+system_dirs = [os.path.join(p, s) for s in suffixes for p in system_paths] + system_paths
_shell_set_strings = {
- 'sh': 'export {0}={1};\n',
- 'csh': 'setenv {0} {1};\n',
- 'fish': 'set -gx {0} {1};\n',
- 'bat': 'set "{0}={1}"\n'
+ "sh": "export {0}={1};\n",
+ "csh": "setenv {0} {1};\n",
+ "fish": "set -gx {0} {1};\n",
+ "bat": 'set "{0}={1}"\n',
}
_shell_unset_strings = {
- 'sh': 'unset {0};\n',
- 'csh': 'unsetenv {0};\n',
- 'fish': 'set -e {0};\n',
- 'bat': 'set "{0}="\n'
+ "sh": "unset {0};\n",
+ "csh": "unsetenv {0};\n",
+ "fish": "set -e {0};\n",
+ "bat": 'set "{0}="\n',
}
@@ -110,7 +110,7 @@ def path_set(var_name, directories):
def path_put_first(var_name, directories):
"""Puts the provided directories first in the path, adding them
- if they're not already there.
+ if they're not already there.
"""
path = os.environ.get(var_name, "").split(os.pathsep)
@@ -122,17 +122,16 @@ def path_put_first(var_name, directories):
path_set(var_name, new_path)
-bash_function_finder = re.compile(r'BASH_FUNC_(.*?)\(\)')
+bash_function_finder = re.compile(r"BASH_FUNC_(.*?)\(\)")
def env_var_to_source_line(var, val):
- if var.startswith('BASH_FUNC'):
- source_line = 'function {fname}{decl}; export -f {fname}'.\
- format(fname=bash_function_finder.sub(r'\1', var),
- decl=val)
+ if var.startswith("BASH_FUNC"):
+ source_line = "function {fname}{decl}; export -f {fname}".format(
+ fname=bash_function_finder.sub(r"\1", var), decl=val
+ )
else:
- source_line = '{var}={val}; export {var}'.format(var=var,
- val=cmd_quote(val))
+ source_line = "{var}={val}; export {var}".format(var=var, val=cmd_quote(val))
return source_line
@@ -140,21 +139,22 @@ def env_var_to_source_line(var, val):
def dump_environment(path, environment=None):
"""Dump an environment dictionary to a source-able file."""
use_env = environment or os.environ
- hidden_vars = set(['PS1', 'PWD', 'OLDPWD', 'TERM_SESSION_ID'])
+ hidden_vars = set(["PS1", "PWD", "OLDPWD", "TERM_SESSION_ID"])
fd = os.open(path, os.O_WRONLY | os.O_CREAT, 0o600)
- with os.fdopen(fd, 'w') as env_file:
+ with os.fdopen(fd, "w") as env_file:
for var, val in sorted(use_env.items()):
- env_file.write(''.join(['#' if var in hidden_vars else '',
- env_var_to_source_line(var, val),
- '\n']))
+ env_file.write(
+ "".join(
+ ["#" if var in hidden_vars else "", env_var_to_source_line(var, val), "\n"]
+ )
+ )
@system_path_filter(arg_slice=slice(1))
def pickle_environment(path, environment=None):
"""Pickle an environment dictionary to a file."""
- cPickle.dump(dict(environment if environment else os.environ),
- open(path, 'wb'), protocol=2)
+ cPickle.dump(dict(environment if environment else os.environ), open(path, "wb"), protocol=2)
def get_host_environment_metadata():
@@ -162,13 +162,16 @@ def get_host_environment_metadata():
the install directory, and add the spack version.
"""
import spack.main
+
environ = get_host_environment()
- return {"host_os": environ['os'],
- "platform": environ['platform'],
- "host_target": environ['target'],
- "hostname": environ['hostname'],
- "spack_version": spack.main.get_version(),
- "kernel_version": platform.version()}
+ return {
+ "host_os": environ["os"],
+ "platform": environ["platform"],
+ "host_target": environ["target"],
+ "hostname": environ["hostname"],
+ "spack_version": spack.main.get_version(),
+ "kernel_version": platform.version(),
+ }
def get_host_environment():
@@ -176,20 +179,18 @@ def get_host_environment():
os.environ).
"""
host_platform = spack.platforms.host()
- host_target = host_platform.target('default_target')
- host_os = host_platform.operating_system('default_os')
- arch_fmt = 'platform={0} os={1} target={2}'
- arch_spec = spack.spec.Spec(
- arch_fmt.format(host_platform, host_os, host_target)
- )
+ host_target = host_platform.target("default_target")
+ host_os = host_platform.operating_system("default_os")
+ arch_fmt = "platform={0} os={1} target={2}"
+ arch_spec = spack.spec.Spec(arch_fmt.format(host_platform, host_os, host_target))
return {
- 'target': str(host_target),
- 'os': str(host_os),
- 'platform': str(host_platform),
- 'arch': arch_spec,
- 'architecture': arch_spec,
- 'arch_str': str(arch_spec),
- 'hostname': socket.gethostname()
+ "target": str(host_target),
+ "os": str(host_os),
+ "platform": str(host_platform),
+ "arch": arch_spec,
+ "architecture": arch_spec,
+ "arch_str": str(arch_spec),
+ "hostname": socket.gethostname(),
}
@@ -221,11 +222,10 @@ def set_env(**kwargs):
class NameModifier(object):
-
def __init__(self, name, **kwargs):
self.name = name
- self.separator = kwargs.get('separator', os.pathsep)
- self.args = {'name': name, 'separator': self.separator}
+ self.separator = kwargs.get("separator", os.pathsep)
+ self.args = {"name": name, "separator": self.separator}
self.args.update(kwargs)
@@ -240,20 +240,21 @@ class NameModifier(object):
class NameValueModifier(object):
-
def __init__(self, name, value, **kwargs):
self.name = name
self.value = value
- self.separator = kwargs.get('separator', os.pathsep)
- self.args = {'name': name, 'value': value, 'separator': self.separator}
+ self.separator = kwargs.get("separator", os.pathsep)
+ self.args = {"name": name, "value": value, "separator": self.separator}
self.args.update(kwargs)
def __eq__(self, other):
if not isinstance(other, NameValueModifier):
return False
- return self.name == other.name and \
- self.value == other.value and \
- self.separator == other.separator
+ return (
+ self.name == other.name
+ and self.value == other.value
+ and self.separator == other.separator
+ )
def update_args(self, **kwargs):
self.__dict__.update(kwargs)
@@ -261,18 +262,14 @@ class NameValueModifier(object):
class SetEnv(NameValueModifier):
-
def execute(self, env):
- tty.debug("SetEnv: {0}={1}".format(self.name, str(self.value)),
- level=3)
+ tty.debug("SetEnv: {0}={1}".format(self.name, str(self.value)), level=3)
env[self.name] = str(self.value)
class AppendFlagsEnv(NameValueModifier):
-
def execute(self, env):
- tty.debug("AppendFlagsEnv: {0}={1}".format(self.name, str(self.value)),
- level=3)
+ tty.debug("AppendFlagsEnv: {0}={1}".format(self.name, str(self.value)), level=3)
if self.name in env and env[self.name]:
env[self.name] += self.separator + str(self.value)
else:
@@ -280,7 +277,6 @@ class AppendFlagsEnv(NameValueModifier):
class UnsetEnv(NameModifier):
-
def execute(self, env):
tty.debug("UnsetEnv: {0}".format(self.name), level=3)
# Avoid throwing if the variable was not set
@@ -288,19 +284,15 @@ class UnsetEnv(NameModifier):
class RemoveFlagsEnv(NameValueModifier):
-
def execute(self, env):
- tty.debug("RemoveFlagsEnv: {0}-{1}".format(self.name, str(self.value)),
- level=3)
- environment_value = env.get(self.name, '')
- flags = environment_value.split(
- self.separator) if environment_value else []
+ tty.debug("RemoveFlagsEnv: {0}-{1}".format(self.name, str(self.value)), level=3)
+ environment_value = env.get(self.name, "")
+ flags = environment_value.split(self.separator) if environment_value else []
flags = [f for f in flags if f != self.value]
env[self.name] = self.separator.join(flags)
class SetPath(NameValueModifier):
-
def execute(self, env):
string_path = concatenate_paths(self.value, separator=self.separator)
tty.debug("SetPath: {0}={1}".format(self.name, string_path), level=3)
@@ -308,66 +300,55 @@ class SetPath(NameValueModifier):
class AppendPath(NameValueModifier):
-
def execute(self, env):
- tty.debug("AppendPath: {0}+{1}".format(self.name, str(self.value)),
- level=3)
- environment_value = env.get(self.name, '')
- directories = environment_value.split(
- self.separator) if environment_value else []
+ tty.debug("AppendPath: {0}+{1}".format(self.name, str(self.value)), level=3)
+ environment_value = env.get(self.name, "")
+ directories = environment_value.split(self.separator) if environment_value else []
directories.append(path_to_os_path(os.path.normpath(self.value)).pop())
env[self.name] = self.separator.join(directories)
class PrependPath(NameValueModifier):
-
def execute(self, env):
- tty.debug("PrependPath: {0}+{1}".format(self.name, str(self.value)),
- level=3)
- environment_value = env.get(self.name, '')
- directories = environment_value.split(
- self.separator) if environment_value else []
- directories = [path_to_os_path(os.path.normpath(self.value)).pop()] \
- + directories
+ tty.debug("PrependPath: {0}+{1}".format(self.name, str(self.value)), level=3)
+ environment_value = env.get(self.name, "")
+ directories = environment_value.split(self.separator) if environment_value else []
+ directories = [path_to_os_path(os.path.normpath(self.value)).pop()] + directories
env[self.name] = self.separator.join(directories)
class RemovePath(NameValueModifier):
-
def execute(self, env):
- tty.debug("RemovePath: {0}-{1}".format(self.name, str(self.value)),
- level=3)
- environment_value = env.get(self.name, '')
- directories = environment_value.split(
- self.separator) if environment_value else []
- directories = [path_to_os_path(os.path.normpath(x)).pop()
- for x in directories
- if x != path_to_os_path(os.path.normpath(self.value)).pop()]
+ tty.debug("RemovePath: {0}-{1}".format(self.name, str(self.value)), level=3)
+ environment_value = env.get(self.name, "")
+ directories = environment_value.split(self.separator) if environment_value else []
+ directories = [
+ path_to_os_path(os.path.normpath(x)).pop()
+ for x in directories
+ if x != path_to_os_path(os.path.normpath(self.value)).pop()
+ ]
env[self.name] = self.separator.join(directories)
class DeprioritizeSystemPaths(NameModifier):
-
def execute(self, env):
tty.debug("DeprioritizeSystemPaths: {0}".format(self.name), level=3)
- environment_value = env.get(self.name, '')
- directories = environment_value.split(
- self.separator) if environment_value else []
+ environment_value = env.get(self.name, "")
+ directories = environment_value.split(self.separator) if environment_value else []
directories = deprioritize_system_paths(
- [path_to_os_path(os.path.normpath(x)).pop() for x in directories])
+ [path_to_os_path(os.path.normpath(x)).pop() for x in directories]
+ )
env[self.name] = self.separator.join(directories)
class PruneDuplicatePaths(NameModifier):
-
def execute(self, env):
- tty.debug("PruneDuplicatePaths: {0}".format(self.name),
- level=3)
- environment_value = env.get(self.name, '')
- directories = environment_value.split(
- self.separator) if environment_value else []
- directories = prune_duplicate_paths([path_to_os_path(os.path.normpath(x)).pop()
- for x in directories])
+ tty.debug("PruneDuplicatePaths: {0}".format(self.name), level=3)
+ environment_value = env.get(self.name, "")
+ directories = environment_value.split(self.separator) if environment_value else []
+ directories = prune_duplicate_paths(
+ [path_to_os_path(os.path.normpath(x)).pop() for x in directories]
+ )
env[self.name] = self.separator.join(directories)
@@ -410,8 +391,7 @@ class EnvironmentModifications(object):
@staticmethod
def _check_other(other):
if not isinstance(other, EnvironmentModifications):
- raise TypeError(
- 'other must be an instance of EnvironmentModifications')
+ raise TypeError("other must be an instance of EnvironmentModifications")
def _maybe_trace(self, kwargs):
"""Provide the modification with stack trace info so that we can track its
@@ -424,10 +404,10 @@ class EnvironmentModifications(object):
_, filename, lineno, _, context, index = stack[2]
context = context[index].strip()
except Exception:
- filename = 'unknown file'
- lineno = 'unknown line'
- context = 'unknown context'
- kwargs.update({'filename': filename, 'lineno': lineno, 'context': context})
+ filename = "unknown file"
+ lineno = "unknown line"
+ context = "unknown context"
+ kwargs.update({"filename": filename, "lineno": lineno, "context": context})
def set(self, name, value, **kwargs):
"""Stores a request to set an environment variable.
@@ -440,7 +420,7 @@ class EnvironmentModifications(object):
item = SetEnv(name, value, **kwargs)
self.env_modifications.append(item)
- def append_flags(self, name, value, sep=' ', **kwargs):
+ def append_flags(self, name, value, sep=" ", **kwargs):
"""
Stores in the current object a request to append to an env variable
@@ -450,7 +430,7 @@ class EnvironmentModifications(object):
Appends with spaces separating different additions to the variable
"""
self._maybe_trace(kwargs)
- kwargs.update({'separator': sep})
+ kwargs.update({"separator": sep})
item = AppendFlagsEnv(name, value, **kwargs)
self.env_modifications.append(item)
@@ -464,7 +444,7 @@ class EnvironmentModifications(object):
item = UnsetEnv(name, **kwargs)
self.env_modifications.append(item)
- def remove_flags(self, name, value, sep=' ', **kwargs):
+ def remove_flags(self, name, value, sep=" ", **kwargs):
"""
Stores in the current object a request to remove flags from an
env variable
@@ -475,7 +455,7 @@ class EnvironmentModifications(object):
sep: separator to assume for environment variable
"""
self._maybe_trace(kwargs)
- kwargs.update({'separator': sep})
+ kwargs.update({"separator": sep})
item = RemoveFlagsEnv(name, value, **kwargs)
self.env_modifications.append(item)
@@ -566,7 +546,7 @@ class EnvironmentModifications(object):
# The last modification must unset the variable for it to be considered
# unset
- return (type(var_updates[-1]) == UnsetEnv)
+ return type(var_updates[-1]) == UnsetEnv
def clear(self):
"""
@@ -588,23 +568,23 @@ class EnvironmentModifications(object):
for envmod in reversed(self.env_modifications):
if type(envmod) == SetEnv:
- tty.debug("Reversing `Set` environment operation may lose "
- "original value")
+ tty.debug("Reversing `Set` environment operation may lose " "original value")
rev.unset(envmod.name)
elif type(envmod) == AppendPath:
rev.remove_path(envmod.name, envmod.value)
elif type(envmod) == PrependPath:
rev.remove_path(envmod.name, envmod.value)
elif type(envmod) == SetPath:
- tty.debug("Reversing `SetPath` environment operation may lose "
- "original value")
+ tty.debug("Reversing `SetPath` environment operation may lose " "original value")
rev.unset(envmod.name)
elif type(envmod) == AppendFlagsEnv:
rev.remove_flags(envmod.name, envmod.value)
else:
# This is an un-reversable operation
- tty.warn("Skipping reversal of unreversable operation"
- "%s %s" % (type(envmod), envmod.name))
+ tty.warn(
+ "Skipping reversal of unreversable operation"
+ "%s %s" % (type(envmod), envmod.name)
+ )
return rev
@@ -621,7 +601,7 @@ class EnvironmentModifications(object):
for x in actions:
x.execute(env)
- def shell_modifications(self, shell='sh', explicit=False, env=None):
+ def shell_modifications(self, shell="sh", explicit=False, env=None):
"""Return shell code to apply the modifications and clears the list."""
modifications = self.group_by_name()
@@ -634,10 +614,10 @@ class EnvironmentModifications(object):
for x in actions:
x.execute(new_env)
- if 'MANPATH' in new_env and not new_env.get('MANPATH').endswith(':'):
- new_env['MANPATH'] += ':'
+ if "MANPATH" in new_env and not new_env.get("MANPATH").endswith(":"):
+ new_env["MANPATH"] += ":"
- cmds = ''
+ cmds = ""
for name in sorted(set(modifications)):
new = new_env.get(name, None)
@@ -647,11 +627,9 @@ class EnvironmentModifications(object):
cmds += _shell_unset_strings[shell].format(name)
else:
if sys.platform != "win32":
- cmd = _shell_set_strings[shell].format(
- name, cmd_quote(new_env[name]))
+ cmd = _shell_set_strings[shell].format(name, cmd_quote(new_env[name]))
else:
- cmd = _shell_set_strings[shell].format(
- name, new_env[name])
+ cmd = _shell_set_strings[shell].format(name, new_env[name])
cmds += cmd
return cmds
@@ -680,44 +658,56 @@ class EnvironmentModifications(object):
clean (bool): in addition to removing empty entries,
also remove duplicate entries (default: False).
"""
- tty.debug("EnvironmentModifications.from_sourcing_file: {0}"
- .format(filename))
+ tty.debug("EnvironmentModifications.from_sourcing_file: {0}".format(filename))
# Check if the file actually exists
if not os.path.isfile(filename):
- msg = 'Trying to source non-existing file: {0}'.format(filename)
+ msg = "Trying to source non-existing file: {0}".format(filename)
raise RuntimeError(msg)
# Prepare include and exclude lists of environment variable names
- exclude = kwargs.get('exclude', [])
- include = kwargs.get('include', [])
- clean = kwargs.get('clean', False)
+ exclude = kwargs.get("exclude", [])
+ include = kwargs.get("include", [])
+ clean = kwargs.get("clean", False)
# Other variables unrelated to sourcing a file
- exclude.extend([
- # Bash internals
- 'SHLVL', '_', 'PWD', 'OLDPWD', 'PS1', 'PS2', 'ENV',
- # Environment modules v4
- 'LOADEDMODULES', '_LMFILES_', 'BASH_FUNC_module()', 'MODULEPATH',
- 'MODULES_(.*)', r'(\w*)_mod(quar|share)',
- # Lmod configuration
- r'LMOD_(.*)', 'MODULERCFILE'
- ])
+ exclude.extend(
+ [
+ # Bash internals
+ "SHLVL",
+ "_",
+ "PWD",
+ "OLDPWD",
+ "PS1",
+ "PS2",
+ "ENV",
+ # Environment modules v4
+ "LOADEDMODULES",
+ "_LMFILES_",
+ "BASH_FUNC_module()",
+ "MODULEPATH",
+ "MODULES_(.*)",
+ r"(\w*)_mod(quar|share)",
+ # Lmod configuration
+ r"LMOD_(.*)",
+ "MODULERCFILE",
+ ]
+ )
# Compute the environments before and after sourcing
before = sanitize(
environment_after_sourcing_files(os.devnull, **kwargs),
- exclude=exclude, include=include
+ exclude=exclude,
+ include=include,
)
file_and_args = (filename,) + arguments
after = sanitize(
environment_after_sourcing_files(file_and_args, **kwargs),
- exclude=exclude, include=include
+ exclude=exclude,
+ include=include,
)
# Delegate to the other factory
- return EnvironmentModifications.from_environment_diff(
- before, after, clean
- )
+ return EnvironmentModifications.from_environment_diff(before, after, clean)
@staticmethod
def from_environment_diff(before, after, clean=False):
@@ -739,15 +729,14 @@ class EnvironmentModifications(object):
unset_variables = list(set(before) - set(after))
# Variables that have been modified
common_variables = set(before).intersection(set(after))
- modified_variables = [x for x in common_variables
- if before[x] != after[x]]
+ modified_variables = [x for x in common_variables if before[x] != after[x]]
# Consistent output order - looks nicer, easier comparison...
new_variables.sort()
unset_variables.sort()
modified_variables.sort()
def return_separator_if_any(*args):
- separators = ':', ';'
+ separators = ":", ";"
for separator in separators:
for arg in args:
if separator in arg:
@@ -761,7 +750,7 @@ class EnvironmentModifications(object):
sep = return_separator_if_any(after[x])
if sep:
env.prepend_path(x, after[x], separator=sep)
- elif 'PATH' in x:
+ elif "PATH" in x:
env.prepend_path(x, after[x])
else:
# We just need to set the variable to the new value
@@ -791,16 +780,14 @@ class EnvironmentModifications(object):
value_after = sep.join(after_list)
# Paths that have been removed
- remove_list = [
- ii for ii in before_list if ii not in after_list]
+ remove_list = [ii for ii in before_list if ii not in after_list]
# Check that nothing has been added in the middle of
# before_list
- remaining_list = [
- ii for ii in before_list if ii in after_list]
+ remaining_list = [ii for ii in before_list if ii in after_list]
try:
start = after_list.index(remaining_list[0])
end = after_list.index(remaining_list[-1])
- search = sep.join(after_list[start:end + 1])
+ search = sep.join(after_list[start : end + 1])
except IndexError:
env.prepend_path(x, value_after)
continue
@@ -815,7 +802,7 @@ class EnvironmentModifications(object):
except KeyError:
prepend_list = []
try:
- append_list = after_list[end + 1:]
+ append_list = after_list[end + 1 :]
except KeyError:
append_list = []
@@ -850,13 +837,14 @@ def set_or_unset_not_first(variable, changes, errstream):
"""Check if we are going to set or unset something after other
modifications have already been requested.
"""
- indexes = [ii for ii, item in enumerate(changes)
- if ii != 0 and
- not item.args.get('force', False) and
- type(item) in [SetEnv, UnsetEnv]]
+ indexes = [
+ ii
+ for ii, item in enumerate(changes)
+ if ii != 0 and not item.args.get("force", False) and type(item) in [SetEnv, UnsetEnv]
+ ]
if indexes:
- good = '\t \t{context} at {filename}:{lineno}'
- nogood = '\t--->\t{context} at {filename}:{lineno}'
+ good = "\t \t{context} at {filename}:{lineno}"
+ nogood = "\t--->\t{context} at {filename}:{lineno}"
message = "Suspicious requests to set or unset '{var}' found"
errstream(message.format(var=variable))
for ii, item in enumerate(changes):
@@ -958,7 +946,7 @@ def preserve_environment(*variables):
for var in variables:
value = cache[var]
- msg = '[PRESERVE_ENVIRONMENT]'
+ msg = "[PRESERVE_ENVIRONMENT]"
if value is not None:
# Print a debug statement if the value changed
if var not in os.environ:
@@ -995,51 +983,51 @@ def environment_after_sourcing_files(*files, **kwargs):
only when the previous command succeeds (default: ``&&``)
"""
# Set the shell executable that will be used to source files
- shell_cmd = kwargs.get('shell', '/bin/bash')
- shell_options = kwargs.get('shell_options', '-c')
- source_command = kwargs.get('source_command', 'source')
- suppress_output = kwargs.get('suppress_output', '&> /dev/null')
- concatenate_on_success = kwargs.get('concatenate_on_success', '&&')
+ shell_cmd = kwargs.get("shell", "/bin/bash")
+ shell_options = kwargs.get("shell_options", "-c")
+ source_command = kwargs.get("source_command", "source")
+ suppress_output = kwargs.get("suppress_output", "&> /dev/null")
+ concatenate_on_success = kwargs.get("concatenate_on_success", "&&")
- shell = executable.Executable(' '.join([shell_cmd, shell_options]))
+ shell = executable.Executable(" ".join([shell_cmd, shell_options]))
def _source_single_file(file_and_args, environment):
source_file = [source_command]
source_file.extend(x for x in file_and_args)
- source_file = ' '.join(source_file)
+ source_file = " ".join(source_file)
# If the environment contains 'python' use it, if not
# go with sys.executable. Below we just need a working
# Python interpreter, not necessarily sys.executable.
- python_cmd = executable.which('python3', 'python', 'python2')
+ python_cmd = executable.which("python3", "python", "python2")
python_cmd = python_cmd.path if python_cmd else sys.executable
- dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))'
+ dump_cmd = "import os, json; print(json.dumps(dict(os.environ)))"
dump_environment = python_cmd + ' -E -c "{0}"'.format(dump_cmd)
# Try to source the file
- source_file_arguments = ' '.join([
- source_file, suppress_output,
- concatenate_on_success, dump_environment,
- ])
- output = shell(
- source_file_arguments, output=str, env=environment, ignore_quotes=True
+ source_file_arguments = " ".join(
+ [
+ source_file,
+ suppress_output,
+ concatenate_on_success,
+ dump_environment,
+ ]
)
+ output = shell(source_file_arguments, output=str, env=environment, ignore_quotes=True)
environment = json.loads(output)
# If we're in python2, convert to str objects instead of unicode
# like json gives us. We can't put unicode in os.environ anyway.
return sjson.encode_json_dict(environment)
- current_environment = kwargs.get('env', dict(os.environ))
+ current_environment = kwargs.get("env", dict(os.environ))
for f in files:
# Normalize the input to the helper function
if isinstance(f, six.string_types):
f = [f]
- current_environment = _source_single_file(
- f, environment=current_environment
- )
+ current_environment = _source_single_file(f, environment=current_environment)
return current_environment
@@ -1057,7 +1045,7 @@ def sanitize(environment, exclude, include):
def set_intersection(fullset, *args):
# A set intersection using string literals and regexs
- meta = '[' + re.escape('[$()*?[]^{|}') + ']'
+ meta = "[" + re.escape("[$()*?[]^{|}") + "]"
subset = fullset & set(args) # As literal
for name in args:
if re.search(meta, name):
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index c424b9cdb5..a046e26eaa 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -16,7 +16,7 @@ import llnl.util.tty as tty
import spack.error
from spack.util.path import Path, format_os_path, path_to_os_path, system_path_filter
-__all__ = ['Executable', 'which', 'ProcessError']
+__all__ = ["Executable", "which", "ProcessError"]
class Executable(object):
@@ -30,6 +30,7 @@ class Executable(object):
self.exe = path_to_os_path(*self.exe)
self.default_env = {}
from spack.util.environment import EnvironmentModifications # no cycle
+
self.default_envmod = EnvironmentModifications()
self.returncode = None
@@ -62,7 +63,7 @@ class Executable(object):
Returns:
str: The executable and default arguments
"""
- return ' '.join(self.exe)
+ return " ".join(self.exe)
@property
def name(self):
@@ -124,7 +125,7 @@ class Executable(object):
"""
# Environment
- env_arg = kwargs.get('env', None)
+ env_arg = kwargs.get("env", None)
# Setup default environment
env = os.environ.copy() if env_arg is None else {}
@@ -140,30 +141,30 @@ class Executable(object):
env.update(env_arg)
# Apply extra env
- extra_env = kwargs.get('extra_env', {})
+ extra_env = kwargs.get("extra_env", {})
if isinstance(extra_env, EnvironmentModifications):
extra_env.apply_modifications(env)
else:
env.update(extra_env)
- if '_dump_env' in kwargs:
- kwargs['_dump_env'].clear()
- kwargs['_dump_env'].update(env)
+ if "_dump_env" in kwargs:
+ kwargs["_dump_env"].clear()
+ kwargs["_dump_env"].update(env)
- fail_on_error = kwargs.pop('fail_on_error', True)
- ignore_errors = kwargs.pop('ignore_errors', ())
- ignore_quotes = kwargs.pop('ignore_quotes', False)
+ fail_on_error = kwargs.pop("fail_on_error", True)
+ ignore_errors = kwargs.pop("ignore_errors", ())
+ ignore_quotes = kwargs.pop("ignore_quotes", False)
# If they just want to ignore one error code, make it a tuple.
if isinstance(ignore_errors, int):
- ignore_errors = (ignore_errors, )
+ ignore_errors = (ignore_errors,)
- input = kwargs.pop('input', None)
- output = kwargs.pop('output', None)
- error = kwargs.pop('error', None)
+ input = kwargs.pop("input", None)
+ output = kwargs.pop("output", None)
+ error = kwargs.pop("error", None)
if input is str:
- raise ValueError('Cannot use `str` as input stream.')
+ raise ValueError("Cannot use `str` as input stream.")
def streamify(arg, mode):
if isinstance(arg, string_types):
@@ -173,22 +174,21 @@ class Executable(object):
else:
return arg, False
- ostream, close_ostream = streamify(output, 'w')
- estream, close_estream = streamify(error, 'w')
- istream, close_istream = streamify(input, 'r')
+ ostream, close_ostream = streamify(output, "w")
+ estream, close_estream = streamify(error, "w")
+ istream, close_istream = streamify(input, "r")
if not ignore_quotes:
quoted_args = [arg for arg in args if re.search(r'^".*"$|^\'.*\'$', arg)]
if quoted_args:
tty.warn(
- "Quotes in command arguments can confuse scripts like"
- " configure.",
+ "Quotes in command arguments can confuse scripts like" " configure.",
"The following arguments may cause problems when executed:",
str("\n".join([" " + arg for arg in quoted_args])),
"Quotes aren't needed because spack doesn't use a shell. "
"Consider removing them.",
- "If multiple levels of quotation are required, use "
- "`ignore_quotes=True`.")
+ "If multiple levels of quotation are required, use " "`ignore_quotes=True`.",
+ )
cmd = self.exe + list(args)
@@ -203,25 +203,26 @@ class Executable(object):
stderr=estream,
stdout=ostream,
env=env,
- close_fds=False,)
+ close_fds=False,
+ )
out, err = proc.communicate()
result = None
if output in (str, str.split) or error in (str, str.split):
- result = ''
+ result = ""
if output in (str, str.split):
- if sys.platform == 'win32':
- outstr = text_type(out.decode('ISO-8859-1'))
+ if sys.platform == "win32":
+ outstr = text_type(out.decode("ISO-8859-1"))
else:
- outstr = text_type(out.decode('utf-8'))
+ outstr = text_type(out.decode("utf-8"))
result += outstr
if output is str.split:
sys.stdout.write(outstr)
if error in (str, str.split):
- if sys.platform == 'win32':
- errstr = text_type(err.decode('ISO-8859-1'))
+ if sys.platform == "win32":
+ errstr = text_type(err.decode("ISO-8859-1"))
else:
- errstr = text_type(err.decode('utf-8'))
+ errstr = text_type(err.decode("utf-8"))
result += errstr
if error is str.split:
sys.stderr.write(errstr)
@@ -234,22 +235,22 @@ class Executable(object):
# been stored either in the specified files (e.g. if
# 'output' specifies a file) or written to the parent's
# stdout/stderr (e.g. if 'output' is not specified)
- long_msg += '\n' + result
+ long_msg += "\n" + result
- raise ProcessError('Command exited with status %d:' %
- proc.returncode, long_msg)
+ raise ProcessError("Command exited with status %d:" % proc.returncode, long_msg)
return result
except OSError as e:
- raise ProcessError(
- '%s: %s' % (self.exe[0], e.strerror), 'Command: ' + cmd_line_string)
+ raise ProcessError("%s: %s" % (self.exe[0], e.strerror), "Command: " + cmd_line_string)
except subprocess.CalledProcessError as e:
if fail_on_error:
raise ProcessError(
- str(e), '\nExit status %d when invoking command: %s' %
- (proc.returncode, cmd_line_string))
+ str(e),
+ "\nExit status %d when invoking command: %s"
+ % (proc.returncode, cmd_line_string),
+ )
finally:
if close_ostream:
@@ -260,35 +261,34 @@ class Executable(object):
istream.close()
def __eq__(self, other):
- return hasattr(other, 'exe') and self.exe == other.exe
+ return hasattr(other, "exe") and self.exe == other.exe
def __neq__(self, other):
return not (self == other)
def __hash__(self):
- return hash((type(self), ) + tuple(self.exe))
+ return hash((type(self),) + tuple(self.exe))
def __repr__(self):
- return '<exe: %s>' % self.exe
+ return "<exe: %s>" % self.exe
def __str__(self):
- return ' '.join(self.exe)
+ return " ".join(self.exe)
@system_path_filter
def which_string(*args, **kwargs):
"""Like ``which()``, but return a string instead of an ``Executable``."""
- path = kwargs.get('path', os.environ.get('PATH', ''))
- required = kwargs.get('required', False)
+ path = kwargs.get("path", os.environ.get("PATH", ""))
+ required = kwargs.get("required", False)
if isinstance(path, string_types):
path = path.split(os.pathsep)
for name in args:
win_candidates = []
- if sys.platform == "win32" and (not name.endswith(".exe")
- and not name.endswith(".bat")):
- win_candidates = [name + ext for ext in ['.exe', '.bat']]
+ if sys.platform == "win32" and (not name.endswith(".exe") and not name.endswith(".bat")):
+ win_candidates = [name + ext for ext in [".exe", ".bat"]]
candidate_names = [name] if not win_candidates else win_candidates
for candidate_name in candidate_names:
@@ -304,8 +304,7 @@ def which_string(*args, **kwargs):
return exe
if required:
- raise CommandNotFoundError(
- "spack requires '%s'. Make sure it is in your path." % args[0])
+ raise CommandNotFoundError("spack requires '%s'. Make sure it is in your path." % args[0])
return None
diff --git a/lib/spack/spack/util/file_cache.py b/lib/spack/spack/util/file_cache.py
index eead22980d..759d150045 100644
--- a/lib/spack/spack/util/file_cache.py
+++ b/lib/spack/spack/util/file_cache.py
@@ -61,13 +61,12 @@ class FileCache(object):
keyfile = os.path.basename(key)
keydir = os.path.dirname(key)
- return os.path.join(self.root, keydir, '.' + keyfile + '.lock')
+ return os.path.join(self.root, keydir, "." + keyfile + ".lock")
def _get_lock(self, key):
"""Create a lock for a key, if necessary, and return a lock object."""
if key not in self._locks:
- self._locks[key] = Lock(self._lock_path(key),
- default_timeout=self.lock_timeout)
+ self._locks[key] = Lock(self._lock_path(key), default_timeout=self.lock_timeout)
return self._locks[key]
def init_entry(self, key):
@@ -107,9 +106,7 @@ class FileCache(object):
cache_file.read()
"""
- return ReadTransaction(
- self._get_lock(key), acquire=lambda: open(self.cache_path(key))
- )
+ return ReadTransaction(self._get_lock(key), acquire=lambda: open(self.cache_path(key)))
def write_transaction(self, key):
"""Get a write transaction on a file cache item.
@@ -122,23 +119,22 @@ class FileCache(object):
filename = self.cache_path(key)
if os.path.exists(filename) and not os.access(filename, os.W_OK):
raise CacheError(
- "Insufficient permissions to write to file cache at {0}"
- .format(filename))
+ "Insufficient permissions to write to file cache at {0}".format(filename)
+ )
# TODO: this nested context manager adds a lot of complexity and
# TODO: is pretty hard to reason about in llnl.util.lock. At some
# TODO: point we should just replace it with functions and simplify
# TODO: the locking code.
class WriteContextManager(object):
-
def __enter__(cm):
cm.orig_filename = self.cache_path(key)
cm.orig_file = None
if os.path.exists(cm.orig_filename):
- cm.orig_file = open(cm.orig_filename, 'r')
+ cm.orig_file = open(cm.orig_filename, "r")
- cm.tmp_filename = self.cache_path(key) + '.tmp'
- cm.tmp_file = open(cm.tmp_filename, 'w')
+ cm.tmp_filename = self.cache_path(key) + ".tmp"
+ cm.tmp_file = open(cm.tmp_filename, "w")
return cm.orig_file, cm.tmp_file
@@ -154,8 +150,7 @@ class FileCache(object):
else:
rename(cm.tmp_filename, cm.orig_filename)
- return WriteTransaction(
- self._get_lock(key), acquire=WriteContextManager)
+ return WriteTransaction(self._get_lock(key), acquire=WriteContextManager)
def mtime(self, key):
"""Return modification time of cache file, or 0 if it does not exist.
diff --git a/lib/spack/spack/util/file_permissions.py b/lib/spack/spack/util/file_permissions.py
index 3ed1032392..141af3a9fb 100644
--- a/lib/spack/spack/util/file_permissions.py
+++ b/lib/spack/spack/util/file_permissions.py
@@ -30,16 +30,13 @@ def set_permissions(path, perms, group=None):
# Do not let users create world/group writable suid binaries
if perms & st.S_ISUID:
if perms & st.S_IWOTH:
- raise InvalidPermissionsError(
- "Attempting to set suid with world writable")
+ raise InvalidPermissionsError("Attempting to set suid with world writable")
if perms & st.S_IWGRP:
- raise InvalidPermissionsError(
- "Attempting to set suid with group writable")
+ raise InvalidPermissionsError("Attempting to set suid with group writable")
# Or world writable sgid binaries
if perms & st.S_ISGID:
if perms & st.S_IWOTH:
- raise InvalidPermissionsError(
- "Attempting to set sgid with world writable")
+ raise InvalidPermissionsError("Attempting to set sgid with world writable")
fs.chmod_x(path, perms)
diff --git a/lib/spack/spack/util/gcs.py b/lib/spack/spack/util/gcs.py
index b09e85da77..93efd7be1a 100644
--- a/lib/spack/spack/util/gcs.py
+++ b/lib/spack/spack/util/gcs.py
@@ -23,13 +23,14 @@ def gcs_client():
import google.auth
from google.cloud import storage
except ImportError as ex:
- tty.error('{0}, google-cloud-storage python module is missing.'.format(ex) +
- ' Please install to use the gs:// backend.')
+ tty.error(
+ "{0}, google-cloud-storage python module is missing.".format(ex)
+ + " Please install to use the gs:// backend."
+ )
sys.exit(1)
storage_credentials, storage_project = google.auth.default()
- storage_client = storage.Client(storage_project,
- storage_credentials)
+ storage_client = storage.Client(storage_project, storage_credentials)
return storage_client
@@ -38,20 +39,24 @@ class GCSBucket(object):
Create a wrapper object for a GCS Bucket. Provides methods to wrap spack
related tasks, such as destroy.
"""
+
def __init__(self, url, client=None):
"""Constructor for GCSBucket objects
- Args:
- url (str): The url pointing to the GCS bucket to build an object out of
- client (google.cloud.storage.client.Client): A pre-defined storage
- client that will be used to access the GCS bucket.
+ Args:
+ url (str): The url pointing to the GCS bucket to build an object out of
+ client (google.cloud.storage.client.Client): A pre-defined storage
+ client that will be used to access the GCS bucket.
"""
- if url.scheme != 'gs':
- raise ValueError('Can not create GCS bucket connection with scheme {SCHEME}'
- .format(SCHEME=url.scheme))
+ if url.scheme != "gs":
+ raise ValueError(
+ "Can not create GCS bucket connection with scheme {SCHEME}".format(
+ SCHEME=url.scheme
+ )
+ )
self.url = url
self.name = self.url.netloc
- if self.url.path[0] == '/':
+ if self.url.path[0] == "/":
self.prefix = self.url.path[1:]
else:
self.prefix = self.url.path
@@ -59,12 +64,13 @@ class GCSBucket(object):
self.client = client or gcs_client()
self.bucket = None
- tty.debug('New GCS bucket:')
+ tty.debug("New GCS bucket:")
tty.debug(" name: {0}".format(self.name))
tty.debug(" prefix: {0}".format(self.prefix))
def exists(self):
from google.cloud.exceptions import NotFound
+
if not self.bucket:
try:
self.bucket = self.client.bucket(self.name)
@@ -97,8 +103,7 @@ class GCSBucket(object):
If false, print absolute blob paths (useful for
destruction of bucket)
"""
- tty.debug('Getting GCS blobs... Recurse {0} -- Rel: {1}'.format(
- recursive, relative))
+ tty.debug("Getting GCS blobs... Recurse {0} -- Rel: {1}".format(recursive, relative))
converter = str
if relative:
@@ -108,11 +113,11 @@ class GCSBucket(object):
all_blobs = self.bucket.list_blobs(prefix=self.prefix)
blob_list = []
- base_dirs = len(self.prefix.split('/')) + 1
+ base_dirs = len(self.prefix.split("/")) + 1
for blob in all_blobs:
if not recursive:
- num_dirs = len(blob.name.split('/'))
+ num_dirs = len(blob.name.split("/"))
if num_dirs <= base_dirs:
blob_list.append(converter(blob.name))
else:
@@ -131,6 +136,7 @@ class GCSBucket(object):
Uses GCS Batch operations to bundle several delete operations together.
"""
from google.cloud.exceptions import NotFound
+
tty.debug("Bucket.destroy(recursive={0})".format(recursive))
try:
bucket_blobs = self.get_all_blobs(recursive=recursive, relative=False)
@@ -143,8 +149,7 @@ class GCSBucket(object):
blob = self.blob(bucket_blobs[j])
blob.delete()
except NotFound as ex:
- tty.error("{0}, Could not delete a blob in bucket {1}.".format(
- ex, self.name))
+ tty.error("{0}, Could not delete a blob in bucket {1}.".format(ex, self.name))
sys.exit(1)
@@ -153,25 +158,28 @@ class GCSBlob(object):
Wraps some blob methods for spack functionality
"""
+
def __init__(self, url, client=None):
self.url = url
- if url.scheme != 'gs':
- raise ValueError('Can not create GCS blob connection with scheme: {SCHEME}'
- .format(SCHEME=url.scheme))
+ if url.scheme != "gs":
+ raise ValueError(
+ "Can not create GCS blob connection with scheme: {SCHEME}".format(
+ SCHEME=url.scheme
+ )
+ )
self.client = client or gcs_client()
self.bucket = GCSBucket(url)
- self.blob_path = self.url.path.lstrip('/')
+ self.blob_path = self.url.path.lstrip("/")
tty.debug("New GCSBlob")
tty.debug(" blob_path = {0}".format(self.blob_path))
if not self.bucket.exists():
- tty.warn("The bucket {0} does not exist, it will be created"
- .format(self.bucket.name))
+ tty.warn("The bucket {0} does not exist, it will be created".format(self.bucket.name))
self.bucket.create()
def get(self):
@@ -179,6 +187,7 @@ class GCSBlob(object):
def exists(self):
from google.cloud.exceptions import NotFound
+
try:
blob = self.bucket.blob(self.blob_path)
exists = blob.exists()
@@ -189,6 +198,7 @@ class GCSBlob(object):
def delete_blob(self):
from google.cloud.exceptions import NotFound
+
try:
blob = self.bucket.blob(self.blob_path)
blob.delete()
@@ -200,16 +210,16 @@ class GCSBlob(object):
blob.upload_from_filename(local_file_path)
def get_blob_byte_stream(self):
- return self.bucket.get_blob(self.blob_path).open(mode='rb')
+ return self.bucket.get_blob(self.blob_path).open(mode="rb")
def get_blob_headers(self):
blob = self.bucket.get_blob(self.blob_path)
headers = {
- 'Content-type': blob.content_type,
- 'Content-encoding': blob.content_encoding,
- 'Content-language': blob.content_language,
- 'MD5Hash': blob.md5_hash
+ "Content-type": blob.content_type,
+ "Content-encoding": blob.content_encoding,
+ "Content-language": blob.content_language,
+ "MD5Hash": blob.md5_hash,
}
return headers
diff --git a/lib/spack/spack/util/gpg.py b/lib/spack/spack/util/gpg.py
index 9496e6b063..3f0d74f4b7 100644
--- a/lib/spack/spack/util/gpg.py
+++ b/lib/spack/spack/util/gpg.py
@@ -55,18 +55,16 @@ def init(gnupghome=None, force=False):
return
# Set the value of GNUPGHOME to be used in this module
- GNUPGHOME = (gnupghome or
- os.getenv('SPACK_GNUPGHOME') or
- spack.paths.gpg_path)
+ GNUPGHOME = gnupghome or os.getenv("SPACK_GNUPGHOME") or spack.paths.gpg_path
# Set the executable objects for "gpg" and "gpgconf"
with spack.bootstrap.ensure_bootstrap_configuration():
spack.bootstrap.ensure_gpg_in_path_or_raise()
GPG, GPGCONF = _gpg(), _gpgconf()
- GPG.add_default_env('GNUPGHOME', GNUPGHOME)
+ GPG.add_default_env("GNUPGHOME", GNUPGHOME)
if GPGCONF:
- GPGCONF.add_default_env('GNUPGHOME', GNUPGHOME)
+ GPGCONF.add_default_env("GNUPGHOME", GNUPGHOME)
# Set the socket dir if not using GnuPG defaults
SOCKET_DIR = _socket_dir(GPGCONF)
@@ -80,7 +78,7 @@ def init(gnupghome=None, force=False):
raise SpackGPGError(msg)
if SOCKET_DIR is not None:
- GPGCONF('--create-socketdir')
+ GPGCONF("--create-socketdir")
def _autoinit(func):
@@ -90,10 +88,12 @@ def _autoinit(func):
Args:
func (callable): decorated function
"""
+
@functools.wraps(func)
def _wrapped(*args, **kwargs):
init()
return func(*args, **kwargs)
+
return _wrapped
@@ -124,14 +124,14 @@ def gnupghome_override(dir):
def _parse_secret_keys_output(output):
keys = []
found_sec = False
- for line in output.split('\n'):
+ for line in output.split("\n"):
if found_sec:
- if line.startswith('fpr'):
- keys.append(line.split(':')[9])
+ if line.startswith("fpr"):
+ keys.append(line.split(":")[9])
found_sec = False
- elif line.startswith('ssb'):
+ elif line.startswith("ssb"):
found_sec = False
- elif line.startswith('sec'):
+ elif line.startswith("sec"):
found_sec = True
return keys
@@ -142,25 +142,25 @@ def _parse_public_keys_output(output):
"""
keys = []
found_pub = False
- current_pub_key = ''
- for line in output.split('\n'):
+ current_pub_key = ""
+ for line in output.split("\n"):
if found_pub:
- if line.startswith('fpr'):
- keys.append((current_pub_key, line.split(':')[9]))
+ if line.startswith("fpr"):
+ keys.append((current_pub_key, line.split(":")[9]))
found_pub = False
- elif line.startswith('ssb'):
+ elif line.startswith("ssb"):
found_pub = False
- elif line.startswith('pub'):
- current_pub_key = line.split(':')[4]
+ elif line.startswith("pub"):
+ current_pub_key = line.split(":")[4]
found_pub = True
return keys
def _get_unimported_public_keys(output):
keys = []
- for line in output.split('\n'):
- if line.startswith('pub'):
- keys.append(line.split(':')[4])
+ for line in output.split("\n"):
+ if line.startswith("pub"):
+ keys.append(line.split(":")[4])
return keys
@@ -172,9 +172,10 @@ class SpackGPGError(spack.error.SpackError):
def create(**kwargs):
"""Create a new key pair."""
r, w = os.pipe()
- with contextlib.closing(os.fdopen(r, 'r')) as r:
- with contextlib.closing(os.fdopen(w, 'w')) as w:
- w.write('''
+ with contextlib.closing(os.fdopen(r, "r")) as r:
+ with contextlib.closing(os.fdopen(w, "w")) as w:
+ w.write(
+ """
Key-Type: rsa
Key-Length: 4096
Key-Usage: sign
@@ -184,27 +185,23 @@ Name-Comment: %(comment)s
Expire-Date: %(expires)s
%%no-protection
%%commit
-''' % kwargs)
- GPG('--gen-key', '--batch', input=r)
+"""
+ % kwargs
+ )
+ GPG("--gen-key", "--batch", input=r)
@_autoinit
def signing_keys(*args):
"""Return the keys that can be used to sign binaries."""
- output = GPG(
- '--list-secret-keys', '--with-colons', '--fingerprint',
- *args, output=str
- )
+ output = GPG("--list-secret-keys", "--with-colons", "--fingerprint", *args, output=str)
return _parse_secret_keys_output(output)
@_autoinit
def public_keys_to_fingerprint(*args):
"""Return the keys that can be used to verify binaries."""
- output = GPG(
- '--list-public-keys', '--with-colons', '--fingerprint',
- *args, output=str
- )
+ output = GPG("--list-public-keys", "--with-colons", "--fingerprint", *args, output=str)
return _parse_public_keys_output(output)
@@ -238,11 +235,11 @@ def trust(keyfile):
keyfile (str): file with the public key
"""
# Get the public keys we are about to import
- output = GPG('--with-colons', keyfile, output=str, error=str)
+ output = GPG("--with-colons", keyfile, output=str, error=str)
keys = _get_unimported_public_keys(output)
# Import them
- GPG('--import', keyfile)
+ GPG("--import", keyfile)
# Set trust to ultimate
key_to_fpr = dict(public_keys_to_fingerprint())
@@ -253,10 +250,10 @@ def trust(keyfile):
fpr = key_to_fpr[key]
r, w = os.pipe()
- with contextlib.closing(os.fdopen(r, 'r')) as r:
- with contextlib.closing(os.fdopen(w, 'w')) as w:
+ with contextlib.closing(os.fdopen(r, "r")) as r:
+ with contextlib.closing(os.fdopen(w, "w")) as w:
w.write("{0}:6:\n".format(fpr))
- GPG('--import-ownertrust', input=r)
+ GPG("--import-ownertrust", input=r)
@_autoinit
@@ -269,10 +266,10 @@ def untrust(signing, *keys):
"""
if signing:
skeys = signing_keys(*keys)
- GPG('--batch', '--yes', '--delete-secret-keys', *skeys)
+ GPG("--batch", "--yes", "--delete-secret-keys", *skeys)
pkeys = public_keys(*keys)
- GPG('--batch', '--yes', '--delete-keys', *pkeys)
+ GPG("--batch", "--yes", "--delete-keys", *pkeys)
@_autoinit
@@ -287,8 +284,8 @@ def sign(key, file, output, clearsign=False):
clearsign (bool): if True wraps the document in an ASCII-armored
signature, if False creates a detached signature
"""
- signopt = '--clearsign' if clearsign else '--detach-sign'
- GPG(signopt, '--armor', '--default-key', key, '--output', output, file)
+ signopt = "--clearsign" if clearsign else "--detach-sign"
+ GPG(signopt, "--armor", "--default-key", key, "--output", output, file)
@_autoinit
@@ -305,8 +302,8 @@ def verify(signature, file=None, suppress_warnings=False):
args = [signature]
if file:
args.append(file)
- kwargs = {'error': str} if suppress_warnings else {}
- GPG('--verify', *args, **kwargs)
+ kwargs = {"error": str} if suppress_warnings else {}
+ GPG("--verify", *args, **kwargs)
@_autoinit
@@ -318,41 +315,39 @@ def list(trusted, signing):
signing (bool): if True list private keys
"""
if trusted:
- GPG('--list-public-keys')
+ GPG("--list-public-keys")
if signing:
- GPG('--list-secret-keys')
+ GPG("--list-secret-keys")
def _verify_exe_or_raise(exe):
msg = (
- 'Spack requires gpgconf version >= 2\n'
- ' To install a suitable version using Spack, run\n'
- ' spack install gnupg@2:\n'
- ' and load it by running\n'
- ' spack load gnupg@2:'
+ "Spack requires gpgconf version >= 2\n"
+ " To install a suitable version using Spack, run\n"
+ " spack install gnupg@2:\n"
+ " and load it by running\n"
+ " spack load gnupg@2:"
)
if not exe:
raise SpackGPGError(msg)
- output = exe('--version', output=str)
+ output = exe("--version", output=str)
match = re.search(r"^gpg(conf)? \(GnuPG\) (.*)$", output, re.M)
if not match:
- raise SpackGPGError(
- 'Could not determine "{0}" version'.format(exe.name)
- )
+ raise SpackGPGError('Could not determine "{0}" version'.format(exe.name))
- if spack.version.Version(match.group(2)) < spack.version.Version('2'):
+ if spack.version.Version(match.group(2)) < spack.version.Version("2"):
raise SpackGPGError(msg)
def _gpgconf():
- exe = spack.util.executable.which('gpgconf', 'gpg2conf', 'gpgconf2')
+ exe = spack.util.executable.which("gpgconf", "gpg2conf", "gpgconf2")
_verify_exe_or_raise(exe)
# ensure that the gpgconf we found can run "gpgconf --create-socketdir"
try:
- exe('--dry-run', '--create-socketdir', output=os.devnull, error=os.devnull)
+ exe("--dry-run", "--create-socketdir", output=os.devnull, error=os.devnull)
except spack.util.executable.ProcessError:
# no dice
exe = None
@@ -361,7 +356,7 @@ def _gpgconf():
def _gpg():
- exe = spack.util.executable.which('gpg2', 'gpg')
+ exe = spack.util.executable.which("gpg2", "gpg")
_verify_exe_or_raise(exe)
return exe
@@ -379,11 +374,11 @@ def _socket_dir(gpgconf):
return None
result = None
- for var_run in ('/run', '/var/run'):
+ for var_run in ("/run", "/var/run"):
if not os.path.exists(var_run):
continue
- var_run_user = os.path.join(var_run, 'user')
+ var_run_user = os.path.join(var_run, "user")
try:
if not os.path.exists(var_run_user):
os.mkdir(var_run_user)
diff --git a/lib/spack/spack/util/hash.py b/lib/spack/spack/util/hash.py
index 0bd4c13230..929c97977c 100644
--- a/lib/spack/spack/util/hash.py
+++ b/lib/spack/spack/util/hash.py
@@ -12,11 +12,11 @@ import spack.util.crypto
def b32_hash(content):
"""Return the b32 encoded sha1 hash of the input string as a string."""
- sha = hashlib.sha1(content.encode('utf-8'))
+ sha = hashlib.sha1(content.encode("utf-8"))
b32_hash = base64.b32encode(sha.digest()).lower()
if sys.version_info[0] >= 3:
- b32_hash = b32_hash.decode('utf-8')
+ b32_hash = b32_hash.decode("utf-8")
return b32_hash
@@ -24,8 +24,7 @@ def b32_hash(content):
def base32_prefix_bits(hash_string, bits):
"""Return the first <bits> bits of a base32 string as an integer."""
if bits > len(hash_string) * 5:
- raise ValueError("Too many bits! Requested %d bit prefix of '%s'."
- % (bits, hash_string))
+ raise ValueError("Too many bits! Requested %d bit prefix of '%s'." % (bits, hash_string))
hash_bytes = base64.b32decode(hash_string, casefold=True)
return spack.util.crypto.prefix_bits(hash_bytes, bits)
diff --git a/lib/spack/spack/util/lock.py b/lib/spack/spack/util/lock.py
index a0ac0f0d64..f89b301b76 100644
--- a/lib/spack/spack/util/lock.py
+++ b/lib/spack/spack/util/lock.py
@@ -21,7 +21,7 @@ import spack.config
import spack.error
import spack.paths
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
class Lock(llnl.util.lock.Lock):
@@ -31,9 +31,10 @@ class Lock(llnl.util.lock.Lock):
``llnl.util.lock`` so that all the lock API calls will succeed, but
the actual locking mechanism can be disabled via ``_enable_locks``.
"""
+
def __init__(self, *args, **kwargs):
super(Lock, self).__init__(*args, **kwargs)
- self._enable = spack.config.get('config:locks', not is_windows)
+ self._enable = spack.config.get("config:locks", not is_windows)
def _lock(self, op, timeout=0):
if self._enable:
@@ -73,15 +74,15 @@ def check_lock_safety(path):
writable = None
if (mode & stat.S_IWGRP) and (uid != gid):
# spack is group-writeable and the group is not the owner
- writable = 'group'
- elif (mode & stat.S_IWOTH):
+ writable = "group"
+ elif mode & stat.S_IWOTH:
# spack is world-writeable
- writable = 'world'
+ writable = "world"
if writable:
- msg = "Refusing to disable locks: spack is {0}-writable.".format(
- writable)
+ msg = "Refusing to disable locks: spack is {0}-writable.".format(writable)
long_msg = (
"Running a shared spack without locks is unsafe. You must "
- "restrict permissions on {0} or enable locks.").format(path)
+ "restrict permissions on {0} or enable locks."
+ ).format(path)
raise spack.error.SpackError(msg, long_msg)
diff --git a/lib/spack/spack/util/log_parse.py b/lib/spack/spack/util/log_parse.py
index b344de43af..e498a41bdf 100644
--- a/lib/spack/spack/util/log_parse.py
+++ b/lib/spack/spack/util/log_parse.py
@@ -13,7 +13,7 @@ from six import StringIO
import llnl.util.tty as tty
from llnl.util.tty.color import cescape, colorize
-__all__ = ['parse_log_events', 'make_log_context']
+__all__ = ["parse_log_events", "make_log_context"]
def parse_log_events(stream, context=6, jobs=None, profile=False):
@@ -51,7 +51,7 @@ def _wrap(text, width):
lines = []
pos = 0
while pos < len(text):
- lines.append(text[pos:pos + width])
+ lines.append(text[pos : pos + width])
pos += width
return lines
@@ -77,8 +77,8 @@ def make_log_context(log_events, width=None):
log_events = sorted(log_events, key=lambda e: e.line_no)
num_width = len(str(max(error_lines or [0]))) + 4
- line_fmt = '%%-%dd%%s' % num_width
- indent = ' ' * (5 + num_width)
+ line_fmt = "%%-%dd%%s" % num_width
+ indent = " " * (5 + num_width)
if width is None:
_, width = tty.terminal_size()
@@ -92,14 +92,14 @@ def make_log_context(log_events, width=None):
start = event.start
if isinstance(event, BuildError):
- color = 'R'
+ color = "R"
elif isinstance(event, BuildWarning):
- color = 'Y'
+ color = "Y"
else:
- color = 'W'
+ color = "W"
if next_line != 1 and start > next_line:
- out.write('\n ...\n\n')
+ out.write("\n ...\n\n")
if start < next_line:
start = next_line
@@ -108,13 +108,12 @@ def make_log_context(log_events, width=None):
# wrap to width
lines = _wrap(event[i], wrap_width)
lines[1:] = [indent + ln for ln in lines[1:]]
- wrapped_line = line_fmt % (i, '\n'.join(lines))
+ wrapped_line = line_fmt % (i, "\n".join(lines))
if i in error_lines:
- out.write(colorize(
- ' @%s{>> %s}\n' % (color, cescape(wrapped_line))))
+ out.write(colorize(" @%s{>> %s}\n" % (color, cescape(wrapped_line))))
else:
- out.write(' %s\n' % wrapped_line)
+ out.write(" %s\n" % wrapped_line)
next_line = event.end
diff --git a/lib/spack/spack/util/mock_package.py b/lib/spack/spack/util/mock_package.py
index 312e3f8f10..356aefbd7a 100644
--- a/lib/spack/spack/util/mock_package.py
+++ b/lib/spack/spack/util/mock_package.py
@@ -20,10 +20,10 @@ class MockPackageBase(object):
Use ``MockPackageMultiRepo.add_package()`` to create new instances.
"""
+
virtual = False
- def __init__(self, dependencies, dependency_types,
- conditions=None, versions=None):
+ def __init__(self, dependencies, dependency_types, conditions=None, versions=None):
"""Instantiate a new MockPackageBase.
This is not for general use; it needs to be constructed by a
@@ -44,8 +44,7 @@ class MockPackageBase(object):
return [v.name for v, c in self.provided]
@classmethod
- def possible_dependencies(
- cls, transitive=True, deptype='all', visited=None, virtuals=None):
+ def possible_dependencies(cls, transitive=True, deptype="all", visited=None, virtuals=None):
visited = {} if visited is None else visited
for name, conditions in cls.dependencies.items():
@@ -65,8 +64,7 @@ class MockPackageBase(object):
if not transitive:
continue
- cls._repo.get(dep_name).possible_dependencies(
- transitive, deptype, visited, virtuals)
+ cls._repo.get(dep_name).possible_dependencies(transitive, deptype, visited, virtuals)
return visited
@@ -81,8 +79,8 @@ class MockPackageMultiRepo(object):
def __init__(self):
self.spec_to_pkg = {}
- self.namespace = 'mock' # repo namespace
- self.full_namespace = 'spack.pkg.mock' # python import namespace
+ self.namespace = "mock" # repo namespace
+ self.full_namespace = "spack.pkg.mock" # python import namespace
def get(self, spec):
if not isinstance(spec, spack.spec.Spec):
@@ -94,8 +92,7 @@ class MockPackageMultiRepo(object):
def get_pkg_class(self, name):
namespace, _, name = name.rpartition(".")
if namespace and namespace != self.namespace:
- raise spack.repo.InvalidNamespaceError(
- "bad namespace: %s" % self.namespace)
+ raise spack.repo.InvalidNamespaceError("bad namespace: %s" % self.namespace)
return self.spec_to_pkg[name]
def exists(self, name):
@@ -105,14 +102,13 @@ class MockPackageMultiRepo(object):
return False
def repo_for_pkg(self, name):
- Repo = collections.namedtuple('Repo', ['namespace'])
- return Repo('mockrepo')
+ Repo = collections.namedtuple("Repo", ["namespace"])
+ return Repo("mockrepo")
def __contains__(self, item):
return item in self.spec_to_pkg
- def add_package(self, name, dependencies=None, dependency_types=None,
- conditions=None):
+ def add_package(self, name, dependencies=None, dependency_types=None, conditions=None):
"""Factory method for creating mock packages.
This creates a new subclass of ``MockPackageBase``, ensures that its
@@ -135,14 +131,14 @@ class MockPackageMultiRepo(object):
dependencies = []
if not dependency_types:
- dependency_types = [
- spack.dependency.default_deptype] * len(dependencies)
+ dependency_types = [spack.dependency.default_deptype] * len(dependencies)
assert len(dependencies) == len(dependency_types)
# new class for the mock package
class MockPackage(MockPackageBase):
pass
+
MockPackage.__name__ = spack.util.naming.mod_to_class(name)
MockPackage.name = name
MockPackage._repo = self
@@ -157,22 +153,20 @@ class MockPackageMultiRepo(object):
dep_conditions = conditions[dep.name]
dep_conditions = dict(
(Spec(x), Dependency(MockPackage, Spec(y), type=dtype))
- for x, y in dep_conditions.items())
+ for x, y in dep_conditions.items()
+ )
MockPackage.dependencies[dep.name] = dep_conditions
# each package has some fake versions
versions = list(Version(x) for x in [1, 2, 3])
- MockPackage.versions = dict(
- (x, {'preferred': False}) for x in versions
- )
+ MockPackage.versions = dict((x, {"preferred": False}) for x in versions)
MockPackage.variants = {}
MockPackage.provided = {}
MockPackage.conflicts = {}
MockPackage.patches = {}
- mock_package = MockPackage(
- dependencies, dependency_types, conditions, versions)
+ mock_package = MockPackage(dependencies, dependency_types, conditions, versions)
self.spec_to_pkg[name] = mock_package
self.spec_to_pkg["mockrepo." + name] = mock_package
diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py
index 82ce6c07a4..007861085a 100644
--- a/lib/spack/spack/util/module_cmd.py
+++ b/lib/spack/spack/util/module_cmd.py
@@ -16,34 +16,34 @@ import llnl.util.tty as tty
# This list is not exhaustive. Currently we only use load and unload
# If we need another option that changes the environment, add it here.
-module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse']
+module_change_commands = ["load", "swap", "unload", "purge", "use", "unuse"]
# This awk script is a posix alternative to `env -0`
-awk_cmd = (r"""awk 'BEGIN{for(name in ENVIRON)"""
- r"""printf("%s=%s%c", name, ENVIRON[name], 0)}'""")
+awk_cmd = r"""awk 'BEGIN{for(name in ENVIRON)""" r"""printf("%s=%s%c", name, ENVIRON[name], 0)}'"""
def module(*args, **kwargs):
- module_cmd = kwargs.get('module_template', 'module ' + ' '.join(args))
+ module_cmd = kwargs.get("module_template", "module " + " ".join(args))
if args[0] in module_change_commands:
# Suppress module output
- module_cmd += r' >/dev/null 2>&1; ' + awk_cmd
+ module_cmd += r" >/dev/null 2>&1; " + awk_cmd
module_p = subprocess.Popen(
module_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
- executable="/bin/bash")
+ executable="/bin/bash",
+ )
# In Python 3, keys and values of `environ` are byte strings.
environ = {}
output = module_p.communicate()[0]
# Loop over each environment variable key=value byte string
- for entry in output.strip(b'\0').split(b'\0'):
+ for entry in output.strip(b"\0").split(b"\0"):
# Split variable name and value
- parts = entry.split(b'=', 1)
+ parts = entry.split(b"=", 1)
if len(parts) != 2:
continue
environ[parts[0]] = parts[1]
@@ -57,11 +57,13 @@ def module(*args, **kwargs):
else:
# Simply execute commands that don't change state and return output
- module_p = subprocess.Popen(module_cmd,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT,
- shell=True,
- executable="/bin/bash")
+ module_p = subprocess.Popen(
+ module_cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=True,
+ executable="/bin/bash",
+ )
# Decode and str to return a string object in both python 2 and 3
return str(module_p.communicate()[0].decode())
@@ -76,20 +78,20 @@ def load_module(mod):
# We do this without checking that they are already installed
# for ease of programming because unloading a module that is not
# loaded does nothing.
- text = module('show', mod).split()
+ text = module("show", mod).split()
for i, word in enumerate(text):
- if word == 'conflict':
- module('unload', text[i + 1])
+ if word == "conflict":
+ module("unload", text[i + 1])
# Load the module now that there are no conflicts
# Some module systems use stdout and some use stderr
- module('load', mod)
+ module("load", mod)
def get_path_args_from_module_line(line):
- if '(' in line and ')' in line:
+ if "(" in line and ")" in line:
# Determine which lua quote symbol is being used for the argument
- comma_index = line.index(',')
+ comma_index = line.index(",")
cline = line[comma_index:]
try:
quote_index = min(cline.find(q) for q in ['"', "'"] if q in cline)
@@ -108,7 +110,7 @@ def get_path_args_from_module_line(line):
else:
return []
- paths = path_arg.split(':')
+ paths = path_arg.split(":")
return paths
@@ -127,12 +129,11 @@ def path_from_modules(modules):
best_choice = None
for module_name in modules:
# Read the current module and return a candidate path
- text = module('show', module_name).split('\n')
+ text = module("show", module_name).split("\n")
candidate_path = get_path_from_module_contents(text, module_name)
if candidate_path and not os.path.exists(candidate_path):
- msg = ("Extracted path from module does not exist "
- "[module={0}, path={1}]")
+ msg = "Extracted path from module does not exist " "[module={0}, path={1}]"
tty.warn(msg.format(module_name, candidate_path))
# If anything is found, then it's the best choice. This means
@@ -144,8 +145,8 @@ def path_from_modules(modules):
def get_path_from_module_contents(text, module_name):
tty.debug("Module name: " + module_name)
- pkg_var_prefix = module_name.replace('-', '_').upper()
- components = pkg_var_prefix.split('/')
+ pkg_var_prefix = module_name.replace("-", "_").upper()
+ components = pkg_var_prefix.split("/")
# For modules with multiple components like foo/1.0.1, retrieve the package
# name "foo" from the module name
if len(components) > 1:
@@ -157,9 +158,9 @@ def get_path_from_module_contents(text, module_name):
def strip_path(path, endings):
for ending in endings:
if path.endswith(ending):
- return path[:-len(ending)]
- if path.endswith(ending + '/'):
- return path[:-(len(ending) + 1)]
+ return path[: -len(ending)]
+ if path.endswith(ending + "/"):
+ return path[: -(len(ending) + 1)]
return path
def match_pattern_and_strip(line, pattern, strip=[]):
@@ -174,46 +175,46 @@ def get_path_from_module_contents(text, module_name):
if flag_idx >= 0:
# Search for the first occurence of any separator marking the end of
# the path.
- separators = (' ', '"', "'")
+ separators = (" ", '"', "'")
occurrences = [line.find(s, flag_idx) for s in separators]
indices = [idx for idx in occurrences if idx >= 0]
if indices:
- path = line[flag_idx + len(flag):min(indices)]
+ path = line[flag_idx + len(flag) : min(indices)]
else:
- path = line[flag_idx + len(flag):]
+ path = line[flag_idx + len(flag) :]
path = strip_path(path, strip)
path_occurrences[path] = path_occurrences.get(path, 0) + 1
- lib_endings = ['/lib64', '/lib']
- bin_endings = ['/bin']
- man_endings = ['/share/man', '/man']
+ lib_endings = ["/lib64", "/lib"]
+ bin_endings = ["/bin"]
+ man_endings = ["/share/man", "/man"]
for line in text:
# Check entries of LD_LIBRARY_PATH and CRAY_LD_LIBRARY_PATH
- pattern = r'\W(CRAY_)?LD_LIBRARY_PATH'
+ pattern = r"\W(CRAY_)?LD_LIBRARY_PATH"
match_pattern_and_strip(line, pattern, lib_endings)
# Check {name}_DIR entries
- pattern = r'\W{0}_DIR'.format(pkg_var_prefix)
+ pattern = r"\W{0}_DIR".format(pkg_var_prefix)
match_pattern_and_strip(line, pattern)
# Check {name}_ROOT entries
- pattern = r'\W{0}_ROOT'.format(pkg_var_prefix)
+ pattern = r"\W{0}_ROOT".format(pkg_var_prefix)
match_pattern_and_strip(line, pattern)
# Check entries that update the PATH variable
- pattern = r'\WPATH'
+ pattern = r"\WPATH"
match_pattern_and_strip(line, pattern, bin_endings)
# Check entries that update the MANPATH variable
- pattern = r'MANPATH'
+ pattern = r"MANPATH"
match_pattern_and_strip(line, pattern, man_endings)
# Check entries that add a `-rpath` flag to a variable
- match_flag_and_strip(line, '-rpath', lib_endings)
+ match_flag_and_strip(line, "-rpath", lib_endings)
# Check entries that add a `-L` flag to a variable
- match_flag_and_strip(line, '-L', lib_endings)
+ match_flag_and_strip(line, "-L", lib_endings)
# Whichever path appeared most in the module, we assume is the correct path
if len(path_occurrences) > 0:
diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py
index 624bd6ed77..46a2f3528c 100644
--- a/lib/spack/spack/util/naming.py
+++ b/lib/spack/spack/util/naming.py
@@ -15,51 +15,52 @@ from six import StringIO
import spack.error
__all__ = [
- 'mod_to_class',
- 'spack_module_to_python_module',
- 'valid_module_name',
- 'valid_fully_qualified_module_name',
- 'validate_fully_qualified_module_name',
- 'validate_module_name',
- 'possible_spack_module_names',
- 'simplify_name',
- 'NamespaceTrie']
+ "mod_to_class",
+ "spack_module_to_python_module",
+ "valid_module_name",
+ "valid_fully_qualified_module_name",
+ "validate_fully_qualified_module_name",
+ "validate_module_name",
+ "possible_spack_module_names",
+ "simplify_name",
+ "NamespaceTrie",
+]
# Valid module names can contain '-' but can't start with it.
-_valid_module_re = r'^\w[\w-]*$'
+_valid_module_re = r"^\w[\w-]*$"
# Valid module names can contain '-' but can't start with it.
-_valid_fully_qualified_module_re = r'^(\w[\w-]*)(\.\w[\w-]*)*$'
+_valid_fully_qualified_module_re = r"^(\w[\w-]*)(\.\w[\w-]*)*$"
def mod_to_class(mod_name):
"""Convert a name from module style to class name style. Spack mostly
- follows `PEP-8 <http://legacy.python.org/dev/peps/pep-0008/>`_:
+ follows `PEP-8 <http://legacy.python.org/dev/peps/pep-0008/>`_:
- * Module and package names use lowercase_with_underscores.
- * Class names use the CapWords convention.
+ * Module and package names use lowercase_with_underscores.
+ * Class names use the CapWords convention.
- Regular source code follows these convetions. Spack is a bit
- more liberal with its Package names and Compiler names:
+ Regular source code follows these convetions. Spack is a bit
+ more liberal with its Package names and Compiler names:
- * They can contain '-' as well as '_', but cannot start with '-'.
- * They can start with numbers, e.g. "3proxy".
+ * They can contain '-' as well as '_', but cannot start with '-'.
+ * They can start with numbers, e.g. "3proxy".
- This function converts from the module convention to the class
- convention by removing _ and - and converting surrounding
- lowercase text to CapWords. If mod_name starts with a number,
- the class name returned will be prepended with '_' to make a
- valid Python identifier.
+ This function converts from the module convention to the class
+ convention by removing _ and - and converting surrounding
+ lowercase text to CapWords. If mod_name starts with a number,
+ the class name returned will be prepended with '_' to make a
+ valid Python identifier.
"""
validate_module_name(mod_name)
- class_name = re.sub(r'[-_]+', '-', mod_name)
- class_name = string.capwords(class_name, '-')
- class_name = class_name.replace('-', '')
+ class_name = re.sub(r"[-_]+", "-", mod_name)
+ class_name = string.capwords(class_name, "-")
+ class_name = class_name.replace("-", "")
# If a class starts with a number, prefix it with Number_ to make it
# a valid Python class name.
- if re.match(r'^[0-9]', class_name):
+ if re.match(r"^[0-9]", class_name):
class_name = "_%s" % class_name
return class_name
@@ -67,27 +68,27 @@ def mod_to_class(mod_name):
def spack_module_to_python_module(mod_name):
"""Given a Spack module name, returns the name by which it can be
- imported in Python.
+ imported in Python.
"""
- if re.match(r'[0-9]', mod_name):
- mod_name = 'num' + mod_name
+ if re.match(r"[0-9]", mod_name):
+ mod_name = "num" + mod_name
- return mod_name.replace('-', '_')
+ return mod_name.replace("-", "_")
def possible_spack_module_names(python_mod_name):
"""Given a Python module name, return a list of all possible spack module
- names that could correspond to it."""
- mod_name = re.sub(r'^num(\d)', r'\1', python_mod_name)
+ names that could correspond to it."""
+ mod_name = re.sub(r"^num(\d)", r"\1", python_mod_name)
- parts = re.split(r'(_)', mod_name)
- options = [['_', '-']] * mod_name.count('_')
+ parts = re.split(r"(_)", mod_name)
+ options = [["_", "-"]] * mod_name.count("_")
results = []
for subs in itertools.product(*options):
s = list(parts)
s[1::2] = subs
- results.append(''.join(s))
+ results.append("".join(s))
return results
@@ -112,7 +113,7 @@ def simplify_name(name):
# Rename Intel downloads
# e.g. l_daal, l_ipp, l_mkl -> daal, ipp, mkl
- if name.startswith('l_'):
+ if name.startswith("l_"):
name = name[2:]
# Convert UPPERCASE to lowercase
@@ -121,21 +122,21 @@ def simplify_name(name):
# Replace '_' and '.' with '-'
# e.g. backports.ssl_match_hostname -> backports-ssl-match-hostname
- name = name.replace('_', '-')
- name = name.replace('.', '-')
+ name = name.replace("_", "-")
+ name = name.replace(".", "-")
# Replace "++" with "pp" and "+" with "-plus"
# e.g. gtk+ -> gtk-plus
# e.g. voro++ -> voropp
- name = name.replace('++', 'pp')
- name = name.replace('+', '-plus')
+ name = name.replace("++", "pp")
+ name = name.replace("+", "-plus")
# Simplify Lua package names
# We don't want "lua" to occur multiple times in the name
- name = re.sub('^(lua)([^-])', r'\1-\2', name)
+ name = re.sub("^(lua)([^-])", r"\1-\2", name)
# Simplify Bio++ package names
- name = re.sub('^(bpp)([^-])', r'\1-\2', name)
+ name = re.sub("^(bpp)([^-])", r"\1-\2", name)
return name
@@ -166,8 +167,7 @@ class InvalidModuleNameError(spack.error.SpackError):
"""Raised when we encounter a bad module name."""
def __init__(self, name):
- super(InvalidModuleNameError, self).__init__(
- "Invalid module name: " + name)
+ super(InvalidModuleNameError, self).__init__("Invalid module name: " + name)
self.name = name
@@ -176,18 +176,17 @@ class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
def __init__(self, name):
super(InvalidFullyQualifiedModuleNameError, self).__init__(
- "Invalid fully qualified package name: " + name)
+ "Invalid fully qualified package name: " + name
+ )
self.name = name
class NamespaceTrie(object):
-
class Element(object):
-
def __init__(self, value):
self.value = value
- def __init__(self, separator='.'):
+ def __init__(self, separator="."):
self._subspaces = {}
self._value = None
self._sep = separator
@@ -220,7 +219,7 @@ class NamespaceTrie(object):
def is_prefix(self, namespace):
"""True if the namespace has a value, or if it's the prefix of one that
- does."""
+ does."""
first, sep, rest = namespace.partition(self._sep)
if not first:
return True
@@ -254,11 +253,11 @@ class NamespaceTrie(object):
return self.has_value(namespace)
def _str_helper(self, stream, level=0):
- indent = (level * ' ')
+ indent = level * " "
for name in sorted(self._subspaces):
- stream.write(indent + name + '\n')
+ stream.write(indent + name + "\n")
if self._value:
- stream.write(indent + ' ' + repr(self._value.value))
+ stream.write(indent + " " + repr(self._value.value))
stream.write(self._subspaces[name]._str_helper(stream, level + 1))
def __str__(self):
diff --git a/lib/spack/spack/util/package_hash.py b/lib/spack/spack/util/package_hash.py
index 7b97649eb5..4877748338 100644
--- a/lib/spack/spack/util/package_hash.py
+++ b/lib/spack/spack/util/package_hash.py
@@ -23,6 +23,7 @@ class RemoveDocstrings(ast.NodeTransformer):
the declaration.
"""
+
def remove_docstring(self, node):
def unused_string(node):
"""Criteria for unassigned body strings."""
@@ -75,29 +76,39 @@ class RemoveDirectives(ast.NodeTransformer):
# Note that changes to directives (e.g., a preferred version change or a hash
# chnage on an archive) are already represented in the spec *outside* the
# package hash.
- return None if (
- node.value and isinstance(node.value, ast.Call) and
- isinstance(node.value.func, ast.Name) and
- node.value.func.id in spack.directives.directive_names
- ) else node
+ return (
+ None
+ if (
+ node.value
+ and isinstance(node.value, ast.Call)
+ and isinstance(node.value.func, ast.Name)
+ and node.value.func.id in spack.directives.directive_names
+ )
+ else node
+ )
def visit_Assign(self, node):
# Remove assignments to metadata attributes, b/c they don't affect the build.
- return None if (
- node.targets and isinstance(node.targets[0], ast.Name) and
- node.targets[0].id in self.metadata_attrs
- ) else node
+ return (
+ None
+ if (
+ node.targets
+ and isinstance(node.targets[0], ast.Name)
+ and node.targets[0].id in self.metadata_attrs
+ )
+ else node
+ )
def visit_With(self, node):
- self.generic_visit(node) # visit children
+ self.generic_visit(node) # visit children
return node if node.body else None # remove with statement if it has no body
def visit_For(self, node):
- self.generic_visit(node) # visit children
+ self.generic_visit(node) # visit children
return node if node.body else None # remove loop if it has no body
def visit_While(self, node):
- self.generic_visit(node) # visit children
+ self.generic_visit(node) # visit children
return node if node.body else None # remove loop if it has no body
def visit_If(self, node):
@@ -139,6 +150,7 @@ class RemoveDirectives(ast.NodeTransformer):
class TagMultiMethods(ast.NodeVisitor):
"""Tag @when-decorated methods in a package AST."""
+
def __init__(self, spec):
self.spec = spec
# map from function name to (implementation, condition_list) tuples
@@ -147,7 +159,7 @@ class TagMultiMethods(ast.NodeVisitor):
def visit_FunctionDef(self, func):
conditions = []
for dec in func.decorator_list:
- if isinstance(dec, ast.Call) and dec.func.id == 'when':
+ if isinstance(dec, ast.Call) and dec.func.id == "when":
try:
# evaluate spec condition for any when's
cond = dec.args[0].s
@@ -231,6 +243,7 @@ class ResolveMultiMethods(ast.NodeTransformer):
package hash, because either could be chosen.
"""
+
def __init__(self, methods):
self.methods = methods
@@ -280,8 +293,9 @@ class ResolveMultiMethods(ast.NodeTransformer):
# strip the when decorators (preserve the rest)
func.decorator_list = [
- dec for dec in func.decorator_list
- if not (isinstance(dec, ast.Call) and dec.func.id == 'when')
+ dec
+ for dec in func.decorator_list
+ if not (isinstance(dec, ast.Call) and dec.func.id == "when")
]
return func
diff --git a/lib/spack/spack/util/parallel.py b/lib/spack/spack/util/parallel.py
index a931bba0c6..55caa6a76b 100644
--- a/lib/spack/spack/util/parallel.py
+++ b/lib/spack/spack/util/parallel.py
@@ -15,6 +15,7 @@ from .cpus import cpus_available
class ErrorFromWorker(object):
"""Wrapper class to report an error from a worker process"""
+
def __init__(self, exc_cls, exc, tb):
"""Create an error object from an exception raised from
the worker process.
@@ -27,7 +28,7 @@ class ErrorFromWorker(object):
"""
self.pid = os.getpid()
self.error_message = str(exc)
- self.stacktrace_message = ''.join(traceback.format_exception(exc_cls, exc, tb))
+ self.stacktrace_message = "".join(traceback.format_exception(exc_cls, exc, tb))
@property
def stacktrace(self):
@@ -45,6 +46,7 @@ class Task(object):
We are using a wrapper class instead of a decorator since the class
is pickleable, while a decorator with an inner closure is not.
"""
+
def __init__(self, func):
self.func = func
@@ -67,18 +69,16 @@ def raise_if_errors(*results, **kwargs):
Raise:
RuntimeError: if ErrorFromWorker objects are in the results
"""
- debug = kwargs.get('debug', False) # This can be a keyword only arg in Python 3
+ debug = kwargs.get("debug", False) # This can be a keyword only arg in Python 3
errors = [x for x in results if isinstance(x, ErrorFromWorker)]
if not errors:
return
- msg = '\n'.join([
- error.stacktrace if debug else str(error) for error in errors
- ])
+ msg = "\n".join([error.stacktrace if debug else str(error) for error in errors])
- error_fmt = '{0}'
+ error_fmt = "{0}"
if len(errors) > 1 and not debug:
- error_fmt = 'errors occurred during concretization of the environment:\n{0}'
+ error_fmt = "errors occurred during concretization of the environment:\n{0}"
raise RuntimeError(error_fmt.format(msg))
@@ -127,7 +127,7 @@ def parallel_map(func, arguments, max_processes=None, debug=False):
RuntimeError: if any error occurred in the worker processes
"""
task_wrapper = Task(func)
- if sys.platform != 'darwin' and sys.platform != 'win32':
+ if sys.platform != "darwin" and sys.platform != "win32":
with pool(processes=num_processes(max_processes=max_processes)) as p:
results = p.map(task_wrapper, arguments)
else:
diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py
index f2b52e57c3..4557ed5daf 100644
--- a/lib/spack/spack/util/path.py
+++ b/lib/spack/spack/util/path.py
@@ -22,23 +22,22 @@ from llnl.util.lang import memoized
import spack.util.spack_yaml as syaml
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
-__all__ = [
- 'substitute_config_variables',
- 'substitute_path_variables',
- 'canonicalize_path']
+__all__ = ["substitute_config_variables", "substitute_path_variables", "canonicalize_path"]
# Substitutions to perform
def replacements():
# break circular import from spack.util.executable
import spack.paths
+
return {
- 'spack': spack.paths.prefix,
- 'user': getpass.getuser(),
- 'tempdir': tempfile.gettempdir(),
- 'user_cache_path': spack.paths.user_cache_path}
+ "spack": spack.paths.prefix,
+ "user": getpass.getuser(),
+ "tempdir": tempfile.gettempdir(),
+ "user_cache_path": spack.paths.user_cache_path,
+ }
# This is intended to be longer than the part of the install path
@@ -58,18 +57,18 @@ SPACK_MAX_INSTALL_PATH_LENGTH = 300
#: Padded paths comprise directories with this name (or some prefix of it). :
#: It starts with two underscores to make it unlikely that prefix matches would
#: include some other component of the intallation path.
-SPACK_PATH_PADDING_CHARS = '__spack_path_placeholder__'
+SPACK_PATH_PADDING_CHARS = "__spack_path_placeholder__"
def is_path_url(path):
- if '\\' in path:
+ if "\\" in path:
return False
url_tuple = urlparse(path)
return bool(url_tuple.scheme) and len(url_tuple.scheme) > 1
def win_exe_ext():
- return '.exe'
+ return ".exe"
def path_to_os_path(*pths):
@@ -80,8 +79,7 @@ def path_to_os_path(*pths):
"""
ret_pths = []
for pth in pths:
- if type(pth) is str and\
- not is_path_url(pth):
+ if type(pth) is str and not is_path_url(pth):
pth = convert_to_platform_path(pth)
ret_pths.append(pth)
return ret_pths
@@ -107,18 +105,18 @@ def sanitize_file_path(pth):
pth_cmpnts = pth.split(os.path.sep)
if is_windows:
- drive_match = r'[a-zA-Z]:'
+ drive_match = r"[a-zA-Z]:"
is_abs = bool(re.match(drive_match, pth_cmpnts[0]))
- drive = pth_cmpnts[0] + os.path.sep if is_abs else ''
+ drive = pth_cmpnts[0] + os.path.sep if is_abs else ""
pth_cmpnts = pth_cmpnts[1:] if drive else pth_cmpnts
illegal_chars = r'[<>?:"|*\\]'
else:
- drive = '/' if not pth_cmpnts[0] else ''
- illegal_chars = r'[/]'
+ drive = "/" if not pth_cmpnts[0] else ""
+ illegal_chars = r"[/]"
pth = []
for cmp in pth_cmpnts:
- san_cmp = re.sub(illegal_chars, '', cmp)
+ san_cmp = re.sub(illegal_chars, "", cmp)
pth.append(san_cmp)
return drive + os.path.join(*pth)
@@ -151,7 +149,9 @@ def system_path_filter(_func=None, arg_slice=None):
else:
args = path_to_os_path(*args)
return func(*args, **kwargs)
+
return path_filter_caller
+
if _func:
return holder_func(_func)
return holder_func
@@ -165,14 +165,15 @@ def get_system_path_max():
sys_max_path_length = 260
else:
try:
- path_max_proc = subprocess.Popen(['getconf', 'PATH_MAX', '/'],
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
+ path_max_proc = subprocess.Popen(
+ ["getconf", "PATH_MAX", "/"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+ )
proc_output = str(path_max_proc.communicate()[0].decode())
sys_max_path_length = int(proc_output)
except (ValueError, subprocess.CalledProcessError, OSError):
- tty.msg('Unable to find system max path length, using: {0}'.format(
- sys_max_path_length))
+ tty.msg(
+ "Unable to find system max path length, using: {0}".format(sys_max_path_length)
+ )
return sys_max_path_length
@@ -185,10 +186,10 @@ class Path:
exposing the path type of
the current platform.
"""
+
unix = 0
windows = 1
- platform_path = windows if is_windows\
- else unix
+ platform_path = windows if is_windows else unix
def format_os_path(path, mode=Path.unix):
@@ -207,9 +208,9 @@ def format_os_path(path, mode=Path.unix):
if not path:
return path
if mode == Path.windows:
- path = path.replace('/', '\\')
+ path = path.replace("/", "\\")
else:
- path = path.replace('\\', '/')
+ path = path.replace("\\", "/")
return path
@@ -242,21 +243,22 @@ def substitute_config_variables(path):
environment yaml files.
"""
import spack.environment as ev # break circular
+
_replacements = replacements()
env = ev.active_environment()
if env:
- _replacements.update({'env': env.path})
+ _replacements.update({"env": env.path})
else:
# If a previous invocation added env, remove it
- _replacements.pop('env', None)
+ _replacements.pop("env", None)
# Look up replacements
def repl(match):
- m = match.group(0).strip('${}')
+ m = match.group(0).strip("${}")
return _replacements.get(m.lower(), match.group(0))
# Replace $var or ${var}.
- return re.sub(r'(\$\w+\b|\$\{\w+\})', repl, path)
+ return re.sub(r"(\$\w+\b|\$\{\w+\})", repl, path)
def substitute_path_variables(path):
@@ -342,7 +344,7 @@ def longest_prefix_re(string, capture=True):
return "(%s%s%s?)" % (
"" if capture else "?:",
string[0],
- longest_prefix_re(string[1:], capture=False)
+ longest_prefix_re(string[1:], capture=False),
)
@@ -377,7 +379,7 @@ def padding_filter(string):
longest_prefix = longest_prefix_re(pad)
regex = (
r"((?:/[^/\s]*)*?)" # zero or more leading non-whitespace path components
- r"(/{pad})+" # the padding string repeated one or more times
+ r"(/{pad})+" # the padding string repeated one or more times
r"(/{longest_prefix})?(?=/)" # trailing prefix of padding as path component
)
regex = regex.replace("/", os.sep)
@@ -385,11 +387,8 @@ def padding_filter(string):
_filter_re = re.compile(regex)
def replacer(match):
- return "%s%s[padded-to-%d-chars]" % (
- match.group(1),
- os.sep,
- len(match.group(0))
- )
+ return "%s%s[padded-to-%d-chars]" % (match.group(1), os.sep, len(match.group(0)))
+
return _filter_re.sub(replacer, string)
@@ -401,6 +400,7 @@ def filter_padding():
long padded installation path.
"""
import spack.config
+
padding = spack.config.get("config:install_tree:padded_length", None)
if padding:
# filter out all padding from the intsall command output
diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py
index 220e18b4a3..6732fbf945 100644
--- a/lib/spack/spack/util/pattern.py
+++ b/lib/spack/spack/util/pattern.py
@@ -15,8 +15,7 @@ class Delegate(object):
self.container = container
def __call__(self, *args, **kwargs):
- return [getattr(item, self.name)(*args, **kwargs)
- for item in self.container]
+ return [getattr(item, self.name)(*args, **kwargs) for item in self.container]
class Composite(list):
@@ -24,7 +23,7 @@ class Composite(list):
self.fns_to_delegate = fns_to_delegate
def __getattr__(self, name):
- if name != 'fns_to_delegate' and name in self.fns_to_delegate:
+ if name != "fns_to_delegate" and name in self.fns_to_delegate:
return Delegate(name, self)
else:
return self.__getattribute__(name)
@@ -60,14 +59,14 @@ def composite(interface=None, method_list=None, container=list):
# are defined
if interface is None and method_list is None:
raise TypeError(
- "Either 'interface' or 'method_list' must be defined on a call "
- "to composite")
+ "Either 'interface' or 'method_list' must be defined on a call " "to composite"
+ )
def cls_decorator(cls):
# Retrieve the base class of the composite. Inspect its methods and
# decide which ones will be overridden
def no_special_no_private(x):
- return callable(x) and not x.__name__.startswith('_')
+ return callable(x) and not x.__name__.startswith("_")
# Patch the behavior of each of the methods in the previous list.
# This is done associating an instance of the descriptor below to
@@ -88,6 +87,7 @@ def composite(interface=None, method_list=None, container=list):
def getter(*args, **kwargs):
for item in instance:
getattr(item, self.name)(*args, **kwargs)
+
# If we are using this descriptor to wrap a method from an
# interface, then we must conditionally use the
# `functools.wraps` decorator to set the appropriate fields
@@ -99,26 +99,24 @@ def composite(interface=None, method_list=None, container=list):
# Construct a dictionary with the methods explicitly passed as name
if method_list is not None:
- dictionary_for_type_call.update(
- (name, IterateOver(name)) for name in method_list)
+ dictionary_for_type_call.update((name, IterateOver(name)) for name in method_list)
# Construct a dictionary with the methods inspected from the interface
if interface is not None:
dictionary_for_type_call.update(
(name, IterateOver(name, method))
- for name, method in inspect.getmembers(
- interface, predicate=no_special_no_private))
+ for name, method in inspect.getmembers(interface, predicate=no_special_no_private)
+ )
# Get the methods that are defined in the scope of the composite
# class and override any previous definition
dictionary_for_type_call.update(
- (name, method) for name, method in inspect.getmembers(
- cls, predicate=inspect.ismethod))
+ (name, method) for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)
+ )
# Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes?
- wrapper_class = type(cls.__name__, (cls, container),
- dictionary_for_type_call)
+ wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call)
return wrapper_class
return cls_decorator
@@ -133,5 +131,6 @@ class Bunch(object):
class Args(Bunch):
"""Subclass of Bunch to write argparse args more naturally."""
+
def __init__(self, *flags, **kwargs):
super(Args, self).__init__(flags=tuple(flags), kwargs=kwargs)
diff --git a/lib/spack/spack/util/prefix.py b/lib/spack/spack/util/prefix.py
index 7b1d15877c..a4eb30b7c0 100644
--- a/lib/spack/spack/util/prefix.py
+++ b/lib/spack/spack/util/prefix.py
@@ -36,6 +36,7 @@ class Prefix(str):
This prints ``foobar /usr``. All of this is meant to make custom
installs easy.
"""
+
def __getattr__(self, attr):
return Prefix(os.path.join(self, attr))
diff --git a/lib/spack/spack/util/s3.py b/lib/spack/spack/util/s3.py
index 492cb15585..9945ae5e5f 100644
--- a/lib/spack/spack/util/s3.py
+++ b/lib/spack/spack/util/s3.py
@@ -30,8 +30,8 @@ def get_mirror_connection(url, url_type="push"):
def _parse_s3_endpoint_url(endpoint_url):
- if not urllib_parse.urlparse(endpoint_url, scheme='').scheme:
- endpoint_url = '://'.join(('https', endpoint_url))
+ if not urllib_parse.urlparse(endpoint_url, scheme="").scheme:
+ endpoint_url = "://".join(("https", endpoint_url))
return endpoint_url
@@ -49,11 +49,11 @@ def get_mirror_s3_connection_info(connection):
if connection.get("profile"):
s3_connection["profile_name"] = connection["profile"]
- s3_client_args = {"use_ssl": spack.config.get('config:verify_ssl')}
+ s3_client_args = {"use_ssl": spack.config.get("config:verify_ssl")}
- endpoint_url = os.environ.get('S3_ENDPOINT_URL')
+ endpoint_url = os.environ.get("S3_ENDPOINT_URL")
if endpoint_url:
- s3_client_args['endpoint_url'] = _parse_s3_endpoint_url(endpoint_url)
+ s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(endpoint_url)
elif s3_connection_is_dict and connection.get("endpoint_url"):
s3_client_args["endpoint_url"] = _parse_s3_endpoint_url(connection["endpoint_url"])
@@ -62,10 +62,10 @@ def get_mirror_s3_connection_info(connection):
def create_s3_session(url, connection={}):
url = url_util.parse(url)
- if url.scheme != 's3':
+ if url.scheme != "s3":
raise ValueError(
- 'Can not create S3 session from URL with scheme: {SCHEME}'.format(
- SCHEME=url.scheme))
+ "Can not create S3 session from URL with scheme: {SCHEME}".format(SCHEME=url.scheme)
+ )
# NOTE(opadron): import boto and friends as late as possible. We don't
# want to require boto as a dependency unless the user actually wants to
@@ -83,6 +83,6 @@ def create_s3_session(url, connection={}):
s3_client_args["config"] = Config(signature_version=UNSIGNED)
- client = session.client('s3', **s3_client_args)
+ client = session.client("s3", **s3_client_args)
client.ClientError = ClientError
return client
diff --git a/lib/spack/spack/util/spack_json.py b/lib/spack/spack/util/spack_json.py
index b9fcd1f53f..305af820aa 100644
--- a/lib/spack/spack/util/spack_json.py
+++ b/lib/spack/spack/util/spack_json.py
@@ -12,21 +12,18 @@ from six import PY3, iteritems, string_types
import spack.error
-__all__ = ['load', 'dump', 'SpackJSONError', 'encode_json_dict', 'decode_json_dict']
+__all__ = ["load", "dump", "SpackJSONError", "encode_json_dict", "decode_json_dict"]
-_json_dump_args = {
- 'indent': 2,
- 'separators': (',', ': ')
-}
+_json_dump_args = {"indent": 2, "separators": (",", ": ")}
def load(stream):
# type: (Any) -> Dict
"""Spack JSON needs to be ordered to support specs."""
if isinstance(stream, string_types):
- load = json.loads # type: ignore[assignment]
+ load = json.loads # type: ignore[assignment]
else:
- load = json.load # type: ignore[assignment]
+ load = json.load # type: ignore[assignment]
return _strify(load(stream, object_hook=_strify), ignore_dicts=True)
@@ -42,8 +39,8 @@ def dump(data, stream=None):
"""Dump JSON with a reasonable amount of indentation and separation."""
data = _strify(data)
if stream is None:
- return json.dumps(data, **_json_dump_args) # type: ignore[arg-type]
- json.dump(data, stream, **_json_dump_args) # type: ignore[arg-type]
+ return json.dumps(data, **_json_dump_args) # type: ignore[arg-type]
+ json.dump(data, stream, **_json_dump_args) # type: ignore[arg-type]
return None
@@ -64,7 +61,7 @@ def _strify(data, ignore_dicts=False):
# if this is a unicode string in python 2, return its string representation
if isinstance(data, string_types):
- return data.encode('utf-8')
+ return data.encode("utf-8")
# if this is a list of values, return list of byteified values
if isinstance(data, list):
diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py
index 227b9d7449..a6c2a5660c 100644
--- a/lib/spack/spack/util/spack_yaml.py
+++ b/lib/spack/spack/util/spack_yaml.py
@@ -27,15 +27,15 @@ from llnl.util.tty.color import cextra, clen, colorize
import spack.error
# Only export load and dump
-__all__ = ['load', 'dump', 'SpackYAMLError']
+__all__ = ["load", "dump", "SpackYAMLError"]
# Make new classes so we can add custom attributes.
# Also, use OrderedDict instead of just dict.
class syaml_dict(collections.OrderedDict):
def __repr__(self):
- mappings = ('%r: %r' % (k, v) for k, v in self.items())
- return '{%s}' % ', '.join(mappings)
+ mappings = ("%r: %r" % (k, v) for k, v in self.items())
+ return "{%s}" % ", ".join(mappings)
class syaml_list(list):
@@ -59,9 +59,7 @@ syaml_types = {
}
-markable_types = set(syaml_types) | set([
- yaml.comments.CommentedSeq,
- yaml.comments.CommentedMap])
+markable_types = set(syaml_types) | set([yaml.comments.CommentedSeq, yaml.comments.CommentedMap])
def syaml_type(obj):
@@ -83,34 +81,39 @@ def markable(obj):
def mark(obj, node):
"""Add start and end markers to an object."""
- if hasattr(node, 'start_mark'):
+ if hasattr(node, "start_mark"):
obj._start_mark = node.start_mark
- elif hasattr(node, '_start_mark'):
+ elif hasattr(node, "_start_mark"):
obj._start_mark = node._start_mark
- if hasattr(node, 'end_mark'):
+ if hasattr(node, "end_mark"):
obj._end_mark = node.end_mark
- elif hasattr(node, '_end_mark'):
+ elif hasattr(node, "_end_mark"):
obj._end_mark = node._end_mark
def marked(obj):
"""Whether an object has been marked by spack_yaml."""
- return (hasattr(obj, '_start_mark') and obj._start_mark or
- hasattr(obj, '_end_mark') and obj._end_mark)
+ return (
+ hasattr(obj, "_start_mark")
+ and obj._start_mark
+ or hasattr(obj, "_end_mark")
+ and obj._end_mark
+ )
class OrderedLineLoader(RoundTripLoader):
"""YAML loader specifically intended for reading Spack configuration
- files. It preserves order and line numbers. It also has special-purpose
- logic for handling dictionary keys that indicate a Spack config
- override: namely any key that contains an "extra" ':' character.
+ files. It preserves order and line numbers. It also has special-purpose
+ logic for handling dictionary keys that indicate a Spack config
+ override: namely any key that contains an "extra" ':' character.
- Mappings read in by this loader behave like an ordered dict.
- Sequences, mappings, and strings also have new attributes,
- ``_start_mark`` and ``_end_mark``, that preserve YAML line
- information in the output data.
+ Mappings read in by this loader behave like an ordered dict.
+ Sequences, mappings, and strings also have new attributes,
+ ``_start_mark`` and ``_end_mark``, that preserve YAML line
+ information in the output data.
"""
+
#
# Override construct_yaml_* so that we can apply _start_mark/_end_mark to
# them. The superclass returns CommentedMap/CommentedSeq objects that we
@@ -127,7 +130,7 @@ class OrderedLineLoader(RoundTripLoader):
# so this assumes we are talking about a Spack config override key if
# it ends with a ':' and does not contain a '@' (which can appear
# in config values that refer to Spack specs)
- if value and value.endswith(':') and '@' not in value:
+ if value and value.endswith(":") and "@" not in value:
value = syaml_str(value[:-1])
value.override = True
else:
@@ -155,21 +158,18 @@ class OrderedLineLoader(RoundTripLoader):
# register above new constructors
-OrderedLineLoader.add_constructor(
- 'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
-OrderedLineLoader.add_constructor(
- 'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
-OrderedLineLoader.add_constructor(
- 'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
+OrderedLineLoader.add_constructor("tag:yaml.org,2002:map", OrderedLineLoader.construct_yaml_map)
+OrderedLineLoader.add_constructor("tag:yaml.org,2002:seq", OrderedLineLoader.construct_yaml_seq)
+OrderedLineLoader.add_constructor("tag:yaml.org,2002:str", OrderedLineLoader.construct_yaml_str)
class OrderedLineDumper(RoundTripDumper):
"""Dumper that preserves ordering and formats ``syaml_*`` objects.
- This dumper preserves insertion ordering ``syaml_dict`` objects
- when they're written out. It also has some custom formatters
- for ``syaml_*`` objects so that they are formatted like their
- regular Python equivalents, instead of ugly YAML pyobjects.
+ This dumper preserves insertion ordering ``syaml_dict`` objects
+ when they're written out. It also has some custom formatters
+ for ``syaml_*`` objects so that they are formatted like their
+ regular Python equivalents, instead of ugly YAML pyobjects.
"""
@@ -184,13 +184,12 @@ class OrderedLineDumper(RoundTripDumper):
return result
def represent_str(self, data):
- if hasattr(data, 'override') and data.override:
- data = data + ':'
+ if hasattr(data, "override") and data.override:
+ data = data + ":"
return super(OrderedLineDumper, self).represent_str(data)
class SafeDumper(RoundTripDumper):
-
def ignore_aliases(self, _data):
"""Make the dumper NEVER print YAML aliases."""
return True
@@ -209,15 +208,16 @@ maxint = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1
def dump(obj, default_flow_style=False, stream=None):
- return yaml.dump(obj, default_flow_style=default_flow_style, width=maxint,
- Dumper=SafeDumper, stream=stream)
+ return yaml.dump(
+ obj, default_flow_style=default_flow_style, width=maxint, Dumper=SafeDumper, stream=stream
+ )
def file_line(mark):
"""Format a mark as <file>:<line> information."""
result = mark.name
if mark.line:
- result += ':' + str(mark.line)
+ result += ":" + str(mark.line)
return result
@@ -244,12 +244,13 @@ class LineAnnotationDumper(OrderedLineDumper):
writes to a ``StringIO`` then joins the lines from that with
annotations.
"""
+
saved = None
def __init__(self, *args, **kwargs):
super(LineAnnotationDumper, self).__init__(*args, **kwargs)
del _annotations[:]
- self.colors = 'KgrbmcyGRBMCY'
+ self.colors = "KgrbmcyGRBMCY"
self.filename_colors = {}
def process_scalar(self):
@@ -271,7 +272,7 @@ class LineAnnotationDumper(OrderedLineDumper):
def write_line_break(self):
super(LineAnnotationDumper, self).write_line_break()
if self.saved is None:
- _annotations.append(colorize('@K{---}'))
+ _annotations.append(colorize("@K{---}"))
return
# append annotations at the end of each line
@@ -284,19 +285,19 @@ class LineAnnotationDumper(OrderedLineDumper):
color = self.colors[len(self.filename_colors) % ncolors]
self.filename_colors[mark.name] = color
- fmt = '@%s{%%s}' % color
+ fmt = "@%s{%%s}" % color
ann = fmt % mark.name
if mark.line is not None:
- ann += ':@c{%s}' % (mark.line + 1)
+ ann += ":@c{%s}" % (mark.line + 1)
_annotations.append(colorize(ann))
else:
- _annotations.append('')
+ _annotations.append("")
def load_config(*args, **kwargs):
"""Load but modify the loader instance so that it will add __line__
- attributes to the returned object."""
- kwargs['Loader'] = OrderedLineLoader
+ attributes to the returned object."""
+ kwargs["Loader"] = OrderedLineLoader
return yaml.load(*args, **kwargs)
@@ -305,17 +306,17 @@ def load(*args, **kwargs):
def dump_config(*args, **kwargs):
- blame = kwargs.pop('blame', False)
+ blame = kwargs.pop("blame", False)
if blame:
return dump_annotated(*args, **kwargs)
else:
- kwargs['Dumper'] = OrderedLineDumper
+ kwargs["Dumper"] = OrderedLineDumper
return yaml.dump(*args, **kwargs)
def dump_annotated(data, stream=None, *args, **kwargs):
- kwargs['Dumper'] = LineAnnotationDumper
+ kwargs["Dumper"] = LineAnnotationDumper
sio = StringIO()
yaml.dump(data, sio, *args, **kwargs)
@@ -331,7 +332,7 @@ def dump_annotated(data, stream=None, *args, **kwargs):
# write out annotations and lines, accounting for color
width = max(clen(a) for a in _annotations)
- formats = ['%%-%ds %%s\n' % (width + cextra(a)) for a in _annotations]
+ formats = ["%%-%ds %%s\n" % (width + cextra(a)) for a in _annotations]
for f, a, l in zip(formats, _annotations, lines):
stream.write(f % (a, l))
@@ -358,5 +359,6 @@ def sorted_dict(dict_like):
class SpackYAMLError(spack.error.SpackError):
"""Raised when there are issues with YAML parsing."""
+
def __init__(self, msg, yaml_error):
super(SpackYAMLError, self).__init__(msg, str(yaml_error))
diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py
index e900ccc7c3..a34cd26cf1 100644
--- a/lib/spack/spack/util/string.py
+++ b/lib/spack/spack/util/string.py
@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-def comma_list(sequence, article=''):
+def comma_list(sequence, article=""):
if type(sequence) != list:
sequence = list(sequence)
@@ -13,26 +13,26 @@ def comma_list(sequence, article=''):
elif len(sequence) == 1:
return sequence[0]
else:
- out = ', '.join(str(s) for s in sequence[:-1])
+ out = ", ".join(str(s) for s in sequence[:-1])
if len(sequence) != 2:
- out += ',' # oxford comma
- out += ' '
+ out += "," # oxford comma
+ out += " "
if article:
- out += article + ' '
+ out += article + " "
out += str(sequence[-1])
return out
def comma_or(sequence):
- return comma_list(sequence, 'or')
+ return comma_list(sequence, "or")
def comma_and(sequence):
- return comma_list(sequence, 'and')
+ return comma_list(sequence, "and")
def quote(sequence, q="'"):
- return ['%s%s%s' % (q, e, q) for e in sequence]
+ return ["%s%s%s" % (q, e, q) for e in sequence]
def plural(n, singular, plural=None, show_n=True):
@@ -48,7 +48,7 @@ def plural(n, singular, plural=None, show_n=True):
Returns:
(str): "1 thing" if n == 1 or "n things" if n != 1
"""
- number = '%s ' % n if show_n else ''
+ number = "%s " % n if show_n else ""
if n == 1:
return "%s%s" % (number, singular)
elif plural is not None:
diff --git a/lib/spack/spack/util/timer.py b/lib/spack/spack/util/timer.py
index f77b050a3a..f7a9bc8e9d 100644
--- a/lib/spack/spack/util/timer.py
+++ b/lib/spack/spack/util/timer.py
@@ -19,6 +19,7 @@ class Timer(object):
"""
Simple timer for timing phases of a solve or install
"""
+
def __init__(self):
self.start = time.time()
self.last = self.start
@@ -33,8 +34,7 @@ class Timer(object):
@property
def total(self):
- """Return the total time
- """
+ """Return the total time"""
if self.end:
return self.end - self.start
return time.time() - self.start
diff --git a/lib/spack/spack/util/unparse/__init__.py b/lib/spack/spack/util/unparse/__init__.py
index da75271fbf..3469443105 100644
--- a/lib/spack/spack/util/unparse/__init__.py
+++ b/lib/spack/spack/util/unparse/__init__.py
@@ -9,7 +9,7 @@ from six.moves import cStringIO
from .unparser import Unparser
-__version__ = '1.6.3'
+__version__ = "1.6.3"
def unparse(tree, py_ver_consistent=False):
diff --git a/lib/spack/spack/util/unparse/unparser.py b/lib/spack/spack/util/unparse/unparser.py
index 8dbd9083b9..a46d19fa76 100644
--- a/lib/spack/spack/util/unparse/unparser.py
+++ b/lib/spack/spack/util/unparse/unparser.py
@@ -28,22 +28,22 @@ class _Precedence:
"""Precedence table that originated from python grammar."""
TUPLE = 0
- YIELD = 1 # 'yield', 'yield from'
- TEST = 2 # 'if'-'else', 'lambda'
- OR = 3 # 'or'
- AND = 4 # 'and'
- NOT = 5 # 'not'
- CMP = 6 # '<', '>', '==', '>=', '<=', '!=', 'in', 'not in', 'is', 'is not'
+ YIELD = 1 # 'yield', 'yield from'
+ TEST = 2 # 'if'-'else', 'lambda'
+ OR = 3 # 'or'
+ AND = 4 # 'and'
+ NOT = 5 # 'not'
+ CMP = 6 # '<', '>', '==', '>=', '<=', '!=', 'in', 'not in', 'is', 'is not'
EXPR = 7
- BOR = EXPR # '|'
- BXOR = 8 # '^'
- BAND = 9 # '&'
- SHIFT = 10 # '<<', '>>'
- ARITH = 11 # '+', '-'
- TERM = 12 # '*', '@', '/', '%', '//'
- FACTOR = 13 # unary '+', '-', '~'
- POWER = 14 # '**'
- AWAIT = 15 # 'await'
+ BOR = EXPR # '|'
+ BXOR = 8 # '^'
+ BAND = 9 # '&'
+ SHIFT = 10 # '<<', '>>'
+ ARITH = 11 # '+', '-'
+ TERM = 12 # '*', '@', '/', '%', '//'
+ FACTOR = 13 # unary '+', '-', '~'
+ POWER = 14 # '**'
+ AWAIT = 15 # 'await'
ATOM = 16
@@ -52,8 +52,7 @@ def pnext(precedence):
def interleave(inter, f, seq):
- """Call f on each item in seq, calling inter() in between.
- """
+ """Call f on each item in seq, calling inter() in between."""
seq = iter(seq)
try:
f(next(seq))
@@ -79,9 +78,8 @@ def is_simple_tuple(slice_value):
and slice_value.elts
and (
# Python 2 doesn't allow starred elements in tuples like Python 3
- six.PY2 or not any(
- isinstance(elt, ast.Starred) for elt in slice_value.elts
- )
+ six.PY2
+ or not any(isinstance(elt, ast.Starred) for elt in slice_value.elts)
)
)
@@ -89,7 +87,7 @@ def is_simple_tuple(slice_value):
class Unparser:
"""Methods in this class recursively traverse an AST and
output source code for the abstract syntax; original formatting
- is disregarded. """
+ is disregarded."""
def __init__(self, py_ver_consistent=False, _avoid_backslashes=False):
"""Traverse an AST and generate its source.
@@ -153,6 +151,7 @@ class Unparser:
"""A context manager for preparing the source for blocks. It adds
the character ':', increases the indentation on enter and decreases
the indentation on exit."""
+
def __init__(self, unparser):
self.unparser = unparser
@@ -169,7 +168,7 @@ class Unparser:
@contextmanager
def delimit(self, start, end):
"""A context manager for preparing the source for expressions. It adds
- *start* to the buffer and enters, after exit it adds *end*."""
+ *start* to the buffer and enters, after exit it adds *end*."""
self.write(start)
yield
@@ -238,7 +237,7 @@ class Unparser:
def visit_ImportFrom(self, node):
# A from __future__ import may affect unparsing, so record it.
- if node.module and node.module == '__future__':
+ if node.module and node.module == "__future__":
self.future_imports.extend(n.name for n in node.names)
self.fill("from ")
@@ -263,8 +262,7 @@ class Unparser:
def visit_AnnAssign(self, node):
self.fill()
- with self.delimit_if(
- "(", ")", not node.simple and isinstance(node.target, ast.Name)):
+ with self.delimit_if("(", ")", not node.simple and isinstance(node.target, ast.Name)):
self.dispatch(node.target)
self.write(": ")
self.dispatch(node.annotation)
@@ -561,8 +559,7 @@ class Unparser:
with self.block():
self.dispatch(node.body)
# collapse nested ifs into equivalent elifs.
- while (node.orelse and len(node.orelse) == 1 and
- isinstance(node.orelse[0], ast.If)):
+ while node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], ast.If):
node = node.orelse[0]
self.fill("elif ")
self.dispatch(node.test)
@@ -586,7 +583,7 @@ class Unparser:
def _generic_With(self, node, async_=False):
self.fill("async with " if async_ else "with ")
- if hasattr(node, 'items'):
+ if hasattr(node, "items"):
interleave(lambda: self.write(", "), self.dispatch, node.items)
else:
self.dispatch(node.context_expr)
@@ -603,11 +600,12 @@ class Unparser:
self._generic_With(node, async_=True)
def _str_literal_helper(
- self, string, quote_types=_ALL_QUOTES, escape_special_whitespace=False
+ self, string, quote_types=_ALL_QUOTES, escape_special_whitespace=False
):
"""Helper for writing string literals, minimizing escapes.
Returns the tuple (string literal to write, possible quote types).
"""
+
def escape_char(c):
# \n and \t are non-printable, but we only escape them if
# escape_special_whitespace is True
@@ -644,10 +642,12 @@ class Unparser:
"""Write string literal value w/a best effort attempt to avoid backslashes."""
string, quote_types = self._str_literal_helper(string, quote_types=quote_types)
quote_type = quote_types[0]
- self.write("{quote_type}{string}{quote_type}".format(
- quote_type=quote_type,
- string=string,
- ))
+ self.write(
+ "{quote_type}{string}{quote_type}".format(
+ quote_type=quote_type,
+ string=string,
+ )
+ )
# expr
def visit_Bytes(self, node):
@@ -702,16 +702,17 @@ class Unparser:
for value, is_constant in buffer:
# Repeatedly narrow down the list of possible quote_types
value, quote_types = self._str_literal_helper(
- value, quote_types=quote_types,
- escape_special_whitespace=is_constant
+ value, quote_types=quote_types, escape_special_whitespace=is_constant
)
new_buffer.append(value)
value = "".join(new_buffer)
quote_type = quote_types[0]
- self.write("{quote_type}{value}{quote_type}".format(
- quote_type=quote_type,
- value=value,
- ))
+ self.write(
+ "{quote_type}{value}{quote_type}".format(
+ quote_type=quote_type,
+ value=value,
+ )
+ )
def visit_FormattedValue(self, node):
# FormattedValue(expr value, int? conversion, expr? format_spec)
@@ -781,7 +782,7 @@ class Unparser:
elif isinstance(value, str) and self._py_ver_consistent:
# emulate a python 2 repr with raw unicode escapes
# see _Str for python 2 counterpart
- raw = repr(value.encode("raw_unicode_escape")).lstrip('b')
+ raw = repr(value.encode("raw_unicode_escape")).lstrip("b")
if raw.startswith(r"'\\u"):
raw = "'\\" + raw[3:]
self.write(raw)
@@ -845,7 +846,7 @@ class Unparser:
self.dispatch(gen)
def visit_comprehension(self, node):
- if getattr(node, 'is_async', False):
+ if getattr(node, "is_async", False):
self.write(" async for ")
else:
self.write(" for ")
@@ -869,7 +870,7 @@ class Unparser:
self.dispatch(node.orelse)
def visit_Set(self, node):
- assert(node.elts) # should be at least one element
+ assert node.elts # should be at least one element
with self.delimit("{", "}"):
interleave(lambda: self.write(", "), self.dispatch, node.elts)
@@ -891,22 +892,13 @@ class Unparser:
write_key_value_pair(k, v)
with self.delimit("{", "}"):
- interleave(
- lambda: self.write(", "),
- write_item,
- zip(node.keys, node.values)
- )
+ interleave(lambda: self.write(", "), write_item, zip(node.keys, node.values))
def visit_Tuple(self, node):
with self.delimit("(", ")"):
self.items_view(self.dispatch, node.elts)
- unop = {
- "Invert": "~",
- "Not": "not",
- "UAdd": "+",
- "USub": "-"
- }
+ unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
unop_precedence = {
"~": _Precedence.FACTOR,
@@ -926,8 +918,7 @@ class Unparser:
self.write(" ")
self.set_precedence(operator_precedence, node.operand)
- if (six.PY2 and
- isinstance(node.op, ast.USub) and isinstance(node.operand, ast.Num)):
+ if six.PY2 and isinstance(node.op, ast.USub) and isinstance(node.operand, ast.Num):
# If we're applying unary minus to a number, parenthesize the number.
# This is necessary: -2147483648 is different from -(2147483648) on
# a 32-bit machine (the first is an int, the second a long), and
@@ -951,7 +942,7 @@ class Unparser:
"BitXor": "^",
"BitAnd": "&",
"FloorDiv": "//",
- "Pow": "**",
+ "Pow": "**",
}
binop_precedence = {
@@ -1041,7 +1032,7 @@ class Unparser:
# Special case: 3.__abs__() is a syntax error, so if node.value
# is an integer literal then we need to either parenthesize
# it or add an extra space to get 3 .__abs__().
- num_type = getattr(ast, 'Constant', getattr(ast, 'Num', None))
+ num_type = getattr(ast, "Constant", getattr(ast, "Num", None))
if isinstance(node.value, num_type) and isinstance(node.value.n, int):
self.write(" ")
self.write(".")
@@ -1149,7 +1140,7 @@ class Unparser:
self.dispatch(node.step)
def visit_ExtSlice(self, node):
- interleave(lambda: self.write(', '), self.dispatch, node.dims)
+ interleave(lambda: self.write(", "), self.dispatch, node.dims)
# argument
def visit_arg(self, node):
@@ -1162,7 +1153,7 @@ class Unparser:
def visit_arguments(self, node):
first = True
# normal arguments
- all_args = getattr(node, 'posonlyargs', []) + node.args
+ all_args = getattr(node, "posonlyargs", []) + node.args
defaults = [None] * (len(all_args) - len(node.defaults)) + node.defaults
for index, elements in enumerate(zip(all_args, defaults), 1):
a, d = elements
@@ -1174,7 +1165,7 @@ class Unparser:
if d:
self.write("=")
self.dispatch(d)
- if index == len(getattr(node, 'posonlyargs', ())):
+ if index == len(getattr(node, "posonlyargs", ())):
self.write(", /")
# varargs, or bare '*' if no varargs but keyword-only arguments present
@@ -1185,14 +1176,14 @@ class Unparser:
self.write(", ")
self.write("*")
if node.vararg:
- if hasattr(node.vararg, 'arg'):
+ if hasattr(node.vararg, "arg"):
self.write(node.vararg.arg)
if node.vararg.annotation:
self.write(": ")
self.dispatch(node.vararg.annotation)
else:
self.write(node.vararg)
- if getattr(node, 'varargannotation', None):
+ if getattr(node, "varargannotation", None):
self.write(": ")
self.dispatch(node.varargannotation)
@@ -1214,14 +1205,14 @@ class Unparser:
first = False
else:
self.write(", ")
- if hasattr(node.kwarg, 'arg'):
+ if hasattr(node.kwarg, "arg"):
self.write("**" + node.kwarg.arg)
if node.kwarg.annotation:
self.write(": ")
self.dispatch(node.kwarg.annotation)
else:
self.write("**" + node.kwarg)
- if getattr(node, 'kwargannotation', None):
+ if getattr(node, "kwargannotation", None):
self.write(": ")
self.dispatch(node.kwargannotation)
diff --git a/lib/spack/spack/util/url.py b/lib/spack/spack/util/url.py
index 824b5fed57..5ff0a53fa7 100644
--- a/lib/spack/spack/util/url.py
+++ b/lib/spack/spack/util/url.py
@@ -21,7 +21,7 @@ from spack.util.path import (
convert_to_posix_path,
)
-is_windows = sys.platform == 'win32'
+is_windows = sys.platform == "win32"
def _split_all(path):
@@ -39,7 +39,7 @@ def _split_all(path):
(old_a, (a, b)) = a, posixpath.split(a)
if a or b:
- result.insert(0, b or '/')
+ result.insert(0, b or "/")
return result
@@ -53,18 +53,18 @@ def local_file_path(url):
if isinstance(url, string_types):
url = parse(url)
- if url.scheme == 'file':
+ if url.scheme == "file":
if is_windows:
pth = convert_to_platform_path(url.netloc + url.path)
- if re.search(r'^\\[A-Za-z]:', pth):
- pth = pth.lstrip('\\')
+ if re.search(r"^\\[A-Za-z]:", pth):
+ pth = pth.lstrip("\\")
return pth
return url.path
return None
-def parse(url, scheme='file'):
+def parse(url, scheme="file"):
"""Parse a url.
For file:// URLs, the netloc and path components are concatenated and
@@ -80,38 +80,37 @@ def parse(url, scheme='file'):
url = escape_file_url(url)
url_obj = (
urllib_parse.urlparse(url, scheme=scheme, allow_fragments=False)
- if isinstance(url, string_types) else url)
+ if isinstance(url, string_types)
+ else url
+ )
(scheme, netloc, path, params, query, _) = url_obj
- scheme = (scheme or 'file').lower()
+ scheme = (scheme or "file").lower()
- if scheme == 'file':
+ if scheme == "file":
# (The user explicitly provides the file:// scheme.)
# examples:
# file://C:\\a\\b\\c
# file://X:/a/b/c
path = canonicalize_path(netloc + path)
- path = re.sub(r'^/+', '/', path)
- netloc = ''
+ path = re.sub(r"^/+", "/", path)
+ netloc = ""
- drive_ltr_lst = re.findall(r'[A-Za-z]:\\', path)
+ drive_ltr_lst = re.findall(r"[A-Za-z]:\\", path)
is_win_path = bool(drive_ltr_lst)
if is_windows and is_win_path:
- drive_ltr = drive_ltr_lst[0].strip('\\')
- path = re.sub(r'[\\]*' + drive_ltr, '', path)
- netloc = '/' + drive_ltr.strip('\\')
+ drive_ltr = drive_ltr_lst[0].strip("\\")
+ path = re.sub(r"[\\]*" + drive_ltr, "", path)
+ netloc = "/" + drive_ltr.strip("\\")
if sys.platform == "win32":
path = convert_to_posix_path(path)
- return urllib_parse.ParseResult(scheme=scheme,
- netloc=netloc,
- path=path,
- params=params,
- query=query,
- fragment=None)
+ return urllib_parse.ParseResult(
+ scheme=scheme, netloc=netloc, path=path, params=params, query=query, fragment=None
+ )
def format(parsed_url):
@@ -180,36 +179,33 @@ def join(base_url, path, *extra, **kwargs):
'file:///opt/spack'
"""
paths = [
- (x) if isinstance(x, string_types)
- else x.geturl()
- for x in itertools.chain((base_url, path), extra)]
+ (x) if isinstance(x, string_types) else x.geturl()
+ for x in itertools.chain((base_url, path), extra)
+ ]
paths = [convert_to_posix_path(x) for x in paths]
n = len(paths)
last_abs_component = None
- scheme = ''
+ scheme = ""
for i in range(n - 1, -1, -1):
- obj = urllib_parse.urlparse(
- paths[i], scheme='', allow_fragments=False)
+ obj = urllib_parse.urlparse(paths[i], scheme="", allow_fragments=False)
scheme = obj.scheme
# in either case the component is absolute
- if scheme or obj.path.startswith('/'):
+ if scheme or obj.path.startswith("/"):
if not scheme:
# Without a scheme, we have to go back looking for the
# next-last component that specifies a scheme.
for j in range(i - 1, -1, -1):
- obj = urllib_parse.urlparse(
- paths[j], scheme='', allow_fragments=False)
+ obj = urllib_parse.urlparse(paths[j], scheme="", allow_fragments=False)
if obj.scheme:
- paths[i] = '{SM}://{NL}{PATH}'.format(
+ paths[i] = "{SM}://{NL}{PATH}".format(
SM=obj.scheme,
- NL=(
- (obj.netloc + '/')
- if obj.scheme != 's3' else ''),
- PATH=paths[i][1:])
+ NL=((obj.netloc + "/") if obj.scheme != "s3" else ""),
+ PATH=paths[i][1:],
+ )
break
last_abs_component = i
@@ -218,20 +214,20 @@ def join(base_url, path, *extra, **kwargs):
if last_abs_component is not None:
paths = paths[last_abs_component:]
if len(paths) == 1:
- result = urllib_parse.urlparse(
- paths[0], scheme='file', allow_fragments=False)
+ result = urllib_parse.urlparse(paths[0], scheme="file", allow_fragments=False)
# another subtlety: If the last argument to join() is an absolute
# file:// URL component with a relative path, the relative path
# needs to be resolved.
- if result.scheme == 'file' and result.netloc:
+ if result.scheme == "file" and result.netloc:
result = urllib_parse.ParseResult(
scheme=result.scheme,
- netloc='',
+ netloc="",
path=posixpath.abspath(result.netloc + result.path),
params=result.params,
query=result.query,
- fragment=None)
+ fragment=None,
+ )
return result.geturl()
@@ -240,24 +236,26 @@ def join(base_url, path, *extra, **kwargs):
def _join(base_url, path, *extra, **kwargs):
base_url = parse(base_url)
- resolve_href = kwargs.get('resolve_href', False)
+ resolve_href = kwargs.get("resolve_href", False)
(scheme, netloc, base_path, params, query, _) = base_url
scheme = scheme.lower()
path_tokens = [
- part for part in itertools.chain(
+ part
+ for part in itertools.chain(
_split_all(path),
- itertools.chain.from_iterable(
- _split_all(extra_path) for extra_path in extra))
- if part and part != '/']
+ itertools.chain.from_iterable(_split_all(extra_path) for extra_path in extra),
+ )
+ if part and part != "/"
+ ]
- base_path_args = ['/fake-root']
- if scheme == 's3':
+ base_path_args = ["/fake-root"]
+ if scheme == "s3":
if netloc:
base_path_args.append(netloc)
- if base_path.startswith('/'):
+ if base_path.startswith("/"):
base_path = base_path[1:]
base_path_args.append(base_path)
@@ -267,34 +265,31 @@ def _join(base_url, path, *extra, **kwargs):
base_path_args = [new_base_path]
base_path_args.extend(path_tokens)
- base_path = posixpath.relpath(posixpath.join(*base_path_args), '/fake-root')
+ base_path = posixpath.relpath(posixpath.join(*base_path_args), "/fake-root")
- if scheme == 's3':
- path_tokens = [
- part for part in _split_all(base_path)
- if part and part != '/']
+ if scheme == "s3":
+ path_tokens = [part for part in _split_all(base_path) if part and part != "/"]
if path_tokens:
netloc = path_tokens.pop(0)
- base_path = posixpath.join('', *path_tokens)
+ base_path = posixpath.join("", *path_tokens)
if sys.platform == "win32":
base_path = convert_to_posix_path(base_path)
- return format(urllib_parse.ParseResult(scheme=scheme,
- netloc=netloc,
- path=base_path,
- params=params,
- query=query,
- fragment=None))
+ return format(
+ urllib_parse.ParseResult(
+ scheme=scheme, netloc=netloc, path=base_path, params=params, query=query, fragment=None
+ )
+ )
git_re = (
- r"^(?:([a-z]+)://)?" # 1. optional scheme
- r"(?:([^@]+)@)?" # 2. optional user
- r"([^:/~]+)?" # 3. optional hostname
- r"(?(1)(?::([^:/]+))?|:)" # 4. :<optional port> if scheme else :
- r"(.*[^/])/?$" # 5. path
+ r"^(?:([a-z]+)://)?" # 1. optional scheme
+ r"(?:([^@]+)@)?" # 2. optional user
+ r"([^:/~]+)?" # 3. optional hostname
+ r"(?(1)(?::([^:/]+))?|:)" # 4. :<optional port> if scheme else :
+ r"(.*[^/])/?$" # 5. path
)
@@ -343,14 +338,14 @@ def parse_git_url(url):
def require_url_format(url):
- ut = re.search(r'^(file://|http://|https://|ftp://|s3://|gs://|ssh://|git://|/)', url)
+ ut = re.search(r"^(file://|http://|https://|ftp://|s3://|gs://|ssh://|git://|/)", url)
if not ut:
- raise ValueError('Invalid url format from url: %s' % url)
+ raise ValueError("Invalid url format from url: %s" % url)
def escape_file_url(url):
- drive_ltr = re.findall(r'[A-Za-z]:\\', url)
+ drive_ltr = re.findall(r"[A-Za-z]:\\", url)
if is_windows and drive_ltr:
- url = url.replace(drive_ltr[0], '/' + drive_ltr[0])
+ url = url.replace(drive_ltr[0], "/" + drive_ltr[0])
return url
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 73f2c8aca0..a5c96e23da 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -52,53 +52,51 @@ else:
class LinkParser(HTMLParser):
"""This parser just takes an HTML page and strips out the hrefs on the
- links. Good enough for a really simple spider. """
+ links. Good enough for a really simple spider."""
def __init__(self):
HTMLParser.__init__(self)
self.links = []
def handle_starttag(self, tag, attrs):
- if tag == 'a':
+ if tag == "a":
for attr, val in attrs:
- if attr == 'href':
+ if attr == "href":
self.links.append(val)
def uses_ssl(parsed_url):
- if parsed_url.scheme == 'https':
+ if parsed_url.scheme == "https":
return True
- if parsed_url.scheme == 's3':
- endpoint_url = os.environ.get('S3_ENDPOINT_URL')
+ if parsed_url.scheme == "s3":
+ endpoint_url = os.environ.get("S3_ENDPOINT_URL")
if not endpoint_url:
return True
- if url_util.parse(endpoint_url, scheme='https').scheme == 'https':
+ if url_util.parse(endpoint_url, scheme="https").scheme == "https":
return True
- elif parsed_url.scheme == 'gs':
+ elif parsed_url.scheme == "gs":
tty.debug("(uses_ssl) GCS Blob is https")
return True
return False
-__UNABLE_TO_VERIFY_SSL = (
- lambda pyver: (
- (pyver < (2, 7, 9)) or
- ((3,) < pyver < (3, 4, 3))
- ))(sys.version_info)
+__UNABLE_TO_VERIFY_SSL = (lambda pyver: ((pyver < (2, 7, 9)) or ((3,) < pyver < (3, 4, 3))))(
+ sys.version_info
+)
def read_from_url(url, accept_content_type=None):
url = url_util.parse(url)
context = None
- verify_ssl = spack.config.get('config:verify_ssl')
+ verify_ssl = spack.config.get("config:verify_ssl")
# Timeout in seconds for web requests
- timeout = spack.config.get('config:connect_timeout', 10)
+ timeout = spack.config.get("config:connect_timeout", 10)
# Don't even bother with a context unless the URL scheme is one that uses
# SSL certs.
@@ -120,10 +118,10 @@ def read_from_url(url, accept_content_type=None):
url = url_util.format(url)
if sys.platform == "win32" and url_scheme == "file":
url = convert_to_posix_path(url)
- req = Request(url, headers={'User-Agent': SPACK_USER_AGENT})
+ req = Request(url, headers={"User-Agent": SPACK_USER_AGENT})
content_type = None
- is_web_url = url_scheme in ('http', 'https')
+ is_web_url = url_scheme in ("http", "https")
if accept_content_type and is_web_url:
# Make a HEAD request first to check the content type. This lets
# us ignore tarballs and gigantic files.
@@ -133,7 +131,7 @@ def read_from_url(url, accept_content_type=None):
req.get_method = lambda: "HEAD"
resp = _urlopen(req, timeout=timeout, context=context)
- content_type = get_header(resp.headers, 'Content-type')
+ content_type = get_header(resp.headers, "Content-type")
# Do the real GET request when we know it's just HTML.
req.get_method = lambda: "GET"
@@ -141,22 +139,21 @@ def read_from_url(url, accept_content_type=None):
try:
response = _urlopen(req, timeout=timeout, context=context)
except URLError as err:
- raise SpackWebError('Download failed: {ERROR}'.format(
- ERROR=str(err)))
+ raise SpackWebError("Download failed: {ERROR}".format(ERROR=str(err)))
if accept_content_type and not is_web_url:
- content_type = get_header(response.headers, 'Content-type')
+ content_type = get_header(response.headers, "Content-type")
- reject_content_type = (
- accept_content_type and (
- content_type is None or
- not content_type.startswith(accept_content_type)))
+ reject_content_type = accept_content_type and (
+ content_type is None or not content_type.startswith(accept_content_type)
+ )
if reject_content_type:
- tty.debug("ignoring page {0}{1}{2}".format(
- url,
- " with content type " if content_type is not None else "",
- content_type or ""))
+ tty.debug(
+ "ignoring page {0}{1}{2}".format(
+ url, " with content type " if content_type is not None else "", content_type or ""
+ )
+ )
return None, None, None
@@ -164,17 +161,18 @@ def read_from_url(url, accept_content_type=None):
def warn_no_ssl_cert_checking():
- tty.warn("Spack will not check SSL certificates. You need to update "
- "your Python to enable certificate verification.")
+ tty.warn(
+ "Spack will not check SSL certificates. You need to update "
+ "your Python to enable certificate verification."
+ )
-def push_to_url(
- local_file_path, remote_path, keep_original=True, extra_args=None):
+def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=None):
if sys.platform == "win32":
- if remote_path[1] == ':':
+ if remote_path[1] == ":":
remote_path = "file://" + remote_path
remote_url = url_util.parse(remote_path)
- verify_ssl = spack.config.get('config:verify_ssl')
+ verify_ssl = spack.config.get("config:verify_ssl")
if __UNABLE_TO_VERIFY_SSL and verify_ssl and uses_ssl(remote_url):
warn_no_ssl_cert_checking()
@@ -198,23 +196,23 @@ def push_to_url(
else:
raise
- elif remote_url.scheme == 's3':
+ elif remote_url.scheme == "s3":
if extra_args is None:
extra_args = {}
remote_path = remote_url.path
- while remote_path.startswith('/'):
+ while remote_path.startswith("/"):
remote_path = remote_path[1:]
- s3 = s3_util.create_s3_session(remote_url,
- connection=s3_util.get_mirror_connection(remote_url))
- s3.upload_file(local_file_path, remote_url.netloc,
- remote_path, ExtraArgs=extra_args)
+ s3 = s3_util.create_s3_session(
+ remote_url, connection=s3_util.get_mirror_connection(remote_url)
+ )
+ s3.upload_file(local_file_path, remote_url.netloc, remote_path, ExtraArgs=extra_args)
if not keep_original:
os.remove(local_file_path)
- elif remote_url.scheme == 'gs':
+ elif remote_url.scheme == "gs":
gcs = gcs_util.GCSBlob(remote_url)
gcs.upload_to_blob(local_file_path)
if not keep_original:
@@ -222,8 +220,8 @@ def push_to_url(
else:
raise NotImplementedError(
- 'Unrecognized URL scheme: {SCHEME}'.format(
- SCHEME=remote_url.scheme))
+ "Unrecognized URL scheme: {SCHEME}".format(SCHEME=remote_url.scheme)
+ )
def url_exists(url):
@@ -232,19 +230,19 @@ def url_exists(url):
if local_path:
return os.path.exists(local_path)
- if url.scheme == 's3':
+ if url.scheme == "s3":
# Check for URL specific connection information
s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url))
try:
- s3.get_object(Bucket=url.netloc, Key=url.path.lstrip('/'))
+ s3.get_object(Bucket=url.netloc, Key=url.path.lstrip("/"))
return True
except s3.ClientError as err:
- if err.response['Error']['Code'] == 'NoSuchKey':
+ if err.response["Error"]["Code"] == "NoSuchKey":
return False
raise err
- elif url.scheme == 'gs':
+ elif url.scheme == "gs":
gcs = gcs_util.GCSBlob(url)
return gcs.exists()
@@ -258,13 +256,12 @@ def url_exists(url):
def _debug_print_delete_results(result):
- if 'Deleted' in result:
- for d in result['Deleted']:
- tty.debug('Deleted {0}'.format(d['Key']))
- if 'Errors' in result:
- for e in result['Errors']:
- tty.debug('Failed to delete {0} ({1})'.format(
- e['Key'], e['Message']))
+ if "Deleted" in result:
+ for d in result["Deleted"]:
+ tty.debug("Deleted {0}".format(d["Key"]))
+ if "Errors" in result:
+ for e in result["Errors"]:
+ tty.debug("Failed to delete {0} ({1})".format(e["Key"], e["Message"]))
def remove_url(url, recursive=False):
@@ -278,40 +275,40 @@ def remove_url(url, recursive=False):
os.remove(local_path)
return
- if url.scheme == 's3':
+ if url.scheme == "s3":
# Try to find a mirror for potential connection information
s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url))
bucket = url.netloc
if recursive:
# Because list_objects_v2 can only return up to 1000 items
# at a time, we have to paginate to make sure we get it all
- prefix = url.path.strip('/')
- paginator = s3.get_paginator('list_objects_v2')
+ prefix = url.path.strip("/")
+ paginator = s3.get_paginator("list_objects_v2")
pages = paginator.paginate(Bucket=bucket, Prefix=prefix)
- delete_request = {'Objects': []}
- for item in pages.search('Contents'):
+ delete_request = {"Objects": []}
+ for item in pages.search("Contents"):
if not item:
continue
- delete_request['Objects'].append({'Key': item['Key']})
+ delete_request["Objects"].append({"Key": item["Key"]})
# Make sure we do not try to hit S3 with a list of more
# than 1000 items
- if len(delete_request['Objects']) >= 1000:
+ if len(delete_request["Objects"]) >= 1000:
r = s3.delete_objects(Bucket=bucket, Delete=delete_request)
_debug_print_delete_results(r)
- delete_request = {'Objects': []}
+ delete_request = {"Objects": []}
# Delete any items that remain
- if len(delete_request['Objects']):
+ if len(delete_request["Objects"]):
r = s3.delete_objects(Bucket=bucket, Delete=delete_request)
_debug_print_delete_results(r)
else:
- s3.delete_object(Bucket=bucket, Key=url.path.lstrip('/'))
+ s3.delete_object(Bucket=bucket, Key=url.path.lstrip("/"))
return
- elif url.scheme == 'gs':
+ elif url.scheme == "gs":
if recursive:
bucket = gcs_util.GCSBucket(url)
bucket.destroy(recursive=recursive)
@@ -325,35 +322,32 @@ def remove_url(url, recursive=False):
def _iter_s3_contents(contents, prefix):
for entry in contents:
- key = entry['Key']
+ key = entry["Key"]
- if not key.startswith('/'):
- key = '/' + key
+ if not key.startswith("/"):
+ key = "/" + key
key = os.path.relpath(key, prefix)
- if key == '.':
+ if key == ".":
continue
yield key
def _list_s3_objects(client, bucket, prefix, num_entries, start_after=None):
- list_args = dict(
- Bucket=bucket,
- Prefix=prefix[1:],
- MaxKeys=num_entries)
+ list_args = dict(Bucket=bucket, Prefix=prefix[1:], MaxKeys=num_entries)
if start_after is not None:
- list_args['StartAfter'] = start_after
+ list_args["StartAfter"] = start_after
result = client.list_objects_v2(**list_args)
last_key = None
- if result['IsTruncated']:
- last_key = result['Contents'][-1]['Key']
+ if result["IsTruncated"]:
+ last_key = result["Contents"][-1]["Key"]
- iter = _iter_s3_contents(result['Contents'], prefix)
+ iter = _iter_s3_contents(result["Contents"], prefix)
return iter, last_key
@@ -361,11 +355,10 @@ def _list_s3_objects(client, bucket, prefix, num_entries, start_after=None):
def _iter_s3_prefix(client, url, num_entries=1024):
key = None
bucket = url.netloc
- prefix = re.sub(r'^/*', '/', url.path)
+ prefix = re.sub(r"^/*", "/", url.path)
while True:
- contents, key = _list_s3_objects(
- client, bucket, prefix, num_entries, start_after=key)
+ contents, key = _list_s3_objects(client, bucket, prefix, num_entries, start_after=key)
for x in contents:
yield x
@@ -387,19 +380,20 @@ def list_url(url, recursive=False):
if local_path:
if recursive:
return list(_iter_local_prefix(local_path))
- return [subpath for subpath in os.listdir(local_path)
- if os.path.isfile(os.path.join(local_path, subpath))]
+ return [
+ subpath
+ for subpath in os.listdir(local_path)
+ if os.path.isfile(os.path.join(local_path, subpath))
+ ]
- if url.scheme == 's3':
+ if url.scheme == "s3":
s3 = s3_util.create_s3_session(url, connection=s3_util.get_mirror_connection(url))
if recursive:
return list(_iter_s3_prefix(s3, url))
- return list(set(
- key.split('/', 1)[0]
- for key in _iter_s3_prefix(s3, url)))
+ return list(set(key.split("/", 1)[0] for key in _iter_s3_prefix(s3, url)))
- elif url.scheme == 'gs':
+ elif url.scheme == "gs":
gcs = gcs_util.GCSBucket(url)
return gcs.get_all_blobs(recursive=recursive)
@@ -445,11 +439,11 @@ def spider(root_urls, depth=0, concurrency=32):
subcalls = []
try:
- response_url, _, response = read_from_url(url, 'text/html')
+ response_url, _, response = read_from_url(url, "text/html")
if not response_url or not response:
return pages, links, subcalls
- page = codecs.getreader('utf-8')(response).read()
+ page = codecs.getreader("utf-8")(response).read()
pages[response_url] = page
# Parse out the links in the page
@@ -458,10 +452,7 @@ def spider(root_urls, depth=0, concurrency=32):
while link_parser.links:
raw_link = link_parser.links.pop()
- abs_link = url_util.join(
- response_url,
- raw_link.strip(),
- resolve_href=True)
+ abs_link = url_util.join(response_url, raw_link.strip(), resolve_href=True)
links.add(abs_link)
# Skip stuff that looks like an archive
@@ -480,11 +471,13 @@ def spider(root_urls, depth=0, concurrency=32):
except URLError as e:
tty.debug(str(e))
- if hasattr(e, 'reason') and isinstance(e.reason, ssl.SSLError):
- tty.warn("Spack was unable to fetch url list due to a "
- "certificate verification problem. You can try "
- "running spack -k, which will not check SSL "
- "certificates. Use this at your own risk.")
+ if hasattr(e, "reason") and isinstance(e.reason, ssl.SSLError):
+ tty.warn(
+ "Spack was unable to fetch url list due to a "
+ "certificate verification problem. You can try "
+ "running spack -k, which will not check SSL "
+ "certificates. Use this at your own risk."
+ )
except HTMLParseError as e:
# This error indicates that Python's HTML parser sucks.
@@ -499,8 +492,7 @@ def spider(root_urls, depth=0, concurrency=32):
except Exception as e:
# Other types of errors are completely ignored,
# except in debug mode
- tty.debug("Error in _spider: %s:%s" % (type(e), str(e)),
- traceback.format_exc())
+ tty.debug("Error in _spider: %s:%s" % (type(e), str(e)), traceback.format_exc())
finally:
tty.debug("SPIDER: [url={0}]".format(url))
@@ -524,8 +516,10 @@ def spider(root_urls, depth=0, concurrency=32):
tp = multiprocessing.pool.ThreadPool(processes=concurrency)
try:
while current_depth <= depth:
- tty.debug("SPIDER: [depth={0}, max_depth={1}, urls={2}]".format(
- current_depth, depth, len(spider_args))
+ tty.debug(
+ "SPIDER: [depth={0}, max_depth={1}, urls={2}]".format(
+ current_depth, depth, len(spider_args)
+ )
)
results = tp.map(llnl.util.lang.star(_spider), spider_args)
spider_args = []
@@ -555,22 +549,24 @@ def _urlopen(req, *args, **kwargs):
# Note: 'context' parameter was only introduced starting
# with versions 2.7.9 and 3.4.3 of Python.
if __UNABLE_TO_VERIFY_SSL:
- del kwargs['context']
+ del kwargs["context"]
opener = urlopen
- if url_util.parse(url).scheme == 's3':
+ if url_util.parse(url).scheme == "s3":
import spack.s3_handler
+
opener = spack.s3_handler.open
- elif url_util.parse(url).scheme == 'gs':
+ elif url_util.parse(url).scheme == "gs":
import spack.gcs_handler
+
opener = spack.gcs_handler.gcs_open
try:
return opener(req, *args, **kwargs)
except TypeError as err:
# If the above fails because of 'context', call without 'context'.
- if 'context' in kwargs and 'context' in str(err):
- del kwargs['context']
+ if "context" in kwargs and "context" in str(err):
+ del kwargs["context"]
return opener(req, *args, **kwargs)
@@ -613,8 +609,8 @@ def find_versions_of_archive(
# Add '/' to the end of the URL. Some web servers require this.
additional_list_urls = set()
for lurl in list_urls:
- if not lurl.endswith('/'):
- additional_list_urls.add(lurl + '/')
+ if not lurl.endswith("/"):
+ additional_list_urls.add(lurl + "/")
list_urls |= additional_list_urls
# Grab some web pages to scrape.
@@ -639,7 +635,7 @@ def find_versions_of_archive(
# https://cran.r-project.org/src/contrib/enpls_5.7.tar.gz
# https://cran.r-project.org/src/contrib/autopls_1.3.tar.gz
# https://cran.r-project.org/src/contrib/matrixpls_1.0.4.tar.gz
- url_regex = '/' + url_regex
+ url_regex = "/" + url_regex
# We need to add a $ anchor to the end of the regex to prevent
# Spack from picking up signature files like:
@@ -648,10 +644,10 @@ def find_versions_of_archive(
# .sha256
# .sig
# However, SourceForge downloads still need to end in '/download'.
- url_regex += r'(\/download)?'
+ url_regex += r"(\/download)?"
# PyPI adds #sha256=... to the end of the URL
- url_regex += '(#sha256=.*)?'
- url_regex += '$'
+ url_regex += "(#sha256=.*)?"
+ url_regex += "$"
regexes.append(url_regex)
@@ -708,7 +704,7 @@ def get_header(headers, header_name):
"""
def unfuzz(header):
- return re.sub(r'[ _-]', '', header).lower()
+ return re.sub(r"[ _-]", "", header).lower()
try:
return headers[header_name]
@@ -726,8 +722,9 @@ class SpackWebError(spack.error.SpackError):
class NoNetworkConnectionError(SpackWebError):
"""Raised when an operation can't get an internet connection."""
+
def __init__(self, message, url):
super(NoNetworkConnectionError, self).__init__(
- "No network connection: " + str(message),
- "URL was: " + str(url))
+ "No network connection: " + str(message), "URL was: " + str(url)
+ )
self.url = url
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index f350bc2666..1f21aad89f 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -23,7 +23,7 @@ import spack.directives
import spack.error as error
from spack.util.string import comma_or
-special_variant_values = [None, 'none', '*']
+special_variant_values = [None, "none", "*"]
class Variant(object):
@@ -32,14 +32,14 @@ class Variant(object):
"""
def __init__(
- self,
- name,
- default,
- description,
- values=(True, False),
- multi=False,
- validator=None,
- sticky=False
+ self,
+ name,
+ default,
+ description,
+ values=(True, False),
+ multi=False,
+ validator=None,
+ sticky=False,
):
"""Initialize a package variant.
@@ -62,7 +62,7 @@ class Variant(object):
self.description = str(description)
self.values = None
- if values == '*':
+ if values == "*":
# wildcard is a special case to make it easy to say any value is ok
self.single_value_validator = lambda x: True
@@ -74,6 +74,7 @@ class Variant(object):
return True
except ValueError:
return False
+
self.single_value_validator = isa_type
if callable(values):
@@ -122,14 +123,13 @@ class Variant(object):
# Check and record the values that are not allowed
not_allowed_values = [
- x for x in value
- if x != '*' and self.single_value_validator(x) is False
+ x for x in value if x != "*" and self.single_value_validator(x) is False
]
if not_allowed_values:
raise InvalidVariantValueError(self, not_allowed_values, pkg_cls)
# Validate the group of values if needed
- if self.group_validator is not None and value != ('*',):
+ if self.group_validator is not None and value != ("*",):
self.group_validator(pkg_cls.name, self.name, value)
@property
@@ -143,11 +143,11 @@ class Variant(object):
# Join an explicit set of allowed values
if self.values is not None:
v = tuple(str(x) for x in self.values)
- return ', '.join(v)
+ return ", ".join(v)
# In case we were given a single-value validator
# print the docstring
docstring = inspect.getdoc(self.single_value_validator)
- v = docstring if docstring else ''
+ v = docstring if docstring else ""
return v
def make_default(self):
@@ -182,12 +182,14 @@ class Variant(object):
return SingleValuedVariant
def __eq__(self, other):
- return (self.name == other.name and
- self.default == other.default and
- self.values == other.values and
- self.multi == other.multi and
- self.single_value_validator == other.single_value_validator and
- self.group_validator == other.group_validator)
+ return (
+ self.name == other.name
+ and self.default == other.default
+ and self.values == other.values
+ and self.multi == other.multi
+ and self.single_value_validator == other.single_value_validator
+ and self.group_validator == other.group_validator
+ )
def __ne__(self, other):
return not self == other
@@ -201,6 +203,7 @@ def implicit_variant_conversion(method):
Returns: decorated method
"""
+
@functools.wraps(method)
def convert(self, other):
# We don't care if types are different as long as I can convert
@@ -210,6 +213,7 @@ def implicit_variant_conversion(method):
except (error.SpecError, ValueError):
return False
return method(self, other)
+
return convert
@@ -262,7 +266,7 @@ class AbstractVariant(object):
mvar._original_value = mvar._value
return mvar
- elif str(value).upper() == 'TRUE' or str(value).upper() == 'FALSE':
+ elif str(value).upper() == "TRUE" or str(value).upper() == "FALSE":
return BoolValuedVariant(name, value)
return SingleValuedVariant(name, value)
@@ -297,7 +301,7 @@ class AbstractVariant(object):
# Store a tuple of CSV string representations
# Tuple is necessary here instead of list because the
# values need to be hashed
- value = re.split(r'\s*,\s*', str(value))
+ value = re.split(r"\s*,\s*", str(value))
for val in special_variant_values:
if val in value and len(value) > 1:
@@ -377,16 +381,16 @@ class AbstractVariant(object):
bool: True or False
"""
if self.name != other.name:
- raise ValueError('variants must have the same name')
+ raise ValueError("variants must have the same name")
old_value = self.value
values = list(sorted(set(self.value + other.value)))
# If we constraint wildcard by another value, just take value
- if '*' in values and len(values) > 1:
- values.remove('*')
+ if "*" in values and len(values) > 1:
+ values.remove("*")
- self.value = ','.join(values)
+ self.value = ",".join(values)
return old_value != self.value
def __contains__(self, item):
@@ -394,18 +398,15 @@ class AbstractVariant(object):
def __repr__(self):
cls = type(self)
- return '{0.__name__}({1}, {2})'.format(
- cls, repr(self.name), repr(self._original_value)
- )
+ return "{0.__name__}({1}, {2})".format(cls, repr(self.name), repr(self._original_value))
def __str__(self):
- return '{0}={1}'.format(
- self.name, ','.join(str(x) for x in self.value)
- )
+ return "{0}={1}".format(self.name, ",".join(str(x) for x in self.value))
class MultiValuedVariant(AbstractVariant):
"""A variant that can hold multiple values at once."""
+
@implicit_variant_conversion
def satisfies(self, other):
"""Returns true if ``other.name == self.name`` and ``other.value`` is
@@ -422,11 +423,11 @@ class MultiValuedVariant(AbstractVariant):
if not super_sat:
return False
- if '*' in other or '*' in self:
+ if "*" in other or "*" in self:
return True
# allow prefix find on patches
- if self.name == 'patches':
+ if self.name == "patches":
return all(any(w.startswith(v) for w in self.value) for v in other.value)
# Otherwise we want all the values in `other` to be also in `self`
@@ -439,11 +440,11 @@ class MultiValuedVariant(AbstractVariant):
def __str__(self):
# Special-case patches to not print the full 64 character hashes
- if self.name == 'patches':
- values_str = ','.join(x[:7] for x in self.value)
+ if self.name == "patches":
+ values_str = ",".join(x[:7] for x in self.value)
else:
- values_str = ','.join(str(x) for x in self.value)
- return '{0}={1}'.format(self.name, values_str)
+ values_str = ",".join(str(x) for x in self.value)
+ return "{0}={1}".format(self.name, values_str)
class SingleValuedVariant(AbstractVariant):
@@ -459,14 +460,15 @@ class SingleValuedVariant(AbstractVariant):
self._value = str(self._value[0])
def __str__(self):
- return '{0}={1}'.format(self.name, self.value)
+ return "{0}={1}".format(self.name, self.value)
@implicit_variant_conversion
def satisfies(self, other):
abstract_sat = super(SingleValuedVariant, self).satisfies(other)
- return abstract_sat and (self.value == other.value or
- other.value == '*' or self.value == '*')
+ return abstract_sat and (
+ self.value == other.value or other.value == "*" or self.value == "*"
+ )
def compatible(self, other):
return self.satisfies(other)
@@ -474,12 +476,12 @@ class SingleValuedVariant(AbstractVariant):
@implicit_variant_conversion
def constrain(self, other):
if self.name != other.name:
- raise ValueError('variants must have the same name')
+ raise ValueError("variants must have the same name")
- if other.value == '*':
+ if other.value == "*":
return False
- if self.value == '*':
+ if self.value == "*":
self.value = other.value
return True
@@ -503,25 +505,25 @@ class BoolValuedVariant(SingleValuedVariant):
def _value_setter(self, value):
# Check the string representation of the value and turn
# it to a boolean
- if str(value).upper() == 'TRUE':
+ if str(value).upper() == "TRUE":
self._original_value = value
self._value = True
- elif str(value).upper() == 'FALSE':
+ elif str(value).upper() == "FALSE":
self._original_value = value
self._value = False
- elif str(value) == '*':
+ elif str(value) == "*":
self._original_value = value
- self._value = '*'
+ self._value = "*"
else:
msg = 'cannot construct a BoolValuedVariant for "{0}" from '
- msg += 'a value that does not represent a bool'
+ msg += "a value that does not represent a bool"
raise ValueError(msg.format(self.name))
def __contains__(self, item):
return item is self.value
def __str__(self):
- return '{0}{1}'.format('+' if self.value else '~', self.name)
+ return "{0}{1}".format("+" if self.value else "~", self.name)
class VariantMap(lang.HashableMap):
@@ -536,8 +538,8 @@ class VariantMap(lang.HashableMap):
def __setitem__(self, name, vspec):
# Raise a TypeError if vspec is not of the right type
if not isinstance(vspec, AbstractVariant):
- msg = 'VariantMap accepts only values of variant types'
- msg += ' [got {0} instead]'.format(type(vspec).__name__)
+ msg = "VariantMap accepts only values of variant types"
+ msg += " [got {0} instead]".format(type(vspec).__name__)
raise TypeError(msg)
# Raise an error if the variant was already in this map
@@ -560,7 +562,7 @@ class VariantMap(lang.HashableMap):
vspec: variant spec to be substituted
"""
if vspec.name not in self:
- msg = 'cannot substitute a key that does not exist [{0}]'
+ msg = "cannot substitute a key that does not exist [{0}]"
raise KeyError(msg.format(vspec.name))
# Set the item
@@ -588,8 +590,7 @@ class VariantMap(lang.HashableMap):
if not strict_or_concrete:
to_be_checked = filter(lambda x: x in self, to_be_checked)
- return all(k in self and self[k].satisfies(other[k])
- for k in to_be_checked)
+ return all(k in self and self[k].satisfies(other[k]) for k in to_be_checked)
def constrain(self, other):
"""Add all variants in other that aren't in self to self. Also
@@ -605,7 +606,7 @@ class VariantMap(lang.HashableMap):
if other.spec is not None and other.spec._concrete:
for k in self:
if k not in other:
- raise UnsatisfiableVariantSpecError(self[k], '<absent>')
+ raise UnsatisfiableVariantSpecError(self[k], "<absent>")
changed = False
for k in other:
@@ -629,9 +630,7 @@ class VariantMap(lang.HashableMap):
Returns:
bool: True or False
"""
- return self.spec._concrete or all(
- v in self for v in self.spec.package_class.variants
- )
+ return self.spec._concrete or all(v in self for v in self.spec.package_class.variants)
def copy(self):
"""Return an instance of VariantMap equivalent to self.
@@ -654,8 +653,7 @@ class VariantMap(lang.HashableMap):
bool_keys = []
kv_keys = []
for key in sorted_keys:
- bool_keys.append(key) if isinstance(self[key].value, bool) \
- else kv_keys.append(key)
+ bool_keys.append(key) if isinstance(self[key].value, bool) else kv_keys.append(key)
# add spaces before and after key/value variants.
string = StringIO()
@@ -664,7 +662,7 @@ class VariantMap(lang.HashableMap):
string.write(str(self[key]))
for key in kv_keys:
- string.write(' ')
+ string.write(" ")
string.write(str(self[key]))
return string.getvalue()
@@ -685,7 +683,7 @@ def substitute_abstract_variants(spec):
failed = []
for name, v in spec.variants.items():
if name in spack.directives.reserved_names:
- if name == 'dev_path':
+ if name == "dev_path":
new_variant = SingleValuedVariant(name, v._original_value)
spec.variants.substitute(new_variant)
continue
@@ -714,21 +712,23 @@ class DisjointSetsOfValues(Sequence):
*sets (list): mutually exclusive sets of values
"""
- _empty_set = set(('none',))
+ _empty_set = set(("none",))
def __init__(self, *sets):
self.sets = [set(_flatten(x)) for x in sets]
# 'none' is a special value and can appear only in a set of
# a single element
- if any('none' in s and s != set(('none',)) for s in self.sets):
- raise error.SpecError("The value 'none' represents the empty set,"
- " and must appear alone in a set. Use the "
- "method 'allow_empty_set' to add it.")
+ if any("none" in s and s != set(("none",)) for s in self.sets):
+ raise error.SpecError(
+ "The value 'none' represents the empty set,"
+ " and must appear alone in a set. Use the "
+ "method 'allow_empty_set' to add it."
+ )
# Sets should not intersect with each other
if any(s1 & s2 for s1, s2 in itertools.combinations(self.sets, 2)):
- raise error.SpecError('sets in input must be disjoint')
+ raise error.SpecError("sets in input must be disjoint")
#: Attribute used to track values which correspond to
#: features which can be enabled or disabled as understood by the
@@ -736,9 +736,11 @@ class DisjointSetsOfValues(Sequence):
self.feature_values = tuple(itertools.chain.from_iterable(self.sets))
self.default = None
self.multi = True
- self.error_fmt = "this variant accepts combinations of values from " \
- "exactly one of the following sets '{values}' " \
- "@*r{{[{package}, variant '{variant}']}}"
+ self.error_fmt = (
+ "this variant accepts combinations of values from "
+ "exactly one of the following sets '{values}' "
+ "@*r{{[{package}, variant '{variant}']}}"
+ )
def with_default(self, default):
"""Sets the default value and returns self."""
@@ -752,9 +754,7 @@ class DisjointSetsOfValues(Sequence):
def with_non_feature_values(self, *values):
"""Marks a few values as not being tied to a feature."""
- self.feature_values = tuple(
- x for x in self.feature_values if x not in values
- )
+ self.feature_values = tuple(x for x in self.feature_values if x not in values)
return self
def allow_empty_set(self):
@@ -763,9 +763,9 @@ class DisjointSetsOfValues(Sequence):
return self
# Create a new object to be returned
- object_with_empty_set = type(self)(('none',), *self.sets)
+ object_with_empty_set = type(self)(("none",), *self.sets)
object_with_empty_set.error_fmt = self.error_fmt
- object_with_empty_set.feature_values = self.feature_values + ('none', )
+ object_with_empty_set.feature_values = self.feature_values + ("none",)
return object_with_empty_set
def prohibit_empty_set(self):
@@ -778,7 +778,7 @@ class DisjointSetsOfValues(Sequence):
object_without_empty_set = type(self)(*sets)
object_without_empty_set.error_fmt = self.error_fmt
object_without_empty_set.feature_values = tuple(
- x for x in self.feature_values if x != 'none'
+ x for x in self.feature_values if x != "none"
)
return object_without_empty_set
@@ -795,23 +795,22 @@ class DisjointSetsOfValues(Sequence):
if any(all(x in s for x in values) for s in self.sets):
return
- format_args = {
- 'variant': variant_name, 'package': pkg_name, 'values': values
- }
- msg = self.error_fmt + \
- " @*r{{[{package}, variant '{variant}']}}"
+ format_args = {"variant": variant_name, "package": pkg_name, "values": values}
+ msg = self.error_fmt + " @*r{{[{package}, variant '{variant}']}}"
msg = llnl.util.tty.color.colorize(msg.format(**format_args))
raise error.SpecError(msg)
+
return _disjoint_set_validator
def _a_single_value_or_a_combination(single_value, *values):
- error = "the value '" + single_value + \
- "' is mutually exclusive with any of the other values"
- return DisjointSetsOfValues(
- (single_value,), values
- ).with_default(single_value).with_error(error).\
- with_non_feature_values(single_value)
+ error = "the value '" + single_value + "' is mutually exclusive with any of the other values"
+ return (
+ DisjointSetsOfValues((single_value,), values)
+ .with_default(single_value)
+ .with_error(error)
+ .with_non_feature_values(single_value)
+ )
# TODO: The factories below are used by package writers to set values of
@@ -833,7 +832,7 @@ def any_combination_of(*values):
Returns:
a properly initialized instance of DisjointSetsOfValues
"""
- return _a_single_value_or_a_combination('none', *values)
+ return _a_single_value_or_a_combination("none", *values)
def auto_or_any_combination_of(*values):
@@ -846,7 +845,7 @@ def auto_or_any_combination_of(*values):
Returns:
a properly initialized instance of DisjointSetsOfValues
"""
- return _a_single_value_or_a_combination('auto', *values)
+ return _a_single_value_or_a_combination("auto", *values)
#: Multi-valued variant that allows any combination picking
@@ -865,18 +864,19 @@ def disjoint_sets(*sets):
Returns:
a properly initialized instance of DisjointSetsOfValues
"""
- return DisjointSetsOfValues(*sets).allow_empty_set().with_default('none')
+ return DisjointSetsOfValues(*sets).allow_empty_set().with_default("none")
@functools.total_ordering
class Value(object):
"""Conditional value that might be used in variants."""
+
def __init__(self, value, when):
self.value = value
self.when = when
def __repr__(self):
- return 'Value({0.value}, when={0.when})'.format(self)
+ return "Value({0.value}, when={0.when})".format(self)
def __str__(self):
return str(self.value)
@@ -902,9 +902,9 @@ class _ConditionalVariantValues(lang.TypedMutableSequence):
def conditional(*values, **kwargs):
"""Conditional values that can be used in variant declarations."""
- if len(kwargs) != 1 and 'when' not in kwargs:
+ if len(kwargs) != 1 and "when" not in kwargs:
raise ValueError('conditional statement expects a "when=" parameter only')
- when = kwargs['when']
+ when = kwargs["when"]
return _ConditionalVariantValues([Value(x, when=when) for x in values])
@@ -914,37 +914,37 @@ class DuplicateVariantError(error.SpecError):
class UnknownVariantError(error.SpecError):
"""Raised when an unknown variant occurs in a spec."""
+
def __init__(self, spec, variants):
self.unknown_variants = variants
- variant_str = 'variant' if len(variants) == 1 else 'variants'
- msg = ('trying to set {0} "{1}" in package "{2}", but the package'
- ' has no such {0} [happened during concretization of {3}]')
+ variant_str = "variant" if len(variants) == 1 else "variants"
+ msg = (
+ 'trying to set {0} "{1}" in package "{2}", but the package'
+ " has no such {0} [happened during concretization of {3}]"
+ )
msg = msg.format(variant_str, comma_or(variants), spec.name, spec.root)
super(UnknownVariantError, self).__init__(msg)
class InconsistentValidationError(error.SpecError):
"""Raised if the wrong validator is used to validate a variant."""
+
def __init__(self, vspec, variant):
- msg = ('trying to validate variant "{0.name}" '
- 'with the validator of "{1.name}"')
- super(InconsistentValidationError, self).__init__(
- msg.format(vspec, variant)
- )
+ msg = 'trying to validate variant "{0.name}" ' 'with the validator of "{1.name}"'
+ super(InconsistentValidationError, self).__init__(msg.format(vspec, variant))
class MultipleValuesInExclusiveVariantError(error.SpecError, ValueError):
"""Raised when multiple values are present in a variant that wants
only one.
"""
+
def __init__(self, variant, pkg):
msg = 'multiple values are not allowed for variant "{0.name}"{1}'
- pkg_info = ''
+ pkg_info = ""
if pkg is not None:
pkg_info = ' in package "{0}"'.format(pkg.name)
- super(MultipleValuesInExclusiveVariantError, self).__init__(
- msg.format(variant, pkg_info)
- )
+ super(MultipleValuesInExclusiveVariantError, self).__init__(msg.format(variant, pkg_info))
class InvalidVariantValueCombinationError(error.SpecError):
@@ -956,7 +956,7 @@ class InvalidVariantValueError(error.SpecError):
def __init__(self, variant, invalid_values, pkg):
msg = 'invalid values for variant "{0.name}"{2}: {1}\n'
- pkg_info = ''
+ pkg_info = ""
if pkg is not None:
pkg_info = ' in package "{0}"'.format(pkg.name)
super(InvalidVariantValueError, self).__init__(
@@ -966,17 +966,15 @@ class InvalidVariantValueError(error.SpecError):
class InvalidVariantForSpecError(error.SpecError):
"""Raised when an invalid conditional variant is specified."""
+
def __init__(self, variant, when, spec):
msg = "Invalid variant {0} for spec {1}.\n"
msg += "{0} is only available for {1.name} when satisfying one of {2}."
- super(InvalidVariantForSpecError, self).__init__(
- msg.format(variant, spec, when)
- )
+ super(InvalidVariantForSpecError, self).__init__(msg.format(variant, spec, when))
class UnsatisfiableVariantSpecError(error.UnsatisfiableSpecError):
"""Raised when a spec variant conflicts with package constraints."""
def __init__(self, provided, required):
- super(UnsatisfiableVariantSpecError, self).__init__(
- provided, required, "variant")
+ super(UnsatisfiableVariantSpecError, self).__init__(provided, required, "variant")
diff --git a/lib/spack/spack/verify.py b/lib/spack/spack/verify.py
index dca908f345..9d910bc43d 100644
--- a/lib/spack/spack/verify.py
+++ b/lib/spack/spack/verify.py
@@ -15,7 +15,7 @@ import spack.util.spack_json as sjson
def compute_hash(path):
- with open(path, 'rb') as f:
+ with open(path, "rb") as f:
sha1 = hashlib.sha1(f.read()).digest()
return compat.b32encode(sha1)
@@ -26,30 +26,30 @@ def create_manifest_entry(path):
if os.path.exists(path):
stat = os.stat(path)
- data['mode'] = stat.st_mode
- data['owner'] = stat.st_uid
- data['group'] = stat.st_gid
+ data["mode"] = stat.st_mode
+ data["owner"] = stat.st_uid
+ data["group"] = stat.st_gid
if os.path.islink(path):
- data['type'] = 'link'
- data['dest'] = os.readlink(path)
+ data["type"] = "link"
+ data["dest"] = os.readlink(path)
elif os.path.isdir(path):
- data['type'] = 'dir'
+ data["type"] = "dir"
else:
- data['type'] = 'file'
- data['hash'] = compute_hash(path)
- data['time'] = stat.st_mtime
- data['size'] = stat.st_size
+ data["type"] = "file"
+ data["hash"] = compute_hash(path)
+ data["time"] = stat.st_mtime
+ data["size"] = stat.st_size
return data
def write_manifest(spec):
- manifest_file = os.path.join(spec.prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ manifest_file = os.path.join(
+ spec.prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
if not os.path.exists(manifest_file):
tty.debug("Writing manifest file: No manifest from binary")
@@ -61,7 +61,7 @@ def write_manifest(spec):
manifest[path] = create_manifest_entry(path)
manifest[spec.prefix] = create_manifest_entry(spec.prefix)
- with open(manifest_file, 'w') as f:
+ with open(manifest_file, "w") as f:
sjson.dump(manifest, f)
fp.set_permissions_by_spec(manifest_file, spec)
@@ -71,42 +71,42 @@ def check_entry(path, data):
res = VerificationResults()
if not data:
- res.add_error(path, 'added')
+ res.add_error(path, "added")
return res
stat = os.stat(path)
# Check for all entries
- if stat.st_mode != data['mode']:
- res.add_error(path, 'mode')
- if stat.st_uid != data['owner']:
- res.add_error(path, 'owner')
- if stat.st_gid != data['group']:
- res.add_error(path, 'group')
+ if stat.st_mode != data["mode"]:
+ res.add_error(path, "mode")
+ if stat.st_uid != data["owner"]:
+ res.add_error(path, "owner")
+ if stat.st_gid != data["group"]:
+ res.add_error(path, "group")
# Check for symlink targets and listed as symlink
if os.path.islink(path):
- if data['type'] != 'link':
- res.add_error(path, 'type')
- if os.readlink(path) != data.get('dest', ''):
- res.add_error(path, 'link')
+ if data["type"] != "link":
+ res.add_error(path, "type")
+ if os.readlink(path) != data.get("dest", ""):
+ res.add_error(path, "link")
# Check directories are listed as directory
elif os.path.isdir(path):
- if data['type'] != 'dir':
- res.add_error(path, 'type')
+ if data["type"] != "dir":
+ res.add_error(path, "type")
else:
# Check file contents against hash and listed as file
# Check mtime and size as well
- if stat.st_size != data['size']:
- res.add_error(path, 'size')
- if stat.st_mtime != data['time']:
- res.add_error(path, 'mtime')
- if data['type'] != 'file':
- res.add_error(path, 'type')
- if compute_hash(path) != data.get('hash', ''):
- res.add_error(path, 'hash')
+ if stat.st_size != data["size"]:
+ res.add_error(path, "size")
+ if stat.st_mtime != data["time"]:
+ res.add_error(path, "mtime")
+ if data["type"] != "file":
+ res.add_error(path, "type")
+ if compute_hash(path) != data.get("hash", ""):
+ res.add_error(path, "hash")
return res
@@ -117,20 +117,20 @@ def check_file_manifest(filename):
results = VerificationResults()
while spack.store.layout.metadata_dir not in os.listdir(dirname):
if dirname == os.path.sep:
- results.add_error(filename, 'not owned by any package')
+ results.add_error(filename, "not owned by any package")
return results
dirname = os.path.dirname(dirname)
- manifest_file = os.path.join(dirname,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ manifest_file = os.path.join(
+ dirname, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
if not os.path.exists(manifest_file):
results.add_error(filename, "manifest missing")
return results
try:
- with open(manifest_file, 'r') as f:
+ with open(manifest_file, "r") as f:
manifest = sjson.load(f)
except Exception:
results.add_error(filename, "manifest corrupted")
@@ -139,7 +139,7 @@ def check_file_manifest(filename):
if filename in manifest:
results += check_entry(filename, manifest[filename])
else:
- results.add_error(filename, 'not owned by any package')
+ results.add_error(filename, "not owned by any package")
return results
@@ -147,26 +147,25 @@ def check_spec_manifest(spec):
prefix = spec.prefix
results = VerificationResults()
- manifest_file = os.path.join(prefix,
- spack.store.layout.metadata_dir,
- spack.store.layout.manifest_file_name)
+ manifest_file = os.path.join(
+ prefix, spack.store.layout.metadata_dir, spack.store.layout.manifest_file_name
+ )
if not os.path.exists(manifest_file):
results.add_error(prefix, "manifest missing")
return results
try:
- with open(manifest_file, 'r') as f:
+ with open(manifest_file, "r") as f:
manifest = sjson.load(f)
except Exception:
results.add_error(prefix, "manifest corrupted")
return results
# Get extensions active in spec
- view = spack.filesystem_view.YamlFilesystemView(prefix,
- spack.store.layout)
+ view = spack.filesystem_view.YamlFilesystemView(prefix, spack.store.layout)
active_exts = view.extensions_layout.extension_map(spec).values()
- ext_file = ''
+ ext_file = ""
if active_exts:
# No point checking contents of this file as it is the only source of
# truth for that information.
@@ -178,8 +177,7 @@ def check_spec_manifest(spec):
# This file is linked in by an extension. Belongs to extension
return True
elif os.path.isdir(p) and p not in manifest:
- if all(is_extension_artifact(os.path.join(p, f))
- for f in os.listdir(p)):
+ if all(is_extension_artifact(os.path.join(p, f)) for f in os.listdir(p)):
return True
return False
@@ -205,7 +203,7 @@ def check_spec_manifest(spec):
results += check_entry(prefix, manifest.pop(prefix, {}))
for path in manifest:
- results.add_error(path, 'deleted')
+ results.add_error(path, "deleted")
return results
@@ -229,12 +227,12 @@ class VerificationResults(object):
return sjson.dump(self.errors)
def __str__(self):
- res = ''
+ res = ""
for path, fields in self.errors.items():
- res += '%s verification failed with error(s):\n' % path
+ res += "%s verification failed with error(s):\n" % path
for error in fields:
- res += ' %s\n' % error
+ res += " %s\n" % error
if not res:
- res += 'No Errors'
+ res += "No Errors"
return res
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 4d59394480..efcee74afe 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -42,24 +42,26 @@ import spack.util.executable
import spack.util.spack_json as sjson
from spack.util.spack_yaml import syaml_dict
-__all__ = ['Version', 'VersionRange', 'VersionList', 'ver']
+__all__ = ["Version", "VersionRange", "VersionList", "ver"]
# Valid version characters
-VALID_VERSION = re.compile(r'^[A-Za-z0-9_.-]+$')
+VALID_VERSION = re.compile(r"^[A-Za-z0-9_.-]+$")
# regex for a commit version
-COMMIT_VERSION = re.compile(r'^[a-f0-9]{40}$')
+COMMIT_VERSION = re.compile(r"^[a-f0-9]{40}$")
# regex for version segments
-SEGMENT_REGEX = re.compile(r'(?:(?P<num>[0-9]+)|(?P<str>[a-zA-Z]+))(?P<sep>[_.-]*)')
+SEGMENT_REGEX = re.compile(r"(?:(?P<num>[0-9]+)|(?P<str>[a-zA-Z]+))(?P<sep>[_.-]*)")
# regular expression for semantic versioning
-SEMVER_REGEX = re.compile(".+(?P<semver>([0-9]+)[.]([0-9]+)[.]([0-9]+)"
- "(?:-([0-9A-Za-z-]+(?:[.][0-9A-Za-z-]+)*))?"
- "(?:[+][0-9A-Za-z-]+)?)")
+SEMVER_REGEX = re.compile(
+ ".+(?P<semver>([0-9]+)[.]([0-9]+)[.]([0-9]+)"
+ "(?:-([0-9A-Za-z-]+(?:[.][0-9A-Za-z-]+)*))?"
+ "(?:[+][0-9A-Za-z-]+)?)"
+)
# Infinity-like versions. The order in the list implies the comparison rules
-infinity_versions = ['develop', 'main', 'master', 'head', 'trunk', 'stable']
+infinity_versions = ["develop", "main", "master", "head", "trunk", "stable"]
iv_min_len = min(len(s) for s in infinity_versions)
@@ -77,6 +79,7 @@ def coerce_versions(a, b):
def check_type(t):
if t not in order:
raise TypeError("coerce_versions cannot be called on %s" % t)
+
check_type(ta)
check_type(tb)
@@ -100,6 +103,7 @@ def coerce_versions(a, b):
def coerced(method):
"""Decorator that ensures that argument types of a method are coerced."""
+
@wraps(method)
def coercing_method(a, b, *args, **kwargs):
if type(a) == type(b) or a is None or b is None:
@@ -107,13 +111,14 @@ def coerced(method):
else:
ca, cb = coerce_versions(a, b)
return getattr(ca, method.__name__)(cb, *args, **kwargs)
+
return coercing_method
class VersionStrComponent(object):
# NOTE: this is intentionally not a UserString, the abc instanceof
# check is slow enough to eliminate all gains
- __slots__ = ['inf_ver', 'data']
+ __slots__ = ["inf_ver", "data"]
def __init__(self, string):
self.inf_ver = None
@@ -162,15 +167,14 @@ class VersionStrComponent(object):
return self < VersionStrComponent(other)
# If we get here, it's an unsupported comparison
- raise ValueError("VersionStrComponent can only be compared with itself, "
- "int and str")
+ raise ValueError("VersionStrComponent can only be compared with itself, " "int and str")
def __gt__(self, other):
return not self.__lt__(other)
def is_git_version(string):
- if string.startswith('git.'):
+ if string.startswith("git."):
return True
elif len(string) == 40 and COMMIT_VERSION.match(string):
return True
@@ -188,6 +192,7 @@ def Version(string): # capitalized for backwards compatibility
class VersionBase(object):
"""Class to represent versions"""
+
__slots__ = [
"version",
"separators",
@@ -207,9 +212,7 @@ class VersionBase(object):
raise ValueError("Bad characters in version string: %s" % string)
segments = SEGMENT_REGEX.findall(string)
- self.version = tuple(
- int(m[0]) if m[0] else VersionStrComponent(m[1]) for m in segments
- )
+ self.version = tuple(int(m[0]) if m[0] else VersionStrComponent(m[1]) for m in segments)
self.separators = tuple(m[2] for m in segments)
@property
@@ -224,7 +227,7 @@ class VersionBase(object):
Returns:
Version: The version with separator characters replaced by dots
"""
- return type(self)(self.string.replace('-', '.').replace('_', '.'))
+ return type(self)(self.string.replace("-", ".").replace("_", "."))
@property
def underscored(self):
@@ -239,7 +242,7 @@ class VersionBase(object):
Version: The version with separator characters replaced by
underscores
"""
- return type(self)(self.string.replace('.', '_').replace('-', '_'))
+ return type(self)(self.string.replace(".", "_").replace("-", "_"))
@property
def dashed(self):
@@ -253,7 +256,7 @@ class VersionBase(object):
Returns:
Version: The version with separator characters replaced by dashes
"""
- return type(self)(self.string.replace('.', '-').replace('_', '-'))
+ return type(self)(self.string.replace(".", "-").replace("_", "-"))
@property
def joined(self):
@@ -267,8 +270,7 @@ class VersionBase(object):
Returns:
Version: The version with separator characters removed
"""
- return type(self)(
- self.string.replace('.', '').replace('-', '').replace('_', ''))
+ return type(self)(self.string.replace(".", "").replace("-", "").replace("_", ""))
def up_to(self, index):
"""The version up to the specified component.
@@ -343,16 +345,16 @@ class VersionBase(object):
if string_arg:
string_arg.pop() # We don't need the last separator
- string_arg = ''.join(string_arg)
+ string_arg = "".join(string_arg)
return cls(string_arg)
else:
- return VersionBase('')
+ return VersionBase("")
- message = '{cls.__name__} indices must be integers'
+ message = "{cls.__name__} indices must be integers"
raise TypeError(message.format(cls=cls))
def __repr__(self):
- return 'VersionBase(' + repr(self.string) + ')'
+ return "VersionBase(" + repr(self.string) + ")"
def __str__(self):
return self.string
@@ -367,9 +369,9 @@ class VersionBase(object):
@coerced
def __lt__(self, other):
"""Version comparison is designed for consistency with the way RPM
- does things. If you need more complicated versions in installed
- packages, you should override your package's version string to
- express it more sensibly.
+ does things. If you need more complicated versions in installed
+ packages, you should override your package's version string to
+ express it more sensibly.
"""
if other is None:
return False
@@ -409,13 +411,13 @@ class VersionBase(object):
if other is None:
return False
- return other.version[:len(self.version)] == self.version
+ return other.version[: len(self.version)] == self.version
@coerced
def is_predecessor(self, other):
"""True if the other version is the immediate predecessor of this one.
- That is, NO non-git versions v exist such that:
- (self < v < other and v not in self).
+ That is, NO non-git versions v exist such that:
+ (self < v < other and v not in self).
"""
if self.version[:-1] != other.version[:-1]:
return False
@@ -457,11 +459,12 @@ class GitVersion(VersionBase):
Non-git versions may be coerced to GitVersion for comparison, but no Spec will ever
have a GitVersion that is not actually referencing a version from git."""
+
def __init__(self, string):
if not isinstance(string, str):
string = str(string) # In case we got a VersionBase or GitVersion object
- git_prefix = string.startswith('git.')
+ git_prefix = string.startswith("git.")
self.ref = string[4:] if git_prefix else string
self.is_commit = len(self.ref) == 40 and COMMIT_VERSION.match(self.ref)
@@ -495,7 +498,7 @@ class GitVersion(VersionBase):
# Extend previous version by empty component and distance
# If commit is exactly a known version, no distance suffix
prev_tuple = VersionBase(prev_version).version if prev_version else ()
- dist_suffix = (VersionStrComponent(''), distance) if distance else ()
+ dist_suffix = (VersionStrComponent(""), distance) if distance else ()
self.ref_version = prev_tuple + dist_suffix
return self.ref_version
@@ -517,14 +520,14 @@ class GitVersion(VersionBase):
return nother <= nself and self_cmp[:nother] == other_cmp
def __repr__(self):
- return 'GitVersion(' + repr(self.string) + ')'
+ return "GitVersion(" + repr(self.string) + ")"
@coerced
def __lt__(self, other):
"""Version comparison is designed for consistency with the way RPM
- does things. If you need more complicated versions in installed
- packages, you should override your package's version string to
- express it more sensibly.
+ does things. If you need more complicated versions in installed
+ packages, you should override your package's version string to
+ express it more sensibly.
"""
if other is None:
return False
@@ -554,13 +557,13 @@ class GitVersion(VersionBase):
return False
self_cmp = self._cmp(other.ref_lookup)
- return other._cmp(self.ref_lookup)[:len(self_cmp)] == self_cmp
+ return other._cmp(self.ref_lookup)[: len(self_cmp)] == self_cmp
@coerced
def is_predecessor(self, other):
"""True if the other version is the immediate predecessor of this one.
- That is, NO non-commit versions v exist such that:
- (self < v < other and v not in self).
+ That is, NO non-commit versions v exist such that:
+ (self < v < other and v not in self).
"""
self_cmp = self._cmp(self.ref_lookup)
other_cmp = other._cmp(other.ref_lookup)
@@ -605,7 +608,6 @@ class GitVersion(VersionBase):
class VersionRange(object):
-
def __init__(self, start, end):
if isinstance(start, string_types):
start = Version(start)
@@ -635,25 +637,26 @@ class VersionRange(object):
@coerced
def __lt__(self, other):
"""Sort VersionRanges lexicographically so that they are ordered first
- by start and then by end. None denotes an open range, so None in
- the start position is less than everything except None, and None in
- the end position is greater than everything but None.
+ by start and then by end. None denotes an open range, so None in
+ the start position is less than everything except None, and None in
+ the end position is greater than everything but None.
"""
if other is None:
return False
s, o = self, other
if s.start != o.start:
- return s.start is None or (
- o.start is not None and s.start < o.start)
- return (s.end != o.end and
- o.end is None or (s.end is not None and s.end < o.end))
+ return s.start is None or (o.start is not None and s.start < o.start)
+ return s.end != o.end and o.end is None or (s.end is not None and s.end < o.end)
@coerced
def __eq__(self, other):
- return (other is not None and
- type(other) == VersionRange and
- self.start == other.start and self.end == other.end)
+ return (
+ other is not None
+ and type(other) == VersionRange
+ and self.start == other.start
+ and self.end == other.end
+ )
@coerced
def __ne__(self, other):
@@ -680,19 +683,21 @@ class VersionRange(object):
if other is None:
return False
- in_lower = (self.start == other.start or
- self.start is None or
- (other.start is not None and (
- self.start < other.start or
- other.start in self.start)))
+ in_lower = (
+ self.start == other.start
+ or self.start is None
+ or (
+ other.start is not None and (self.start < other.start or other.start in self.start)
+ )
+ )
if not in_lower:
return False
- in_upper = (self.end == other.end or
- self.end is None or
- (other.end is not None and (
- self.end > other.end or
- other.end in self.end)))
+ in_upper = (
+ self.end == other.end
+ or self.end is None
+ or (other.end is not None and (self.end > other.end or other.end in self.end))
+ )
return in_upper
@coerced
@@ -717,22 +722,35 @@ class VersionRange(object):
@coerced
def overlaps(self, other):
- return ((self.start is None or other.end is None or
- self.start <= other.end or
- other.end in self.start or self.start in other.end) and
- (other.start is None or self.end is None or
- other.start <= self.end or
- other.start in self.end or self.end in other.start))
+ return (
+ self.start is None
+ or other.end is None
+ or self.start <= other.end
+ or other.end in self.start
+ or self.start in other.end
+ ) and (
+ other.start is None
+ or self.end is None
+ or other.start <= self.end
+ or other.start in self.end
+ or self.end in other.start
+ )
@coerced
def union(self, other):
if not self.overlaps(other):
- if (self.end is not None and other.start is not None and
- self.end.is_predecessor(other.start)):
+ if (
+ self.end is not None
+ and other.start is not None
+ and self.end.is_predecessor(other.start)
+ ):
return VersionRange(self.start, other.end)
- if (other.end is not None and self.start is not None and
- other.end.is_predecessor(self.start)):
+ if (
+ other.end is not None
+ and self.start is not None
+ and other.end.is_predecessor(self.start)
+ ):
return VersionRange(other.start, self.end)
return VersionList([self, other])
@@ -871,9 +889,7 @@ class VersionList(object):
def highest_numeric(self):
"""Get the highest numeric version in the list."""
- numeric_versions = list(filter(
- lambda v: str(v) not in infinity_versions,
- self.versions))
+ numeric_versions = list(filter(lambda v: str(v) not in infinity_versions, self.versions))
if not any(numeric_versions):
return None
else:
@@ -904,34 +920,30 @@ class VersionList(object):
def to_dict(self):
"""Generate human-readable dict for YAML."""
if self.concrete:
- return syaml_dict([
- ('version', str(self[0]))
- ])
+ return syaml_dict([("version", str(self[0]))])
else:
- return syaml_dict([
- ('versions', [str(v) for v in self])
- ])
+ return syaml_dict([("versions", [str(v) for v in self])])
@staticmethod
def from_dict(dictionary):
"""Parse dict from to_dict."""
- if 'versions' in dictionary:
- return VersionList(dictionary['versions'])
- elif 'version' in dictionary:
- return VersionList([dictionary['version']])
+ if "versions" in dictionary:
+ return VersionList(dictionary["versions"])
+ elif "version" in dictionary:
+ return VersionList([dictionary["version"]])
else:
raise ValueError("Dict must have 'version' or 'versions' in it.")
@coerced
def satisfies(self, other, strict=False):
"""A VersionList satisfies another if some version in the list
- would satisfy some version in the other list. This uses
- essentially the same algorithm as overlaps() does for
- VersionList, but it calls satisfies() on member Versions
- and VersionRanges.
+ would satisfy some version in the other list. This uses
+ essentially the same algorithm as overlaps() does for
+ VersionList, but it calls satisfies() on member Versions
+ and VersionRanges.
- If strict is specified, this version list must lie entirely
- *within* the other in order to satisfy it.
+ If strict is specified, this version list must lie entirely
+ *within* the other in order to satisfy it.
"""
if not other or not self:
return False
@@ -976,7 +988,7 @@ class VersionList(object):
Return True if the spec changed as a result; False otherwise
"""
isection = self.intersection(other)
- changed = (isection.versions != self.versions)
+ changed = isection.versions != self.versions
self.versions = isection.versions
return changed
@@ -990,7 +1002,7 @@ class VersionList(object):
if i == 0:
if version not in self[0]:
return False
- elif all(version not in v for v in self[i - 1:]):
+ elif all(version not in v for v in self[i - 1 :]):
return False
return True
@@ -1046,15 +1058,15 @@ class VersionList(object):
def _string_to_version(string):
"""Converts a string to a Version, VersionList, or VersionRange.
- This is private. Client code should use ver().
+ This is private. Client code should use ver().
"""
- string = string.replace(' ', '')
+ string = string.replace(" ", "")
- if ',' in string:
- return VersionList(string.split(','))
+ if "," in string:
+ return VersionList(string.split(","))
- elif ':' in string:
- s, e = string.split(':')
+ elif ":" in string:
+ s, e = string.split(":")
start = Version(s) if s else None
end = Version(e) if e else None
return VersionRange(start, end)
@@ -1065,7 +1077,7 @@ def _string_to_version(string):
def ver(obj):
"""Parses a Version, VersionRange, or VersionList from a string
- or list of strings.
+ or list of strings.
"""
if isinstance(obj, (list, tuple)):
return VersionList(obj)
@@ -1099,6 +1111,7 @@ class CommitLookup(object):
Version.is_ref returns True to allow for comparisons between git refs
and versions as represented by tags in the git repository.
"""
+
def __init__(self, pkg_name):
self.pkg_name = pkg_name
@@ -1116,9 +1129,9 @@ class CommitLookup(object):
@property
def cache_key(self):
if not self._cache_key:
- key_base = 'git_metadata'
- if not self.repository_uri.startswith('/'):
- key_base += '/'
+ key_base = "git_metadata"
+ if not self.repository_uri.startswith("/"):
+ key_base += "/"
self._cache_key = key_base + self.repository_uri
# Cache data in misc_cache
@@ -1129,8 +1142,7 @@ class CommitLookup(object):
@property
def cache_path(self):
if not self._cache_path:
- self._cache_path = spack.caches.misc_cache.cache_path(
- self.cache_key)
+ self._cache_path = spack.caches.misc_cache.cache_path(self.cache_key)
return self._cache_path
@property
@@ -1144,6 +1156,7 @@ class CommitLookup(object):
if not self._fetcher:
# We require the full git repository history
import spack.fetch_strategy # break cycle
+
fetcher = spack.fetch_strategy.GitFetchStrategy(git=self.pkg.git)
fetcher.get_full_repo = True
self._fetcher = fetcher
@@ -1156,9 +1169,9 @@ class CommitLookup(object):
"""
try:
- components = [str(c).lstrip('/')
- for c in spack.util.url.parse_git_url(self.pkg.git)
- if c]
+ components = [
+ str(c).lstrip("/") for c in spack.util.url.parse_git_url(self.pkg.git) if c
+ ]
return os.path.join(*components)
except ValueError:
# If it's not a git url, it's a local path
@@ -1198,7 +1211,7 @@ class CommitLookup(object):
to the commit in the git repo. Those values are used to compare Version objects.
"""
dest = os.path.join(spack.paths.user_repos_cache_path, self.repository_uri)
- if dest.endswith('.git'):
+ if dest.endswith(".git"):
dest = dest[:-4]
# prepare a cache for the repository
@@ -1216,24 +1229,26 @@ class CommitLookup(object):
# remote instance, simply adding '-f' may not be sufficient
# (if commits are deleted on the remote, this command alone
# won't properly update the local rev-list)
- self.fetcher.git("fetch", '--tags', output=os.devnull, error=os.devnull)
+ self.fetcher.git("fetch", "--tags", output=os.devnull, error=os.devnull)
# Ensure ref is a commit object known to git
# Note the brackets are literals, the ref replaces the format string
try:
self.fetcher.git(
- 'cat-file', '-e', '%s^{commit}' % ref,
- output=os.devnull, error=os.devnull
+ "cat-file", "-e", "%s^{commit}" % ref, output=os.devnull, error=os.devnull
)
except spack.util.executable.ProcessError:
- raise VersionLookupError(
- "%s is not a valid git ref for %s" % (ref, self.pkg_name)
- )
+ raise VersionLookupError("%s is not a valid git ref for %s" % (ref, self.pkg_name))
# List tags (refs) by date, so last reference of a tag is newest
tag_info = self.fetcher.git(
- "for-each-ref", "--sort=creatordate", "--format",
- "%(objectname) %(refname)", "refs/tags", output=str).split('\n')
+ "for-each-ref",
+ "--sort=creatordate",
+ "--format",
+ "%(objectname) %(refname)",
+ "refs/tags",
+ output=str,
+ ).split("\n")
# Lookup of commits to spack versions
commit_to_version = {}
@@ -1242,29 +1257,27 @@ class CommitLookup(object):
if not entry:
continue
tag_commit, tag = entry.split()
- tag = tag.replace('refs/tags/', '', 1)
+ tag = tag.replace("refs/tags/", "", 1)
# For each tag, try to match to a version
for v in [v.string for v in self.pkg.versions]:
- if v == tag or 'v' + v == tag:
+ if v == tag or "v" + v == tag:
commit_to_version[tag_commit] = v
break
else:
# try to parse tag to copare versions spack does not know
match = SEMVER_REGEX.match(tag)
if match:
- semver = match.groupdict()['semver']
+ semver = match.groupdict()["semver"]
commit_to_version[tag_commit] = semver
ancestor_commits = []
for tag_commit in commit_to_version:
- self.fetcher.git(
- 'merge-base', '--is-ancestor', tag_commit, ref,
- ignore_errors=[1])
+ self.fetcher.git("merge-base", "--is-ancestor", tag_commit, ref, ignore_errors=[1])
if self.fetcher.git.returncode == 0:
distance = self.fetcher.git(
- 'rev-list', '%s..%s' % (tag_commit, ref), '--count',
- output=str, error=str).strip()
+ "rev-list", "%s..%s" % (tag_commit, ref), "--count", output=str, error=str
+ ).strip()
ancestor_commits.append((tag_commit, int(distance)))
# Get nearest ancestor that is a known version
@@ -1275,15 +1288,15 @@ class CommitLookup(object):
else:
# Get list of all commits, this is in reverse order
# We use this to get the first commit below
- ref_info = self.fetcher.git("log", "--all", "--pretty=format:%H",
- output=str)
- commits = [c for c in ref_info.split('\n') if c]
+ ref_info = self.fetcher.git("log", "--all", "--pretty=format:%H", output=str)
+ commits = [c for c in ref_info.split("\n") if c]
# No previous version and distance from first commit
prev_version = None
- distance = int(self.fetcher.git(
- 'rev-list', '%s..%s' % (commits[-1], ref), '--count',
- output=str, error=str
- ).strip())
+ distance = int(
+ self.fetcher.git(
+ "rev-list", "%s..%s" % (commits[-1], ref), "--count", output=str, error=str
+ ).strip()
+ )
return prev_version, distance