summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAxel Huebl <axel.huebl@plasma.ninja>2022-08-20 10:12:23 -0700
committerGitHub <noreply@github.com>2022-08-20 10:12:23 -0700
commit39beafc99a8b19803918983f1e1b4c06879c880a (patch)
tree0a6f5eaaf0c2dfaa9d93848aa60bd99b7f3489d8
parentfff929d5ab08fc21a3fbe795c10e255f993aa39e (diff)
downloadspack-39beafc99a8b19803918983f1e1b4c06879c880a.tar.gz
spack-39beafc99a8b19803918983f1e1b4c06879c880a.tar.bz2
spack-39beafc99a8b19803918983f1e1b4c06879c880a.tar.xz
spack-39beafc99a8b19803918983f1e1b4c06879c880a.zip
Ascent, Conduit & VTK-h: don't assume I have Fortran (#32285)
* VTK-h: don't assume I have Fortran Don't assume I have a working Fortran compiler in my toolchain :) * Conduit: Do not Assume Fortran * Ascent: Do not Assume Fortran * fix style
-rw-r--r--var/spack/repos/builtin/packages/ascent/package.py5
-rw-r--r--var/spack/repos/builtin/packages/conduit/package.py2
-rw-r--r--var/spack/repos/builtin/packages/vtk-h/package.py6
3 files changed, 8 insertions, 5 deletions
diff --git a/var/spack/repos/builtin/packages/ascent/package.py b/var/spack/repos/builtin/packages/ascent/package.py
index df8b9bdcc6..7181b5a819 100644
--- a/var/spack/repos/builtin/packages/ascent/package.py
+++ b/var/spack/repos/builtin/packages/ascent/package.py
@@ -446,7 +446,7 @@ class Ascent(CMakePackage, CudaPackage):
if "+mpi" in spec:
mpicc_path = spec["mpi"].mpicc
mpicxx_path = spec["mpi"].mpicxx
- mpifc_path = spec["mpi"].mpifc
+ mpifc_path = spec["mpi"].mpifc if "+fortran" in spec else None
# if we are using compiler wrappers on cray systems
# use those for mpi wrappers, b/c spec['mpi'].mpicxx
# etc make return the spack compiler wrappers
@@ -458,7 +458,8 @@ class Ascent(CMakePackage, CudaPackage):
cfg.write(cmake_cache_entry("ENABLE_MPI", "ON"))
cfg.write(cmake_cache_entry("MPI_C_COMPILER", mpicc_path))
cfg.write(cmake_cache_entry("MPI_CXX_COMPILER", mpicxx_path))
- cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", mpifc_path))
+ if "+fortran" in spec:
+ cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", mpifc_path))
mpiexe_bin = join_path(spec["mpi"].prefix.bin, "mpiexec")
if os.path.isfile(mpiexe_bin):
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE
diff --git a/var/spack/repos/builtin/packages/conduit/package.py b/var/spack/repos/builtin/packages/conduit/package.py
index d9980d341d..600f2a23ff 100644
--- a/var/spack/repos/builtin/packages/conduit/package.py
+++ b/var/spack/repos/builtin/packages/conduit/package.py
@@ -452,7 +452,7 @@ class Conduit(CMakePackage):
if "+mpi" in spec:
mpicc_path = spec["mpi"].mpicc
mpicxx_path = spec["mpi"].mpicxx
- mpifc_path = spec["mpi"].mpifc
+ mpifc_path = spec["mpi"].mpifc if "+fortran" in spec else None
# if we are using compiler wrappers on cray systems
# use those for mpi wrappers, b/c spec['mpi'].mpicxx
# etc make return the spack compiler wrappers
diff --git a/var/spack/repos/builtin/packages/vtk-h/package.py b/var/spack/repos/builtin/packages/vtk-h/package.py
index 5d1c0daa36..c1c0b3ace3 100644
--- a/var/spack/repos/builtin/packages/vtk-h/package.py
+++ b/var/spack/repos/builtin/packages/vtk-h/package.py
@@ -197,7 +197,8 @@ class VtkH(CMakePackage, CudaPackage):
if "+mpi" in spec:
mpicc_path = spec["mpi"].mpicc
mpicxx_path = spec["mpi"].mpicxx
- mpifc_path = spec["mpi"].mpifc
+ has_mpifc = hasattr(spec["mpi"], "mpifc")
+ mpifc_path = spec["mpi"].mpifc if has_mpifc else None
# if we are using compiler wrappers on cray systems
# use those for mpi wrappers, b/c spec['mpi'].mpicxx
# etc make return the spack compiler wrappers
@@ -209,7 +210,8 @@ class VtkH(CMakePackage, CudaPackage):
cfg.write(cmake_cache_entry("ENABLE_MPI", "ON"))
cfg.write(cmake_cache_entry("MPI_C_COMPILER", mpicc_path))
cfg.write(cmake_cache_entry("MPI_CXX_COMPILER", mpicxx_path))
- cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", mpifc_path))
+ if has_mpifc:
+ cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER", mpifc_path))
mpiexe_bin = join_path(spec["mpi"].prefix.bin, "mpiexec")
if os.path.isfile(mpiexe_bin):
# starting with cmake 3.10, FindMPI expects MPIEXEC_EXECUTABLE