summaryrefslogtreecommitdiff
path: root/var/spack/repos/builtin/packages/hdf/package.py
blob: ed32c6270869139c1bbd070071206cb1ff9c20b9 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import os
import sys


class Hdf(AutotoolsPackage):
    """HDF4 (also known as HDF) is a library and multi-object
    file format for storing and managing data between machines."""

    homepage = "https://portal.hdfgroup.org"
    url      = "https://support.hdfgroup.org/ftp/HDF/releases/HDF4.2.14/src/hdf-4.2.14.tar.gz"
    list_url = "https://support.hdfgroup.org/ftp/HDF/releases/"
    list_depth = 2
    maintainers = ['lrknox']

    version('4.2.15', sha256='dbeeef525af7c2d01539906c28953f0fdab7dba603d1bc1ec4a5af60d002c459')
    version('4.2.14', sha256='2d383e87c8a0ca6a5352adbd1d5546e6cc43dc21ff7d90f93efa644d85c0b14a')
    version('4.2.13', sha256='be9813c1dc3712c2df977d4960e1f13f20f447dfa8c3ce53331d610c1f470483')
    version('4.2.12', sha256='dd419c55e85d1a0e13f3ea5ed35d00710033ccb16c85df088eb7925d486e040c')
    version('4.2.11', sha256='c3f7753b2fb9b27d09eced4d2164605f111f270c9a60b37a578f7de02de86d24')

    variant('szip', default=False, description="Enable szip support")
    variant('external-xdr', default=sys.platform != 'darwin',
            description="Use an external XDR backend")
    variant('netcdf', default=False,
            description='Build NetCDF API (version 2.3.2)')
    variant('fortran', default=False,
            description='Enable Fortran interface')
    variant('java', default=False,
            description='Enable Java JNI interface')
    variant('shared', default=False, description='Enable shared library')
    variant('pic', default=True,
            description='Produce position-independent code')

    depends_on('zlib@1.1.4:')
    depends_on('jpeg')
    depends_on('szip', when='+szip')
    depends_on('rpc', when='+external-xdr')

    depends_on('bison', type='build')
    depends_on('flex',  type='build')
    depends_on('java@7:', when='+java', type=('build', 'run'))

    # https://forum.hdfgroup.org/t/cant-build-hdf-4-2-14-with-jdk-11-and-enable-java/5702
    patch('disable_doclint.patch', when='@:4.2.14^java@9:')

    conflicts('^libjpeg@:6a')

    # configure: error: Cannot build shared fortran libraries.
    # Please configure with --disable-fortran flag.
    conflicts('+fortran', when='+shared')

    # configure: error: Java requires shared libraries to be built
    conflicts('+java', when='~shared')

    # configure: WARNING: unrecognized options: --enable-java
    conflicts('+java', when='@:4.2.11')

    # The Java interface library uses netcdf-related macro definitions even
    # when netcdf is disabled and the macros are not defined, e.g.:
    # hdfsdsImp.c:158:30: error: 'MAX_NC_NAME' undeclared
    conflicts('+java', when='@4.2.12:4.2.13~netcdf')

    # TODO: '@:4.2.14 ~external-xdr' and the fact that we compile for 64 bit
    #  architecture should be in conflict

    @property
    def libs(self):
        """HDF can be queried for the following parameters:

        - "shared": shared libraries (default if '+shared')
        - "static": static libraries (default if '~shared')
        - "transitive": append transitive dependencies to the list of static
            libraries (the argument is ignored if shared libraries are
            requested)

        :return: list of matching libraries
        """
        libraries = ['libmfhdf', 'libdf']

        query_parameters = self.spec.last_query.extra_parameters

        if 'shared' in query_parameters:
            shared = True
        elif 'static' in query_parameters:
            shared = False
        else:
            shared = '+shared' in self.spec

        libs = find_libraries(
            libraries, root=self.prefix, shared=shared, recursive=True
        )

        if not libs:
            msg = 'Unable to recursively locate {0} {1} libraries in {2}'
            raise spack.error.NoLibrariesError(
                msg.format('shared' if shared else 'static',
                           self.spec.name,
                           self.spec.prefix))

        if not shared and 'transitive' in query_parameters:
            libs += self.spec['jpeg:transitive'].libs
            libs += self.spec['zlib:transitive'].libs
            if '+szip' in self.spec:
                libs += self.spec['szip:transitive'].libs
            if ('+external-xdr' in self.spec and
                    self.spec['rpc'].name != 'libc'):
                libs += self.spec['rpc:transitive'].libs

        return libs

    def flag_handler(self, name, flags):
        if '+pic' in self.spec:
            if name == 'cflags':
                flags.append(self.compiler.cc_pic_flag)
            elif name == 'fflags':
                flags.append(self.compiler.f77_pic_flag)

        return flags, None, None

    def configure_args(self):
        config_args = ['--enable-production',
                       '--enable-static',
                       '--with-zlib=%s' % self.spec['zlib'].prefix,
                       '--with-jpeg=%s' % self.spec['jpeg'].prefix]

        config_args += self.enable_or_disable('shared')
        config_args += self.enable_or_disable('netcdf')
        config_args += self.enable_or_disable('fortran')
        config_args += self.enable_or_disable('java')

        if '+szip' in self.spec:
            config_args.append('--with-szlib=%s' % self.spec['szip'].prefix)
        else:
            config_args.append('--without-szlib')

        if '~external-xdr' in self.spec:
            config_args.append('--enable-hdf4-xdr')
        elif self.spec['rpc'].name != 'libc':
            # We should not specify '--disable-hdf4-xdr' due to a bug in the
            # configure script.
            config_args.append('LIBS=%s' % self.spec['rpc'].libs.link_flags)

        # https://github.com/Parallel-NetCDF/PnetCDF/issues/61
        if self.spec.satisfies('%gcc@10:'):
            config_args.extend([
                'FFLAGS=-fallow-argument-mismatch',
                'FCFLAGS=-fallow-argument-mismatch']
            )

        return config_args

    # Otherwise, we randomly get:
    # SDgetfilename:
    #   incorrect file being opened - expected <file755>, retrieved <file754>
    def check(self):
        with working_dir(self.build_directory):
            make('check', parallel=False)

    extra_install_tests = 'hdf/util/testfiles'

    @property
    def cached_tests_work_dir(self):
        """The working directory for cached test sources."""
        return join_path(self.test_suite.current_test_cache_dir,
                         self.extra_install_tests)

    @run_after('install')
    def setup_build_tests(self):
        """Copy the build test files after the package is installed to an
        install test subdirectory for use during `spack test run`."""
        self.cache_extra_test_sources(self.extra_install_tests)

    def _test_check_versions(self):
        """Perform version checks on selected installed package binaries."""
        spec_vers_str = 'Version {0}'.format(self.spec.version.up_to(2))

        exes = ['hdfimport', 'hrepack', 'ncdump', 'ncgen']
        for exe in exes:
            reason = 'test: ensuring version of {0} is {1}' \
                .format(exe, spec_vers_str)
            self.run_test(exe, ['-V'], spec_vers_str, installed=True,
                          purpose=reason, skip_missing=True)

    def _test_gif_converters(self):
        """This test performs an image conversion sequence and diff."""
        work_dir = '.'
        storm_fn = os.path.join(self.cached_tests_work_dir, 'storm110.hdf')

        gif_fn = 'storm110.gif'
        new_hdf_fn = 'storm110gif.hdf'

        # Convert a test HDF file to a gif
        self.run_test('hdf2gif', [storm_fn, gif_fn], '', installed=True,
                      purpose="test: hdf-to-gif", work_dir=work_dir)

        # Convert the gif to an HDF file
        self.run_test('gif2hdf', [gif_fn, new_hdf_fn], '', installed=True,
                      purpose="test: gif-to-hdf", work_dir=work_dir)

        # Compare the original and new HDF files
        self.run_test('hdiff', [new_hdf_fn, storm_fn], '', installed=True,
                      purpose="test: compare orig to new hdf",
                      work_dir=work_dir)

    def _test_list(self):
        """This test compares low-level HDF file information to expected."""
        storm_fn = os.path.join(self.cached_tests_work_dir,
                                'storm110.hdf')
        test_data_dir = self.test_suite.current_test_data_dir
        work_dir = '.'

        reason = 'test: checking hdfls output'
        details_file = os.path.join(test_data_dir, 'storm110.out')
        expected = get_escaped_text_output(details_file)
        self.run_test('hdfls', [storm_fn], expected, installed=True,
                      purpose=reason, skip_missing=True, work_dir=work_dir)

    def test(self):
        """Perform smoke tests on the installed package."""
        # Simple version check tests on subset of known binaries that respond
        self._test_check_versions()

        # Run gif converter sequence test
        self._test_gif_converters()

        # Run hdfls output
        self._test_list()