diff options
-rw-r--r-- | lib/spack/spack/package.py | 6 | ||||
-rw-r--r-- | lib/spack/spack/util/web.py | 21 |
2 files changed, 25 insertions, 2 deletions
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index c10caf89fc..c30747edeb 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -2564,7 +2564,11 @@ class PackageBase(six.with_metaclass(PackageMeta, PackageViewMixin, object)): try: return spack.util.web.find_versions_of_archive( - self.all_urls, self.list_url, self.list_depth, concurrency + self.all_urls, + self.list_url, + self.list_depth, + concurrency, + reference_package=self, ) except spack.util.web.NoNetworkConnectionError as e: tty.die("Package.fetch_versions couldn't connect to:", e.url, diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index a47bd39ccb..0f148a88f5 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -562,7 +562,7 @@ def _urlopen(req, *args, **kwargs): def find_versions_of_archive( - archive_urls, list_url=None, list_depth=0, concurrency=32 + archive_urls, list_url=None, list_depth=0, concurrency=32, reference_package=None ): """Scrape web pages for new versions of a tarball. @@ -577,6 +577,10 @@ def find_versions_of_archive( list_depth (int): max depth to follow links on list_url pages. Defaults to 0. concurrency (int): maximum number of concurrent requests + reference_package (spack.package.Package or None): a spack package + used as a reference for url detection. Uses the url_for_version + method on the package to produce reference urls which, if found, + are preferred. """ if not isinstance(archive_urls, (list, tuple)): archive_urls = [archive_urls] @@ -638,11 +642,26 @@ def find_versions_of_archive( # Walk through archive_url links first. # Any conflicting versions will be overwritten by the list_url links. versions = {} + matched = set() for url in archive_urls + sorted(links): if any(re.search(r, url) for r in regexes): try: ver = spack.url.parse_version(url) + if ver in matched: + continue versions[ver] = url + # prevent this version from getting overwritten + if url in archive_urls: + matched.add(ver) + elif reference_package is not None: + if url == reference_package.url_for_version(ver): + matched.add(ver) + else: + extrapolated_urls = [ + spack.url.substitute_version(u, ver) for u in archive_urls + ] + if url in extrapolated_urls: + matched.add(ver) except spack.url.UndetectableVersionError: continue |