summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorJustin S <3630356+codeandkey@users.noreply.github.com>2019-04-19 19:39:13 -0500
committerPeter Scheibel <scheibel1@llnl.gov>2019-04-19 17:39:13 -0700
commit6f1fe3904cdda29cb6a373be7d1b3aac2adbc753 (patch)
treebd55c9a648d79976953407554af1230b96915b56 /lib
parent3b34931f6886a44709c35eb0a41535f88b3e2002 (diff)
downloadspack-6f1fe3904cdda29cb6a373be7d1b3aac2adbc753.tar.gz
spack-6f1fe3904cdda29cb6a373be7d1b3aac2adbc753.tar.bz2
spack-6f1fe3904cdda29cb6a373be7d1b3aac2adbc753.tar.xz
spack-6f1fe3904cdda29cb6a373be7d1b3aac2adbc753.zip
Fix outdated R packages failing to fetch (#11039)
PR #10758 made a slight change to find_versions_of_archive() which included archive_url in the search process. While this fixed `spack create` and `spack checksum` missing command-line arguments, it caused `spack install` to prefer those URLs over those it found in the scrape process. As a result, the package url was treated as a list_url causing all R packages to stop fetching once the package was updated on CRAN. This patch is more selective about including the archive_url in the remote versions, explicitly overriding it with matching versions found by the scraper.
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/util/web.py7
1 files changed, 4 insertions, 3 deletions
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 959d03781e..99078b203a 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -304,9 +304,8 @@ def find_versions_of_archive(archive_urls, list_url=None, list_depth=0):
list_urls.update(additional_list_urls)
# Grab some web pages to scrape.
- # Start with any links already given.
pages = {}
- links = set(archive_urls)
+ links = set()
for lurl in list_urls:
pg, lnk = spider(lurl, depth=list_depth)
pages.update(pg)
@@ -345,8 +344,10 @@ def find_versions_of_archive(archive_urls, list_url=None, list_depth=0):
regexes.append(url_regex)
# Build a dict version -> URL from any links that match the wildcards.
+ # Walk through archive_url links first.
+ # Any conflicting versions will be overwritten by the list_url links.
versions = {}
- for url in sorted(links):
+ for url in archive_urls + sorted(links):
if any(re.search(r, url) for r in regexes):
try:
ver = spack.url.parse_version(url)