summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorHarmen Stoppels <harmenstoppels@gmail.com>2023-08-24 08:26:56 +0200
committerGitHub <noreply@github.com>2023-08-24 08:26:56 +0200
commit8914d2686718ef3e2ad55ca0f96fc28082aa8f38 (patch)
treed6529c74e1f8e3896ec61645c17102284efadc0d /lib
parentfdea5e7624a37849bc35aae6fbd6df7b07868b74 (diff)
downloadspack-8914d2686718ef3e2ad55ca0f96fc28082aa8f38.tar.gz
spack-8914d2686718ef3e2ad55ca0f96fc28082aa8f38.tar.bz2
spack-8914d2686718ef3e2ad55ca0f96fc28082aa8f38.tar.xz
spack-8914d2686718ef3e2ad55ca0f96fc28082aa8f38.zip
rebuild-index: fix race condition by avoiding parallelism (#39594)
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/binary_distribution.py35
1 files changed, 10 insertions, 25 deletions
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 7ba8b5ff07..abca0531df 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -9,7 +9,6 @@ import hashlib
import io
import itertools
import json
-import multiprocessing.pool
import os
import re
import shutil
@@ -876,32 +875,18 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di
db: A spack database used for adding specs and then writing the index.
temp_dir (str): Location to write index.json and hash for pushing
concurrency (int): Number of parallel processes to use when fetching
-
- Return:
- None
"""
+ for file in file_list:
+ contents = read_method(file)
+ # Need full spec.json name or this gets confused with index.json.
+ if file.endswith(".json.sig"):
+ specfile_json = Spec.extract_json_from_clearsig(contents)
+ fetched_spec = Spec.from_dict(specfile_json)
+ elif file.endswith(".json"):
+ fetched_spec = Spec.from_json(contents)
+ else:
+ continue
- def _fetch_spec_from_mirror(spec_url):
- spec_file_contents = read_method(spec_url)
-
- if spec_file_contents:
- # Need full spec.json name or this gets confused with index.json.
- if spec_url.endswith(".json.sig"):
- specfile_json = Spec.extract_json_from_clearsig(spec_file_contents)
- return Spec.from_dict(specfile_json)
- if spec_url.endswith(".json"):
- return Spec.from_json(spec_file_contents)
-
- tp = multiprocessing.pool.ThreadPool(processes=concurrency)
- try:
- fetched_specs = tp.map(
- llnl.util.lang.star(_fetch_spec_from_mirror), [(f,) for f in file_list]
- )
- finally:
- tp.terminate()
- tp.join()
-
- for fetched_spec in fetched_specs:
db.add(fetched_spec, None)
db.mark(fetched_spec, "in_buildcache", True)