Back to home page

EIC code displayed by LXR

 
 

    


Warning, file /acts/CI/dependencies/select_lockfile.py was not indexed or was modified since last indexation (in which case cross-reference links may be missing, inaccurate or erroneous).

0001 #!/usr/bin/env python3
0002 
0003 import os
0004 import argparse
0005 import json
0006 import urllib.request
0007 import urllib.error
0008 import re
0009 import subprocess
0010 import hashlib
0011 import tempfile
0012 from pathlib import Path
0013 from typing import Tuple, Dict, Optional
0014 import contextlib
0015 
0016 # Modify the default cache dir to use a temporary directory
0017 DEFAULT_CACHE_SIZE_LIMIT = 1 * 1024 * 1024  # 1MB
0018 
0019 
0020 def compute_cache_key(url: str) -> str:
0021     """Compute a cache key for a URL"""
0022     return hashlib.sha256(url.encode()).hexdigest()
0023 
0024 
0025 def compute_cache_digest(cache_dir: Path) -> str:
0026     """Compute a digest of all cache files except digest.txt"""
0027     files = sorted(
0028         f
0029         for f in os.listdir(cache_dir)
0030         if (cache_dir / f).is_file() and f != "digest.txt"
0031     )
0032 
0033     digest = hashlib.sha256()
0034     for fname in files:
0035         fpath = cache_dir / fname
0036         digest.update(fname.encode())
0037         digest.update(str(fpath.stat().st_size).encode())
0038         digest.update(fpath.read_bytes())
0039     return digest.hexdigest()
0040 
0041 
0042 def update_cache_digest(cache_dir: Path):
0043     """Update the cache digest file"""
0044     digest = compute_cache_digest(cache_dir)
0045     (cache_dir / "digest.txt").write_text(digest)
0046 
0047 
0048 def prune_cache(cache_dir: Optional[Path], size_limit: int):
0049     """Prune the cache to keep it under the size limit"""
0050     if cache_dir is None or not cache_dir.exists():
0051         return
0052 
0053     # Get all cache files with their modification times
0054     cache_files = [
0055         (cache_dir / f, (cache_dir / f).stat().st_mtime)
0056         for f in os.listdir(cache_dir)
0057         if (cache_dir / f).is_file()
0058         and f != "digest.txt"  # Exclude digest from pruning
0059     ]
0060     total_size = sum(f.stat().st_size for f, _ in cache_files)
0061 
0062     if total_size <= size_limit:
0063         return
0064 
0065     # Sort by modification time (oldest first)
0066     cache_files.sort(key=lambda x: x[1])
0067 
0068     # Remove files until we're under the limit
0069     for file_path, _ in cache_files:
0070         if total_size <= size_limit:
0071             break
0072         total_size -= file_path.stat().st_size
0073         file_path.unlink()
0074 
0075     # Update digest after pruning
0076     update_cache_digest(cache_dir)
0077 
0078 
0079 def fetch_github(base_url: str, cache_dir: Optional[Path], cache_limit: int) -> bytes:
0080     headers = {}
0081     token = os.environ.get("GITHUB_TOKEN")
0082     if token is not None and token != "":
0083         headers["Authorization"] = f"token {token}"
0084 
0085     with contextlib.ExitStack() as stack:
0086         if cache_dir is not None:
0087             cache_dir.mkdir(parents=True, exist_ok=True)
0088         else:
0089             cache_dir = Path(stack.enter_context(tempfile.TemporaryDirectory()))
0090 
0091         # Check cache first
0092         cache_key = compute_cache_key(base_url)
0093         cache_file = cache_dir / cache_key
0094 
0095         if cache_file.exists():
0096             print("Cache hit on", base_url)
0097             return cache_file.read_bytes()
0098         else:
0099             print("Cache miss on", base_url)
0100 
0101         try:
0102             req = urllib.request.Request(base_url, headers=headers)
0103             with urllib.request.urlopen(req) as response:
0104                 content = response.read()
0105 
0106                 # Write to cache
0107                 cache_file.write_bytes(content)
0108 
0109                 # Update digest after adding new file
0110                 update_cache_digest(cache_dir)
0111 
0112                 # Prune cache if necessary (this will update digest again if pruning occurs)
0113                 prune_cache(cache_dir, cache_limit)
0114 
0115                 return content
0116         except urllib.error.URLError as e:
0117             print(f"Failed to fetch from {base_url}: {e}")
0118             exit(1)
0119         except json.JSONDecodeError as e:
0120             print(f"Failed to parse JSON response: {e}")
0121             exit(1)
0122 
0123 
0124 def main():
0125     parser = argparse.ArgumentParser()
0126     parser.add_argument("--tag", type=str, required=True, help="Tag to use")
0127     parser.add_argument("--arch", type=str, required=True, help="Architecture to use")
0128     parser.add_argument(
0129         "--compiler-binary",
0130         type=str,
0131         default=os.environ.get("CXX"),
0132         help="Compiler to use (defaults to CXX environment variable if set)",
0133     )
0134     parser.add_argument(
0135         "--compiler",
0136         type=str,
0137         default=None,
0138         help="Compiler to use (defaults to compiler binary if set)",
0139     )
0140     parser.add_argument(
0141         "--output",
0142         type=str,
0143         default=None,
0144         help="Output file to write lockfile to",
0145     )
0146     parser.add_argument(
0147         "--cache-dir",
0148         type=lambda x: Path(x).expanduser() if x else None,
0149         default=os.environ.get("LOCKFILE_CACHE_DIR"),
0150         help="Directory to use for caching (defaults to LOCKFILE_CACHE_DIR env var)",
0151     )
0152     parser.add_argument(
0153         "--cache-limit",
0154         type=int,
0155         default=int(os.environ.get("LOCKFILE_CACHE_LIMIT", DEFAULT_CACHE_SIZE_LIMIT)),
0156         help="Cache size limit in bytes (defaults to LOCKFILE_CACHE_LIMIT env var)",
0157     )
0158     args = parser.parse_args()
0159 
0160     print("Fetching lockfiles for tag:", args.tag)
0161     print("Architecture:", args.arch)
0162 
0163     base_url = f"https://api.github.com/repos/acts-project/ci-dependencies/releases/tags/{args.tag}"
0164 
0165     data = json.loads(fetch_github(base_url, args.cache_dir, args.cache_limit))
0166 
0167     lockfiles = parse_assets(data)
0168 
0169     print("Available lockfiles:")
0170     for arch, compilers in lockfiles.items():
0171         print(f"> {arch}:")
0172         for c, (n, _) in compilers.items():
0173             print(f"  - {c}: {n}")
0174 
0175     if args.arch not in lockfiles:
0176         print(f"No lockfile found for architecture {args.arch}")
0177         exit(1)
0178 
0179     if args.compiler_binary is not None:
0180         compiler = determine_compiler_version(args.compiler_binary)
0181         print("Compiler:", args.compiler_binary, f"{compiler}")
0182     elif args.compiler is not None:
0183         if not re.match(r"^([\w-]+)@(\d+\.\d+\.\d+)$", args.compiler):
0184             print(f"Invalid compiler format: {args.compiler}")
0185             exit(1)
0186         compiler = args.compiler
0187         print("Compiler:", f"{compiler}")
0188     else:
0189         compiler = None
0190 
0191     lockfile = select_lockfile(lockfiles, args.arch, compiler)
0192 
0193     print("Selected lockfile:", lockfile)
0194 
0195     if args.output:
0196         with open(args.output, "wb") as f:
0197             f.write(fetch_github(lockfile, args.cache_dir, args.cache_limit))
0198 
0199 
0200 def parse_assets(data: Dict) -> Dict[str, Dict[str, Tuple[str, str]]]:
0201     lockfiles: Dict[str, Dict[str, Tuple[str, str]]] = {}
0202 
0203     for asset in data["assets"]:
0204         url = asset["browser_download_url"]
0205 
0206         name = asset["name"]
0207         if not name.endswith(".lock") or not name.startswith("spack_"):
0208             continue
0209 
0210         m = re.match(r"spack_(.*(?:aarch64|x86_64))(?:_(.*))?\.lock", name)
0211         if m is None:
0212             continue
0213 
0214         arch, compiler = m.groups()
0215         compiler = compiler if compiler else "default"
0216         lockfiles.setdefault(arch, {})[compiler] = (name, url)
0217 
0218     return lockfiles
0219 
0220 
0221 def select_lockfile(
0222     lockfiles: Dict[str, Dict[str, Tuple[str, str]]], arch: str, compiler: Optional[str]
0223 ):
0224     # Default to the default lockfile
0225     _, lockfile = lockfiles[arch]["default"]
0226 
0227     if compiler is None:
0228         return lockfile
0229 
0230     # Extract compiler family and version
0231     compiler_family = compiler.split("@")[0]
0232 
0233     # Find all matching compiler families
0234     matching_compilers = {
0235         comp: ver
0236         for comp, ver in lockfiles[arch].items()
0237         if comp != "default" and comp.split("@")[0] == compiler_family
0238     }
0239 
0240     if matching_compilers:
0241         if compiler in matching_compilers:
0242             # Exact match found
0243             _, lockfile = matching_compilers[compiler]
0244         else:
0245             # Find highest version of same compiler family
0246             highest_version = max(
0247                 matching_compilers.keys(),
0248                 key=lambda x: [int(v) for v in x.split("@")[1].split(".")],
0249             )
0250             _, lockfile = matching_compilers[highest_version]
0251 
0252     return lockfile
0253 
0254 
0255 def determine_compiler_version(binary: str):
0256     try:
0257         result = subprocess.run([binary, "--version"], capture_output=True, text=True)
0258 
0259         line = result.stdout.split("\n", 1)[0]
0260         print(line)
0261         if "clang" in line:
0262             compiler = "clang"
0263             if "Apple" in line:
0264                 compiler = "apple-clang"
0265         elif "gcc" in line or "GCC" in line or "g++" in line:
0266             compiler = "gcc"
0267         else:
0268             print(f"Unknown compiler: {binary}")
0269             exit(1)
0270 
0271         m = re.search(r"(\d+\.\d+\.\d+)", line)
0272         if m is None:
0273             print(f"Failed to determine version for compiler: {binary}")
0274             exit(1)
0275         (version,) = m.groups()
0276         return f"{compiler}@{version}"
0277 
0278     except (subprocess.SubprocessError, FileNotFoundError):
0279         print(f"Failed to determine version for compiler: {binary}")
0280         exit(1)
0281 
0282 
0283 if __name__ == "__main__":
0284     main()