download_hash.py: generalized and data-driven

The script is currently limited to one hardcoded URL for kubernetes
related binaries, and a fixed set of architectures.

The solution is three-fold:
1. Use an url template dictionary for each download -> this allow to easily
   add support for new downloads.
2. Source the architectures to search from the existing data
3. Enumerate the existing versions in the data and start searching from
   the last one until no newer version is found (newer in the version
   order sense, irrespective of actual age)
pull/11513/head
Max Gautier 2024-02-02 16:01:14 +01:00
parent 4b324cb0f0
commit a7616231a4
No known key found for this signature in database
1 changed files with 31 additions and 19 deletions

View File

@ -6,7 +6,7 @@
import sys import sys
from itertools import count from itertools import count, groupby
from collections import defaultdict from collections import defaultdict
import requests import requests
from ruamel.yaml import YAML from ruamel.yaml import YAML
@ -25,36 +25,48 @@ def open_checksums_yaml():
return data, yaml return data, yaml
def version_compare(version):
return Version(version.removeprefix("v"))
def download_hash(minors): def download_hash(minors):
architectures = ["arm", "arm64", "amd64", "ppc64le"] downloads = {
downloads = ["kubelet", "kubectl", "kubeadm"] "containerd_archive": "https://github.com/containerd/containerd/releases/download/v{version}/containerd-{version}-{os}-{arch}.tar.gz.sha256sum",
"kubeadm": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubeadm.sha256",
"kubectl": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubectl.sha256",
"kubelet": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubelet.sha256",
"runc": "https://github.com/opencontainers/runc/releases/download/{version}/runc.{arch}.sha256sum",
}
data, yaml = open_checksums_yaml() data, yaml = open_checksums_yaml()
if not minors:
minors = {'.'.join(minor.split('.')[:-1]) for minor in data["kubelet_checksums"]["amd64"].keys()}
for download in downloads: for download, url in downloads.items():
checksum_name = f"{download}_checksums" checksum_name = f"{download}_checksums"
data[checksum_name] = defaultdict(dict, data[checksum_name]) for arch, versions in data[checksum_name].items():
for arch in architectures: for minor, patches in groupby(versions.copy().keys(), lambda v : '.'.join(v.split('.')[:-1])):
for minor in minors: for version in (f"{minor}.{patch}" for patch in
if not minor.startswith("v"): count(start=int(max(patches, key=version_compare).split('.')[-1]),
minor = f"v{minor}" step=1)):
for release in (f"{minor}.{patch}" for patch in count(start=0, step=1)): # Those barbaric generators do the following:
if release in data[checksum_name][arch]: # Group all patches versions by minor number, take the newest and start from that
# to find new versions
if version in versions and versions[version] != 0:
continue continue
hash_file = requests.get(f"https://dl.k8s.io/release/{release}/bin/linux/{arch}/{download}.sha256", allow_redirects=True) hash_file = requests.get(downloads[download].format(
version = version,
os = "linux",
arch = arch
),
allow_redirects=True)
if hash_file.status_code == 404: if hash_file.status_code == 404:
print(f"Unable to find {download} hash file for release {release} (arch: {arch})") print(f"Unable to find {download} hash file for version {version} (arch: {arch}) at {hash_file.url}")
break break
hash_file.raise_for_status() hash_file.raise_for_status()
sha256sum = hash_file.content.decode().strip() sha256sum = hash_file.content.decode().split(' ')[0]
if len(sha256sum) != 64: if len(sha256sum) != 64:
raise Exception(f"Checksum has an unexpected length: {len(sha256sum)} (binary: {download}, arch: {arch}, release: 1.{minor}.{patch})") raise Exception(f"Checksum has an unexpected length: {len(sha256sum)} (binary: {download}, arch: {arch}, release: {version}, checksum: '{sha256sum}')")
data[checksum_name][arch][release] = sha256sum data[checksum_name][arch][version] = sha256sum
data[checksum_name] = {arch : {r : releases[r] for r in sorted(releases.keys(), data[checksum_name] = {arch : {r : releases[r] for r in sorted(releases.keys(),
key=lambda v : Version(v[1:]), key=version_compare,
reverse=True)} reverse=True)}
for arch, releases in data[checksum_name].items()} for arch, releases in data[checksum_name].items()}