2020-05-27 21:52:40 +08:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
# After a new version of Kubernetes has been released,
|
2023-12-12 20:47:27 +08:00
|
|
|
# run this script to update roles/kubespray-defaults/defaults/main/download.yml
|
2020-05-27 21:52:40 +08:00
|
|
|
# with new hashes.
|
|
|
|
|
|
|
|
import sys
|
|
|
|
|
2024-02-02 23:01:14 +08:00
|
|
|
from itertools import count, groupby
|
2024-01-30 10:06:10 +08:00
|
|
|
from collections import defaultdict
|
2024-09-05 21:58:36 +08:00
|
|
|
import argparse
|
2020-05-27 21:52:40 +08:00
|
|
|
import requests
|
|
|
|
from ruamel.yaml import YAML
|
2024-01-30 10:06:10 +08:00
|
|
|
from packaging.version import Version
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-01-23 23:41:20 +08:00
|
|
|
CHECKSUMS_YML = "../roles/kubespray-defaults/defaults/main/checksums.yml"
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-01-23 23:41:20 +08:00
|
|
|
def open_checksums_yaml():
|
2020-05-27 21:52:40 +08:00
|
|
|
yaml = YAML()
|
|
|
|
yaml.explicit_start = True
|
|
|
|
yaml.preserve_quotes = True
|
|
|
|
yaml.width = 4096
|
|
|
|
|
2024-01-23 23:41:20 +08:00
|
|
|
with open(CHECKSUMS_YML, "r") as checksums_yml:
|
|
|
|
data = yaml.load(checksums_yml)
|
2020-05-27 21:52:40 +08:00
|
|
|
|
|
|
|
return data, yaml
|
|
|
|
|
2024-02-02 23:01:14 +08:00
|
|
|
def version_compare(version):
|
|
|
|
return Version(version.removeprefix("v"))
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-09-05 21:58:36 +08:00
|
|
|
downloads = {
|
|
|
|
"containerd_archive": "https://github.com/containerd/containerd/releases/download/v{version}/containerd-{version}-{os}-{arch}.tar.gz.sha256sum",
|
|
|
|
"kubeadm": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubeadm.sha256",
|
|
|
|
"kubectl": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubectl.sha256",
|
|
|
|
"kubelet": "https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubelet.sha256",
|
|
|
|
"runc": "https://github.com/opencontainers/runc/releases/download/{version}/runc.sha256sum",
|
|
|
|
}
|
|
|
|
|
|
|
|
def download_hash(only_downloads: [str]) -> None:
|
2024-02-03 03:48:08 +08:00
|
|
|
# Handle hashes not directly in one url per hash. Return dict of hashs indexed by arch
|
|
|
|
download_hash_extract = {
|
|
|
|
"runc": lambda hashes : {
|
|
|
|
parts[1].split('.')[1] : parts[0]
|
|
|
|
for parts in (line.split()
|
|
|
|
for line in hashes.split('\n')[3:9])
|
|
|
|
},
|
|
|
|
}
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-01-23 23:41:20 +08:00
|
|
|
data, yaml = open_checksums_yaml()
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-09-05 21:58:36 +08:00
|
|
|
for download, url in (downloads if only_downloads == []
|
|
|
|
else {k:downloads[k] for k in downloads.keys() & only_downloads}).items():
|
2020-05-27 21:52:40 +08:00
|
|
|
checksum_name = f"{download}_checksums"
|
2024-09-05 22:39:04 +08:00
|
|
|
# Propagate new patch versions to all architectures
|
|
|
|
for arch in data[checksum_name].values():
|
|
|
|
for arch2 in data[checksum_name].values():
|
|
|
|
arch.update({
|
|
|
|
v:("NONE" if arch2[v] == "NONE" else 0)
|
|
|
|
for v in (set(arch2.keys()) - set(arch.keys()))
|
|
|
|
if v.split('.')[2] == '0'})
|
|
|
|
# this is necessary to make the script indempotent,
|
|
|
|
# by only adding a vX.X.0 version (=minor release) in each arch
|
|
|
|
# and letting the rest of the script populate the potential
|
|
|
|
# patch versions
|
|
|
|
|
2024-02-02 23:01:14 +08:00
|
|
|
for arch, versions in data[checksum_name].items():
|
|
|
|
for minor, patches in groupby(versions.copy().keys(), lambda v : '.'.join(v.split('.')[:-1])):
|
|
|
|
for version in (f"{minor}.{patch}" for patch in
|
|
|
|
count(start=int(max(patches, key=version_compare).split('.')[-1]),
|
|
|
|
step=1)):
|
|
|
|
# Those barbaric generators do the following:
|
|
|
|
# Group all patches versions by minor number, take the newest and start from that
|
|
|
|
# to find new versions
|
|
|
|
if version in versions and versions[version] != 0:
|
2024-01-30 10:06:10 +08:00
|
|
|
continue
|
2024-02-02 23:01:14 +08:00
|
|
|
hash_file = requests.get(downloads[download].format(
|
|
|
|
version = version,
|
|
|
|
os = "linux",
|
|
|
|
arch = arch
|
|
|
|
),
|
|
|
|
allow_redirects=True)
|
2024-01-30 10:06:10 +08:00
|
|
|
if hash_file.status_code == 404:
|
2024-02-02 23:01:14 +08:00
|
|
|
print(f"Unable to find {download} hash file for version {version} (arch: {arch}) at {hash_file.url}")
|
2024-01-30 10:06:10 +08:00
|
|
|
break
|
|
|
|
hash_file.raise_for_status()
|
2024-02-03 03:48:08 +08:00
|
|
|
sha256sum = hash_file.content.decode()
|
|
|
|
if download in download_hash_extract:
|
|
|
|
sha256sum = download_hash_extract[download](sha256sum).get(arch)
|
|
|
|
if sha256sum == None:
|
|
|
|
break
|
|
|
|
sha256sum = sha256sum.split()[0]
|
|
|
|
|
2024-01-30 10:06:10 +08:00
|
|
|
if len(sha256sum) != 64:
|
2024-02-02 23:01:14 +08:00
|
|
|
raise Exception(f"Checksum has an unexpected length: {len(sha256sum)} (binary: {download}, arch: {arch}, release: {version}, checksum: '{sha256sum}')")
|
|
|
|
data[checksum_name][arch][version] = sha256sum
|
2024-01-30 10:06:10 +08:00
|
|
|
data[checksum_name] = {arch : {r : releases[r] for r in sorted(releases.keys(),
|
2024-02-02 23:01:14 +08:00
|
|
|
key=version_compare,
|
2024-01-30 10:06:10 +08:00
|
|
|
reverse=True)}
|
|
|
|
for arch, releases in data[checksum_name].items()}
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-01-23 23:41:20 +08:00
|
|
|
with open(CHECKSUMS_YML, "w") as checksums_yml:
|
|
|
|
yaml.dump(data, checksums_yml)
|
|
|
|
print(f"\n\nUpdated {CHECKSUMS_YML}\n")
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-09-05 21:58:36 +08:00
|
|
|
parser = argparse.ArgumentParser(description=f"Add new patch versions hashes in {CHECKSUMS_YML}")
|
|
|
|
parser.add_argument('binaries', nargs='*', choices=downloads.keys())
|
2020-05-27 21:52:40 +08:00
|
|
|
|
2024-09-05 21:58:36 +08:00
|
|
|
args = parser.parse_args()
|
|
|
|
download_hash(args.binaries)
|