refactor
parent
ec1a3a2b41
commit
7b13bacd09
|
@ -2,7 +2,7 @@ name: Dependency check version
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 0 * * 0' # Every Sunday at 00:00 UTC
|
- cron: '0 3 * * 1' # Every Monday at 03:00 AM UTC
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
@ -42,14 +42,14 @@ jobs:
|
||||||
|
|
||||||
component=$(_jq '.key')
|
component=$(_jq '.key')
|
||||||
current_version=$(_jq '.value.current_version')
|
current_version=$(_jq '.value.current_version')
|
||||||
latest_version=$(_jq '.value.latest_version')
|
processed_latest_version=$(_jq '.value.processed_latest_version')
|
||||||
|
|
||||||
echo "Triggering update for $component from $current_version to $latest_version"
|
echo "Triggering update for $component from $current_version to $processed_latest_version"
|
||||||
|
|
||||||
gh workflow run dependency-pull-request.yml \
|
gh workflow run dependency-pull-request.yml \
|
||||||
-f component=$component \
|
-f component=$component \
|
||||||
-f current_version=$current_version \
|
-f current_version=$current_version \
|
||||||
-f latest_version=$latest_version \
|
-f latest_version=$processed_latest_version \
|
||||||
|
|
||||||
count=$((count + 1))
|
count=$((count + 1))
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
name: Dependency create bump PR
|
name: Dependency create bump PR
|
||||||
|
run-name: Create bump PR for ${{ inputs.component }} from ${{ inputs.current_version }} to ${{ inputs.latest_version }}
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
@ -38,7 +39,7 @@ jobs:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: python scripts/dependency_updater.py --ci-check --component ${{ github.event.inputs.component }}
|
run: python scripts/dependency_updater.py --ci-check --component ${{ github.event.inputs.component }}
|
||||||
|
|
||||||
- name: Update component version
|
- name: Update component versions and checksums
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
run: python scripts/dependency_updater.py --component ${{ github.event.inputs.component }}
|
run: python scripts/dependency_updater.py --component ${{ github.event.inputs.component }}
|
||||||
|
@ -55,10 +56,13 @@ jobs:
|
||||||
echo "$pr_body" >> $GITHUB_OUTPUT
|
echo "$pr_body" >> $GITHUB_OUTPUT
|
||||||
echo "EOF" >> $GITHUB_OUTPUT
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Cleanup cache and version_diff.json
|
||||||
|
run: rm -r cache/ version_diff.json
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v5
|
uses: peter-evans/create-pull-request@v5
|
||||||
with:
|
with:
|
||||||
branch: "dependency-${{ github.event.inputs.component }}-${{ github.event.inputs.latest_version }}"
|
branch: "update-dependency-${{ github.event.inputs.component }}"
|
||||||
commit-message: "Bump ${{ github.event.inputs.component }} from ${{ github.event.inputs.current_version }} to ${{ github.event.inputs.latest_version }}"
|
commit-message: "Bump ${{ github.event.inputs.component }} from ${{ github.event.inputs.current_version }} to ${{ github.event.inputs.latest_version }}"
|
||||||
title: "Bump ${{ github.event.inputs.component }} from ${{ github.event.inputs.current_version }} to ${{ github.event.inputs.latest_version }}"
|
title: "Bump ${{ github.event.inputs.component }} from ${{ github.event.inputs.current_version }} to ${{ github.event.inputs.latest_version }}"
|
||||||
body: ${{ steps.generate_pr_body.outputs.pr_body }}
|
body: ${{ steps.generate_pr_body.outputs.pr_body }}
|
||||||
|
|
|
@ -1,222 +1,221 @@
|
||||||
# Arhitectures and OSes
|
ARCHITECTURES = ['arm', 'arm64', 'amd64', 'ppc64le']
|
||||||
architectures = ['arm', 'arm64', 'amd64', 'ppc64le']
|
OSES = ['darwin', 'linux', 'windows']
|
||||||
oses = ['darwin', 'linux', 'windows']
|
README_COMPONENTS = ['etcd', 'containerd', 'crio', 'calicoctl', 'krew', 'helm']
|
||||||
|
SHA256REGEX = r'(\b[a-f0-9]{64})\b'
|
||||||
|
|
||||||
# Paths
|
PATH_DOWNLOAD = 'roles/kubespray-defaults/defaults/main/download.yml'
|
||||||
path_download = 'roles/kubespray-defaults/defaults/main/download.yml'
|
PATH_CHECKSUM = 'roles/kubespray-defaults/defaults/main/checksums.yml'
|
||||||
path_checksum = 'roles/kubespray-defaults/defaults/main/checksums.yml'
|
PATH_MAIN = 'roles/kubespray-defaults/defaults/main/main.yml'
|
||||||
path_main = 'roles/kubespray-defaults/defaults/main/main.yml'
|
PATH_README = 'README.md'
|
||||||
path_readme = 'README.md'
|
PATH_VERSION_DIFF = 'version_diff.json'
|
||||||
path_version_diff = 'version_diff.json'
|
|
||||||
|
|
||||||
|
COMPONENT_INFO = {
|
||||||
component_info = {
|
|
||||||
'calico_crds': {
|
'calico_crds': {
|
||||||
'owner': 'projectcalico',
|
'owner': 'projectcalico',
|
||||||
'repo': 'calico',
|
'repo': 'calico',
|
||||||
'url_download': 'https://github.com/projectcalico/calico/archive/VERSION.tar.gz',
|
'url_download': 'https://github.com/projectcalico/calico/archive/{version}.tar.gz',
|
||||||
'placeholder_version': ['calico_version'],
|
'placeholder_version': ['calico_version'],
|
||||||
'placeholder_checksum' : 'calico_crds_archive_checksums',
|
'placeholder_checksum' : 'calico_crds_archive_checksums',
|
||||||
'checksum_structure' : 'simple',
|
'checksum_structure' : 'simple',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'calicoctl': {
|
'calicoctl': {
|
||||||
'owner': 'projectcalico',
|
'owner': 'projectcalico',
|
||||||
'repo': 'calico',
|
'repo': 'calico',
|
||||||
'url_download': 'https://github.com/projectcalico/calico/releases/download/VERSION/calicoctl-linux-ARCH',
|
'url_download': 'https://github.com/projectcalico/calico/releases/download/{version}/SHA256SUMS',
|
||||||
'placeholder_version': ['calico_version'],
|
'placeholder_version': ['calico_version'],
|
||||||
'placeholder_checksum' : 'calicoctl_binary_checksums',
|
'placeholder_checksum' : 'calicoctl_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'linux-{arch}\b',
|
||||||
},
|
},
|
||||||
'ciliumcli': {
|
'ciliumcli': {
|
||||||
'owner': 'cilium',
|
'owner': 'cilium',
|
||||||
'repo': 'cilium-cli',
|
'repo': 'cilium-cli',
|
||||||
'url_download': 'https://github.com/cilium/cilium-cli/releases/download/VERSION/cilium-linux-ARCH.tar.gz.sha256sum',
|
'url_download': 'https://github.com/cilium/cilium-cli/releases/download/{version}/cilium-linux-{arch}.tar.gz.sha256sum',
|
||||||
'placeholder_version': ['cilium_cli_version'],
|
'placeholder_version': ['cilium_cli_version'],
|
||||||
'placeholder_checksum' : 'ciliumcli_binary_checksums',
|
'placeholder_checksum' : 'ciliumcli_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'{arch}',
|
||||||
},
|
},
|
||||||
'cni': {
|
'cni': {
|
||||||
'owner': 'containernetworking',
|
'owner': 'containernetworking',
|
||||||
'repo': 'plugins',
|
'repo': 'plugins',
|
||||||
'url_download': 'https://github.com/containernetworking/plugins/releases/download/VERSION/cni-plugins-linux-ARCH-VERSION.tgz.sha256',
|
'url_download': 'https://github.com/containernetworking/plugins/releases/download/{version}/cni-plugins-linux-{arch}-{version}.tgz.sha256',
|
||||||
'placeholder_version': ['cni_version'],
|
'placeholder_version': ['cni_version'],
|
||||||
'placeholder_checksum' : 'cni_binary_checksums',
|
'placeholder_checksum' : 'cni_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'{arch}',
|
||||||
},
|
},
|
||||||
'containerd': {
|
'containerd': {
|
||||||
'owner': 'containerd',
|
'owner': 'containerd',
|
||||||
'repo': 'containerd',
|
'repo': 'containerd',
|
||||||
'url_download': 'https://github.com/containerd/containerd/releases/download/vVERSION/containerd-VERSION-linux-ARCH.tar.gz.sha256sum',
|
'url_download': 'https://github.com/containerd/containerd/releases/download/v{version}/containerd-{version}-linux-{arch}.tar.gz.sha256sum',
|
||||||
'placeholder_version': ['containerd_version'],
|
'placeholder_version': ['containerd_version'],
|
||||||
'placeholder_checksum' : 'containerd_archive_checksums',
|
'placeholder_checksum' : 'containerd_archive_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'{arch}',
|
||||||
},
|
},
|
||||||
'crictl': {
|
'crictl': {
|
||||||
'owner': 'kubernetes-sigs',
|
'owner': 'kubernetes-sigs',
|
||||||
'repo': 'cri-tools',
|
'repo': 'cri-tools',
|
||||||
'url_download': 'https://github.com/kubernetes-sigs/cri-tools/releases/download/VERSION/crictl-VERSION-linux-ARCH.tar.gz.sha256',
|
'url_download': 'https://github.com/kubernetes-sigs/cri-tools/releases/download/{version}/crictl-{version}-linux-{arch}.tar.gz.sha256',
|
||||||
'placeholder_version': ['crictl_supported_versions', 'kube_major_version'],
|
'placeholder_version': ['crictl_supported_versions', 'kube_major_version'],
|
||||||
'placeholder_checksum' : 'crictl_checksums',
|
'placeholder_checksum' : 'crictl_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'simple', # only sha
|
||||||
},
|
},
|
||||||
'cri_dockerd': {
|
'cri_dockerd': {
|
||||||
'owner': 'Mirantis',
|
'owner': 'Mirantis',
|
||||||
'repo': 'cri-dockerd',
|
'repo': 'cri-dockerd',
|
||||||
'url_download': 'https://github.com/Mirantis/cri-dockerd/releases/download/vVERSION/cri-dockerd-VERSION.ARCH.tgz',
|
'url_download': 'https://github.com/Mirantis/cri-dockerd/releases/download/v{version}/cri-dockerd-{version}.{arch}.tgz',
|
||||||
'placeholder_version': ['cri_dockerd_version'],
|
'placeholder_version': ['cri_dockerd_version'],
|
||||||
'placeholder_checksum' : 'cri_dockerd_archive_checksums',
|
'placeholder_checksum' : 'cri_dockerd_archive_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'crio': {
|
'crio': {
|
||||||
'owner': 'cri-o',
|
'owner': 'cri-o',
|
||||||
'repo': 'cri-o',
|
'repo': 'cri-o',
|
||||||
'url_download': 'https://storage.googleapis.com/cri-o/artifacts/cri-o.ARCH.VERSION.tar.gz',
|
'url_download': 'https://storage.googleapis.com/cri-o/artifacts/cri-o.{arch}.{version}.tar.gz.sha256sum',
|
||||||
'placeholder_version': ['crio_supported_versions', 'kube_major_version'],
|
'placeholder_version': ['crio_supported_versions', 'kube_major_version'],
|
||||||
'placeholder_checksum' : 'crio_archive_checksums',
|
'placeholder_checksum' : 'crio_archive_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'{arch}',
|
||||||
},
|
},
|
||||||
'crun': {
|
'crun': {
|
||||||
'owner': 'containers',
|
'owner': 'containers',
|
||||||
'repo': 'crun',
|
'repo': 'crun',
|
||||||
'url_download': 'https://github.com/containers/crun/releases/download/VERSION/crun-VERSION-linux-ARCH',
|
'url_download': 'https://github.com/containers/crun/releases/download/{version}/crun-{version}-linux-{arch}',
|
||||||
'placeholder_version': ['crun_version'],
|
'placeholder_version': ['crun_version'],
|
||||||
'placeholder_checksum' : 'crun_checksums',
|
'placeholder_checksum' : 'crun_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'etcd': {
|
'etcd': {
|
||||||
'owner': 'etcd-io',
|
'owner': 'etcd-io',
|
||||||
'repo': 'etcd',
|
'repo': 'etcd',
|
||||||
'url_download': 'https://github.com/etcd-io/etcd/releases/download/VERSION/SHA256SUMS',
|
'url_download': 'https://github.com/etcd-io/etcd/releases/download/{version}/SHA256SUMS',
|
||||||
'placeholder_version': ['etcd_supported_versions', 'kube_major_version'],
|
'placeholder_version': ['etcd_supported_versions', 'kube_major_version'],
|
||||||
'placeholder_checksum' : 'etcd_binary_checksums',
|
'placeholder_checksum' : 'etcd_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'linux-{arch}\.',
|
||||||
},
|
},
|
||||||
'gvisor_containerd_shim': {
|
'gvisor_containerd_shim': {
|
||||||
'owner': 'google',
|
'owner': 'google',
|
||||||
'repo': 'gvisor',
|
'repo': 'gvisor',
|
||||||
'url_download': 'https://storage.googleapis.com/gvisor/releases/release/VERSION/ARCH/containerd-shim-runsc-v1',
|
'url_download': 'https://storage.googleapis.com/gvisor/releases/release/{version}/{arch}/containerd-shim-runsc-v1',
|
||||||
'placeholder_version': ['gvisor_version'],
|
'placeholder_version': ['gvisor_version'],
|
||||||
'placeholder_checksum' : 'gvisor_containerd_shim_binary_checksums',
|
'placeholder_checksum' : 'gvisor_containerd_shim_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'tag',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'gvisor_runsc': {
|
'gvisor_runsc': {
|
||||||
'owner': 'google',
|
'owner': 'google',
|
||||||
'repo': 'gvisor',
|
'repo': 'gvisor',
|
||||||
'url_download': 'https://storage.googleapis.com/gvisor/releases/release/VERSION/ARCH/runsc',
|
'url_download': 'https://storage.googleapis.com/gvisor/releases/release/{version}/{arch}/runsc',
|
||||||
'placeholder_version': ['gvisor_version'],
|
'placeholder_version': ['gvisor_version'],
|
||||||
'placeholder_checksum' : 'gvisor_runsc_binary_checksums',
|
'placeholder_checksum' : 'gvisor_runsc_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'tag',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'helm': {
|
'helm': {
|
||||||
'owner': 'helm',
|
'owner': 'helm',
|
||||||
'repo': 'helm',
|
'repo': 'helm',
|
||||||
'url_download': 'https://get.helm.sh/helm-VERSION-linux-ARCH.tar.gz',
|
'url_download': 'https://get.helm.sh/helm-{version}-linux-{arch}.tar.gz.sha256sum',
|
||||||
'placeholder_version': ['helm_version'],
|
'placeholder_version': ['helm_version'],
|
||||||
'placeholder_checksum' : 'helm_archive_checksums',
|
'placeholder_checksum' : 'helm_archive_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'{arch}',
|
||||||
},
|
},
|
||||||
|
|
||||||
'kata_containers': {
|
'kata_containers': {
|
||||||
'owner': 'kata-containers',
|
'owner': 'kata-containers',
|
||||||
'repo': 'kata-containers',
|
'repo': 'kata-containers',
|
||||||
'url_download': 'https://github.com/kata-containers/kata-containers/releases/download/VERSION/kata-static-VERSION-ARCH.tar.xz',
|
'url_download': 'https://github.com/kata-containers/kata-containers/releases/download/{version}/kata-static-{version}-{arch}.tar.xz',
|
||||||
'placeholder_version': ['kata_containers_version'],
|
'placeholder_version': ['kata_containers_version'],
|
||||||
'placeholder_checksum' : 'kata_containers_binary_checksums',
|
'placeholder_checksum' : 'kata_containers_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'krew': {
|
'krew': {
|
||||||
'owner': 'kubernetes-sigs',
|
'owner': 'kubernetes-sigs',
|
||||||
'repo': 'krew',
|
'repo': 'krew',
|
||||||
'url_download': 'https://github.com/kubernetes-sigs/krew/releases/download/VERSION/krew-OS_ARCH.tar.gz.sha256',
|
'url_download': 'https://github.com/kubernetes-sigs/krew/releases/download/{version}/krew-{os_name}_{arch}.tar.gz.sha256',
|
||||||
'placeholder_version': ['krew_version'],
|
'placeholder_version': ['krew_version'],
|
||||||
'placeholder_checksum' : 'krew_archive_checksums',
|
'placeholder_checksum' : 'krew_archive_checksums',
|
||||||
'checksum_structure' : 'os_arch',
|
'checksum_structure' : 'os_arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'simple', # only sha
|
||||||
},
|
},
|
||||||
'kubeadm': {
|
'kubeadm': {
|
||||||
'owner': 'kubernetes',
|
'owner': 'kubernetes',
|
||||||
'repo': 'kubernetes',
|
'repo': 'kubernetes',
|
||||||
'url_download': 'https://dl.k8s.io/release/VERSION/bin/linux/ARCH/kubeadm.sha256',
|
'url_download': 'https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubeadm.sha256',
|
||||||
'placeholder_version': ['kube_version'],
|
'placeholder_version': ['kube_version'],
|
||||||
'placeholder_checksum' : 'kubeadm_checksums',
|
'placeholder_checksum' : 'kubeadm_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'simple', # only sha
|
||||||
},
|
},
|
||||||
'kubectl': {
|
'kubectl': {
|
||||||
'owner': 'kubernetes',
|
'owner': 'kubernetes',
|
||||||
'repo': 'kubernetes',
|
'repo': 'kubernetes',
|
||||||
'url_download': 'https://dl.k8s.io/release/VERSION/bin/linux/ARCH/kubectl.sha256',
|
'url_download': 'https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubectl.sha256',
|
||||||
'placeholder_version': ['kube_version'],
|
'placeholder_version': ['kube_version'],
|
||||||
'placeholder_checksum' : 'kubectl_checksums',
|
'placeholder_checksum' : 'kubectl_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'simple', # only sha
|
||||||
},
|
},
|
||||||
'kubelet': {
|
'kubelet': {
|
||||||
'owner': 'kubernetes',
|
'owner': 'kubernetes',
|
||||||
'repo': 'kubernetes',
|
'repo': 'kubernetes',
|
||||||
'url_download': 'https://dl.k8s.io/release/VERSION/bin/linux/ARCH/kubelet.sha256',
|
'url_download': 'https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubelet.sha256',
|
||||||
'placeholder_version': ['kube_version'],
|
'placeholder_version': ['kube_version'],
|
||||||
'placeholder_checksum' : 'kubelet_checksums',
|
'placeholder_checksum' : 'kubelet_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'simple', # only sha
|
||||||
},
|
},
|
||||||
'nerdctl': {
|
'nerdctl': {
|
||||||
'owner': 'containerd',
|
'owner': 'containerd',
|
||||||
'repo': 'nerdctl',
|
'repo': 'nerdctl',
|
||||||
'url_download': 'https://github.com/containerd/nerdctl/releases/download/vVERSION/SHA256SUMS',
|
'url_download': 'https://github.com/containerd/nerdctl/releases/download/v{version}/SHA256SUMS',
|
||||||
'placeholder_version': ['nerdctl_version'],
|
'placeholder_version': ['nerdctl_version'],
|
||||||
'placeholder_checksum' : 'nerdctl_archive_checksums',
|
'placeholder_checksum' : 'nerdctl_archive_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'nerdctl-(?!full)[\w.-]+-linux-{arch}\.tar\.gz',
|
||||||
},
|
},
|
||||||
'runc': {
|
'runc': {
|
||||||
'owner': 'opencontainers',
|
'owner': 'opencontainers',
|
||||||
'repo': 'runc',
|
'repo': 'runc',
|
||||||
'url_download': 'https://github.com/opencontainers/runc/releases/download/VERSION/runc.ARCH',
|
'url_download': 'https://github.com/opencontainers/runc/releases/download/{version}/runc.sha256sum',
|
||||||
'placeholder_version': ['runc_version'],
|
'placeholder_version': ['runc_version'],
|
||||||
'placeholder_checksum' : 'runc_checksums',
|
'placeholder_checksum' : 'runc_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'\.{arch}\b',
|
||||||
},
|
},
|
||||||
'skopeo': {
|
'skopeo': {
|
||||||
'owner': 'containers',
|
'owner': 'containers',
|
||||||
'repo': 'skopeo',
|
'repo': 'skopeo',
|
||||||
'url_download': 'https://github.com/lework/skopeo-binary/releases/download/VERSION/skopeo-linux-ARCH',
|
'url_download': 'https://github.com/lework/skopeo-binary/releases/download/{version}/skopeo-linux-{arch}',
|
||||||
'placeholder_version': ['skopeo_version'],
|
'placeholder_version': ['skopeo_version'],
|
||||||
'placeholder_checksum' : 'skopeo_binary_checksums',
|
'placeholder_checksum' : 'skopeo_binary_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'youki': {
|
'youki': {
|
||||||
'owner': 'containers',
|
'owner': 'containers',
|
||||||
'repo': 'youki',
|
'repo': 'youki',
|
||||||
'url_download': 'https://github.com/containers/youki/releases/download/vVERSION/youki-VERSION-ARCH.tar.gz',
|
'url_download': 'https://github.com/containers/youki/releases/download/v{version}/youki-{version}-{arch}.tar.gz',
|
||||||
'placeholder_version': ['youki_version'],
|
'placeholder_version': ['youki_version'],
|
||||||
'placeholder_checksum' : 'youki_checksums',
|
'placeholder_checksum' : 'youki_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'', # binary
|
||||||
},
|
},
|
||||||
'yq': {
|
'yq': {
|
||||||
'owner': 'mikefarah',
|
'owner': 'mikefarah',
|
||||||
'repo': 'yq',
|
'repo': 'yq',
|
||||||
'url_download': 'https://github.com/mikefarah/yq/releases/download/VERSION/checksums-bsd',
|
'url_download': 'https://github.com/mikefarah/yq/releases/download/{version}/checksums-bsd',
|
||||||
'placeholder_version': ['yq_version'],
|
'placeholder_version': ['yq_version'],
|
||||||
'placeholder_checksum' : 'yq_checksums',
|
'placeholder_checksum' : 'yq_checksums',
|
||||||
'checksum_structure' : 'arch',
|
'checksum_structure' : 'arch',
|
||||||
'release_type' : 'release',
|
'sha_regex' : r'SHA256 \([^)]+linux_{arch}\)',
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,6 @@ import re
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
import time
|
|
||||||
import json
|
import json
|
||||||
import argparse
|
import argparse
|
||||||
import hashlib
|
import hashlib
|
||||||
|
@ -11,7 +10,7 @@ from ruamel.yaml import YAML
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from urllib3.util.retry import Retry
|
from urllib3.util.retry import Retry
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from dependency_config import component_info, architectures, oses, path_download, path_checksum, path_main, path_readme, path_version_diff
|
from dependency_config import ARCHITECTURES, OSES, README_COMPONENTS, PATH_DOWNLOAD, PATH_CHECKSUM, PATH_MAIN, PATH_README, PATH_VERSION_DIFF, COMPONENT_INFO, SHA256REGEX
|
||||||
|
|
||||||
|
|
||||||
yaml = YAML()
|
yaml = YAML()
|
||||||
|
@ -48,28 +47,10 @@ def get_session_with_retries():
|
||||||
pool_maxsize=50,
|
pool_maxsize=50,
|
||||||
max_retries=Retry(total=3, backoff_factor=1)
|
max_retries=Retry(total=3, backoff_factor=1)
|
||||||
)
|
)
|
||||||
|
|
||||||
session.mount('http://', adapter)
|
session.mount('http://', adapter)
|
||||||
session.mount('https://', adapter)
|
session.mount('https://', adapter)
|
||||||
return session
|
return session
|
||||||
|
|
||||||
def load_from_cache(component):
|
|
||||||
cache_file = os.path.join(cache_dir, f'{component}.json')
|
|
||||||
if os.path.exists(cache_file):
|
|
||||||
file_age = time.time() - os.path.getmtime(cache_file)
|
|
||||||
if file_age < cache_expiry_seconds:
|
|
||||||
logging.info(f'Using cached release info for {component}')
|
|
||||||
with open(cache_file, 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def save_to_cache(component, data):
|
|
||||||
os.makedirs(cache_dir, exist_ok=True)
|
|
||||||
cache_file = os.path.join(cache_dir, f'{component}.json')
|
|
||||||
with open(cache_file, 'w') as f:
|
|
||||||
json.dump(data, f, indent=2)
|
|
||||||
logging.info(f'Cached release info for {component}')
|
|
||||||
|
|
||||||
def get_current_version(component, component_data):
|
def get_current_version(component, component_data):
|
||||||
kube_major_version = component_data['kube_major_version']
|
kube_major_version = component_data['kube_major_version']
|
||||||
placeholder_version = [kube_major_version if item == 'kube_major_version' else item for item in component_data['placeholder_version']]
|
placeholder_version = [kube_major_version if item == 'kube_major_version' else item for item in component_data['placeholder_version']]
|
||||||
|
@ -81,180 +62,222 @@ def get_current_version(component, component_data):
|
||||||
current_version = current_version.get(key)
|
current_version = current_version.get(key)
|
||||||
return current_version
|
return current_version
|
||||||
|
|
||||||
def get_release(component, component_data, session, number_of_releases=10):
|
def get_latest_version(component_repo_metadata):
|
||||||
release = load_from_cache(component)
|
releases = component_repo_metadata.get('releases', {}).get('nodes', [])
|
||||||
if not release:
|
for release in releases:
|
||||||
try:
|
if release.get('isLatest', False):
|
||||||
query = """
|
return release['tagName']
|
||||||
query {
|
tags = component_repo_metadata.get('refs', {}).get('nodes', []) # fallback on tags
|
||||||
repository(owner: "%s", name: "%s") {
|
if tags:
|
||||||
releases(first: %s, orderBy: {field: CREATED_AT, direction: DESC}) {
|
first_tag = tags[0]['name']
|
||||||
nodes {
|
return first_tag
|
||||||
tagName
|
return None
|
||||||
url
|
|
||||||
description
|
|
||||||
publishedAt
|
|
||||||
isLatest
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
""" % (component_data['owner'], component_data['repo'], number_of_releases)
|
|
||||||
|
|
||||||
headers = {
|
def get_patch_versions(component, latest_version, component_repo_metadata):
|
||||||
'Authorization': f'Bearer {gh_token}',
|
if component in ['gvisor_runsc','gvisor_containerd_shim']: # hack for gvisor
|
||||||
'Content-Type': 'application/json'
|
return [latest_version]
|
||||||
}
|
match = re.match(r'v?(\d+)\.(\d+)', latest_version)
|
||||||
|
if not match:
|
||||||
|
logging.error(f'Invalid version format: {latest_version}')
|
||||||
|
return []
|
||||||
|
major_version, minor_version = match.groups()
|
||||||
|
patch_versions = []
|
||||||
|
stable_version_pattern = re.compile(rf'^v?{major_version}\.{minor_version}(\.\d+)?$') # no rc, alpha, dev, etc.
|
||||||
|
# Search releases
|
||||||
|
releases = component_repo_metadata.get('releases', {}).get('nodes', [])
|
||||||
|
for release in releases:
|
||||||
|
version = release.get('tagName', '')
|
||||||
|
if stable_version_pattern.match(version):
|
||||||
|
patch_versions.append(version)
|
||||||
|
# Fallback to tags
|
||||||
|
if not patch_versions:
|
||||||
|
tags = component_repo_metadata.get('refs', {}).get('nodes', [])
|
||||||
|
for tag in tags:
|
||||||
|
version = tag.get('name', '')
|
||||||
|
if stable_version_pattern.match(version):
|
||||||
|
patch_versions.append(version)
|
||||||
|
patch_versions.sort(key=lambda v: list(map(int, re.findall(r'\d+', v)))) # sort for checksum update
|
||||||
|
return patch_versions
|
||||||
|
|
||||||
response = session.post(github_api_url, json={'query': query}, headers=headers)
|
def get_repository_metadata(component_info, session):
|
||||||
response.raise_for_status()
|
query_parts = []
|
||||||
|
for component, data in component_info.items():
|
||||||
data = response.json()
|
owner = data['owner']
|
||||||
logging.debug(f'Component {component} releases: {data}')
|
repo = data['repo']
|
||||||
# Look for the release marked as latest
|
query_parts.append(f"""
|
||||||
for release_node in data['data']['repository']['releases']['nodes']:
|
{component}: repository(owner: "{owner}", name: "{repo}") {{
|
||||||
if release_node['isLatest']:
|
releases(first: {args.graphql_number_of_entries}, orderBy: {{field: CREATED_AT, direction: DESC}}) {{
|
||||||
release = release_node
|
nodes {{
|
||||||
save_to_cache(component, release)
|
tagName
|
||||||
return release
|
url
|
||||||
|
description
|
||||||
logging.warning(f'No latest release found for {component}')
|
publishedAt
|
||||||
return None
|
isLatest
|
||||||
except Exception as e:
|
}}
|
||||||
logging.error(f'Error fetching latest release for {component}: {e}')
|
}}
|
||||||
return None
|
refs(refPrefix: "refs/tags/", first: {args.graphql_number_of_entries}, orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
|
||||||
return release
|
nodes {{
|
||||||
|
|
||||||
def get_release_tag(component, component_data, session):
|
|
||||||
tag = load_from_cache(component)
|
|
||||||
if not tag:
|
|
||||||
try:
|
|
||||||
query = """
|
|
||||||
query {
|
|
||||||
repository(owner: "%s", name: "%s") {
|
|
||||||
refs(refPrefix: "refs/tags/", first: 1, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}) {
|
|
||||||
edges {
|
|
||||||
node {
|
|
||||||
name
|
name
|
||||||
}
|
target {{
|
||||||
}
|
... on Tag {{
|
||||||
}
|
target {{
|
||||||
}
|
... on Commit {{
|
||||||
}
|
history(first: {args.graphql_number_of_commits}) {{
|
||||||
""" % (component_data['owner'], component_data['repo'])
|
edges {{
|
||||||
|
node {{
|
||||||
|
oid
|
||||||
|
message
|
||||||
|
url
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
... on Commit {{
|
||||||
|
# In case the tag directly points to a commit
|
||||||
|
history(first: {args.graphql_number_of_commits}) {{
|
||||||
|
edges {{
|
||||||
|
node {{
|
||||||
|
oid
|
||||||
|
message
|
||||||
|
url
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
""")
|
||||||
|
|
||||||
headers = {
|
query = f"query {{ {''.join(query_parts)} }}"
|
||||||
'Authorization': f'Bearer {gh_token}',
|
headers = {
|
||||||
'Content-Type': 'application/json'
|
'Authorization': f'Bearer {gh_token}',
|
||||||
}
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
|
||||||
response = session.post(github_api_url, json={'query': query}, headers=headers)
|
try:
|
||||||
response.raise_for_status()
|
response = session.post(github_api_url, json={'query': query}, headers=headers)
|
||||||
|
response.raise_for_status()
|
||||||
data = response.json()
|
json_data = response.json()
|
||||||
logging.debug(f'Component {component} releases: {data}')
|
data = json_data.get('data')
|
||||||
tag = data['data']['repository']['refs']['edges'][0]['node']
|
if data is not None and bool(data): # Ensure 'data' is not None and not empty
|
||||||
save_to_cache(component, tag)
|
logging.debug(f'GraphQL data response:\n{json.dumps(data, indent=2)}')
|
||||||
return tag
|
return data
|
||||||
except Exception as e:
|
else:
|
||||||
logging.error(f'Error fetching tags for {component}: {e}')
|
logging.error(f'GraphQL query returned errors: {json_data}')
|
||||||
return None
|
return None
|
||||||
return tag
|
except Exception as e:
|
||||||
|
logging.error(f'Error fetching repository metadata: {e}')
|
||||||
|
return None
|
||||||
|
|
||||||
def calculate_checksum(cachefile, arch, url_download):
|
def calculate_checksum(cachefile, sha_regex):
|
||||||
if url_download.endswith('.sha256sum'):
|
if sha_regex:
|
||||||
with open(f'cache/{cachefile}', 'r') as f:
|
logging.debug(f'Searching with regex {sha_regex} in file {cachefile}')
|
||||||
checksum_line = f.readline().strip()
|
|
||||||
return checksum_line.split()[0]
|
|
||||||
elif url_download.endswith('SHA256SUMS'):
|
|
||||||
with open(f'cache/{cachefile}', 'r') as f:
|
with open(f'cache/{cachefile}', 'r') as f:
|
||||||
for line in f:
|
for line in f:
|
||||||
if 'linux' in line and arch in line:
|
if sha_regex == 'simple': # Only sha is present in the file
|
||||||
return line.split()[0]
|
pattern = re.compile(SHA256REGEX)
|
||||||
elif url_download.endswith('bsd'):
|
else:
|
||||||
with open(f'cache/{cachefile}', 'r') as f:
|
pattern = re.compile(rf'(?:{SHA256REGEX}.*{sha_regex}|{sha_regex}.*{SHA256REGEX})') # Sha may be at start or end
|
||||||
for line in f:
|
match = pattern.search(line)
|
||||||
if 'SHA256' in line and 'linux' in line and arch in line:
|
if match:
|
||||||
return line.split()[0]
|
checksum = match.group(1) or match.group(2)
|
||||||
sha256_hash = hashlib.sha256()
|
logging.debug(f'Matched line: {line.strip()}')
|
||||||
with open(f'cache/{cachefile}', 'rb') as f:
|
return checksum
|
||||||
for byte_block in iter(lambda: f.read(4096), b''):
|
else: # binary
|
||||||
sha256_hash.update(byte_block)
|
sha256_hash = hashlib.sha256()
|
||||||
return sha256_hash.hexdigest()
|
with open(f'cache/{cachefile}', 'rb') as f:
|
||||||
|
for byte_block in iter(lambda: f.read(4096), b''):
|
||||||
|
sha256_hash.update(byte_block)
|
||||||
|
checksum = sha256_hash.hexdigest()
|
||||||
|
return checksum
|
||||||
|
|
||||||
def download_file_and_get_checksum(component, arch, url_download, session):
|
def download_file_and_get_checksum(component, arch, url_download, version, sha_regex, session):
|
||||||
cache_file = f'{component}-{arch}'
|
logging.info(f'Download URL {url_download}')
|
||||||
|
cache_file = f'{component}-{arch}-{version}'
|
||||||
if os.path.exists(f'cache/{cache_file}'):
|
if os.path.exists(f'cache/{cache_file}'):
|
||||||
logging.info(f'Using cached file for {url_download}')
|
logging.info(f'Using cached file for {url_download}')
|
||||||
return calculate_checksum(cache_file, arch, url_download)
|
return calculate_checksum(cache_file, sha_regex)
|
||||||
try:
|
try:
|
||||||
response = session.get(url_download, timeout=10)
|
response = session.get(url_download, timeout=10)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
with open(f'cache/{cache_file}', 'wb') as f:
|
with open(f'cache/{cache_file}', 'wb') as f:
|
||||||
f.write(response.content)
|
f.write(response.content)
|
||||||
logging.info(f'Downloaded and cached file for {url_download}')
|
logging.info(f'Downloaded and cached file for {url_download}')
|
||||||
return calculate_checksum(cache_file, arch, url_download)
|
return calculate_checksum(cache_file, sha_regex)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(e)
|
logging.warning(e)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def get_checksums(component, component_data, version, session):
|
def get_checksums(component, component_data, versions, session):
|
||||||
checksums = {}
|
checksums = {}
|
||||||
url_download_template = component_data['url_download'].replace('VERSION', version)
|
for version in versions:
|
||||||
if component_data['checksum_structure'] == 'os_arch':
|
processed_version = process_version_string(component, version)
|
||||||
# OS -> Arch -> Checksum
|
checksums[version] = {}
|
||||||
for os_name in oses:
|
url_download_template = component_data.get('url_download')
|
||||||
checksums[os_name] = {}
|
if component_data['checksum_structure'] == 'os_arch':
|
||||||
for arch in architectures:
|
# OS -> Arch -> Checksum
|
||||||
url_download = url_download_template.replace('OS', os_name).replace('ARCH', arch)
|
for os_name in OSES:
|
||||||
checksum = download_file_and_get_checksum(component, arch, url_download, session)
|
if os_name not in checksums[version]:
|
||||||
if not checksum:
|
checksums[version][os_name] = {}
|
||||||
checksum = 0
|
for arch in ARCHITECTURES:
|
||||||
checksums[os_name][arch] = checksum
|
url_download = url_download_template.format(arch=arch, os_name=os_name, version=processed_version)
|
||||||
elif component_data['checksum_structure'] == 'arch':
|
sha_regex = component_data.get('sha_regex').format(arch=arch, os_name=os_name)
|
||||||
# Arch -> Checksum
|
checksum = download_file_and_get_checksum(component, arch, url_download, processed_version, sha_regex, session) or 0
|
||||||
for arch in architectures:
|
checksums[version][os_name][arch] = checksum
|
||||||
url_download = url_download_template.replace('ARCH', arch)
|
elif component_data['checksum_structure'] == 'arch':
|
||||||
checksum = download_file_and_get_checksum(component, arch, url_download, session)
|
# Arch -> Checksum
|
||||||
if not checksum:
|
for arch in ARCHITECTURES:
|
||||||
checksum = 0
|
tmp_arch = arch
|
||||||
checksums[arch] = checksum
|
if component == 'youki':
|
||||||
elif component_data['checksum_structure'] == 'simple':
|
tmp_arch = tmp_arch.replace('arm64', 'aarch64-gnu').replace('amd64', 'x86_64-gnu')
|
||||||
# Checksum
|
elif component in ['gvisor_containerd_shim','gvisor_runsc']:
|
||||||
checksum = download_file_and_get_checksum(component, '', url_download_template, session)
|
tmp_arch = tmp_arch.replace("arm64", "aarch64").replace("amd64", "x86_64")
|
||||||
if not checksum:
|
url_download = url_download_template.format(arch=tmp_arch, version=processed_version)
|
||||||
checksum = 0
|
sha_regex = component_data.get('sha_regex').format(arch=tmp_arch)
|
||||||
checksums[version] = checksum
|
checksum = download_file_and_get_checksum(component, arch, url_download, processed_version, sha_regex, session) or 0
|
||||||
|
checksums[version][arch] = checksum
|
||||||
|
elif component_data['checksum_structure'] == 'simple':
|
||||||
|
# Checksum
|
||||||
|
url_download = url_download_template.format(version=processed_version)
|
||||||
|
sha_regex = component_data.get('sha_regex')
|
||||||
|
checksum = download_file_and_get_checksum(component, '', url_download, processed_version, sha_regex, session) or 0
|
||||||
|
checksums[version] = checksum # Store checksum for the version
|
||||||
return checksums
|
return checksums
|
||||||
|
|
||||||
def update_yaml_checksum(component_data, checksums, version):
|
def update_checksum(component, component_data, checksums, version):
|
||||||
|
processed_version = process_version_string(component, version)
|
||||||
placeholder_checksum = component_data['placeholder_checksum']
|
placeholder_checksum = component_data['placeholder_checksum']
|
||||||
checksum_structure = component_data['checksum_structure']
|
checksum_structure = component_data['checksum_structure']
|
||||||
current = checksum_yaml_data[placeholder_checksum]
|
current = checksum_yaml_data[placeholder_checksum]
|
||||||
|
|
||||||
if checksum_structure == 'simple':
|
if checksum_structure == 'simple':
|
||||||
# Simple structure (placeholder_checksum -> version -> checksum)
|
# Simple structure (placeholder_checksum -> version -> checksum)
|
||||||
current[(version)] = checksums[version]
|
checksum_yaml_data[placeholder_checksum] = {processed_version: checksums, **current}
|
||||||
elif checksum_structure == 'os_arch':
|
elif checksum_structure == 'os_arch':
|
||||||
# OS structure (placeholder_checksum -> os -> arch -> version -> checksum)
|
# OS structure (placeholder_checksum -> os -> arch -> version -> checksum)
|
||||||
for os_name, arch_dict in checksums.items():
|
for os_name, arch_dict in checksums.items():
|
||||||
os_current = current.setdefault(os_name, {})
|
os_current = current.setdefault(os_name, {})
|
||||||
for arch, checksum in arch_dict.items():
|
for arch, checksum in arch_dict.items():
|
||||||
os_current[arch] = {(version): checksum, **os_current.get(arch, {})}
|
os_current[arch] = {(processed_version): checksum, **os_current.get(arch, {})}
|
||||||
elif checksum_structure == 'arch':
|
elif checksum_structure == 'arch':
|
||||||
# Arch structure (placeholder_checksum -> arch -> version -> checksum)
|
# Arch structure (placeholder_checksum -> arch -> version -> checksum)
|
||||||
for arch, checksum in checksums.items():
|
for arch, checksum in checksums.items():
|
||||||
current[arch] = {(version): checksum, **current.get(arch, {})}
|
current[arch] = {(processed_version): checksum, **current.get(arch, {})}
|
||||||
logging.info(f'Updated {placeholder_checksum} with {checksums}')
|
logging.info(f'Updated {placeholder_checksum} with version {processed_version} and checksums {checksums}')
|
||||||
|
|
||||||
def resolve_kube_dependent_component_version(component, component_data, version):
|
def resolve_kube_dependent_component_version(component, component_data, version):
|
||||||
kube_major_version = component_data['kube_major_version']
|
kube_major_version = component_data['kube_major_version']
|
||||||
if component in ['crictl', 'crio']:
|
if component in ['crictl', 'crio']:
|
||||||
try:
|
try:
|
||||||
component_major_minor_version = get_major_minor_version(version)
|
component_major_version = get_major_version(version)
|
||||||
if component_major_minor_version == kube_major_version:
|
if component_major_version == kube_major_version:
|
||||||
resolved_version = kube_major_version
|
resolved_version = kube_major_version
|
||||||
else:
|
else:
|
||||||
resolved_version = component_major_minor_version
|
resolved_version = component_major_version
|
||||||
except (IndexError, AttributeError):
|
except (IndexError, AttributeError):
|
||||||
logging.error(f'Error parsing version for {component}: {version}')
|
logging.error(f'Error parsing version for {component}: {version}')
|
||||||
return
|
return
|
||||||
|
@ -262,7 +285,7 @@ def resolve_kube_dependent_component_version(component, component_data, version)
|
||||||
resolved_version = kube_major_version
|
resolved_version = kube_major_version
|
||||||
return resolved_version
|
return resolved_version
|
||||||
|
|
||||||
def update_yaml_version(component, component_data, version):
|
def update_version(component, component_data, version):
|
||||||
placeholder_version = component_data['placeholder_version']
|
placeholder_version = component_data['placeholder_version']
|
||||||
resolved_version = resolve_kube_dependent_component_version(component, component_data, version)
|
resolved_version = resolve_kube_dependent_component_version(component, component_data, version)
|
||||||
updated_placeholder = [
|
updated_placeholder = [
|
||||||
|
@ -288,10 +311,15 @@ def update_readme(component, version):
|
||||||
for i, line in enumerate(readme_data):
|
for i, line in enumerate(readme_data):
|
||||||
if component in line and re.search(r'v\d+\.\d+\.\d+', line):
|
if component in line and re.search(r'v\d+\.\d+\.\d+', line):
|
||||||
readme_data[i] = re.sub(r'v\d+\.\d+\.\d+', version, line)
|
readme_data[i] = re.sub(r'v\d+\.\d+\.\d+', version, line)
|
||||||
logging.info(f"Updated {component} to {version} in README")
|
logging.info(f'Updated {component} to {version} in README')
|
||||||
break
|
break
|
||||||
return readme_data
|
return readme_data
|
||||||
|
|
||||||
|
def safe_save_files(file_path, data=None, save_func=None):
|
||||||
|
if not save_func(file_path, data):
|
||||||
|
logging.error(f'Failed to save file {file_path}')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
def create_json_file(file_path):
|
def create_json_file(file_path):
|
||||||
new_data = {}
|
new_data = {}
|
||||||
try:
|
try:
|
||||||
|
@ -317,12 +345,13 @@ def load_yaml_file(yaml_file):
|
||||||
return yaml.load(f)
|
return yaml.load(f)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f'Failed to load {yaml_file}: {e}')
|
logging.error(f'Failed to load {yaml_file}: {e}')
|
||||||
return {}
|
return None
|
||||||
|
|
||||||
def save_yaml_file(yaml_file, data):
|
def save_yaml_file(yaml_file, data):
|
||||||
try:
|
try:
|
||||||
with open(yaml_file, 'w') as f:
|
with open(yaml_file, 'w') as f:
|
||||||
yaml.dump(data, f)
|
yaml.dump(data, f)
|
||||||
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f'Failed to save {yaml_file}: {e}')
|
logging.error(f'Failed to save {yaml_file}: {e}')
|
||||||
return False
|
return False
|
||||||
|
@ -333,12 +362,13 @@ def open_readme(path_readme):
|
||||||
return f.readlines()
|
return f.readlines()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f'Failed to load {path_readme}: {e}')
|
logging.error(f'Failed to load {path_readme}: {e}')
|
||||||
return False
|
return None
|
||||||
|
|
||||||
def save_readme(path_readme):
|
def save_readme(path_readme, data):
|
||||||
try:
|
try:
|
||||||
with open(path_readme, 'w') as f:
|
with open(path_readme, 'w') as f:
|
||||||
f.writelines(readme_data)
|
f.writelines(data)
|
||||||
|
return True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(f'Failed to save {path_readme}: {e}')
|
logging.error(f'Failed to save {path_readme}: {e}')
|
||||||
return False
|
return False
|
||||||
|
@ -353,124 +383,144 @@ def process_version_string(component, version):
|
||||||
version = match.group(1)
|
version = match.group(1)
|
||||||
return version
|
return version
|
||||||
|
|
||||||
def get_major_minor_version(version):
|
def get_major_version(version):
|
||||||
match = re.match(r'^(v\d+)\.(\d+)', version)
|
match = re.match(r'^(v\d+)\.(\d+)', version)
|
||||||
if match:
|
if match:
|
||||||
return f"{match.group(1)}.{match.group(2)}"
|
return f'{match.group(1)}.{match.group(2)}'
|
||||||
return version
|
return None
|
||||||
|
|
||||||
def process_component(component, component_data, session):
|
def process_component(component, component_data, repo_metadata, session):
|
||||||
logging.info(f'Processing component: {component}')
|
logging.info(f'Processing component: {component}')
|
||||||
|
component_repo_metada = repo_metadata.get(component, {})
|
||||||
|
|
||||||
|
# Get current kube version
|
||||||
kube_version = main_yaml_data.get('kube_version')
|
kube_version = main_yaml_data.get('kube_version')
|
||||||
kube_major_version = get_major_minor_version(kube_version)
|
kube_major_version = get_major_version(kube_version)
|
||||||
component_data['kube_version'] = kube_version # needed for nested components
|
component_data['kube_version'] = kube_version # needed for nested components
|
||||||
component_data['kube_major_version'] = kube_major_version # needed for nested components
|
component_data['kube_major_version'] = kube_major_version # needed for nested components
|
||||||
|
|
||||||
# Get current version
|
# Get current component version
|
||||||
current_version = get_current_version(component, component_data)
|
current_version = get_current_version(component, component_data)
|
||||||
if not current_version:
|
if not current_version:
|
||||||
logging.info(f'Stop processing component {component}, current version unknown')
|
logging.info(f'Stop processing component {component}, current version unknown')
|
||||||
return
|
return
|
||||||
|
|
||||||
# Get latest version
|
# Get latest component version
|
||||||
if component_data['release_type'] == 'tag':
|
latest_version = get_latest_version(component_repo_metada)
|
||||||
release = get_release_tag(component, component_data, session)
|
|
||||||
if release:
|
|
||||||
latest_version = release.get('name')
|
|
||||||
else:
|
|
||||||
release = get_release(component, component_data, session)
|
|
||||||
latest_version = release.get('tagName')
|
|
||||||
|
|
||||||
if not latest_version:
|
if not latest_version:
|
||||||
logging.info(f'Stop processing component {component}, latest version unknown.')
|
logging.info(f'Stop processing component {component}, latest version unknown.')
|
||||||
return
|
return
|
||||||
|
# Kubespray version
|
||||||
|
processed_latest_version = process_version_string(component, latest_version)
|
||||||
|
|
||||||
|
# Log version comparison
|
||||||
latest_version = process_version_string(component, latest_version)
|
if current_version == processed_latest_version:
|
||||||
|
|
||||||
if current_version == latest_version:
|
|
||||||
logging.info(f'Component {component}, version {current_version} is up to date')
|
logging.info(f'Component {component}, version {current_version} is up to date')
|
||||||
if args.skip_checksum and (current_version == latest_version):
|
|
||||||
logging.info(f'Stop processing component {component} due to flag.')
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
logging.info(f'Component {component} version discrepancy, current={current_version}, latest={latest_version}')
|
logging.info(f'Component {component} version discrepancy, current={current_version}, latest={processed_latest_version}')
|
||||||
|
|
||||||
|
# CI - write data and return
|
||||||
if args.ci_check:
|
if args.ci_check:
|
||||||
release['component'] = component
|
version_diff[component] = {
|
||||||
release['owner'] = component_data['owner']
|
# used in dependecy-check.yml workflow
|
||||||
release['repo'] = component_data['repo']
|
'current_version' : current_version,
|
||||||
release['release_type'] = component_data['release_type']
|
'latest_version' : latest_version, # used for PR name
|
||||||
if (current_version != latest_version):
|
# used in generate_pr_body.py script
|
||||||
version_diff[component] = {
|
'processed_latest_version': processed_latest_version, # used for PR body
|
||||||
'current_version' : current_version, # needed for dependecy-check
|
'owner' : component_data['owner'],
|
||||||
'latest_version' : latest_version, # needed for dependecy-check
|
'repo' : component_data['repo'],
|
||||||
'release' : release # needed for generate_pr_body
|
'repo_metadata' : component_repo_metada,
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
|
|
||||||
checksums = get_checksums(component, component_data, latest_version, session)
|
# Get patch versions
|
||||||
update_yaml_checksum(component_data, checksums, latest_version)
|
patch_versions = get_patch_versions(component, latest_version, component_repo_metada)
|
||||||
|
logging.info(f'Component {component} patch versions: {patch_versions}')
|
||||||
|
|
||||||
|
# Get checksums for all patch versions
|
||||||
|
checksums = get_checksums(component, component_data, patch_versions, session)
|
||||||
|
# Update checksums
|
||||||
|
for version in patch_versions:
|
||||||
|
version_checksum = checksums.get(version)
|
||||||
|
update_checksum(component, component_data, version_checksum, version)
|
||||||
|
|
||||||
|
# Update version in configuration
|
||||||
if component not in ['kubeadm', 'kubectl', 'kubelet']: # kubernetes dependent components
|
if component not in ['kubeadm', 'kubectl', 'kubelet']: # kubernetes dependent components
|
||||||
update_yaml_version(component, component_data, latest_version)
|
if component != 'calico_crds': # TODO double check if only calicoctl may change calico_version
|
||||||
if component in ['etcd', 'containerd', 'crio', 'calicoctl', 'krew', 'helm']: # in README
|
update_version(component, component_data, processed_latest_version)
|
||||||
|
|
||||||
|
# Update version in README
|
||||||
|
if component in README_COMPONENTS:
|
||||||
if component in ['crio', 'crictl']:
|
if component in ['crio', 'crictl']:
|
||||||
component_major_minor_version = get_major_minor_version(latest_version)
|
component_major_version = get_major_version(processed_latest_version)
|
||||||
if component_major_minor_version != kube_major_version: # do not update README
|
if component_major_version != kube_major_version: # do not update README, we just added checksums
|
||||||
return
|
return
|
||||||
component = component.replace('crio', 'cri-o')
|
# replace component name to fit readme
|
||||||
elif component == 'containerd':
|
component = component.replace('crio', 'cri-o').replace('calicoctl', 'calico')
|
||||||
latest_version = f'v{latest_version}'
|
|
||||||
elif component == 'calicoctl':
|
|
||||||
component = component.replace('calicoctl', 'calico')
|
|
||||||
update_readme(component, latest_version)
|
update_readme(component, latest_version)
|
||||||
|
|
||||||
|
def main():
|
||||||
def main(loglevel, component, max_workers):
|
# Setup logging
|
||||||
setup_logging(loglevel)
|
setup_logging(args.loglevel)
|
||||||
|
# Setup session with retries
|
||||||
session = get_session_with_retries()
|
session = get_session_with_retries()
|
||||||
|
|
||||||
|
# Load configuration files
|
||||||
global main_yaml_data, checksum_yaml_data, download_yaml_data, readme_data, version_diff
|
global main_yaml_data, checksum_yaml_data, download_yaml_data, readme_data, version_diff
|
||||||
main_yaml_data = load_yaml_file(path_main)
|
main_yaml_data = load_yaml_file(PATH_MAIN)
|
||||||
checksum_yaml_data = load_yaml_file(path_checksum)
|
checksum_yaml_data = load_yaml_file(PATH_CHECKSUM)
|
||||||
download_yaml_data = load_yaml_file(path_download)
|
download_yaml_data = load_yaml_file(PATH_DOWNLOAD)
|
||||||
readme_data = open_readme(path_readme)
|
readme_data = open_readme(PATH_README)
|
||||||
|
|
||||||
if not (main_yaml_data and checksum_yaml_data and download_yaml_data and readme_data):
|
if not (main_yaml_data and checksum_yaml_data and download_yaml_data and readme_data):
|
||||||
logging.error(f'Failed to open required yaml file, current working directory is {pwd}. Exiting...')
|
logging.error(f'Failed to open one or more configuration files, current working directory is {pwd}. Exiting...')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
# CI - create version_diff file
|
||||||
if args.ci_check:
|
if args.ci_check:
|
||||||
version_diff = create_json_file(path_version_diff)
|
version_diff = create_json_file(PATH_VERSION_DIFF)
|
||||||
if version_diff is None:
|
if version_diff is None:
|
||||||
logging.error(f'Failed to create version_diff.json file')
|
logging.error(f'Failed to create {PATH_VERSION_DIFF} file')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
else:
|
|
||||||
version_diff = {}
|
|
||||||
|
|
||||||
if component != 'all':
|
# Process single component
|
||||||
if component in component_info:
|
if args.component != 'all':
|
||||||
process_component(component, component_info[component], session)
|
if args.component in COMPONENT_INFO:
|
||||||
|
specific_component_info = {args.component: COMPONENT_INFO[args.component]}
|
||||||
|
# Get repository metadata => releases, tags and commits
|
||||||
|
logging.info(f'Fetching repository metadata for the component {args.component}')
|
||||||
|
repo_metadata = get_repository_metadata(specific_component_info, session)
|
||||||
|
if not repo_metadata:
|
||||||
|
sys.exit(1)
|
||||||
|
process_component(args.component, COMPONENT_INFO[args.component], repo_metadata, session)
|
||||||
else:
|
else:
|
||||||
logging.error(f'Component {component} not found in config.')
|
logging.error(f'Component {args.component} not found in config.')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
# Process all components in the configuration file concurrently
|
||||||
else:
|
else:
|
||||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
# Get repository metadata => releases, tags and commits
|
||||||
|
logging.info('Fetching repository metadata for all components')
|
||||||
|
repo_metadata = get_repository_metadata(COMPONENT_INFO, session)
|
||||||
|
if not repo_metadata:
|
||||||
|
sys.exit(1)
|
||||||
|
with ThreadPoolExecutor(max_workers=args.max_workers) as executor:
|
||||||
futures = []
|
futures = []
|
||||||
logging.info(f'Running with {executor._max_workers} executors')
|
logging.info(f'Running with {executor._max_workers} executors')
|
||||||
for component, component_data in component_info.items():
|
for component, component_data in COMPONENT_INFO.items():
|
||||||
futures.append(executor.submit(process_component, component, component_data, session))
|
futures.append(executor.submit(process_component, component, component_data, repo_metadata, session))
|
||||||
for future in futures:
|
for future in futures:
|
||||||
future.result()
|
future.result()
|
||||||
|
|
||||||
|
# CI - save JSON file
|
||||||
if args.ci_check:
|
if args.ci_check:
|
||||||
save_json_file(path_version_diff, version_diff)
|
safe_save_files(PATH_VERSION_DIFF, version_diff, save_json_file)
|
||||||
|
|
||||||
|
# Save configurations
|
||||||
|
else:
|
||||||
|
safe_save_files(PATH_CHECKSUM, checksum_yaml_data, save_yaml_file)
|
||||||
|
safe_save_files(PATH_DOWNLOAD, download_yaml_data, save_yaml_file)
|
||||||
|
safe_save_files(PATH_README, readme_data, save_readme)
|
||||||
|
|
||||||
save_yaml_file(path_checksum, checksum_yaml_data)
|
logging.info('Finished.')
|
||||||
save_yaml_file(path_download, download_yaml_data)
|
|
||||||
save_readme(path_readme)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -478,10 +528,9 @@ if __name__ == '__main__':
|
||||||
parser.add_argument('--loglevel', default='INFO', help='Set the log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)')
|
parser.add_argument('--loglevel', default='INFO', help='Set the log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)')
|
||||||
parser.add_argument('--component', default='all', help='Specify a component to process, default is all components')
|
parser.add_argument('--component', default='all', help='Specify a component to process, default is all components')
|
||||||
parser.add_argument('--max-workers', type=int, default=4, help='Maximum number of concurrent workers, use with caution(sometimes less is more)')
|
parser.add_argument('--max-workers', type=int, default=4, help='Maximum number of concurrent workers, use with caution(sometimes less is more)')
|
||||||
parser.add_argument('--skip-checksum', action='store_true', help='Skip checksum if the current version is up to date')
|
|
||||||
parser.add_argument('--ci-check', action='store_true', help='Check versions, store discrepancies in version_diff.json')
|
parser.add_argument('--ci-check', action='store_true', help='Check versions, store discrepancies in version_diff.json')
|
||||||
|
parser.add_argument('--graphql-number-of-entries', type=int, default=10, help='Number of releases/tags to retrieve from Github GraphQL per component (default: 10)')
|
||||||
|
parser.add_argument('--graphql-number-of-commits', type=int, default=5, help='Number of commits to retrieve from Github GraphQL per tag (default: 5)')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
main(args.loglevel, args.component, args.max_workers)
|
main()
|
||||||
|
|
|
@ -1,109 +1,68 @@
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import argparse
|
import argparse
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
# Do not commit any prints if the script doesn't exit with error code
|
# Do not commit any prints if the script doesn't exit with error code
|
||||||
# Otherwise it will be part of the PR body
|
# Otherwise it will be part of the PR body
|
||||||
|
|
||||||
github_api_url = 'https://api.github.com/graphql'
|
|
||||||
gh_token = os.getenv('GH_TOKEN')
|
|
||||||
|
|
||||||
def get_commits(tag, release, number_of_commits=5):
|
def load_json(component):
|
||||||
owner = release['owner']
|
try:
|
||||||
repo = release['repo']
|
with open('version_diff.json', 'r') as f:
|
||||||
repo_url = f'https://github.com/{owner}/{repo}'
|
repo_metadata = json.load(f)
|
||||||
|
component_data = repo_metadata.get(component)
|
||||||
query = """
|
return component_data
|
||||||
{
|
except Exception as e:
|
||||||
repository(owner: "%s", name: "%s") {
|
|
||||||
ref(qualifiedName: "refs/tags/%s") {
|
|
||||||
target {
|
|
||||||
... on Tag {
|
|
||||||
target {
|
|
||||||
... on Commit {
|
|
||||||
history(first: %s) {
|
|
||||||
edges {
|
|
||||||
node {
|
|
||||||
oid
|
|
||||||
message
|
|
||||||
url
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
... on Commit {
|
|
||||||
# In case the tag directly points to a commit
|
|
||||||
history(first: %s) {
|
|
||||||
edges {
|
|
||||||
node {
|
|
||||||
oid
|
|
||||||
message
|
|
||||||
url
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
""" % (owner, repo, tag, number_of_commits, number_of_commits)
|
|
||||||
|
|
||||||
headers = {'Authorization': f'Bearer {gh_token}'}
|
|
||||||
response = requests.post(github_api_url, json={'query': query}, headers=headers)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
try:
|
|
||||||
data = response.json()
|
|
||||||
target = data['data']['repository']['ref']['target']
|
|
||||||
|
|
||||||
if 'history' in target:
|
|
||||||
commits = target['history']['edges']
|
|
||||||
elif 'target' in target and 'history' in target['target']:
|
|
||||||
commits = target['target']['history']['edges']
|
|
||||||
else:
|
|
||||||
# print('No commit history found.')
|
|
||||||
return None
|
|
||||||
|
|
||||||
pr_commits = '\n<details>\n<summary>Commits</summary>\n\n'
|
|
||||||
for commit in commits:
|
|
||||||
node = commit['node']
|
|
||||||
short_oid = node['oid'][:7]
|
|
||||||
commit_message = node['message'].split('\n')[0]
|
|
||||||
commit_message = link_pull_requests(commit_message, repo_url)
|
|
||||||
commit_url = node['url']
|
|
||||||
pr_commits += f'- [`{short_oid}`]({commit_url}) {commit_message} \n'
|
|
||||||
pr_commits += '\n</details>'
|
|
||||||
return pr_commits
|
|
||||||
except Exception as e:
|
|
||||||
# print(f'Error parsing commits: {e}')
|
|
||||||
# print(f'data: {response.text}')
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
# print(f'GraphQL query failed with status code {response.status_code}')
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def get_version_commits(version, component_metadata):
|
||||||
|
tags = component_metadata.get('refs', {}).get('nodes', [])
|
||||||
|
for tag in tags:
|
||||||
|
if tag['name'] == version:
|
||||||
|
target = tag.get('target', {})
|
||||||
|
|
||||||
def replace_match(match, repo_url):
|
# Check if the target is a Tag pointing to a Commit
|
||||||
pr_number = match.group(2)
|
if 'history' in target.get('target', {}):
|
||||||
return f'{match.group(1)}[# {pr_number}]({repo_url}/pull/{pr_number}){match.group(3)}'
|
commit_history = target['target']['history'].get('edges', [])
|
||||||
|
# Check if the target is directly a Commit object
|
||||||
|
elif 'history' in target:
|
||||||
|
commit_history = target['history'].get('edges', [])
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
def link_pull_requests(input, repo_url):
|
commits = []
|
||||||
return re.sub(r'(\(?)#(\d+)(\)?)', lambda match: replace_match(match, repo_url), input)
|
for commit in commit_history:
|
||||||
|
commit_node = commit.get('node', {})
|
||||||
|
commit_info = {
|
||||||
|
'oid': commit_node.get('oid'),
|
||||||
|
'message': commit_node.get('message'),
|
||||||
|
'url': commit_node.get('url')
|
||||||
|
}
|
||||||
|
commits.append(commit_info)
|
||||||
|
|
||||||
def format_description(description, length=20):
|
if commits:
|
||||||
|
return commits
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_version_description(version, repo_metadata):
|
||||||
|
if repo_metadata:
|
||||||
|
releases = repo_metadata.get('releases', {}).get('nodes', [])
|
||||||
|
for release in releases:
|
||||||
|
if release.get('tagName') == version:
|
||||||
|
description = release.get('description', None)
|
||||||
|
return format_description(description)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def handle_reference(input):
|
||||||
|
return input.replace('github.com', 'redirect.github.com') # Prevent reference in the sourced PR
|
||||||
|
|
||||||
|
# Split description into visible and collapsed
|
||||||
|
def format_description(description):
|
||||||
|
description = handle_reference(description)
|
||||||
lines = description.splitlines()
|
lines = description.splitlines()
|
||||||
|
if len(lines) > args.description_number_of_lines:
|
||||||
if len(lines) > length:
|
first_part = '\n'.join(lines[:args.description_number_of_lines])
|
||||||
first_part = '\n'.join(lines[:length])
|
collapsed_part = '\n'.join(lines[args.description_number_of_lines:])
|
||||||
collapsed_part = '\n'.join(lines[length:])
|
|
||||||
|
|
||||||
formatted_description = f"""{first_part}
|
formatted_description = f"""{first_part}
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
|
@ -117,57 +76,55 @@ def format_description(description, length=20):
|
||||||
else:
|
else:
|
||||||
return description
|
return description
|
||||||
|
|
||||||
def main(component):
|
def main():
|
||||||
try:
|
component_data = load_json(args.component)
|
||||||
with open('version_diff.json') as f:
|
if not component_data:
|
||||||
data = json.load(f)
|
print('Failed to load component data')
|
||||||
data = data[component]
|
|
||||||
except Exception as e:
|
|
||||||
print(f'Error loading version_diff.json or component not found: {e}')
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
owner = component_data.get('owner')
|
||||||
|
repo = component_data.get('repo')
|
||||||
|
latest_version = component_data.get('latest_version')
|
||||||
|
repo_metadata = component_data.get('repo_metadata')
|
||||||
|
release_url = f'https://github.com/{owner}/{repo}/releases/tag/{latest_version}'
|
||||||
|
commits = get_version_commits(latest_version, repo_metadata)
|
||||||
|
description = get_version_description(latest_version, repo_metadata)
|
||||||
|
|
||||||
release = data['release']
|
# General info
|
||||||
owner = release['owner']
|
pr_body = f"""
|
||||||
repo = release['repo']
|
### {latest_version}
|
||||||
|
|
||||||
if component in ['gvisor_containerd_shim','gvisor_runsc']:
|
**URL**: [Release {latest_version}]({release_url})
|
||||||
name = release.get('name')
|
|
||||||
release_url = f'https://github.com/google/gvisor/releases/tag/{name}'
|
|
||||||
pr_body = f"""
|
|
||||||
### {name}
|
|
||||||
|
|
||||||
**URL**: [Release {name}]({release_url})
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
commits = get_commits(name, release)
|
|
||||||
if commits:
|
|
||||||
pr_body += commits
|
|
||||||
else:
|
|
||||||
name = release['tagName']
|
|
||||||
tag_name = release['tagName']
|
|
||||||
published_at = release['publishedAt']
|
|
||||||
release_url = release['url']
|
|
||||||
description = release['description']
|
|
||||||
repo_url = 'https://github.com/%s/%s' % (owner, repo)
|
|
||||||
description = link_pull_requests(description, repo_url)
|
|
||||||
pr_body = f"""
|
|
||||||
### {name}
|
|
||||||
|
|
||||||
**Tag**: {tag_name}
|
# Description
|
||||||
**Published at**: {published_at}
|
if description:
|
||||||
**URL**: [Release {tag_name}]({release_url})
|
|
||||||
|
|
||||||
|
pr_body += f"""
|
||||||
#### Description:
|
#### Description:
|
||||||
{format_description(description)}
|
{description}
|
||||||
"""
|
"""
|
||||||
commits = get_commits(name, release)
|
|
||||||
if commits:
|
# Commits
|
||||||
pr_body += commits
|
if commits:
|
||||||
|
pr_commits = '\n<details>\n<summary>Commits</summary>\n\n'
|
||||||
|
for commit in commits:
|
||||||
|
short_oid = commit.get('oid')[:7]
|
||||||
|
message = commit.get('message').split('\n')[0]
|
||||||
|
commit_message = handle_reference(message)
|
||||||
|
# commit_message = link_pull_requests(commit_message, repo_url)
|
||||||
|
commit_url = commit.get('url')
|
||||||
|
pr_commits += f'- [`{short_oid}`]({commit_url}) {commit_message} \n'
|
||||||
|
pr_commits += '\n</details>'
|
||||||
|
pr_body += pr_commits
|
||||||
|
|
||||||
|
# Print body
|
||||||
print(pr_body)
|
print(pr_body)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
parser = argparse.ArgumentParser(description='Pull Request body generator')
|
parser = argparse.ArgumentParser(description='Pull Request body generator')
|
||||||
parser.add_argument('--component', required=True, help='Specify the component to process')
|
parser.add_argument('--component', required=True, help='Specify the component to process')
|
||||||
|
parser.add_argument('--description-number-of-lines', type=int, default=20, help='Number of lines to include from the description')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
main(args.component)
|
main()
|
||||||
|
|
Loading…
Reference in New Issue