diff --git a/.github/workflows/dependency-check.yml b/.github/workflows/dependency-check.yml
index c6f91541b..07b5f4272 100644
--- a/.github/workflows/dependency-check.yml
+++ b/.github/workflows/dependency-check.yml
@@ -2,7 +2,7 @@ name: Dependency check version
on:
schedule:
- - cron: '0 0 * * 0' # Every Sunday at 00:00 UTC
+ - cron: '0 3 * * 1' # Every Monday at 03:00 AM UTC
workflow_dispatch:
jobs:
@@ -42,14 +42,14 @@ jobs:
component=$(_jq '.key')
current_version=$(_jq '.value.current_version')
- latest_version=$(_jq '.value.latest_version')
+ processed_latest_version=$(_jq '.value.processed_latest_version')
- echo "Triggering update for $component from $current_version to $latest_version"
+ echo "Triggering update for $component from $current_version to $processed_latest_version"
gh workflow run dependency-pull-request.yml \
-f component=$component \
-f current_version=$current_version \
- -f latest_version=$latest_version \
+ -f latest_version=$processed_latest_version \
count=$((count + 1))
diff --git a/.github/workflows/dependency-pull-request.yml b/.github/workflows/dependency-pull-request.yml
index 863f9ba9f..1dc7dd807 100644
--- a/.github/workflows/dependency-pull-request.yml
+++ b/.github/workflows/dependency-pull-request.yml
@@ -1,4 +1,5 @@
name: Dependency create bump PR
+run-name: Create bump PR for ${{ inputs.component }} from ${{ inputs.current_version }} to ${{ inputs.latest_version }}
on:
workflow_dispatch:
@@ -38,7 +39,7 @@ jobs:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: python scripts/dependency_updater.py --ci-check --component ${{ github.event.inputs.component }}
- - name: Update component version
+ - name: Update component versions and checksums
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: python scripts/dependency_updater.py --component ${{ github.event.inputs.component }}
@@ -55,10 +56,13 @@ jobs:
echo "$pr_body" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
+ - name: Cleanup cache and version_diff.json
+ run: rm -r cache/ version_diff.json
+
- name: Create Pull Request
uses: peter-evans/create-pull-request@v5
with:
- branch: "dependency-${{ github.event.inputs.component }}-${{ github.event.inputs.latest_version }}"
+ branch: "update-dependency-${{ github.event.inputs.component }}"
commit-message: "Bump ${{ github.event.inputs.component }} from ${{ github.event.inputs.current_version }} to ${{ github.event.inputs.latest_version }}"
title: "Bump ${{ github.event.inputs.component }} from ${{ github.event.inputs.current_version }} to ${{ github.event.inputs.latest_version }}"
body: ${{ steps.generate_pr_body.outputs.pr_body }}
diff --git a/scripts/dependency_config.py b/scripts/dependency_config.py
index 6b2afe671..8ff5da237 100644
--- a/scripts/dependency_config.py
+++ b/scripts/dependency_config.py
@@ -1,222 +1,221 @@
-# Arhitectures and OSes
-architectures = ['arm', 'arm64', 'amd64', 'ppc64le']
-oses = ['darwin', 'linux', 'windows']
+ARCHITECTURES = ['arm', 'arm64', 'amd64', 'ppc64le']
+OSES = ['darwin', 'linux', 'windows']
+README_COMPONENTS = ['etcd', 'containerd', 'crio', 'calicoctl', 'krew', 'helm']
+SHA256REGEX = r'(\b[a-f0-9]{64})\b'
-# Paths
-path_download = 'roles/kubespray-defaults/defaults/main/download.yml'
-path_checksum = 'roles/kubespray-defaults/defaults/main/checksums.yml'
-path_main = 'roles/kubespray-defaults/defaults/main/main.yml'
-path_readme = 'README.md'
-path_version_diff = 'version_diff.json'
+PATH_DOWNLOAD = 'roles/kubespray-defaults/defaults/main/download.yml'
+PATH_CHECKSUM = 'roles/kubespray-defaults/defaults/main/checksums.yml'
+PATH_MAIN = 'roles/kubespray-defaults/defaults/main/main.yml'
+PATH_README = 'README.md'
+PATH_VERSION_DIFF = 'version_diff.json'
-
-component_info = {
+COMPONENT_INFO = {
'calico_crds': {
'owner': 'projectcalico',
'repo': 'calico',
- 'url_download': 'https://github.com/projectcalico/calico/archive/VERSION.tar.gz',
+ 'url_download': 'https://github.com/projectcalico/calico/archive/{version}.tar.gz',
'placeholder_version': ['calico_version'],
'placeholder_checksum' : 'calico_crds_archive_checksums',
'checksum_structure' : 'simple',
- 'release_type' : 'release',
+ 'sha_regex' : r'', # binary
},
'calicoctl': {
'owner': 'projectcalico',
'repo': 'calico',
- 'url_download': 'https://github.com/projectcalico/calico/releases/download/VERSION/calicoctl-linux-ARCH',
+ 'url_download': 'https://github.com/projectcalico/calico/releases/download/{version}/SHA256SUMS',
'placeholder_version': ['calico_version'],
'placeholder_checksum' : 'calicoctl_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'linux-{arch}\b',
},
'ciliumcli': {
'owner': 'cilium',
'repo': 'cilium-cli',
- 'url_download': 'https://github.com/cilium/cilium-cli/releases/download/VERSION/cilium-linux-ARCH.tar.gz.sha256sum',
+ 'url_download': 'https://github.com/cilium/cilium-cli/releases/download/{version}/cilium-linux-{arch}.tar.gz.sha256sum',
'placeholder_version': ['cilium_cli_version'],
'placeholder_checksum' : 'ciliumcli_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'{arch}',
},
'cni': {
'owner': 'containernetworking',
'repo': 'plugins',
- 'url_download': 'https://github.com/containernetworking/plugins/releases/download/VERSION/cni-plugins-linux-ARCH-VERSION.tgz.sha256',
+ 'url_download': 'https://github.com/containernetworking/plugins/releases/download/{version}/cni-plugins-linux-{arch}-{version}.tgz.sha256',
'placeholder_version': ['cni_version'],
'placeholder_checksum' : 'cni_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'{arch}',
},
'containerd': {
'owner': 'containerd',
'repo': 'containerd',
- 'url_download': 'https://github.com/containerd/containerd/releases/download/vVERSION/containerd-VERSION-linux-ARCH.tar.gz.sha256sum',
+ 'url_download': 'https://github.com/containerd/containerd/releases/download/v{version}/containerd-{version}-linux-{arch}.tar.gz.sha256sum',
'placeholder_version': ['containerd_version'],
'placeholder_checksum' : 'containerd_archive_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'{arch}',
},
'crictl': {
'owner': 'kubernetes-sigs',
'repo': 'cri-tools',
- 'url_download': 'https://github.com/kubernetes-sigs/cri-tools/releases/download/VERSION/crictl-VERSION-linux-ARCH.tar.gz.sha256',
+ 'url_download': 'https://github.com/kubernetes-sigs/cri-tools/releases/download/{version}/crictl-{version}-linux-{arch}.tar.gz.sha256',
'placeholder_version': ['crictl_supported_versions', 'kube_major_version'],
'placeholder_checksum' : 'crictl_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'simple', # only sha
},
'cri_dockerd': {
'owner': 'Mirantis',
'repo': 'cri-dockerd',
- 'url_download': 'https://github.com/Mirantis/cri-dockerd/releases/download/vVERSION/cri-dockerd-VERSION.ARCH.tgz',
+ 'url_download': 'https://github.com/Mirantis/cri-dockerd/releases/download/v{version}/cri-dockerd-{version}.{arch}.tgz',
'placeholder_version': ['cri_dockerd_version'],
'placeholder_checksum' : 'cri_dockerd_archive_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'', # binary
},
'crio': {
'owner': 'cri-o',
'repo': 'cri-o',
- 'url_download': 'https://storage.googleapis.com/cri-o/artifacts/cri-o.ARCH.VERSION.tar.gz',
+ 'url_download': 'https://storage.googleapis.com/cri-o/artifacts/cri-o.{arch}.{version}.tar.gz.sha256sum',
'placeholder_version': ['crio_supported_versions', 'kube_major_version'],
'placeholder_checksum' : 'crio_archive_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'{arch}',
},
'crun': {
'owner': 'containers',
'repo': 'crun',
- 'url_download': 'https://github.com/containers/crun/releases/download/VERSION/crun-VERSION-linux-ARCH',
+ 'url_download': 'https://github.com/containers/crun/releases/download/{version}/crun-{version}-linux-{arch}',
'placeholder_version': ['crun_version'],
'placeholder_checksum' : 'crun_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'', # binary
},
'etcd': {
'owner': 'etcd-io',
'repo': 'etcd',
- 'url_download': 'https://github.com/etcd-io/etcd/releases/download/VERSION/SHA256SUMS',
+ 'url_download': 'https://github.com/etcd-io/etcd/releases/download/{version}/SHA256SUMS',
'placeholder_version': ['etcd_supported_versions', 'kube_major_version'],
'placeholder_checksum' : 'etcd_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'linux-{arch}\.',
},
'gvisor_containerd_shim': {
'owner': 'google',
'repo': 'gvisor',
- 'url_download': 'https://storage.googleapis.com/gvisor/releases/release/VERSION/ARCH/containerd-shim-runsc-v1',
+ 'url_download': 'https://storage.googleapis.com/gvisor/releases/release/{version}/{arch}/containerd-shim-runsc-v1',
'placeholder_version': ['gvisor_version'],
'placeholder_checksum' : 'gvisor_containerd_shim_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'tag',
+ 'sha_regex' : r'', # binary
},
'gvisor_runsc': {
'owner': 'google',
'repo': 'gvisor',
- 'url_download': 'https://storage.googleapis.com/gvisor/releases/release/VERSION/ARCH/runsc',
+ 'url_download': 'https://storage.googleapis.com/gvisor/releases/release/{version}/{arch}/runsc',
'placeholder_version': ['gvisor_version'],
'placeholder_checksum' : 'gvisor_runsc_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'tag',
+ 'sha_regex' : r'', # binary
},
'helm': {
'owner': 'helm',
'repo': 'helm',
- 'url_download': 'https://get.helm.sh/helm-VERSION-linux-ARCH.tar.gz',
+ 'url_download': 'https://get.helm.sh/helm-{version}-linux-{arch}.tar.gz.sha256sum',
'placeholder_version': ['helm_version'],
'placeholder_checksum' : 'helm_archive_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'{arch}',
},
'kata_containers': {
'owner': 'kata-containers',
'repo': 'kata-containers',
- 'url_download': 'https://github.com/kata-containers/kata-containers/releases/download/VERSION/kata-static-VERSION-ARCH.tar.xz',
+ 'url_download': 'https://github.com/kata-containers/kata-containers/releases/download/{version}/kata-static-{version}-{arch}.tar.xz',
'placeholder_version': ['kata_containers_version'],
'placeholder_checksum' : 'kata_containers_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'', # binary
},
'krew': {
'owner': 'kubernetes-sigs',
'repo': 'krew',
- 'url_download': 'https://github.com/kubernetes-sigs/krew/releases/download/VERSION/krew-OS_ARCH.tar.gz.sha256',
+ 'url_download': 'https://github.com/kubernetes-sigs/krew/releases/download/{version}/krew-{os_name}_{arch}.tar.gz.sha256',
'placeholder_version': ['krew_version'],
'placeholder_checksum' : 'krew_archive_checksums',
'checksum_structure' : 'os_arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'simple', # only sha
},
'kubeadm': {
'owner': 'kubernetes',
'repo': 'kubernetes',
- 'url_download': 'https://dl.k8s.io/release/VERSION/bin/linux/ARCH/kubeadm.sha256',
+ 'url_download': 'https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubeadm.sha256',
'placeholder_version': ['kube_version'],
'placeholder_checksum' : 'kubeadm_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'simple', # only sha
},
'kubectl': {
'owner': 'kubernetes',
'repo': 'kubernetes',
- 'url_download': 'https://dl.k8s.io/release/VERSION/bin/linux/ARCH/kubectl.sha256',
+ 'url_download': 'https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubectl.sha256',
'placeholder_version': ['kube_version'],
'placeholder_checksum' : 'kubectl_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'simple', # only sha
},
'kubelet': {
'owner': 'kubernetes',
'repo': 'kubernetes',
- 'url_download': 'https://dl.k8s.io/release/VERSION/bin/linux/ARCH/kubelet.sha256',
+ 'url_download': 'https://dl.k8s.io/release/{version}/bin/linux/{arch}/kubelet.sha256',
'placeholder_version': ['kube_version'],
'placeholder_checksum' : 'kubelet_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'simple', # only sha
},
'nerdctl': {
'owner': 'containerd',
'repo': 'nerdctl',
- 'url_download': 'https://github.com/containerd/nerdctl/releases/download/vVERSION/SHA256SUMS',
+ 'url_download': 'https://github.com/containerd/nerdctl/releases/download/v{version}/SHA256SUMS',
'placeholder_version': ['nerdctl_version'],
'placeholder_checksum' : 'nerdctl_archive_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'nerdctl-(?!full)[\w.-]+-linux-{arch}\.tar\.gz',
},
'runc': {
'owner': 'opencontainers',
'repo': 'runc',
- 'url_download': 'https://github.com/opencontainers/runc/releases/download/VERSION/runc.ARCH',
+ 'url_download': 'https://github.com/opencontainers/runc/releases/download/{version}/runc.sha256sum',
'placeholder_version': ['runc_version'],
'placeholder_checksum' : 'runc_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'\.{arch}\b',
},
'skopeo': {
'owner': 'containers',
'repo': 'skopeo',
- 'url_download': 'https://github.com/lework/skopeo-binary/releases/download/VERSION/skopeo-linux-ARCH',
+ 'url_download': 'https://github.com/lework/skopeo-binary/releases/download/{version}/skopeo-linux-{arch}',
'placeholder_version': ['skopeo_version'],
'placeholder_checksum' : 'skopeo_binary_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'', # binary
},
'youki': {
'owner': 'containers',
'repo': 'youki',
- 'url_download': 'https://github.com/containers/youki/releases/download/vVERSION/youki-VERSION-ARCH.tar.gz',
+ 'url_download': 'https://github.com/containers/youki/releases/download/v{version}/youki-{version}-{arch}.tar.gz',
'placeholder_version': ['youki_version'],
'placeholder_checksum' : 'youki_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'', # binary
},
'yq': {
'owner': 'mikefarah',
'repo': 'yq',
- 'url_download': 'https://github.com/mikefarah/yq/releases/download/VERSION/checksums-bsd',
+ 'url_download': 'https://github.com/mikefarah/yq/releases/download/{version}/checksums-bsd',
'placeholder_version': ['yq_version'],
'placeholder_checksum' : 'yq_checksums',
'checksum_structure' : 'arch',
- 'release_type' : 'release',
+ 'sha_regex' : r'SHA256 \([^)]+linux_{arch}\)',
},
}
diff --git a/scripts/dependency_updater.py b/scripts/dependency_updater.py
index ebb455f2e..2d78a19eb 100644
--- a/scripts/dependency_updater.py
+++ b/scripts/dependency_updater.py
@@ -3,7 +3,6 @@ import re
import sys
import logging
import requests
-import time
import json
import argparse
import hashlib
@@ -11,7 +10,7 @@ from ruamel.yaml import YAML
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from concurrent.futures import ThreadPoolExecutor
-from dependency_config import component_info, architectures, oses, path_download, path_checksum, path_main, path_readme, path_version_diff
+from dependency_config import ARCHITECTURES, OSES, README_COMPONENTS, PATH_DOWNLOAD, PATH_CHECKSUM, PATH_MAIN, PATH_README, PATH_VERSION_DIFF, COMPONENT_INFO, SHA256REGEX
yaml = YAML()
@@ -48,28 +47,10 @@ def get_session_with_retries():
pool_maxsize=50,
max_retries=Retry(total=3, backoff_factor=1)
)
-
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
-def load_from_cache(component):
- cache_file = os.path.join(cache_dir, f'{component}.json')
- if os.path.exists(cache_file):
- file_age = time.time() - os.path.getmtime(cache_file)
- if file_age < cache_expiry_seconds:
- logging.info(f'Using cached release info for {component}')
- with open(cache_file, 'r') as f:
- return json.load(f)
- return None
-
-def save_to_cache(component, data):
- os.makedirs(cache_dir, exist_ok=True)
- cache_file = os.path.join(cache_dir, f'{component}.json')
- with open(cache_file, 'w') as f:
- json.dump(data, f, indent=2)
- logging.info(f'Cached release info for {component}')
-
def get_current_version(component, component_data):
kube_major_version = component_data['kube_major_version']
placeholder_version = [kube_major_version if item == 'kube_major_version' else item for item in component_data['placeholder_version']]
@@ -81,180 +62,222 @@ def get_current_version(component, component_data):
current_version = current_version.get(key)
return current_version
-def get_release(component, component_data, session, number_of_releases=10):
- release = load_from_cache(component)
- if not release:
- try:
- query = """
- query {
- repository(owner: "%s", name: "%s") {
- releases(first: %s, orderBy: {field: CREATED_AT, direction: DESC}) {
- nodes {
- tagName
- url
- description
- publishedAt
- isLatest
- }
- }
- }
- }
- """ % (component_data['owner'], component_data['repo'], number_of_releases)
+def get_latest_version(component_repo_metadata):
+ releases = component_repo_metadata.get('releases', {}).get('nodes', [])
+ for release in releases:
+ if release.get('isLatest', False):
+ return release['tagName']
+ tags = component_repo_metadata.get('refs', {}).get('nodes', []) # fallback on tags
+ if tags:
+ first_tag = tags[0]['name']
+ return first_tag
+ return None
- headers = {
- 'Authorization': f'Bearer {gh_token}',
- 'Content-Type': 'application/json'
- }
+def get_patch_versions(component, latest_version, component_repo_metadata):
+ if component in ['gvisor_runsc','gvisor_containerd_shim']: # hack for gvisor
+ return [latest_version]
+ match = re.match(r'v?(\d+)\.(\d+)', latest_version)
+ if not match:
+ logging.error(f'Invalid version format: {latest_version}')
+ return []
+ major_version, minor_version = match.groups()
+ patch_versions = []
+ stable_version_pattern = re.compile(rf'^v?{major_version}\.{minor_version}(\.\d+)?$') # no rc, alpha, dev, etc.
+ # Search releases
+ releases = component_repo_metadata.get('releases', {}).get('nodes', [])
+ for release in releases:
+ version = release.get('tagName', '')
+ if stable_version_pattern.match(version):
+ patch_versions.append(version)
+ # Fallback to tags
+ if not patch_versions:
+ tags = component_repo_metadata.get('refs', {}).get('nodes', [])
+ for tag in tags:
+ version = tag.get('name', '')
+ if stable_version_pattern.match(version):
+ patch_versions.append(version)
+ patch_versions.sort(key=lambda v: list(map(int, re.findall(r'\d+', v)))) # sort for checksum update
+ return patch_versions
- response = session.post(github_api_url, json={'query': query}, headers=headers)
- response.raise_for_status()
-
- data = response.json()
- logging.debug(f'Component {component} releases: {data}')
- # Look for the release marked as latest
- for release_node in data['data']['repository']['releases']['nodes']:
- if release_node['isLatest']:
- release = release_node
- save_to_cache(component, release)
- return release
-
- logging.warning(f'No latest release found for {component}')
- return None
- except Exception as e:
- logging.error(f'Error fetching latest release for {component}: {e}')
- return None
- return release
-
-def get_release_tag(component, component_data, session):
- tag = load_from_cache(component)
- if not tag:
- try:
- query = """
- query {
- repository(owner: "%s", name: "%s") {
- refs(refPrefix: "refs/tags/", first: 1, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}) {
- edges {
- node {
+def get_repository_metadata(component_info, session):
+ query_parts = []
+ for component, data in component_info.items():
+ owner = data['owner']
+ repo = data['repo']
+ query_parts.append(f"""
+ {component}: repository(owner: "{owner}", name: "{repo}") {{
+ releases(first: {args.graphql_number_of_entries}, orderBy: {{field: CREATED_AT, direction: DESC}}) {{
+ nodes {{
+ tagName
+ url
+ description
+ publishedAt
+ isLatest
+ }}
+ }}
+ refs(refPrefix: "refs/tags/", first: {args.graphql_number_of_entries}, orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
+ nodes {{
name
- }
- }
- }
- }
- }
- """ % (component_data['owner'], component_data['repo'])
+ target {{
+ ... on Tag {{
+ target {{
+ ... on Commit {{
+ history(first: {args.graphql_number_of_commits}) {{
+ edges {{
+ node {{
+ oid
+ message
+ url
+ }}
+ }}
+ }}
+ }}
+ }}
+ }}
+ ... on Commit {{
+ # In case the tag directly points to a commit
+ history(first: {args.graphql_number_of_commits}) {{
+ edges {{
+ node {{
+ oid
+ message
+ url
+ }}
+ }}
+ }}
+ }}
+ }}
+ }}
+ }}
+ }}
+ """)
- headers = {
- 'Authorization': f'Bearer {gh_token}',
- 'Content-Type': 'application/json'
- }
+ query = f"query {{ {''.join(query_parts)} }}"
+ headers = {
+ 'Authorization': f'Bearer {gh_token}',
+ 'Content-Type': 'application/json'
+ }
- response = session.post(github_api_url, json={'query': query}, headers=headers)
- response.raise_for_status()
-
- data = response.json()
- logging.debug(f'Component {component} releases: {data}')
- tag = data['data']['repository']['refs']['edges'][0]['node']
- save_to_cache(component, tag)
- return tag
- except Exception as e:
- logging.error(f'Error fetching tags for {component}: {e}')
+ try:
+ response = session.post(github_api_url, json={'query': query}, headers=headers)
+ response.raise_for_status()
+ json_data = response.json()
+ data = json_data.get('data')
+ if data is not None and bool(data): # Ensure 'data' is not None and not empty
+ logging.debug(f'GraphQL data response:\n{json.dumps(data, indent=2)}')
+ return data
+ else:
+ logging.error(f'GraphQL query returned errors: {json_data}')
return None
- return tag
+ except Exception as e:
+ logging.error(f'Error fetching repository metadata: {e}')
+ return None
-def calculate_checksum(cachefile, arch, url_download):
- if url_download.endswith('.sha256sum'):
- with open(f'cache/{cachefile}', 'r') as f:
- checksum_line = f.readline().strip()
- return checksum_line.split()[0]
- elif url_download.endswith('SHA256SUMS'):
+def calculate_checksum(cachefile, sha_regex):
+ if sha_regex:
+ logging.debug(f'Searching with regex {sha_regex} in file {cachefile}')
with open(f'cache/{cachefile}', 'r') as f:
for line in f:
- if 'linux' in line and arch in line:
- return line.split()[0]
- elif url_download.endswith('bsd'):
- with open(f'cache/{cachefile}', 'r') as f:
- for line in f:
- if 'SHA256' in line and 'linux' in line and arch in line:
- return line.split()[0]
- sha256_hash = hashlib.sha256()
- with open(f'cache/{cachefile}', 'rb') as f:
- for byte_block in iter(lambda: f.read(4096), b''):
- sha256_hash.update(byte_block)
- return sha256_hash.hexdigest()
+ if sha_regex == 'simple': # Only sha is present in the file
+ pattern = re.compile(SHA256REGEX)
+ else:
+ pattern = re.compile(rf'(?:{SHA256REGEX}.*{sha_regex}|{sha_regex}.*{SHA256REGEX})') # Sha may be at start or end
+ match = pattern.search(line)
+ if match:
+ checksum = match.group(1) or match.group(2)
+ logging.debug(f'Matched line: {line.strip()}')
+ return checksum
+ else: # binary
+ sha256_hash = hashlib.sha256()
+ with open(f'cache/{cachefile}', 'rb') as f:
+ for byte_block in iter(lambda: f.read(4096), b''):
+ sha256_hash.update(byte_block)
+ checksum = sha256_hash.hexdigest()
+ return checksum
-def download_file_and_get_checksum(component, arch, url_download, session):
- cache_file = f'{component}-{arch}'
+def download_file_and_get_checksum(component, arch, url_download, version, sha_regex, session):
+ logging.info(f'Download URL {url_download}')
+ cache_file = f'{component}-{arch}-{version}'
if os.path.exists(f'cache/{cache_file}'):
logging.info(f'Using cached file for {url_download}')
- return calculate_checksum(cache_file, arch, url_download)
+ return calculate_checksum(cache_file, sha_regex)
try:
response = session.get(url_download, timeout=10)
response.raise_for_status()
with open(f'cache/{cache_file}', 'wb') as f:
f.write(response.content)
logging.info(f'Downloaded and cached file for {url_download}')
- return calculate_checksum(cache_file, arch, url_download)
+ return calculate_checksum(cache_file, sha_regex)
except Exception as e:
- logging.error(e)
+ logging.warning(e)
return None
-def get_checksums(component, component_data, version, session):
+def get_checksums(component, component_data, versions, session):
checksums = {}
- url_download_template = component_data['url_download'].replace('VERSION', version)
- if component_data['checksum_structure'] == 'os_arch':
- # OS -> Arch -> Checksum
- for os_name in oses:
- checksums[os_name] = {}
- for arch in architectures:
- url_download = url_download_template.replace('OS', os_name).replace('ARCH', arch)
- checksum = download_file_and_get_checksum(component, arch, url_download, session)
- if not checksum:
- checksum = 0
- checksums[os_name][arch] = checksum
- elif component_data['checksum_structure'] == 'arch':
- # Arch -> Checksum
- for arch in architectures:
- url_download = url_download_template.replace('ARCH', arch)
- checksum = download_file_and_get_checksum(component, arch, url_download, session)
- if not checksum:
- checksum = 0
- checksums[arch] = checksum
- elif component_data['checksum_structure'] == 'simple':
- # Checksum
- checksum = download_file_and_get_checksum(component, '', url_download_template, session)
- if not checksum:
- checksum = 0
- checksums[version] = checksum
+ for version in versions:
+ processed_version = process_version_string(component, version)
+ checksums[version] = {}
+ url_download_template = component_data.get('url_download')
+ if component_data['checksum_structure'] == 'os_arch':
+ # OS -> Arch -> Checksum
+ for os_name in OSES:
+ if os_name not in checksums[version]:
+ checksums[version][os_name] = {}
+ for arch in ARCHITECTURES:
+ url_download = url_download_template.format(arch=arch, os_name=os_name, version=processed_version)
+ sha_regex = component_data.get('sha_regex').format(arch=arch, os_name=os_name)
+ checksum = download_file_and_get_checksum(component, arch, url_download, processed_version, sha_regex, session) or 0
+ checksums[version][os_name][arch] = checksum
+ elif component_data['checksum_structure'] == 'arch':
+ # Arch -> Checksum
+ for arch in ARCHITECTURES:
+ tmp_arch = arch
+ if component == 'youki':
+ tmp_arch = tmp_arch.replace('arm64', 'aarch64-gnu').replace('amd64', 'x86_64-gnu')
+ elif component in ['gvisor_containerd_shim','gvisor_runsc']:
+ tmp_arch = tmp_arch.replace("arm64", "aarch64").replace("amd64", "x86_64")
+ url_download = url_download_template.format(arch=tmp_arch, version=processed_version)
+ sha_regex = component_data.get('sha_regex').format(arch=tmp_arch)
+ checksum = download_file_and_get_checksum(component, arch, url_download, processed_version, sha_regex, session) or 0
+ checksums[version][arch] = checksum
+ elif component_data['checksum_structure'] == 'simple':
+ # Checksum
+ url_download = url_download_template.format(version=processed_version)
+ sha_regex = component_data.get('sha_regex')
+ checksum = download_file_and_get_checksum(component, '', url_download, processed_version, sha_regex, session) or 0
+ checksums[version] = checksum # Store checksum for the version
return checksums
-def update_yaml_checksum(component_data, checksums, version):
+def update_checksum(component, component_data, checksums, version):
+ processed_version = process_version_string(component, version)
placeholder_checksum = component_data['placeholder_checksum']
checksum_structure = component_data['checksum_structure']
current = checksum_yaml_data[placeholder_checksum]
+
if checksum_structure == 'simple':
# Simple structure (placeholder_checksum -> version -> checksum)
- current[(version)] = checksums[version]
+ checksum_yaml_data[placeholder_checksum] = {processed_version: checksums, **current}
elif checksum_structure == 'os_arch':
# OS structure (placeholder_checksum -> os -> arch -> version -> checksum)
for os_name, arch_dict in checksums.items():
os_current = current.setdefault(os_name, {})
for arch, checksum in arch_dict.items():
- os_current[arch] = {(version): checksum, **os_current.get(arch, {})}
+ os_current[arch] = {(processed_version): checksum, **os_current.get(arch, {})}
elif checksum_structure == 'arch':
# Arch structure (placeholder_checksum -> arch -> version -> checksum)
for arch, checksum in checksums.items():
- current[arch] = {(version): checksum, **current.get(arch, {})}
- logging.info(f'Updated {placeholder_checksum} with {checksums}')
+ current[arch] = {(processed_version): checksum, **current.get(arch, {})}
+ logging.info(f'Updated {placeholder_checksum} with version {processed_version} and checksums {checksums}')
def resolve_kube_dependent_component_version(component, component_data, version):
kube_major_version = component_data['kube_major_version']
if component in ['crictl', 'crio']:
try:
- component_major_minor_version = get_major_minor_version(version)
- if component_major_minor_version == kube_major_version:
+ component_major_version = get_major_version(version)
+ if component_major_version == kube_major_version:
resolved_version = kube_major_version
else:
- resolved_version = component_major_minor_version
+ resolved_version = component_major_version
except (IndexError, AttributeError):
logging.error(f'Error parsing version for {component}: {version}')
return
@@ -262,7 +285,7 @@ def resolve_kube_dependent_component_version(component, component_data, version)
resolved_version = kube_major_version
return resolved_version
-def update_yaml_version(component, component_data, version):
+def update_version(component, component_data, version):
placeholder_version = component_data['placeholder_version']
resolved_version = resolve_kube_dependent_component_version(component, component_data, version)
updated_placeholder = [
@@ -288,10 +311,15 @@ def update_readme(component, version):
for i, line in enumerate(readme_data):
if component in line and re.search(r'v\d+\.\d+\.\d+', line):
readme_data[i] = re.sub(r'v\d+\.\d+\.\d+', version, line)
- logging.info(f"Updated {component} to {version} in README")
+ logging.info(f'Updated {component} to {version} in README')
break
return readme_data
+def safe_save_files(file_path, data=None, save_func=None):
+ if not save_func(file_path, data):
+ logging.error(f'Failed to save file {file_path}')
+ sys.exit(1)
+
def create_json_file(file_path):
new_data = {}
try:
@@ -317,12 +345,13 @@ def load_yaml_file(yaml_file):
return yaml.load(f)
except Exception as e:
logging.error(f'Failed to load {yaml_file}: {e}')
- return {}
+ return None
def save_yaml_file(yaml_file, data):
try:
with open(yaml_file, 'w') as f:
yaml.dump(data, f)
+ return True
except Exception as e:
logging.error(f'Failed to save {yaml_file}: {e}')
return False
@@ -333,12 +362,13 @@ def open_readme(path_readme):
return f.readlines()
except Exception as e:
logging.error(f'Failed to load {path_readme}: {e}')
- return False
+ return None
-def save_readme(path_readme):
+def save_readme(path_readme, data):
try:
with open(path_readme, 'w') as f:
- f.writelines(readme_data)
+ f.writelines(data)
+ return True
except Exception as e:
logging.error(f'Failed to save {path_readme}: {e}')
return False
@@ -353,124 +383,144 @@ def process_version_string(component, version):
version = match.group(1)
return version
-def get_major_minor_version(version):
+def get_major_version(version):
match = re.match(r'^(v\d+)\.(\d+)', version)
if match:
- return f"{match.group(1)}.{match.group(2)}"
- return version
+ return f'{match.group(1)}.{match.group(2)}'
+ return None
-def process_component(component, component_data, session):
+def process_component(component, component_data, repo_metadata, session):
logging.info(f'Processing component: {component}')
+ component_repo_metada = repo_metadata.get(component, {})
+ # Get current kube version
kube_version = main_yaml_data.get('kube_version')
- kube_major_version = get_major_minor_version(kube_version)
+ kube_major_version = get_major_version(kube_version)
component_data['kube_version'] = kube_version # needed for nested components
component_data['kube_major_version'] = kube_major_version # needed for nested components
- # Get current version
+ # Get current component version
current_version = get_current_version(component, component_data)
if not current_version:
logging.info(f'Stop processing component {component}, current version unknown')
return
- # Get latest version
- if component_data['release_type'] == 'tag':
- release = get_release_tag(component, component_data, session)
- if release:
- latest_version = release.get('name')
- else:
- release = get_release(component, component_data, session)
- latest_version = release.get('tagName')
-
+ # Get latest component version
+ latest_version = get_latest_version(component_repo_metada)
if not latest_version:
logging.info(f'Stop processing component {component}, latest version unknown.')
return
+ # Kubespray version
+ processed_latest_version = process_version_string(component, latest_version)
-
- latest_version = process_version_string(component, latest_version)
-
- if current_version == latest_version:
+ # Log version comparison
+ if current_version == processed_latest_version:
logging.info(f'Component {component}, version {current_version} is up to date')
- if args.skip_checksum and (current_version == latest_version):
- logging.info(f'Stop processing component {component} due to flag.')
- return
else:
- logging.info(f'Component {component} version discrepancy, current={current_version}, latest={latest_version}')
+ logging.info(f'Component {component} version discrepancy, current={current_version}, latest={processed_latest_version}')
+ # CI - write data and return
if args.ci_check:
- release['component'] = component
- release['owner'] = component_data['owner']
- release['repo'] = component_data['repo']
- release['release_type'] = component_data['release_type']
- if (current_version != latest_version):
- version_diff[component] = {
- 'current_version' : current_version, # needed for dependecy-check
- 'latest_version' : latest_version, # needed for dependecy-check
- 'release' : release # needed for generate_pr_body
- }
+ version_diff[component] = {
+ # used in dependecy-check.yml workflow
+ 'current_version' : current_version,
+ 'latest_version' : latest_version, # used for PR name
+ # used in generate_pr_body.py script
+ 'processed_latest_version': processed_latest_version, # used for PR body
+ 'owner' : component_data['owner'],
+ 'repo' : component_data['repo'],
+ 'repo_metadata' : component_repo_metada,
+ }
return
- checksums = get_checksums(component, component_data, latest_version, session)
- update_yaml_checksum(component_data, checksums, latest_version)
+ # Get patch versions
+ patch_versions = get_patch_versions(component, latest_version, component_repo_metada)
+ logging.info(f'Component {component} patch versions: {patch_versions}')
+
+ # Get checksums for all patch versions
+ checksums = get_checksums(component, component_data, patch_versions, session)
+ # Update checksums
+ for version in patch_versions:
+ version_checksum = checksums.get(version)
+ update_checksum(component, component_data, version_checksum, version)
+
+ # Update version in configuration
if component not in ['kubeadm', 'kubectl', 'kubelet']: # kubernetes dependent components
- update_yaml_version(component, component_data, latest_version)
- if component in ['etcd', 'containerd', 'crio', 'calicoctl', 'krew', 'helm']: # in README
+ if component != 'calico_crds': # TODO double check if only calicoctl may change calico_version
+ update_version(component, component_data, processed_latest_version)
+
+ # Update version in README
+ if component in README_COMPONENTS:
if component in ['crio', 'crictl']:
- component_major_minor_version = get_major_minor_version(latest_version)
- if component_major_minor_version != kube_major_version: # do not update README
+ component_major_version = get_major_version(processed_latest_version)
+ if component_major_version != kube_major_version: # do not update README, we just added checksums
return
- component = component.replace('crio', 'cri-o')
- elif component == 'containerd':
- latest_version = f'v{latest_version}'
- elif component == 'calicoctl':
- component = component.replace('calicoctl', 'calico')
+ # replace component name to fit readme
+ component = component.replace('crio', 'cri-o').replace('calicoctl', 'calico')
update_readme(component, latest_version)
-
-def main(loglevel, component, max_workers):
- setup_logging(loglevel)
+def main():
+ # Setup logging
+ setup_logging(args.loglevel)
+ # Setup session with retries
session = get_session_with_retries()
+ # Load configuration files
global main_yaml_data, checksum_yaml_data, download_yaml_data, readme_data, version_diff
- main_yaml_data = load_yaml_file(path_main)
- checksum_yaml_data = load_yaml_file(path_checksum)
- download_yaml_data = load_yaml_file(path_download)
- readme_data = open_readme(path_readme)
-
+ main_yaml_data = load_yaml_file(PATH_MAIN)
+ checksum_yaml_data = load_yaml_file(PATH_CHECKSUM)
+ download_yaml_data = load_yaml_file(PATH_DOWNLOAD)
+ readme_data = open_readme(PATH_README)
if not (main_yaml_data and checksum_yaml_data and download_yaml_data and readme_data):
- logging.error(f'Failed to open required yaml file, current working directory is {pwd}. Exiting...')
+ logging.error(f'Failed to open one or more configuration files, current working directory is {pwd}. Exiting...')
sys.exit(1)
+ # CI - create version_diff file
if args.ci_check:
- version_diff = create_json_file(path_version_diff)
+ version_diff = create_json_file(PATH_VERSION_DIFF)
if version_diff is None:
- logging.error(f'Failed to create version_diff.json file')
+ logging.error(f'Failed to create {PATH_VERSION_DIFF} file')
sys.exit(1)
- else:
- version_diff = {}
- if component != 'all':
- if component in component_info:
- process_component(component, component_info[component], session)
+ # Process single component
+ if args.component != 'all':
+ if args.component in COMPONENT_INFO:
+ specific_component_info = {args.component: COMPONENT_INFO[args.component]}
+ # Get repository metadata => releases, tags and commits
+ logging.info(f'Fetching repository metadata for the component {args.component}')
+ repo_metadata = get_repository_metadata(specific_component_info, session)
+ if not repo_metadata:
+ sys.exit(1)
+ process_component(args.component, COMPONENT_INFO[args.component], repo_metadata, session)
else:
- logging.error(f'Component {component} not found in config.')
+ logging.error(f'Component {args.component} not found in config.')
sys.exit(1)
+ # Process all components in the configuration file concurrently
else:
- with ThreadPoolExecutor(max_workers=max_workers) as executor:
+ # Get repository metadata => releases, tags and commits
+ logging.info('Fetching repository metadata for all components')
+ repo_metadata = get_repository_metadata(COMPONENT_INFO, session)
+ if not repo_metadata:
+ sys.exit(1)
+ with ThreadPoolExecutor(max_workers=args.max_workers) as executor:
futures = []
logging.info(f'Running with {executor._max_workers} executors')
- for component, component_data in component_info.items():
- futures.append(executor.submit(process_component, component, component_data, session))
+ for component, component_data in COMPONENT_INFO.items():
+ futures.append(executor.submit(process_component, component, component_data, repo_metadata, session))
for future in futures:
future.result()
+ # CI - save JSON file
if args.ci_check:
- save_json_file(path_version_diff, version_diff)
+ safe_save_files(PATH_VERSION_DIFF, version_diff, save_json_file)
+ # Save configurations
+ else:
+ safe_save_files(PATH_CHECKSUM, checksum_yaml_data, save_yaml_file)
+ safe_save_files(PATH_DOWNLOAD, download_yaml_data, save_yaml_file)
+ safe_save_files(PATH_README, readme_data, save_readme)
- save_yaml_file(path_checksum, checksum_yaml_data)
- save_yaml_file(path_download, download_yaml_data)
- save_readme(path_readme)
+ logging.info('Finished.')
if __name__ == '__main__':
@@ -478,10 +528,9 @@ if __name__ == '__main__':
parser.add_argument('--loglevel', default='INFO', help='Set the log level (DEBUG, INFO, WARNING, ERROR, CRITICAL)')
parser.add_argument('--component', default='all', help='Specify a component to process, default is all components')
parser.add_argument('--max-workers', type=int, default=4, help='Maximum number of concurrent workers, use with caution(sometimes less is more)')
- parser.add_argument('--skip-checksum', action='store_true', help='Skip checksum if the current version is up to date')
parser.add_argument('--ci-check', action='store_true', help='Check versions, store discrepancies in version_diff.json')
-
-
+ parser.add_argument('--graphql-number-of-entries', type=int, default=10, help='Number of releases/tags to retrieve from Github GraphQL per component (default: 10)')
+ parser.add_argument('--graphql-number-of-commits', type=int, default=5, help='Number of commits to retrieve from Github GraphQL per tag (default: 5)')
args = parser.parse_args()
- main(args.loglevel, args.component, args.max_workers)
+ main()
diff --git a/scripts/generate_pr_body.py b/scripts/generate_pr_body.py
index 8357ee93b..9ec937a8b 100644
--- a/scripts/generate_pr_body.py
+++ b/scripts/generate_pr_body.py
@@ -1,109 +1,68 @@
-import os
-import re
import sys
import json
import argparse
-import requests
-
# Do not commit any prints if the script doesn't exit with error code
# Otherwise it will be part of the PR body
-github_api_url = 'https://api.github.com/graphql'
-gh_token = os.getenv('GH_TOKEN')
-def get_commits(tag, release, number_of_commits=5):
- owner = release['owner']
- repo = release['repo']
- repo_url = f'https://github.com/{owner}/{repo}'
-
- query = """
- {
- repository(owner: "%s", name: "%s") {
- ref(qualifiedName: "refs/tags/%s") {
- target {
- ... on Tag {
- target {
- ... on Commit {
- history(first: %s) {
- edges {
- node {
- oid
- message
- url
- }
- }
- }
- }
- }
- }
- ... on Commit {
- # In case the tag directly points to a commit
- history(first: %s) {
- edges {
- node {
- oid
- message
- url
- }
- }
- }
- }
- }
- }
- }
- }
- """ % (owner, repo, tag, number_of_commits, number_of_commits)
-
- headers = {'Authorization': f'Bearer {gh_token}'}
- response = requests.post(github_api_url, json={'query': query}, headers=headers)
-
- if response.status_code == 200:
- try:
- data = response.json()
- target = data['data']['repository']['ref']['target']
-
- if 'history' in target:
- commits = target['history']['edges']
- elif 'target' in target and 'history' in target['target']:
- commits = target['target']['history']['edges']
- else:
- # print('No commit history found.')
- return None
-
- pr_commits = '\n\nCommits
\n\n'
- for commit in commits:
- node = commit['node']
- short_oid = node['oid'][:7]
- commit_message = node['message'].split('\n')[0]
- commit_message = link_pull_requests(commit_message, repo_url)
- commit_url = node['url']
- pr_commits += f'- [`{short_oid}`]({commit_url}) {commit_message} \n'
- pr_commits += '\n '
- return pr_commits
- except Exception as e:
- # print(f'Error parsing commits: {e}')
- # print(f'data: {response.text}')
- return None
- else:
- # print(f'GraphQL query failed with status code {response.status_code}')
+def load_json(component):
+ try:
+ with open('version_diff.json', 'r') as f:
+ repo_metadata = json.load(f)
+ component_data = repo_metadata.get(component)
+ return component_data
+ except Exception as e:
return None
+def get_version_commits(version, component_metadata):
+ tags = component_metadata.get('refs', {}).get('nodes', [])
+ for tag in tags:
+ if tag['name'] == version:
+ target = tag.get('target', {})
-def replace_match(match, repo_url):
- pr_number = match.group(2)
- return f'{match.group(1)}[# {pr_number}]({repo_url}/pull/{pr_number}){match.group(3)}'
+ # Check if the target is a Tag pointing to a Commit
+ if 'history' in target.get('target', {}):
+ commit_history = target['target']['history'].get('edges', [])
+ # Check if the target is directly a Commit object
+ elif 'history' in target:
+ commit_history = target['history'].get('edges', [])
+ else:
+ return None
-def link_pull_requests(input, repo_url):
- return re.sub(r'(\(?)#(\d+)(\)?)', lambda match: replace_match(match, repo_url), input)
+ commits = []
+ for commit in commit_history:
+ commit_node = commit.get('node', {})
+ commit_info = {
+ 'oid': commit_node.get('oid'),
+ 'message': commit_node.get('message'),
+ 'url': commit_node.get('url')
+ }
+ commits.append(commit_info)
-def format_description(description, length=20):
+ if commits:
+ return commits
+ return None
+
+def get_version_description(version, repo_metadata):
+ if repo_metadata:
+ releases = repo_metadata.get('releases', {}).get('nodes', [])
+ for release in releases:
+ if release.get('tagName') == version:
+ description = release.get('description', None)
+ return format_description(description)
+ return None
+
+def handle_reference(input):
+ return input.replace('github.com', 'redirect.github.com') # Prevent reference in the sourced PR
+
+# Split description into visible and collapsed
+def format_description(description):
+ description = handle_reference(description)
lines = description.splitlines()
-
- if len(lines) > length:
- first_part = '\n'.join(lines[:length])
- collapsed_part = '\n'.join(lines[length:])
-
+ if len(lines) > args.description_number_of_lines:
+ first_part = '\n'.join(lines[:args.description_number_of_lines])
+ collapsed_part = '\n'.join(lines[args.description_number_of_lines:])
formatted_description = f"""{first_part}
@@ -117,57 +76,55 @@ def format_description(description, length=20):
else:
return description
-def main(component):
- try:
- with open('version_diff.json') as f:
- data = json.load(f)
- data = data[component]
- except Exception as e:
- print(f'Error loading version_diff.json or component not found: {e}')
+def main():
+ component_data = load_json(args.component)
+ if not component_data:
+ print('Failed to load component data')
sys.exit(1)
+ owner = component_data.get('owner')
+ repo = component_data.get('repo')
+ latest_version = component_data.get('latest_version')
+ repo_metadata = component_data.get('repo_metadata')
+ release_url = f'https://github.com/{owner}/{repo}/releases/tag/{latest_version}'
+ commits = get_version_commits(latest_version, repo_metadata)
+ description = get_version_description(latest_version, repo_metadata)
- release = data['release']
- owner = release['owner']
- repo = release['repo']
+ # General info
+ pr_body = f"""
+### {latest_version}
- if component in ['gvisor_containerd_shim','gvisor_runsc']:
- name = release.get('name')
- release_url = f'https://github.com/google/gvisor/releases/tag/{name}'
- pr_body = f"""
-### {name}
-
-**URL**: [Release {name}]({release_url})
+**URL**: [Release {latest_version}]({release_url})
"""
- commits = get_commits(name, release)
- if commits:
- pr_body += commits
- else:
- name = release['tagName']
- tag_name = release['tagName']
- published_at = release['publishedAt']
- release_url = release['url']
- description = release['description']
- repo_url = 'https://github.com/%s/%s' % (owner, repo)
- description = link_pull_requests(description, repo_url)
- pr_body = f"""
-### {name}
-**Tag**: {tag_name}
-**Published at**: {published_at}
-**URL**: [Release {tag_name}]({release_url})
+ # Description
+ if description:
+ pr_body += f"""
#### Description:
-{format_description(description)}
+{description}
"""
- commits = get_commits(name, release)
- if commits:
- pr_body += commits
+
+ # Commits
+ if commits:
+ pr_commits = '\n\nCommits
\n\n'
+ for commit in commits:
+ short_oid = commit.get('oid')[:7]
+ message = commit.get('message').split('\n')[0]
+ commit_message = handle_reference(message)
+ # commit_message = link_pull_requests(commit_message, repo_url)
+ commit_url = commit.get('url')
+ pr_commits += f'- [`{short_oid}`]({commit_url}) {commit_message} \n'
+ pr_commits += '\n '
+ pr_body += pr_commits
+
+ # Print body
print(pr_body)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Pull Request body generator')
parser.add_argument('--component', required=True, help='Specify the component to process')
+ parser.add_argument('--description-number-of-lines', type=int, default=20, help='Number of lines to include from the description')
args = parser.parse_args()
- main(args.component)
+ main()