kubespray/tests/cloud_playbooks/upload-logs-gcs.yml

76 lines
2.0 KiB
YAML

---
- hosts: localhost
become: false
gather_facts: no
vars:
expire_days: 2
tasks:
- name: Generate uniq bucket name prefix
shell: date +%Y%m%d
register: out
- name: replace_test_id
set_fact:
test_name: "kargo-ci-{{ out.stdout }}"
- set_fact:
file_name: "{{ostype}}-{{kube_network_plugin}}-{{commit}}-logs.tar.gz"
- name: Create a bucket
gc_storage:
bucket: "{{ test_name }}"
mode: create
permission: public-read
gs_access_key: "{{ gs_key }}"
gs_secret_key: "{{ gs_skey }}"
no_log: True
- name: Create a lifecycle template for the bucket
template:
src: gcs_life.json.j2
dest: "{{dir}}/gcs_life.json"
- name: Create a boto config to access GCS
template:
src: boto.j2
dest: "{{dir}}/.boto"
no_log: True
- name: Download gsutil cp installer
get_url:
url: https://dl.google.com/dl/cloudsdk/channels/rapid/install_google_cloud_sdk.bash
dest: "{{dir}}/gcp-installer.sh"
- name: Get gsutil tool
script: "{{dir}}/gcp-installer.sh"
environment:
CLOUDSDK_CORE_DISABLE_PROMPTS: 1
CLOUDSDK_INSTALL_DIR: "{{dir}}"
no_log: True
ignore_errors: true
- name: Apply the lifecycle rules
command: "{{dir}}/google-cloud-sdk/bin/gsutil lifecycle set {{dir}}/gcs_life.json gs://{{test_name}}"
environment:
BOTO_CONFIG: "{{dir}}/.boto"
no_log: True
- name: Upload collected diagnostic info
gc_storage:
bucket: "{{ test_name }}"
mode: put
permission: public-read
object: "{{ file_name }}"
src: "{{dir}}/logs.tar.gz"
headers: '{"Content-Encoding": "x-gzip"}'
gs_access_key: "{{ gs_key }}"
gs_secret_key: "{{ gs_skey }}"
expiration: "{{expire_days * 36000|int}}"
ignore_errors: true
no_log: True
- debug:
msg: "A public url https://storage.googleapis.com/{{test_name}}/{{file_name}}"