Updated bake file and workflows for 6.0 release

This commit is contained in:
Alexey Pustovalov
2026-03-31 14:55:50 +09:00
parent f1ef2500ef
commit 49ff891892
20 changed files with 1931 additions and 2140 deletions

18
.github/actions/attest-image/action.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
name: Attest image
description: Generate and push attestation for image
inputs:
subject_name:
required: true
subject_digest:
required: true
runs:
using: composite
steps:
- name: Attest image
uses: actions/attest@59d89421af93a897026c735860bf21b6eb4f7b26 # v4.1.0
with:
subject-name: ${{ inputs.subject_name }}
subject-digest: ${{ inputs.subject_digest }}
push-to-registry: true

View File

@@ -0,0 +1,20 @@
name: Install cosign
description: Install cosign and print its version
inputs:
cosign_release:
required: false
default: v3.0.5
description: Cosign release version to install
runs:
using: composite
steps:
- name: Install cosign
uses: sigstore/cosign-installer@ba7bc0a3fef59531c69a25acd34668d6d3fe6f22
with:
cosign-release: ${{ inputs.cosign_release }}
- name: Check cosign version
shell: bash
run: cosign version

71
.github/actions/cosign-sign/action.yml vendored Normal file
View File

@@ -0,0 +1,71 @@
name: Cosign sign images
description: Sign image tags with digest via cosign
inputs:
digest:
required: true
description: Built image digest
tags:
required: true
description: Newline-separated image tags
runs:
using: composite
steps:
- name: Sign images on Linux
if: runner.os != 'Windows'
shell: bash
env:
DIGEST: ${{ inputs.digest }}
TAGS: ${{ inputs.tags }}
run: |
set -euo pipefail
images=()
while IFS= read -r tag; do
[[ -n "$tag" ]] || continue
images+=("${tag}@${DIGEST}")
done <<< "$TAGS"
if [[ ${#images[@]} -eq 0 ]]; then
echo "No image tags provided"
exit 1
fi
echo "::group::Images to sign"
printf '%s\n' "${images[@]}"
echo "::endgroup::"
echo "::group::Signing"
cosign sign --yes "${images[@]}"
echo "::endgroup::"
- name: Sign images on Windows
if: runner.os == 'Windows'
shell: pwsh
env:
DIGEST: ${{ inputs.digest }}
TAGS: ${{ inputs.tags }}
run: |
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version Latest
$images = @()
$tagsArray = $Env:TAGS -split "\r?\n" | Where-Object { $_.Trim() }
foreach ($tag in $tagsArray) {
$images += "$tag@$Env:DIGEST"
}
if ($images.Count -eq 0) {
throw 'No image tags provided'
}
Write-Output '::group::Images to sign'
$images | ForEach-Object { Write-Output $_ }
Write-Output '::endgroup::'
Write-Output '::group::Signing'
cosign sign --yes $images
Write-Output '::endgroup::'

View File

@@ -0,0 +1,63 @@
name: Cosign verify image
description: Verify container image signature with cosign and GitHub OIDC
inputs:
image:
required: true
description: Image reference in name@digest format
oidc_issuer:
required: true
description: OIDC issuer regexp
identity_regexp:
required: true
description: Certificate identity regexp
runs:
using: composite
steps:
- name: Verify image signature on Linux
if: runner.os != 'Windows'
shell: bash
env:
IMAGE: ${{ inputs.image }}
OIDC_ISSUER: ${{ inputs.oidc_issuer }}
IDENTITY_REGEX: ${{ inputs.identity_regexp }}
run: |
set -euo pipefail
echo "::group::Image sign data"
echo "OIDC issuer=$OIDC_ISSUER"
echo "Identity=$IDENTITY_REGEX"
echo "Image to verify=$IMAGE"
echo "::endgroup::"
echo "::group::Verify signature"
cosign verify \
--certificate-oidc-issuer-regexp "$OIDC_ISSUER" \
--certificate-identity-regexp "$IDENTITY_REGEX" \
"$IMAGE" | jq
echo "::endgroup::"
- name: Verify image signature on Windows
if: runner.os == 'Windows'
shell: pwsh
env:
IMAGE: ${{ inputs.image }}
OIDC_ISSUER: ${{ inputs.oidc_issuer }}
IDENTITY_REGEX: ${{ inputs.identity_regexp }}
run: |
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version Latest
Write-Output '::group::Image sign data'
Write-Output "OIDC issuer=$env:OIDC_ISSUER"
Write-Output "Identity=$env:IDENTITY_REGEX"
Write-Output "Image to verify=$env:IMAGE"
Write-Output '::endgroup::'
Write-Output '::group::Verify signature'
cosign verify `
--certificate-oidc-issuer-regexp "$env:OIDC_ISSUER" `
--certificate-identity-regexp "$env:IDENTITY_REGEX" `
"$env:IMAGE" | ConvertFrom-Json | ConvertTo-Json -Depth 100
Write-Output '::endgroup::'

View File

@@ -0,0 +1,65 @@
name: Docker build for Linux
description: Build and optionally push a Linux image via docker/build-push-action
inputs:
context:
required: true
description: Docker build context
file:
required: true
description: Dockerfile path
platforms:
required: true
description: Target platforms
push:
required: false
default: "true"
description: Whether to push the image
sbom:
required: false
default: "true"
description: Whether to generate SBOM
provenance:
required: false
default: mode=max
description: Provenance mode
tags:
required: true
description: Image tags
labels:
required: true
description: Image labels
build_args:
required: false
default: ""
description: Optional multiline build args
build_contexts:
required: false
default: ""
description: Optional multiline build contexts
outputs:
digest:
description: Built image digest
value: ${{ steps.build.outputs.digest }}
metadata:
description: Build metadata
value: ${{ steps.build.outputs.metadata }}
runs:
using: composite
steps:
- name: Build and optionally push image
id: build
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294
with:
context: ${{ inputs.context }}
file: ${{ inputs.file }}
platforms: ${{ inputs.platforms }}
push: ${{ inputs.push }}
provenance: ${{ inputs.provenance }}
sbom: ${{ inputs.sbom }}
tags: ${{ inputs.tags }}
labels: ${{ inputs.labels }}
build-args: ${{ inputs.build_args }}
build-contexts: ${{ inputs.build_contexts }}

View File

@@ -0,0 +1,188 @@
name: Docker build and optionally push image on Windows
description: Build a Windows Docker image, optionally push it, and output its digest
inputs:
context:
required: true
description: Docker build context path
dockerfile:
required: true
description: Dockerfile path
tags:
required: true
description: Newline-separated image tags
label_revision:
required: true
description: org.opencontainers.image.revision label value
label_created:
required: true
description: org.opencontainers.image.created label value
pull_images:
required: false
default: ""
description: Newline-separated image references to pull before build
build_args:
required: false
default: ""
description: Newline-separated build arguments in NAME=value format
push:
required: false
default: "true"
description: Whether to push built image tags
remove_readme:
required: false
default: "true"
description: Whether to remove README.md from the build context
outputs:
digest:
description: Built image digest or image ID
value: ${{ steps.build_push.outputs.digest }}
runs:
using: composite
steps:
- name: Build and optionally push image
id: build_push
shell: pwsh
env:
CONTEXT: ${{ inputs.context }}
DOCKERFILE: ${{ inputs.dockerfile }}
TAGS: ${{ inputs.tags }}
LABEL_REVISION: ${{ inputs.label_revision }}
LABEL_CREATED: ${{ inputs.label_created }}
PULL_IMAGES: ${{ inputs.pull_images }}
BUILD_ARGS_INPUT: ${{ inputs.build_args }}
PUSH_IMAGES: ${{ inputs.push }}
REMOVE_README: ${{ inputs.remove_readme }}
run: |
$ErrorActionPreference = 'Stop'
Set-StrictMode -Version Latest
Write-Output '::group::Docker version'
docker version
Write-Output '::endgroup::'
Write-Output '::group::Docker info'
docker info
Write-Output '::endgroup::'
if ($Env:REMOVE_README -eq 'true') {
Remove-Item -ErrorAction Ignore -Force -Path (Join-Path $Env:CONTEXT 'README.md')
}
$tagsArray = @($Env:TAGS -split "\r?\n" | Where-Object { $_.Trim() })
if ($tagsArray.Count -eq 0) {
throw 'No image tags provided'
}
$tagArgs = @()
foreach ($tag in $tagsArray) {
$tagArgs += "--tag=$tag"
}
Write-Output '::group::Image tags'
$tagsArray | ForEach-Object { Write-Output $_ }
Write-Output '::endgroup::'
$pullImagesArray = @($Env:PULL_IMAGES -split "\r?\n" | Where-Object { $_.Trim() })
if ($pullImagesArray.Count -gt 0) {
Write-Output '::group::Pull base images'
foreach ($image in $pullImagesArray) {
Write-Output "docker pull $image"
docker pull $image
if (-not $?) {
throw "Failed to pull $image"
}
}
Write-Output '::endgroup::'
}
$buildArgsInputArray = @($Env:BUILD_ARGS_INPUT -split "\r?\n" | Where-Object { $_.Trim() })
$buildArgCliArgs = @()
foreach ($buildArg in $buildArgsInputArray) {
$buildArgCliArgs += "--build-arg=$buildArg"
}
Write-Output '::group::Build arguments'
if ($buildArgsInputArray.Count -gt 0) {
$buildArgsInputArray | ForEach-Object { Write-Output $_ }
}
else {
Write-Output 'No build arguments provided'
}
Write-Output '::endgroup::'
Write-Output '::group::Build image'
Write-Output @"
docker build --label org.opencontainers.image.revision=$Env:LABEL_REVISION `
--label org.opencontainers.image.created=$Env:LABEL_CREATED `
$($buildArgCliArgs -join " ") `
--file=$Env:DOCKERFILE `
$($tagArgs -join " ") `
$Env:CONTEXT
"@
docker build `
--label "org.opencontainers.image.revision=$Env:LABEL_REVISION" `
--label "org.opencontainers.image.created=$Env:LABEL_CREATED" `
$buildArgCliArgs `
--file "$Env:DOCKERFILE" `
$tagArgs `
"$Env:CONTEXT"
if (-not $?) {
throw 'Failed to build image'
}
Write-Output '::endgroup::'
Write-Output '::group::Publish image'
if ($Env:PUSH_IMAGES -eq 'true') {
foreach ($tag in $tagsArray) {
Write-Output "docker image push $tag"
docker image push $tag
if (-not $?) {
throw "Failed to push $tag"
}
}
$repoDigest = docker inspect $tagsArray[0] --format "{{ index .RepoDigests 0}}"
if (-not $?) {
throw "Failed to inspect RepoDigests for $($tagsArray[0])"
}
$digest = $repoDigest.Split('@')[-1]
if ([string]::IsNullOrWhiteSpace($digest)) {
throw 'Image digest is empty'
}
Write-Output 'Image digest got from RepoDigests'
}
else {
$digest = docker inspect $tagsArray[0] --format "{{ .Id }}"
if (-not $?) {
throw "Failed to inspect image Id for $($tagsArray[0])"
}
if ([string]::IsNullOrWhiteSpace($digest)) {
throw 'Image ID digest is empty'
}
Write-Output 'Image digest got from Id'
}
Write-Output '::endgroup::'
Write-Output '::group::Digest'
Write-Output $digest
Write-Output '::endgroup::'
"digest=$digest" >> $Env:GITHUB_OUTPUT

View File

@@ -0,0 +1,54 @@
name: Docker Linux setup
description: Set up QEMU, Docker Buildx, and optional registry login
inputs:
auto_push_images:
required: false
default: false
description: Whether registry login is needed
qemu_image:
required: false
default: tonistiigi/binfmt:qemu-v10.0.4
description: QEMU binfmt image
qemu_platforms:
required: false
default: all
description: Platforms for QEMU
buildkit_image:
required: false
default: moby/buildkit:v0.28.1
description: BuildKit image
docker_username:
required: false
default: ""
description: Docker registry username
docker_password:
required: false
default: ""
description: Docker registry password
runs:
using: composite
steps:
- name: Set up QEMU
uses: docker/setup-qemu-action@ce360397dd3f832beb865e1373c09c0e9f86d70a # v4.0.0
with:
image: ${{ inputs.qemu_image }}
platforms: ${{ inputs.qemu_platforms }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
with:
driver-opts: image=${{ inputs.buildkit_image }}
- name: Login to DockerHub
if: ${{ inputs.auto_push_images == 'true' }}
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ inputs.docker_username }}
password: ${{ inputs.docker_password }}

56
.github/actions/docker-meta/action.yml vendored Normal file
View File

@@ -0,0 +1,56 @@
name: Docker metadata
description: Generate Docker tags and labels consistently
inputs:
image:
required: true
description: 'Full image name, e.g. docker.io/org/prefix-image'
os:
required: true
description: 'OS suffix/prefix for tags'
current_branch:
required: true
description: 'Current branch name'
is_default_branch:
required: true
description: '"true" or "false"'
trunk_only_event:
required: true
description: '"true" or false"'
event_name:
required: true
description: 'GitHub event name'
outputs:
tags:
description: Generated Docker tags
value: ${{ steps.meta.outputs.tags }}
labels:
description: Generated Docker labels
value: ${{ steps.meta.outputs.labels }}
json:
description: Full metadata-action json output
value: ${{ steps.meta.outputs.json }}
version:
description: Generated version
value: ${{ steps.meta.outputs.version }}
runs:
using: composite
steps:
- name: Generate tags
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ inputs.image }}
context: ${{ inputs.trunk_only_event == 'true' && 'git' || '' }}
tags: |
type=semver,enable=${{ inputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ inputs.os }}-
type=semver,enable=${{ inputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ inputs.os }}
type=ref,enable=${{ inputs.current_branch != 'trunk' && inputs.event_name != 'workflow_dispatch' }},event=branch,prefix=${{ inputs.os }}-,suffix=-latest
type=ref,enable=${{ inputs.current_branch != 'trunk' && inputs.event_name != 'workflow_dispatch' }},event=branch,suffix=-${{ inputs.os }}-latest
type=raw,enable=${{ inputs.current_branch != 'trunk' && inputs.is_default_branch == 'true' }},value=${{ inputs.os }}-latest
type=ref,enable=${{ inputs.current_branch == 'trunk' }},event=branch,prefix=${{ inputs.os }}-
type=ref,enable=${{ inputs.current_branch == 'trunk' || inputs.event_name == 'workflow_dispatch' }},event=branch,suffix=-${{ inputs.os }}
flavor: |
latest=${{ inputs.os == 'alpine' && inputs.event_name != 'workflow_dispatch' && inputs.is_default_branch == 'true' }}

View File

@@ -0,0 +1,86 @@
name: Get branch info
description: Extract branch metadata and derived values for the workflow
inputs:
trunk_version:
required: false
default: ""
description: Major version to use when the branch resolves to trunk
trunk_git_branch:
required: false
default: ""
description: Git ref to use for scheduled runs
outputs:
is_default_branch:
description: Whether the current branch is the default branch
value: ${{ steps.branch_info.outputs.is_default_branch }}
current_branch:
description: Normalized current branch or tag name
value: ${{ steps.branch_info.outputs.current_branch }}
secret_prefix:
description: Derived secret prefix for the current branch
value: ${{ steps.branch_info.outputs.secret_prefix }}
sha_short:
description: Short Git commit SHA
value: ${{ steps.branch_info.outputs.sha_short }}
runs:
using: composite
steps:
- name: Get branch info
id: branch_info
shell: bash
env:
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
GITHUB_REF_RAW: ${{ github.ref }}
GITHUB_EVENT_NAME: ${{ github.event_name }}
TRUNK_MAJOR_VERSION: ${{ inputs.trunk_version }}
TRUNK_GIT_BRANCH: ${{ inputs.trunk_git_branch }}
run: |
set -euo pipefail
if [[ "$GITHUB_EVENT_NAME" == "schedule" ]]; then
if [[ -z "$TRUNK_GIT_BRANCH" ]]; then
echo "TRUNK_GIT_BRANCH must be set for schedule events" >&2
exit 1
fi
ref="$TRUNK_GIT_BRANCH"
else
ref="$GITHUB_REF_RAW"
fi
sha_short="$(git rev-parse --short HEAD)"
if [[ "$ref" == refs/tags/* ]]; then
ref="${ref%.*}"
fi
current_branch="${ref##*/}"
is_default_branch=false
if [[ "$current_branch" == "$DEFAULT_BRANCH" ]]; then
is_default_branch=true
fi
if [[ "${current_branch//.}" == "trunk" && -n "$TRUNK_MAJOR_VERSION" ]]; then
secret_prefix="RHEL_${TRUNK_MAJOR_VERSION//.}"
else
secret_prefix="RHEL_${current_branch//.}"
fi
echo "::group::Branch metadata"
echo "event_name=$GITHUB_EVENT_NAME"
echo "ref=$ref"
echo "is_default_branch=$is_default_branch"
echo "current_branch=$current_branch"
echo "secret_prefix=$secret_prefix"
echo "sha_short=$sha_short"
echo "::endgroup::"
{
echo "is_default_branch=$is_default_branch"
echo "current_branch=$current_branch"
echo "secret_prefix=$secret_prefix"
echo "sha_short=$sha_short"
} >> "$GITHUB_OUTPUT"

459
.github/dependabot.yml vendored
View File

@@ -1,418 +1,45 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
time: "06:00"
open-pull-requests-limit: 10
- package-ecosystem: "docker"
directory: /Dockerfiles/agent/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent/windows
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent2/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent2/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent2/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent2/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent2/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/agent2/windows
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-base/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-base/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-base/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-base/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-base/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-mysql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-mysql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-mysql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-mysql/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-mysql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-pgsql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-pgsql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-pgsql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-pgsql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-sqlite3/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-sqlite3/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-sqlite3/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-sqlite3/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/build-sqlite3/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/java-gateway/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/java-gateway/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/java-gateway/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/java-gateway/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/java-gateway/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-mysql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-mysql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-mysql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-mysql/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-mysql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-sqlite3/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-sqlite3/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-sqlite3/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-sqlite3/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/proxy-sqlite3/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-mysql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-mysql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-mysql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-mysql/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-mysql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-pgsql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-pgsql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-pgsql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/server-pgsql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/snmptraps/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/snmptraps/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/snmptraps/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/snmptraps/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/snmptraps/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-mysql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-mysql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-mysql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-mysql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-pgsql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-pgsql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-pgsql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-apache-pgsql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-mysql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-mysql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-mysql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-mysql/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-mysql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-pgsql/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-pgsql/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-pgsql/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-nginx-pgsql/ubuntu
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-service/alpine
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-service/centos
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-service/ol
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-service/rhel
schedule:
interval: daily
- package-ecosystem: "docker"
directory: /Dockerfiles/web-service/ubuntu
schedule:
interval: daily
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "daily"
time: "06:00"
open-pull-requests-limit: 5
labels:
- "dependencies"
- "github_actions"
- package-ecosystem: "docker"
directories:
- "/Dockerfiles/agent/*"
- "/Dockerfiles/agent2/*"
- "/Dockerfiles/build-base/*"
- "/Dockerfiles/build-mysql/*"
- "/Dockerfiles/build-pgsql/*"
- "/Dockerfiles/build-sqlite3/*"
- "/Dockerfiles/java-gateway/*"
- "/Dockerfiles/proxy-mysql/*"
- "/Dockerfiles/proxy-sqlite3/*"
- "/Dockerfiles/server-mysql/*"
- "/Dockerfiles/server-pgsql/*"
- "/Dockerfiles/snmptraps/*"
- "/Dockerfiles/web-apache-mysql/*"
- "/Dockerfiles/web-apache-pgsql/*"
- "/Dockerfiles/web-nginx-mysql/*"
- "/Dockerfiles/web-nginx-pgsql/*"
- "/Dockerfiles/web-service/*"
schedule:
interval: "weekly"
time: "06:30"
open-pull-requests-limit: 3
labels:
- "dependencies"
- "docker"
groups:
docker-images:
applies-to: version-updates
patterns:
- "*"
group-by: dependency-name

View File

@@ -0,0 +1,2 @@
markdown==3.10.2
requests==2.33.0

View File

@@ -1,48 +1,96 @@
import sys
import requests
import json
import markdown
import os
import sys
from pathlib import Path
repository_description = None
import markdown
import requests
if ("DESCRIPTION_FILE" not in os.environ or len(os.environ["DESCRIPTION_FILE"]) == 0):
print("::error::Description file environment variable is not specified")
sys.exit(1)
if ("PYXIS_API_TOKEN" not in os.environ or len(os.environ["PYXIS_API_TOKEN"]) == 0):
print("::error::API token environment variable is not specified")
sys.exit(1)
if ("API_URL" not in os.environ or len(os.environ["API_URL"]) == 0):
print("::error::API URL environment variable is not specified")
sys.exit(1)
if ("PROJECT_ID" not in os.environ or len(os.environ["PROJECT_ID"]) == 0):
print("RedHat project ID environment variable is not specified")
MAX_DESCRIPTION_LEN = 32768
REQUEST_TIMEOUT = 30
##################
def fail(msg: str) -> None:
print(f"::error::{msg}")
sys.exit(1)
if (os.path.isfile(os.environ["DESCRIPTION_FILE"] + '.html')):
file = open(os.environ["DESCRIPTION_FILE"] + '.html', mode='r')
repository_description = file.read()
file.close()
elif (os.path.isfile(os.environ["DESCRIPTION_FILE"] + '.md')):
file = open(os.environ["DESCRIPTION_FILE"] + '.md', mode='r')
markdown_data = file.read()
file.close()
repository_description=markdown.markdown(markdown_data)
if (repository_description is None or len(repository_description) == 0):
print("::error::No description file found")
sys.exit(1)
def get_env(name: str) -> str:
value = os.environ.get(name, "").strip()
if not value:
fail(f"{name} environment variable is not specified")
return value
data = dict()
data['container'] = dict()
data['container']['repository_description'] = repository_description[:32768]
##################
headers = {'accept' : 'application/json', 'X-API-KEY' : os.environ["PYXIS_API_TOKEN"], 'Content-Type' : 'application/json'}
result = requests.patch(os.environ["API_URL"] + os.environ["PROJECT_ID"],
headers = headers,
data = json.dumps(data))
def load_description(file_name: str) -> str:
html_path = Path(f"{file_name}.html")
md_path = Path(f"{file_name}.md")
print("::group::Result")
print("Response code: " + str(result.status_code))
print("Last update date: " + json.loads(result.content)['last_update_date'])
print("::endgroup::")
if html_path.is_file():
with html_path.open("r", encoding="utf-8") as f:
return f.read()
if md_path.is_file():
with md_path.open("r", encoding="utf-8") as f:
markdown_data = f.read()
return markdown.markdown(markdown_data)
fail(f"No description file found: expected '{html_path}' or '{md_path}'")
def main() -> int:
description_file = get_env("DESCRIPTION_FILE")
api_token = get_env("PYXIS_API_TOKEN")
api_url = get_env("API_URL")
project_id = get_env("PROJECT_ID")
repository_description = load_description(description_file)
if not repository_description.strip():
fail("Description file is empty")
was_truncated = len(repository_description) > MAX_DESCRIPTION_LEN
repository_description = repository_description[:MAX_DESCRIPTION_LEN]
payload = {
"container": {
"repository_description": repository_description,
}
}
headers = {
"accept": "application/json",
"X-API-KEY": api_token,
"Content-Type": "application/json",
}
url = f"{api_url}{project_id}"
try:
with requests.Session() as session:
response = session.patch(
url,
headers = headers,
json = payload,
timeout = REQUEST_TIMEOUT,
)
response.raise_for_status()
except requests.RequestException as exc:
fail(f"Request to Pyxis API failed: {exc}")
try:
response_data = response.json()
except ValueError:
fail(f"API returned non-JSON response: {response.text[:500]}")
print("::group::Result")
print(f"Response code: {response.status_code}")
if was_truncated:
print(f"Warning: repository_description was truncated to {MAX_DESCRIPTION_LEN} characters")
print(f"Last update date: {response_data.get('last_update_date', '<missing>')}")
print("::endgroup::")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -18,15 +18,17 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
api.github.com:443
github.com:443
api.deps.dev:443
api.securityscorecards.dev:443
- name: 'Checkout Repository'
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: 'Dependency Review'
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
uses: actions/dependency-review-action@2031cfc080254a8a887f58cffee85186f0e49e48 # v4.9.0

View File

@@ -3,26 +3,28 @@ name: DockerHub Description
on:
push:
branches:
- 'trunk'
- trunk
paths:
- 'Dockerfiles/*/README.md'
- '.github/workflows/dockerhub_description.yml'
workflow_dispatch:
concurrency:
group: dockerhub-description-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
DOCKER_REPOSITORY: "zabbix"
IMAGES_PREFIX: "zabbix-"
DOCKERFILES_DIRECTORY: "./Dockerfiles"
permissions:
contents: read
contents: read
jobs:
main:
update-description:
name: Update description
runs-on: ubuntu-latest
env:
DOCKER_REPOSITORY: "zabbix"
permissions:
contents: read
strategy:
@@ -46,9 +48,10 @@ jobs:
- web-nginx-mysql
- web-nginx-pgsql
- web-service
steps:
- name: Block egress traffic
uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
@@ -61,7 +64,16 @@ jobs:
with:
fetch-depth: 1
- name: Update DockerHub repo description (zabbix-${{ matrix.component }})
- name: Check README file
run: |
set -Eeuo pipefail
readme="${DOCKERFILES_DIRECTORY}/${{ matrix.component }}/README.md"
if [[ ! -f "$readme" ]]; then
echo "::error::README file is missing: $readme"
exit 1
fi
- name: Update DockerHub repo description
uses: peter-evans/dockerhub-description@1b9a80c056b620d92cedb9d9b5a223409c68ddfa # v5.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}

File diff suppressed because it is too large Load Diff

View File

@@ -2,8 +2,7 @@ name: Build images (RedHat)
on:
release:
types:
- published
types: [published]
push:
branches:
- '[0-9]+.[0-9]+'
@@ -25,27 +24,27 @@ on:
description: 'Specify trunk major version'
type: string
concurrency:
group: rhel-build-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: bash
permissions:
contents: read
contents: read
env:
TRUNK_ONLY_EVENT: ${{ contains(fromJSON('["schedule"]'), github.event_name) }}
AUTO_PUSH_IMAGES: ${{ (! contains(fromJSON('["push"]'), github.event_name) && vars.AUTO_PUSH_IMAGES) || (contains(fromJSON('["workflow_dispatch"]'), github.event_name) && inputs.publish_images == 'true' ) }}
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
TRUNK_GIT_BRANCH: "refs/heads/trunk"
IMAGES_PREFIX: "zabbix-"
BASE_BUILD_NAME: "build-base"
MATRIX_FILE: "build.json"
DOCKERFILES_DIRECTORY: "Dockerfiles"
OIDC_ISSUER: "https://token.actions.githubusercontent.com"
IDENITY_REGEX: "https://github.com/zabbix/zabbix-docker/.github/"
REGISTRY: "quay.io"
REGISTRY_NAMESPACE: "redhat-isv-containers"
PREFLIGHT_IMAGE: "quay.io/opdev/preflight:stable"
@@ -61,7 +60,6 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
actions: write
outputs:
platforms: ${{ steps.platform_list.outputs.list }}
database: ${{ steps.database.outputs.list }}
@@ -70,9 +68,10 @@ jobs:
current_branch: ${{ steps.branch_info.outputs.current_branch }}
sha_short: ${{ steps.branch_info.outputs.sha_short }}
secret_prefix: ${{ steps.branch_info.outputs.secret_prefix }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
@@ -86,119 +85,74 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 1
sparse-checkout: ${{ env.MATRIX_FILE }}
sparse-checkout: |
${{ env.MATRIX_FILE }}
.github
- name: Check ${{ env.MATRIX_FILE }} file
id: build_exists
- name: Check matrix file
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
if [[ ! -f "$MATRIX_FILE" ]]; then
echo "::error::File $MATRIX_FILE is missing"
exit 1
fi
if [[ ! -f "$MATRIX_FILE" ]]; then
echo "::error::File $MATRIX_FILE is missing"
exit 1
fi
- name: Prepare Platform list
- name: Prepare platform list
id: platform_list
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
platform_list=$(jq -r '.["os-linux"].rhel | @json' "$MATRIX_FILE")
platform_list="$(jq -c '.["os-linux"].rhel' "$MATRIX_FILE")"
echo "::group::Platform List"
echo "$platform_list"
echo "::endgroup::"
echo "::group::Platform List"
echo "$platform_list"
echo "::endgroup::"
echo "list=$platform_list" >> $GITHUB_OUTPUT
echo "list=$platform_list" >> "$GITHUB_OUTPUT"
- name: Prepare Database engine list
- name: Prepare database engine list
id: database
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
database_list=$(jq -r '[.components | map_values(select(.rhel == true)) | values[].base ] | sort | unique | del(.. | select ( . == "" ) ) | @json' "$MATRIX_FILE")
database_list="$(jq -c '
[.components
| map_values(select(.rhel == true))
| values[].base]
| map(select(length > 0))
| unique
' "$MATRIX_FILE")"
echo "::group::Database List"
echo "$database_list"
echo "::endgroup::"
echo "::group::Database List"
echo "$database_list"
echo "::endgroup::"
echo "list=$database_list" >> $GITHUB_OUTPUT
echo "list=$database_list" >> "$GITHUB_OUTPUT"
- name: Prepare Zabbix component list
id: components
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
component_list=$(jq -r '.components | map_values(select(.rhel == true)) | keys | @json' "$MATRIX_FILE")
component_list="$(jq -c '
.components
| map_values(select(.rhel == true))
| keys
' "$MATRIX_FILE")"
echo "::group::Zabbix Component List"
echo "$component_list"
echo "::endgroup::"
echo "::group::Zabbix Component List"
echo "$component_list"
echo "::endgroup::"
echo "list=$component_list" >> $GITHUB_OUTPUT
echo "list=$component_list" >> "$GITHUB_OUTPUT"
- name: Get branch info
id: branch_info
shell: bash
env:
LATEST_BRANCH: ${{ env.LATEST_BRANCH }}
github_ref: ${{ github.ref }}
TRUNK_MAJOR_VERSION: ${{ inputs.trunk_version }}
run: |
result=false
sha_short=$(git rev-parse --short HEAD)
if [[ "$github_ref" == "refs/tags/"* ]]; then
github_ref=${github_ref%.*}
fi
github_ref=${github_ref##*/}
if [[ "$github_ref" == "$LATEST_BRANCH" ]]; then
result=true
fi
echo "::group::Branch metadata"
echo "is_default_branch - $result"
echo "current_branch - $github_ref"
if [ "${github_ref//.}" == "trunk" ] && [ ! -z "$TRUNK_MAJOR_VERSION" ]; then
echo "secret_prefix=RHEL_${TRUNK_MAJOR_VERSION//.}"
else
echo "secret_prefix=RHEL_${github_ref//.}"
fi
echo "sha_short - $sha_short"
echo "::endgroup::"
echo "is_default_branch=$result" >> $GITHUB_OUTPUT
echo "current_branch=$github_ref" >> $GITHUB_OUTPUT
if [ "${github_ref//.}" == "trunk" ] && [ ! -z "$TRUNK_MAJOR_VERSION" ]; then
echo "secret_prefix=RHEL_${TRUNK_MAJOR_VERSION//.}" >> $GITHUB_OUTPUT
else
echo "secret_prefix=RHEL_${github_ref//.}" >> $GITHUB_OUTPUT
fi
echo "sha_short=$sha_short" >> $GITHUB_OUTPUT
- name: Cleanup existing cache
shell: bash
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
REPO: ${{ github.repository }}
BRANCH: ${{ steps.branch_info.outputs.current_branch }}
GH_RUN_ID: ${{ github.run_id }}
run: |
gh extension install actions/gh-actions-cache
cache_keys=$(gh actions-cache list -R "${REPO}" -B "${BRANCH}" -L 100 --sort created-at --order desc | cut -f 1)
## Setting this to not fail the workflow while deleting cache keys
set +e
echo "Deleting caches..."
for cache_key in $cache_keys
do
if [[ "$cache_key" == *"${GH_RUN_ID}" ]]; then
gh actions-cache delete $cache_key -R "${REPO}" -B "${BRANCH}" --confirm
fi
done
uses: ./.github/actions/get-branch-info
with:
trunk_version: ${{ inputs.trunk_version }}
trunk_git_branch: ${{ env.TRUNK_GIT_BRANCH }}
build_base:
timeout-minutes: 30
@@ -212,32 +166,23 @@ jobs:
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 1
- name: Install cosign
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad
with:
cosign-release: 'v2.4.0'
- name: Check cosign version
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
run: cosign version
- name: Fix string case
- name: Normalize architecture
id: lc
env:
ARCH: ${{ matrix.arch }}
run: |
echo "arch=${ARCH,,}" >> $GITHUB_OUTPUT
echo "arch=${ARCH,,}" >> "$GITHUB_OUTPUT"
- name: Generate tags
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
tags: |
@@ -256,38 +201,36 @@ jobs:
--pull
--iidfile=${{ github.workspace }}/iidfile
- name: Image metadata
- name: Save image metadata
id: image_metadata
env:
IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
CACHE_FILE_NAME: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
GITHUB_WORKSPACE: ${{ github.workspace }}
IIDFILE: ${{ github.workspace }}/iidfile
run: |
TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
[[ -f "$IIDFILE" ]] || { echo "::error::iidfile is missing: $IIDFILE"; exit 1; }
echo "::group::Image tag"
echo "image_tag=$IMAGE_TAG"
echo "::endgroup::"
echo "::group::Image Tag ID"
echo "tag_id=$TAG_ID"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$CACHE_FILE_NAME"
echo "::endgroup::"
TAG_ID="$(<"$IIDFILE")"
echo "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
echo "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
echo "::group::Image metadata"
echo "image_tag=$IMAGE_TAG"
echo "tag_id=$TAG_ID"
echo "cache_file_name=$CACHE_FILE_NAME"
echo "::endgroup::"
echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
printf '%s' "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
printf '%s' "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
- name: Cache image metadata
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
echo "image_tag_id=$TAG_ID" >> "$GITHUB_OUTPUT"
echo "image_tag=$IMAGE_TAG" >> "$GITHUB_OUTPUT"
- name: Upload artifact metadata
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
path: |
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
name: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
- name: Push image to local storage
id: push_image
@@ -295,45 +238,57 @@ jobs:
IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: |
echo "::group::Result"
echo "Image ${IMAGE_TAG} location: \"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
podman push "${IMAGE_TAG}" dir:"${IMAGE_DIR}/${IMAGE_TAG_ID}"
echo "::endgroup::"
[[ -n "$IMAGE_TAG" ]] || { echo "::error::IMAGE_TAG is empty"; exit 1; }
[[ -n "$IMAGE_TAG_ID" ]] || { echo "::error::IMAGE_TAG_ID is empty"; exit 1; }
[[ -n "$IMAGE_DIR" ]] || { echo "::error::IMAGE_DIR is empty"; exit 1; }
- name: Post build image
run_dir="${IMAGE_DIR}/${GITHUB_RUN_ID}"
rm -rf "$run_dir"
mkdir -p "$run_dir"
echo "::group::Result"
echo "Image ${IMAGE_TAG} location: ${run_dir}/${IMAGE_TAG_ID}"
podman push "${IMAGE_TAG}" "dir:${run_dir}/${IMAGE_TAG_ID}"
echo "::endgroup::"
- name: Post build cleanup
if: ${{ success() || failure() }}
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
IIDFILE: ${{ github.workspace }}/iidfile
run: |
echo "::group::Result"
echo "::group::Cleanup"
rm -rf "$GITHUB_WORKSPACE/iidfile"
rm -f "$IIDFILE"
echo "Removing working containers"
buildah rm -a 2>/dev/null || true
echo "Removing container data in storage not controlled by podman"
podman system prune --external 2>/dev/null
echo "Removing all unused container data with volumes"
podman system prune -a --volumes -f 2>/dev/null
echo "Reseting podman storage to default state"
podman system reset -f 2>/dev/null || true
echo "Removing working containers"
buildah rm -a 2>/dev/null || true
echo "::endgroup::"
echo "Removing container data in storage not controlled by podman"
podman system prune --external 2>/dev/null || true
- name: Check on failures
if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
echo "Removing unused container data"
podman container prune -f 2>/dev/null || true
podman image prune -f 2>/dev/null || true
podman volume prune -f 2>/dev/null || true
echo "::endgroup::"
- name: Remove orphaned local image
if: ${{ (cancelled() || failure()) && (steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
env:
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: |
echo "::group::Removing orphaned image"
rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
echo "::endgroup::"
echo "::group::Removing orphaned image"
rm -rf "${IMAGE_DIR:?}/${GITHUB_RUN_ID:?}/${IMAGE_TAG_ID:?}"
echo "::endgroup::"
build_base_database:
timeout-minutes: 90
needs: [ "build_base", "init_build"]
needs: ["build_base", "init_build"]
name: Build ${{ matrix.build }} base (${{ matrix.arch }})
strategy:
fail-fast: false
@@ -343,59 +298,70 @@ jobs:
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 1
- name: Fix string case
- name: Normalize architecture
id: lc
env:
ARCH: ${{ matrix.arch }}
run: |
echo "arch=${ARCH,,}" >> $GITHUB_OUTPUT
echo "arch=${ARCH,,}" >> "$GITHUB_OUTPUT"
- name: Download metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
- name: Download artifact metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
path: |
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
name: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
- name: Pull ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} image from local storage
id: base_build
env:
MATRIX_ARCH: ${{ matrix.arch }}
BASE_IMAGE: ${{ env.BASE_BUILD_NAME }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
MATRIX_ARCH: ${{ matrix.arch }}
BASE_IMAGE: ${{ env.BASE_BUILD_NAME }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: |
BASE_TAG_ID=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag_id")
BASE_IMAGE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag")
tag_id_file="${BASE_IMAGE}_${MATRIX_ARCH}_tag_id"
tag_file="${BASE_IMAGE}_${MATRIX_ARCH}_tag"
echo "::group::Pull image"
echo "podman pull dir:\"${IMAGE_DIR}/${BASE_TAG_ID}\""
podman pull dir:"${IMAGE_DIR}/${BASE_TAG_ID}"
echo "::endgroup::"
[[ -f "$tag_id_file" ]] || { echo "::error::Missing file: $tag_id_file"; exit 1; }
[[ -f "$tag_file" ]] || { echo "::error::Missing file: $tag_file"; exit 1; }
[[ -n "$IMAGE_DIR" ]] || { echo "::error::IMAGE_DIR is empty"; exit 1; }
echo "::group::Tag image"
echo "podman tag \"${BASE_TAG_ID}\" \"${BASE_IMAGE_TAG}\""
podman tag "${BASE_TAG_ID}" "${BASE_IMAGE_TAG}"
echo "::endgroup::"
BASE_TAG_ID="$(<"$tag_id_file")"
BASE_IMAGE_TAG="$(<"$tag_file")"
echo "::group::SHA256 tag"
DIGEST=$(podman inspect "${BASE_TAG_ID}" --format '{{ .Digest }}')
BASE_BUILD_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${DIGEST}"
run_dir="${IMAGE_DIR}/${GITHUB_RUN_ID}"
echo "base_build_image=${BASE_BUILD_IMAGE}"
echo "::endgroup::"
echo "::group::Pull image"
echo "podman pull dir:${run_dir}/${BASE_TAG_ID}"
podman pull "dir:${run_dir}/${BASE_TAG_ID}"
echo "::endgroup::"
echo "base_build_image=${BASE_BUILD_IMAGE}" >> $GITHUB_OUTPUT
echo "::group::Tag image"
echo "podman tag ${BASE_TAG_ID} ${BASE_IMAGE_TAG}"
podman tag "${BASE_TAG_ID}" "${BASE_IMAGE_TAG}"
echo "::endgroup::"
DIGEST="$(podman inspect "${BASE_TAG_ID}" --format '{{ .Digest }}')"
[[ -n "$DIGEST" && "$DIGEST" != "<no value>" ]] || { echo "::error::Image digest is empty"; exit 1; }
BASE_BUILD_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${DIGEST}"
echo "::group::SHA256 tag"
echo "base_build_image=${BASE_BUILD_IMAGE}"
echo "::endgroup::"
echo "base_build_image=${BASE_BUILD_IMAGE}" >> "$GITHUB_OUTPUT"
- name: Generate tags
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ env.IMAGES_PREFIX }}${{ matrix.build }}
tags: |
@@ -415,38 +381,36 @@ jobs:
--iidfile=${{ github.workspace }}/iidfile
--build-context sources=./sources/
- name: Prepare image metadata
- name: Save image metadata
id: image_metadata
env:
IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
CACHE_FILE_NAME: ${{ matrix.build }}_${{ matrix.arch }}
GITHUB_WORKSPACE: ${{ github.workspace }}
IIDFILE: ${{ github.workspace }}/iidfile
run: |
TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
[[ -f "$IIDFILE" ]] || { echo "::error::iidfile is missing: $IIDFILE"; exit 1; }
echo "::group::Image tag"
echo "image_tag=$IMAGE_TAG"
echo "::endgroup::"
echo "::group::Image Tag ID"
echo "tag_id=$TAG_ID"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$CACHE_FILE_NAME"
echo "::endgroup::"
TAG_ID="$(<"$IIDFILE")"
echo "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
echo "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
echo "::group::Image metadata"
echo "image_tag=$IMAGE_TAG"
echo "tag_id=$TAG_ID"
echo "cache_file_name=$CACHE_FILE_NAME"
echo "::endgroup::"
echo "image_tag_id=${TAG_ID}" >> $GITHUB_OUTPUT
echo "image_tag=${IMAGE_TAG}" >> $GITHUB_OUTPUT
printf '%s' "$TAG_ID" > "${CACHE_FILE_NAME}_tag_id"
printf '%s' "$IMAGE_TAG" > "${CACHE_FILE_NAME}_tag"
- name: Cache image metadata
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
echo "image_tag_id=$TAG_ID" >> "$GITHUB_OUTPUT"
echo "image_tag=$IMAGE_TAG" >> "$GITHUB_OUTPUT"
- name: Upload artifact metadata
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
path: |
${{ matrix.build }}_${{ matrix.arch }}_tag_id
${{ matrix.build }}_${{ matrix.arch }}_tag
key: ${{ matrix.build }}-${{ matrix.arch }}-${{ github.run_id }}
${{ matrix.build }}_${{ matrix.arch }}_tag_id
${{ matrix.build }}_${{ matrix.arch }}_tag
name: ${{ matrix.build }}-${{ matrix.arch }}-${{ github.run_id }}
- name: Push image to local storage
id: push_image
@@ -454,45 +418,56 @@ jobs:
IMAGE_TAG: ${{ steps.image_metadata.outputs.image_tag }}
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: |
echo "::group::Result"
echo "podman push \"${IMAGE_TAG}\" dir:\"${IMAGE_DIR}/${IMAGE_TAG_ID}\""
podman push "${IMAGE_TAG}" dir:"${IMAGE_DIR}/${IMAGE_TAG_ID}"
echo "::endgroup::"
[[ -n "$IMAGE_TAG" ]] || { echo "::error::IMAGE_TAG is empty"; exit 1; }
[[ -n "$IMAGE_TAG_ID" ]] || { echo "::error::IMAGE_TAG_ID is empty"; exit 1; }
[[ -n "$IMAGE_DIR" ]] || { echo "::error::IMAGE_DIR is empty"; exit 1; }
- name: Post build image
run_dir="${IMAGE_DIR}/${GITHUB_RUN_ID}"
mkdir -p "$run_dir"
echo "::group::Result"
echo "Image ${IMAGE_TAG} location: ${run_dir}/${IMAGE_TAG_ID}"
podman push "${IMAGE_TAG}" "dir:${run_dir}/${IMAGE_TAG_ID}"
echo "::endgroup::"
- name: Post build cleanup
if: ${{ success() || failure() }}
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
IIDFILE: ${{ github.workspace }}/iidfile
run: |
echo "::group::Result"
echo "::group::Cleanup"
rm -rf "$GITHUB_WORKSPACE/iidfile"
rm -f "$IIDFILE"
echo "Removing working containers"
buildah rm -a 2>/dev/null || true
echo "Removing container data in storage not controlled by podman"
podman system prune --external 2>/dev/null
echo "Removing all unused container data with volumes"
podman system prune -a --volumes -f 2>/dev/null
echo "Reseting podman storage to default state"
podman system reset -f 2>/dev/null || true
echo "Removing working containers"
buildah rm -a 2>/dev/null || true
echo "::endgroup::"
echo "Removing container data in storage not controlled by podman"
podman system prune --external 2>/dev/null || true
- name: Check on failures
if: ${{ (cancelled() || failure()) && ( steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
echo "Removing unused container data"
podman container prune -f 2>/dev/null || true
podman image prune -f 2>/dev/null || true
podman volume prune -f 2>/dev/null || true
echo "::endgroup::"
- name: Remove orphaned local image
if: ${{ (cancelled() || failure()) && (steps.push_image.outcome == 'failure' || steps.push_image.outcome == 'cancelled') }}
env:
IMAGE_TAG_ID: ${{ steps.image_metadata.outputs.image_tag_id }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: |
echo "::group::Removing orphaned image"
rm -rf "${IMAGE_DIR}/${IMAGE_TAG_ID}"
echo "::endgroup::"
echo "::group::Removing orphaned image"
rm -rf "${IMAGE_DIR:?}/${GITHUB_RUN_ID:?}/${IMAGE_TAG_ID:?}"
echo "::endgroup::"
build_images:
timeout-minutes: 30
needs: [ "build_base_database", "init_build"]
needs: ["build_base_database", "init_build"]
name: Build ${{ matrix.build }} image (${{ matrix.arch }})
strategy:
fail-fast: false
@@ -502,88 +477,95 @@ jobs:
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 1
- name: Variables formating
- name: Format variables
id: var_format
env:
MATRIX_BUILD: ${{ matrix.build }}
run: |
MATRIX_BUILD=${MATRIX_BUILD^^}
MATRIX_BUILD=${MATRIX_BUILD//-/_}
matrix_build="${MATRIX_BUILD^^}"
matrix_build="${matrix_build//-/_}"
echo "::group::Result"
echo "matrix_build=${MATRIX_BUILD}"
echo "::endgroup::"
echo "matrix_build=${MATRIX_BUILD}" >> $GITHUB_OUTPUT
echo "::group::Result"
echo "matrix_build=${matrix_build}"
echo "::endgroup::"
- name: Detect Build Base Image
echo "matrix_build=${matrix_build}" >> "$GITHUB_OUTPUT"
- name: Detect build base image
id: build_base_image
if: ${{ matrix.build != 'snmptraps' }}
env:
MATRIX_BUILD: ${{ matrix.build }}
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
BUILD_BASE=$(jq -r ".components.\"$MATRIX_BUILD\".base" "$MATRIX_FILE")
build_base="$(jq -r ".components.\"$MATRIX_BUILD\".base" "$MATRIX_FILE")"
[[ -n "$build_base" && "$build_base" != "null" ]] || { echo "::error::Base image is not defined for $MATRIX_BUILD"; exit 1; }
echo "::group::Base Build Image"
echo "$BUILD_BASE"
echo "::endgroup::"
echo "::group::Base Build Image"
echo "$build_base"
echo "::endgroup::"
echo "build_base=${BUILD_BASE}" >> $GITHUB_OUTPUT
echo "build_base=$build_base" >> "$GITHUB_OUTPUT"
- name: Download metadata of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }}
- name: Download artifact metadata of ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }}
if: ${{ matrix.build != 'snmptraps' }}
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
path: |
${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}_tag_id
${{ steps.build_base_image.outputs.build_base }}_${{ matrix.arch }}_tag
key: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.arch }}-${{ github.run_id }}
name: ${{ steps.build_base_image.outputs.build_base }}-${{ matrix.arch }}-${{ github.run_id }}
- name: Pull ${{ steps.build_base_image.outputs.build_base }}:${{ matrix.arch }} image
id: base_build
if: ${{ matrix.build != 'snmptraps' }}
env:
MATRIX_ARCH: ${{ matrix.arch }}
BASE_IMAGE: ${{ steps.build_base_image.outputs.build_base }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
MATRIX_ARCH: ${{ matrix.arch }}
BASE_IMAGE: ${{ steps.build_base_image.outputs.build_base }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: |
BASE_TAG_ID=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag_id")
BASE_IMAGE_TAG=$(cat "${BASE_IMAGE}_${MATRIX_ARCH}_tag")
tag_id_file="${BASE_IMAGE}_${MATRIX_ARCH}_tag_id"
tag_file="${BASE_IMAGE}_${MATRIX_ARCH}_tag"
echo "::group::Pull image"
echo "podman pull dir:\"${IMAGE_DIR}/${BASE_TAG_ID}\""
podman pull dir:"${IMAGE_DIR}/${BASE_TAG_ID}"
echo "::endgroup::"
[[ -f "$tag_id_file" ]] || { echo "::error::Missing file: $tag_id_file"; exit 1; }
[[ -f "$tag_file" ]] || { echo "::error::Missing file: $tag_file"; exit 1; }
[[ -n "$IMAGE_DIR" ]] || { echo "::error::IMAGE_DIR is empty"; exit 1; }
echo "::group::Tag image"
echo "podman tag \"${BASE_TAG_ID}\" \"${BASE_IMAGE_TAG}\""
podman tag "${BASE_TAG_ID}" "${BASE_IMAGE_TAG}"
echo "::endgroup::"
base_tag_id="$(<"$tag_id_file")"
base_image_tag="$(<"$tag_file")"
echo "::group::SHA256 tag"
DIGEST=$(podman inspect "${BASE_TAG_ID}" --format '{{ .Digest }}')
BASE_BUILD_IMAGE="${IMAGES_PREFIX}${BASE_IMAGE}@${DIGEST}"
echo "digest=${BASE_BUILD_IMAGE}"
echo "::endgroup::"
run_dir="${IMAGE_DIR}/${GITHUB_RUN_ID}"
echo "base_build_image=${BASE_BUILD_IMAGE}" >> $GITHUB_OUTPUT
echo "::group::Pull image"
echo "podman pull dir:${run_dir}/${base_tag_id}"
podman pull "dir:${run_dir}/${base_tag_id}"
echo "::endgroup::"
- name: Remove smartmontools
if: ${{ matrix.build == 'agent2' }}
env:
DOCKERFILES_DIRECTORY: ${{ env.DOCKERFILES_DIRECTORY }}
run: |
sed -i '/smartmontools/d' "$DOCKERFILES_DIRECTORY/agent2/rhel/Dockerfile"
echo "::group::Tag image"
echo "podman tag ${base_tag_id} ${base_image_tag}"
podman tag "${base_tag_id}" "${base_image_tag}"
echo "::endgroup::"
digest="$(podman inspect "${base_tag_id}" --format '{{ .Digest }}')"
[[ -n "$digest" && "$digest" != "<no value>" ]] || { echo "::error::Image digest is empty"; exit 1; }
base_build_image="${IMAGES_PREFIX}${BASE_IMAGE}@${digest}"
echo "::group::SHA256 tag"
echo "digest=${base_build_image}"
echo "::endgroup::"
echo "base_build_image=${base_build_image}" >> "$GITHUB_OUTPUT"
- name: Generate tags
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ env.REGISTRY }}/${{ env.REGISTRY_NAMESPACE }}/${{ secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] || matrix.build }}
tags: |
@@ -607,13 +589,23 @@ jobs:
${{ env.DOCKERFILES_DIRECTORY }}/${{ matrix.build }}/rhel/Dockerfile
extra-args: |
--iidfile=${{ github.workspace }}/iidfile
build-args: BUILD_BASE_IMAGE=localhost/${{ steps.base_build.outputs.base_build_image }}
build-args: |
BUILD_BASE_IMAGE=${{ matrix.build != 'snmptraps' && format('localhost/{0}', steps.base_build.outputs.base_build_image) || '' }}
- name: Validate registry secrets
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
env:
PROJECT_NAME: ${{ secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] }}
PROJECT_SECRET: ${{ secrets[format('{0}_{1}_SECRET', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] }}
run: |
[[ -n "${PROJECT_NAME:-}" ]] || { echo "::error::Registry project secret is missing"; exit 1; }
[[ -n "${PROJECT_SECRET:-}" ]] || { echo "::error::Registry robot secret is missing"; exit 1; }
- name: Log in to ${{ env.REGISTRY }}
uses: redhat-actions/podman-login@9184318aae1ee5034fbfbacc0388acf12669171f # v1.6
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
with:
username: ${{ format('redhat-isv-containers+{0}-robot', secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)]) }}
username: ${{ format('redhat-isv-containers+{0}-robot', secrets[format('{0}_{1}_PROJECT', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)]) }}
password: ${{ secrets[format('{0}_{1}_SECRET', needs.init_build.outputs.secret_prefix, steps.var_format.outputs.matrix_build)] }}
registry: ${{ env.REGISTRY }}
auth_file_path: /tmp/.docker_${{ matrix.build }}_${{ matrix.arch }}_${{ needs.init_build.outputs.sha_short }}
@@ -636,27 +628,27 @@ jobs:
IMAGE_TAG: ${{ steps.build_image.outputs.image-with-tag }}
PREFLIGHT_IMAGE: ${{ env.PREFLIGHT_IMAGE }}
run: |
mkdir -p $PFLT_ARTIFACTS
echo "::group::Pull preflight \"$PREFLIGHT_IMAGE\" image"
podman pull "$PREFLIGHT_IMAGE"
echo "::endgroup::"
mkdir -p "$PFLT_ARTIFACTS"
echo "::group::Perform certification tests"
podman run \
-it \
--rm \
--security-opt=label=disable \
--env PFLT_LOGLEVEL=$PFLT_LOGLEVEL \
--env PFLT_ARTIFACTS=/artifacts \
--env PFLT_LOGFILE=/artifacts/preflight.log \
--env PFLT_CERTIFICATION_COMPONENT_ID=$PFLT_CERTIFICATION_PROJECT_ID \
--env PFLT_PYXIS_API_TOKEN=$PFLT_PYXIS_API_TOKEN \
--env PFLT_DOCKERCONFIG=/temp-authfile.json \
-v $PFLT_ARTIFACTS:/artifacts \
-v $PFLT_DOCKERCONFIG:/temp-authfile.json:ro \
"$PREFLIGHT_IMAGE" check container $IMAGE_TAG --submit
podman rmi -i -f "$PREFLIGHT_IMAGE"
echo "::endgroup::"
echo "::group::Pull preflight image"
podman pull "$PREFLIGHT_IMAGE"
echo "::endgroup::"
echo "::group::Perform certification tests"
podman run \
-it \
--rm \
--security-opt=label=disable \
--env PFLT_LOGLEVEL="$PFLT_LOGLEVEL" \
--env PFLT_ARTIFACTS=/artifacts \
--env PFLT_LOGFILE=/artifacts/preflight.log \
--env PFLT_CERTIFICATION_COMPONENT_ID="$PFLT_CERTIFICATION_PROJECT_ID" \
--env PFLT_PYXIS_API_TOKEN="$PFLT_PYXIS_API_TOKEN" \
--env PFLT_DOCKERCONFIG=/temp-authfile.json \
-v "$PFLT_ARTIFACTS:/artifacts" \
-v "$PFLT_DOCKERCONFIG:/temp-authfile.json:ro" \
"$PREFLIGHT_IMAGE" check container "$IMAGE_TAG" --submit
echo "::endgroup::"
- name: Push to RedHat certification procedure (all tags)
id: push_to_registry_all_tags
@@ -665,97 +657,78 @@ jobs:
with:
tags: ${{ steps.meta.outputs.tags }}
- name: Post Preflight certification
- name: Post preflight cleanup
if: ${{ env.AUTO_PUSH_IMAGES == 'true' && (success() || failure()) }}
env:
PREFLIGHT_IMAGE: ${{ env.PREFLIGHT_IMAGE }}
PFLT_ARTIFACTS: ${{ env.PFLT_ARTIFACTS }}
run: |
echo "::group::Result"
rm -rf "$PFLT_ARTIFACTS"
podman rmi -i -f "$PREFLIGHT_IMAGE"
echo "::endgroup::"
echo "::group::Cleanup"
rm -rf "$PFLT_ARTIFACTS"
podman rmi -i -f "$PREFLIGHT_IMAGE" 2>/dev/null || true
echo "::endgroup::"
- name: Image digest
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
IIDFILE: ${{ github.workspace }}/iidfile
run: |
TAG_ID=$(cat $GITHUB_WORKSPACE/iidfile)
echo "::group::Image digest"
echo "$TAG_ID"
echo "::endgroup::"
[[ -f "$IIDFILE" ]] || { echo "::error::iidfile is missing: $IIDFILE"; exit 1; }
tag_id="$(<"$IIDFILE")"
- name: Post build image
echo "::group::Image digest"
echo "$tag_id"
echo "::endgroup::"
- name: Post build cleanup
if: ${{ success() || failure() }}
env:
GITHUB_WORKSPACE: ${{ github.workspace }}
IIDFILE: ${{ github.workspace }}/iidfile
run: |
echo "::group::Result"
echo "::group::Cleanup"
rm -rf "$GITHUB_WORKSPACE/iidfile"
rm -f "$IIDFILE"
echo "Removing working containers"
buildah rm -a 2>/dev/null || true
echo "Removing container data in storage not controlled by podman"
podman system prune --external 2>/dev/null
echo "Removing all unused container data with volumes"
podman system prune -a --volumes -f 2>/dev/null
echo "Reseting podman storage to default state"
podman system reset -f 2>/dev/null || true
echo "Removing working containers"
buildah rm -a 2>/dev/null || true
echo "::endgroup::"
echo "Removing container data in storage not controlled by podman"
podman system prune --external 2>/dev/null || true
echo "Removing unused container data"
podman container prune -f 2>/dev/null || true
podman image prune -f 2>/dev/null || true
podman volume prune -f 2>/dev/null || true
echo "::endgroup::"
clear_artifacts:
timeout-minutes: 10
needs: [ "build_images", "init_build"]
name: Clear ${{ matrix.build }} image cache (${{ matrix.arch }})
needs: ["build_images", "build_base_database", "init_build"]
name: Clear images cache (${{ matrix.arch }})
strategy:
fail-fast: false
matrix:
build: ${{ fromJson(needs.init_build.outputs.database) }}
arch: ${{ fromJson(needs.init_build.outputs.platforms) }}
runs-on: [self-hosted, linux, "${{ matrix.arch }}"]
if: ${{ needs.build_base_database.result == 'success' }}
permissions: {}
steps:
- name: Download metadata of ${{ matrix.build }}:${{ matrix.arch }}
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: |
${{ matrix.build }}_${{ matrix.arch }}_tag_id
${{ matrix.build }}_${{ matrix.arch }}_tag
key: ${{ matrix.build }}-${{ matrix.arch }}-${{ github.run_id }}
- name: Remove ${{ matrix.build }}:${{ matrix.arch }} cache
- name: Remove local image storage for current run
if: ${{ always() }}
env:
CACHE_FILE_NAME: ${{ matrix.build }}_${{ matrix.arch }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
GITHUB_RUN_ID: ${{ github.run_id }}
run: |
echo "::group::Result"
set -u
BASE_TAG=$(cat "${CACHE_FILE_NAME}_tag_id")
echo "Removing ${IMAGE_DIR}/${BASE_TAG}"
rm -rf "${IMAGE_DIR}/${BASE_TAG}"
run_dir="${IMAGE_DIR}/${GITHUB_RUN_ID}"
echo "::endgroup::"
- name: Download metadata of ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }}
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: |
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag_id
${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}_tag
key: ${{ env.BASE_BUILD_NAME }}-${{ matrix.arch }}-${{ github.run_id }}
- name: Remove ${{ env.BASE_BUILD_NAME }}:${{ matrix.arch }} cache
env:
CACHE_FILE_NAME: ${{ env.BASE_BUILD_NAME }}_${{ matrix.arch }}
IMAGE_DIR: ${{ env.IMAGE_DIR }}
run: |
echo "::group::Result"
BASE_TAG=$(cat "${CACHE_FILE_NAME}_tag_id")
echo "Removing ${IMAGE_DIR}/${BASE_TAG}"
rm -rf "${IMAGE_DIR}/${BASE_TAG}"
echo "::endgroup::"
echo "::group::Cleanup run storage"
if [[ -d "$run_dir" ]]; then
echo "Removing $run_dir"
rm -rf "${run_dir:?}"
else
echo "Run directory does not exist: $run_dir"
fi
echo "::endgroup::"

View File

@@ -13,10 +13,15 @@ on:
- 'Dockerfiles/*/windows/*'
- '!**/README.md'
- '.github/workflows/images_build_windows.yml'
- '.github/actions/docker-build-push-windows/*'
schedule:
- cron: '05 02 * * *'
workflow_dispatch:
concurrency:
group: windows-build-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
defaults:
run:
shell: pwsh
@@ -30,7 +35,6 @@ env:
DOCKER_REGISTRY: ${{ vars.DOCKER_REGISTRY }}
DOCKER_REPOSITORY: ${{ vars.DOCKER_REPOSITORY }}
LATEST_BRANCH: ${{ github.event.repository.default_branch }}
TRUNK_GIT_BRANCH: "refs/heads/trunk"
IMAGES_PREFIX: "zabbix-"
@@ -44,7 +48,7 @@ env:
DOCKERFILES_DIRECTORY: "Dockerfiles"
OIDC_ISSUER: "https://token.actions.githubusercontent.com"
IDENITY_REGEX: "https://github.com/zabbix/zabbix-docker/.github/"
IDENTITY_REGEX: "${{ github.server_url }}/${{ github.repository }}/.github/"
jobs:
init_build:
@@ -52,6 +56,9 @@ jobs:
runs-on: ubuntu-latest
permissions:
contents: read
defaults:
run:
shell: bash
outputs:
os: ${{ steps.os.outputs.list }}
components: ${{ steps.components.outputs.list }}
@@ -60,10 +67,11 @@ jobs:
sha_short: ${{ steps.branch_info.outputs.sha_short }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
disable-telemetry: true
allowed-endpoints: >
github.com:443
release-assets.githubusercontent.com:443
@@ -73,32 +81,31 @@ jobs:
with:
ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || '' }}
fetch-depth: 1
sparse-checkout: ${{ env.MATRIX_FILE }}
sparse-checkout: |
${{ env.MATRIX_FILE }}
.github
- name: Check ${{ env.MATRIX_FILE }} file
id: build_exists
shell: bash
- name: Check matrix file
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
if [[ ! -f "$MATRIX_FILE" ]]; then
echo "::error::File $MATRIX_FILE is missing"
exit 1
fi
if [[ ! -f "$MATRIX_FILE" ]]; then
echo "::error::File $MATRIX_FILE is missing"
exit 1
fi
- name: Prepare Operating System list
id: os
shell: bash
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
os_list=$(jq -r '.["os-windows"] | keys | [ .[] | tostring ] | @json' "$MATRIX_FILE")
os_list="$(jq -c '.["os-windows"] | keys' "$MATRIX_FILE")"
echo "::group::Operating System List"
echo "$os_list"
echo "::endgroup::"
echo "::group::Operating System List"
echo "$os_list"
echo "::endgroup::"
echo "list=$os_list" >> $GITHUB_OUTPUT
echo "list=$os_list" >> "$GITHUB_OUTPUT"
- name: Prepare Zabbix component list
id: components
@@ -114,33 +121,10 @@ jobs:
- name: Get branch info
id: branch_info
shell: bash
env:
LATEST_BRANCH: ${{ env.LATEST_BRANCH }}
github_ref: ${{ env.TRUNK_ONLY_EVENT == 'true' && env.TRUNK_GIT_BRANCH || github.ref }}
run: |
result=false
sha_short=$(git rev-parse --short HEAD)
if [[ "$github_ref" == "refs/tags/"* ]]; then
github_ref=${github_ref%.*}
fi
github_ref=${github_ref##*/}
if [[ "$github_ref" == "$LATEST_BRANCH" ]]; then
result=true
fi
echo "::group::Branch data"
echo "is_default_branch - $result"
echo "current_branch - $github_ref"
echo "sha_short - $sha_short"
echo "::endgroup::"
echo "is_default_branch=$result" >> $GITHUB_OUTPUT
echo "current_branch=$github_ref" >> $GITHUB_OUTPUT
echo "sha_short=$sha_short" >> $GITHUB_OUTPUT
uses: ./.github/actions/get-branch-info
with:
trunk_version: ${{ inputs.trunk_version }}
trunk_git_branch: ${{ env.TRUNK_GIT_BRANCH }}
build_base:
name: Build ${{ matrix.component }} base on ${{ matrix.os }}
@@ -151,12 +135,48 @@ jobs:
contents: read
id-token: write
attestations: write
artifact-metadata: write
strategy:
fail-fast: false
matrix:
os: ${{ fromJson(needs.init_build.outputs.os) }}
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
disable-telemetry: true
allowed-endpoints: >
aka.ms:443
api.github.com:443
auth.docker.io:443
curl.se:443
dc.services.visualstudio.com:443
definitionupdates.microsoft.com:443
dl.google.com:443
download.visualstudio.microsoft.com:443
fulcio.sigstore.dev:443
github.com:443
go.dev:443
go.microsoft.com:443
index.docker.io:443
mcr.microsoft.com:443
mobile.events.data.microsoft.com:443
ocsp.digicert.com:80
ocsp.sectigo.com:80
production.cloudflare.docker.com:443
r12.c.lencr.org:80
raw.githubusercontent.com:443
registry-1.docker.io:443
rekor.sigstore.dev:443
tuf-repo-cdn.sigstore.dev:443
www.nasm.us:443
x1.c.lencr.org:80
zlib.net:443
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
@@ -164,15 +184,11 @@ jobs:
fetch-depth: 1
- name: Install cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad
with:
cosign-release: 'v2.4.0'
- name: Check cosign version
run: cosign version
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
uses: ./.github/actions/cosign-install
- name: Login to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -183,26 +199,30 @@ jobs:
MATRIX_OS: ${{ matrix.os }}
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
$os_tag=$(Get-Content -Path $Env:MATRIX_FILE | ConvertFrom-Json).'os-windows'."$Env:MATRIX_OS"
$osTag = (Get-Content -Path $Env:MATRIX_FILE | ConvertFrom-Json).'os-windows'."$Env:MATRIX_OS"
echo "::group::Base Microsoft Windows OS tag"
echo "$os_tag"
echo "::endgroup::"
if ([string]::IsNullOrWhiteSpace($osTag)) {
throw "Failed to detect Windows base OS tag for '$Env:MATRIX_OS'"
}
echo "os_tag=$os_tag" >> $Env:GITHUB_OUTPUT
echo "::group::Base Microsoft Windows OS tag"
echo "$osTag"
echo "::endgroup::"
echo "os_tag=$osTag" >> $Env:GITHUB_OUTPUT
- name: Generate tags
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ env.DOCKER_REGISTRY }}/${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_IMAGE_NAME }}
images: ${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY, env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, env.BASE_IMAGE_NAME ) }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
tags: |
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.component }}-${{ steps.base_os_tag.outputs.os_tag }}-
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},suffix=-${{ steps.base_os_tag.outputs.os_tag }},prefix=${{ matrix.component }}-
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,prefix=${{ matrix.component }}-${{ steps.base_os_tag.outputs.os_tag }}-,suffix=-latest
type=ref,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},event=branch,suffix=-${{ steps.base_os_tag.outputs.os_tag }}-latest,prefix=${{ matrix.component }}-
type=raw,enable=${{ (needs.init_build.outputs.current_branch != 'trunk') && (needs.init_build.outputs.is_default_branch == 'true') }},value=${{ matrix.component }}-${{ steps.base_os_tag.outputs.os_tag }}-latest
type=raw,enable=${{ needs.init_build.outputs.current_branch != 'trunk' && needs.init_build.outputs.is_default_branch == 'true' }},value=${{ matrix.component }}-${{ steps.base_os_tag.outputs.os_tag }}-latest
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,prefix=${{ matrix.component }}-${{ steps.base_os_tag.outputs.os_tag }}-
type=ref,enable=${{ needs.init_build.outputs.current_branch == 'trunk' }},event=branch,suffix=-${{ steps.base_os_tag.outputs.os_tag }},prefix=${{ matrix.component }}-
flavor: |
@@ -210,142 +230,68 @@ jobs:
- name: Build and push image
id: docker_build
env:
DOCKERFILES_DIRECTORY: ${{ env.DOCKERFILES_DIRECTORY }}
BASE_BUILD_IMAGE: ${{ env.MSFT_BASE_BUILD_IMAGE }}
BASE_IMAGE_NAME: ${{ env.BASE_IMAGE_NAME }}
MATRIX_COMPONENT: ${{ matrix.component }}
TAGS: ${{ steps.meta.outputs.tags }}
BASE_OS_TAG: ${{ steps.base_os_tag.outputs.os_tag }}
LABEL_REVISION: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
LABEL_CREATED: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
AUTO_PUSH_IMAGES: ${{ env.AUTO_PUSH_IMAGES }}
run: |
echo "::group::Docker version"
docker version
echo "::endgroup::"
echo "::group::Docker info"
docker info
echo "::endgroup::"
$context="$Env:DOCKERFILES_DIRECTORY\$Env:BASE_IMAGE_NAME\windows\"
$dockerfile= $context + 'Dockerfile.' + $Env:MATRIX_COMPONENT
$base_os_image= $Env:BASE_BUILD_IMAGE + ':' + $Env:BASE_OS_TAG
# Can not build on GitHub due existing symlink. Must be removed before build process
Remove-Item -ErrorAction Ignore -Force -Path $context\README.md
$tags_array=$( "$Env:TAGS".Split("`n") )
$tags=$( $tags_array | Foreach-Object { "--tag=$_" } )
echo "::group::Image tags"
echo "$Env:TAGS"
echo "::endgroup::"
echo "::group::Pull base image"
docker pull $base_os_image
if (-not $?) {throw "Failed"}
echo "::endgroup::"
echo "::group::Build Image"
Write-Host @"
docker build --label org.opencontainers.image.revision=$Env:LABEL_REVISION
--label org.opencontainers.image.created=$Env:LABEL_CREATED
--build-arg=OS_BASE_IMAGE=$base_os_image
--file=$dockerfile
$tags
$context
"@
docker build --label org.opencontainers.image.revision=$Env:LABEL_REVISION `
--label org.opencontainers.image.created=$Env:LABEL_CREATED `
--build-arg=OS_BASE_IMAGE=$base_os_image `
--file=$dockerfile `
$tags `
$context
if (-not $?) {throw "Failed"}
echo "::endgroup::"
echo "::group::Publish Image"
if ( $Env:AUTO_PUSH_IMAGES -eq 'true' ) {
Foreach ($tag in $tags_array) {
echo "docker image push $tag"
docker image push $tag
if (-not $?) {throw "Failed"}
}
$digest=$(docker inspect $tags_array[0] --format "{{ index .RepoDigests 0}}").Split('@')[-1]
if (-not $?) {throw "Failed"}
echo "Image digest got from RepoDigests"
}
else {
$digest=$(docker inspect $tags_array[0] --format "{{ index .Id}}")
if (-not $?) {throw "Failed"}
echo "Image digest got from Id"
}
echo "::endgroup::"
echo "::group::Digest"
echo "$digest"
echo "::endgroup::"
echo "digest=$digest" >> $Env:GITHUB_OUTPUT
uses: ./.github/actions/docker-build-push-windows
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}\${{ env.BASE_IMAGE_NAME }}\windows
dockerfile: ${{ env.DOCKERFILES_DIRECTORY }}\${{ env.BASE_IMAGE_NAME }}\windows\Dockerfile.${{ matrix.component }}
tags: ${{ steps.meta.outputs.tags }}
label_revision: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
label_created: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
pull_images: |
${{ env.MSFT_BASE_BUILD_IMAGE }}:${{ steps.base_os_tag.outputs.os_tag }}
build_args: |
OS_BASE_IMAGE=${{ env.MSFT_BASE_BUILD_IMAGE }}:${{ steps.base_os_tag.outputs.os_tag }}
push: true
- name: Sign the images with GitHub OIDC Token
env:
DIGEST: ${{ steps.docker_build.outputs.digest }}
TAGS: ${{ steps.meta.outputs.tags }}
run: |
$tags_array=$( "$Env:TAGS".Split("`n") )
$tag_list=@()
uses: ./.github/actions/cosign-sign
with:
digest: ${{ steps.docker_build.outputs.digest }}
tags: ${{ steps.meta.outputs.tags }}
foreach ($tag in $tags_array) {
$tag_name=$tag.Split(":")[0]
$tag_list+="$tag_name@$Env:DIGEST"
}
echo "::group::Images to sign"
echo "$tag_list"
echo "::endgroup::"
echo "::group::Signing"
echo "cosign sign --yes $tag_list"
cosign sign --yes $tag_list
echo "::endgroup::"
- name: Attest images
uses: ./.github/actions/attest-image
with:
subject_name: ${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY, env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, env.BASE_IMAGE_NAME ) }}
subject_digest: ${{ steps.docker_build.outputs.digest }}
- name: Image digest
if: ${{ env.AUTO_PUSH_IMAGES }}
env:
DIGEST: ${{ steps.docker_build.outputs.digest }}
CACHE_FILE_NAME: ${{ env.BASE_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
run: |
echo "::group::Image digest"
echo "$Env:DIGEST"
echo "::endgroup::"
echo "::group::Image digest"
echo "$Env:DIGEST"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$Env:CACHE_FILE_NAME"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$Env:CACHE_FILE_NAME"
echo "::endgroup::"
$Env:DIGEST | Set-Content -Path $Env:CACHE_FILE_NAME
Set-Content -Path $Env:CACHE_FILE_NAME -Value $Env:DIGEST -NoNewline
- name: Cache image digest
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
- name: Upload artifact metadata
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
path: ${{ env.BASE_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
name: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ matrix.component }}-${{ github.run_id }}
build_components:
name: Build ${{ matrix.component }} sources on ${{ matrix.os }}
needs: [ "build_base", "init_build"]
needs: ["build_base", "init_build"]
runs-on: ${{ matrix.os }}
timeout-minutes: 70
permissions:
contents: read
id-token: write
attestations: write
artifact-metadata: write
strategy:
fail-fast: false
matrix:
os: ${{ fromJson(needs.init_build.outputs.os) }}
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
@@ -354,15 +300,10 @@ jobs:
fetch-depth: 1
- name: Install cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad
with:
cosign-release: 'v2.4.0'
- name: Check cosign version
run: cosign version
uses: ./.github/actions/cosign-install
- name: Login to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -373,19 +314,23 @@ jobs:
MATRIX_OS: ${{ matrix.os }}
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
$os_tag=$(Get-Content -Path $Env:MATRIX_FILE | ConvertFrom-Json).'os-windows'."$Env:MATRIX_OS"
$osTag = (Get-Content -Path $Env:MATRIX_FILE | ConvertFrom-Json).'os-windows'."$Env:MATRIX_OS"
echo "::group::Base Windows OS tag"
echo "$os_tag"
echo "::endgroup::"
if ([string]::IsNullOrWhiteSpace($osTag)) {
throw "Failed to detect Windows base OS tag for '$Env:MATRIX_OS'"
}
echo "os_tag=$os_tag" >> $Env:GITHUB_OUTPUT
echo "::group::Base Windows OS tag"
echo "$osTag"
echo "::endgroup::"
echo "os_tag=$osTag" >> $Env:GITHUB_OUTPUT
- name: Generate tags
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ env.DOCKER_REGISTRY }}/${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ env.BASE_BUILD_IMAGE_NAME }}
images: ${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY, env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, env.BASE_BUILD_IMAGE_NAME ) }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
tags: |
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ matrix.component }}-${{ steps.base_os_tag.outputs.os_tag }}-
@@ -399,10 +344,9 @@ jobs:
latest=false
- name: Download SHA256 tag of ${{ env.BASE_IMAGE_NAME }}:${{ matrix.os }}
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
path: ${{ env.BASE_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
name: ${{ env.BASE_IMAGE_NAME }}-${{ matrix.os }}-${{ matrix.component }}-${{ github.run_id }}
- name: Retrieve ${{ env.BASE_IMAGE_NAME }}:${{ matrix.os }} SHA256 tag
id: base_build
@@ -413,168 +357,123 @@ jobs:
DOCKER_REPOSITORY: ${{ env.DOCKER_REPOSITORY }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
run: |
$base_image_file=$Env:BASE_IMAGE_NAME + '_' + $Env:MATRIX_OS + '_' + $Env:MATRIX_COMPONENT
$base_tag = Get-Content $base_image_file -Raw
$build_base_image="$Env:DOCKER_REPOSITORY/$Env:IMAGES_PREFIX$Env:BASE_IMAGE_NAME@" + $base_tag
$baseImageFile = "$Env:BASE_IMAGE_NAME" + "_" + "$Env:MATRIX_OS" + "_" + "$Env:MATRIX_COMPONENT"
echo "::group::Base image Info"
echo "base_tag=$base_tag"
echo "base_build_image=$build_base_image"
echo "::endgroup::"
if (-not (Test-Path $baseImageFile)) {
throw "Missing cached base image digest file: $baseImageFile"
}
echo "base_tag=$base_tag" >> $Env:GITHUB_OUTPUT
echo "base_build_image=$build_base_image" >> $Env:GITHUB_OUTPUT
$baseTag = (Get-Content $baseImageFile -Raw).Trim()
if ([string]::IsNullOrWhiteSpace($baseTag)) {
throw "Base image digest is empty in file: $baseImageFile"
}
$buildBaseImage = "$Env:DOCKER_REPOSITORY/$Env:IMAGES_PREFIX$Env:BASE_IMAGE_NAME@" + $baseTag
echo "::group::Base image info"
echo "base_tag=$baseTag"
echo "base_build_image=$buildBaseImage"
echo "::endgroup::"
echo "base_tag=$baseTag" >> $Env:GITHUB_OUTPUT
echo "base_build_image=$buildBaseImage" >> $Env:GITHUB_OUTPUT
- name: Verify ${{ env.BASE_IMAGE_NAME }}:${{ matrix.os }} cosign
env:
BASE_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
OIDC_ISSUER: ${{ env.OIDC_ISSUER }}
IDENITY_REGEX: ${{ env.IDENITY_REGEX }}
run: |
cosign verify `
--certificate-oidc-issuer-regexp "$Env:OIDC_ISSUER" `
--certificate-identity-regexp "$Env:IDENITY_REGEX" `
"$Env:BASE_IMAGE" | jq
uses: ./.github/actions/cosign-verify
with:
image: ${{ steps.base_build.outputs.base_build_image }}
oidc_issuer: ${{ env.OIDC_ISSUER }}
identity_regexp: ${{ env.IDENTITY_REGEX }}
- name: Build and push image
id: docker_build
env:
DOCKERFILES_DIRECTORY: ${{ env.DOCKERFILES_DIRECTORY }}
BASE_BUILD_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
BASE_BUILD_IMAGE_NAME: ${{ env.BASE_BUILD_IMAGE_NAME }}
BASE_BUILD_OS_TAG: ${{ steps.base_os_tag.outputs.os_tag }}
MATRIX_COMPONENT: ${{ matrix.component }}
TAGS: ${{ steps.meta.outputs.tags }}
LABEL_REVISION: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
LABEL_CREATED: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
AUTO_PUSH_IMAGES: ${{ env.AUTO_PUSH_IMAGES }}
run: |
echo "::group::Docker version"
docker version
echo "::endgroup::"
echo "::group::Docker info"
docker info
echo "::endgroup::"
uses: ./.github/actions/docker-build-push-windows
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}\${{ env.BASE_BUILD_IMAGE_NAME }}\windows
dockerfile: ${{ env.DOCKERFILES_DIRECTORY }}\${{ env.BASE_BUILD_IMAGE_NAME }}\windows\Dockerfile.${{ matrix.component }}
tags: ${{ steps.meta.outputs.tags }}
label_revision: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
label_created: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
pull_images: |
${{ steps.base_build.outputs.base_build_image }}
build_args: |
BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
push: true
$context="$Env:DOCKERFILES_DIRECTORY\$Env:BASE_BUILD_IMAGE_NAME\windows\"
$dockerfile= $context + 'Dockerfile.' + $Env:MATRIX_COMPONENT
$base_build_image= $Env:BASE_BUILD_IMAGE
# Can not build on GitHub due existing symlink. Must be removed before build process
Remove-Item -ErrorAction Ignore -Force -Path $context\README.md
$tags_array=$( "$Env:TAGS".Split("`n") )
$tags=$( $tags_array | Foreach-Object { "--tag=$_" } )
echo "::group::Image tags"
echo "$Env:TAGS"
echo "::endgroup::"
echo "::group::Pull base image"
docker pull $base_build_image
if (-not $?) {throw "Failed"}
echo "::endgroup::"
echo "::group::Build Image"
Write-Host @"
docker build --label org.opencontainers.image.revision=$Env:LABEL_REVISION
--label org.opencontainers.image.created=$Env:LABEL_CREATED
--build-arg=BUILD_BASE_IMAGE=$base_build_image
--file=$dockerfile
$tags
$context
"@
docker build --label org.opencontainers.image.revision=$Env:LABEL_REVISION `
--label org.opencontainers.image.created=$Env:LABEL_CREATED `
--build-arg=BUILD_BASE_IMAGE=$base_build_image `
--file=$dockerfile `
$tags `
$context
if (-not $?) {throw "Failed"}
echo "::endgroup::"
echo "::group::Publish Image"
if ( $Env:AUTO_PUSH_IMAGES -eq 'true' ) {
Foreach ($tag in $tags_array) {
echo "docker image push $tag"
docker image push $tag
if (-not $?) {throw "Failed"}
}
$digest=$(docker inspect $tags_array[0] --format "{{ index .RepoDigests 0}}").Split('@')[-1]
if (-not $?) {throw "Failed"}
echo "Image digest got from RepoDigests"
}
else {
$digest=$(docker inspect $tags_array[0] --format "{{ index .Id}}")
if (-not $?) {throw "Failed"}
echo "Image digest got from Id"
}
echo "::endgroup::"
echo "::group::Digest"
echo "$digest"
echo "::endgroup::"
echo "digest=$digest" >> $Env:GITHUB_OUTPUT
- name: Attest images
uses: ./.github/actions/attest-image
with:
subject_name: ${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY, env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, env.BASE_BUILD_IMAGE_NAME ) }}
subject_digest: ${{ steps.docker_build.outputs.digest }}
- name: Sign the images with GitHub OIDC Token
env:
DIGEST: ${{ steps.docker_build.outputs.digest }}
TAGS: ${{ steps.meta.outputs.tags }}
run: |
$tags_array=$( "$Env:TAGS".Split("`n") )
$tag_list=@()
foreach ($tag in $tags_array) {
$tag_name=$tag.Split(":")[0]
$tag_list+="$tag_name@$Env:DIGEST"
}
echo "::group::Images to sign"
echo "$tag_list"
echo "::endgroup::"
echo "::group::Signing"
echo "cosign sign --yes $tag_list"
cosign sign --yes $tag_list
echo "::endgroup::"
uses: ./.github/actions/cosign-sign
with:
digest: ${{ steps.docker_build.outputs.digest }}
tags: ${{ steps.meta.outputs.tags }}
- name: Image digest
if: ${{ env.AUTO_PUSH_IMAGES }}
env:
DIGEST: ${{ steps.docker_build.outputs.digest }}
CACHE_FILE_NAME: ${{ env.BASE_BUILD_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
run: |
echo "::group::Image digest"
echo "$Env:DIGEST"
echo "::endgroup::"
echo "::group::Image digest"
echo "$Env:DIGEST"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$Env:CACHE_FILE_NAME"
echo "::endgroup::"
echo "::group::Cache file name"
echo "$Env:CACHE_FILE_NAME"
echo "::endgroup::"
$Env:DIGEST | Set-Content -Path $Env:CACHE_FILE_NAME
Set-Content -Path $Env:CACHE_FILE_NAME -Value $Env:DIGEST -NoNewline
- name: Cache image digest
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
- name: Upload artifact metadata
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
path: ${{ env.BASE_BUILD_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
name: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ matrix.component }}-${{ github.run_id }}
build_images:
name: Build ${{ matrix.component }} on ${{ matrix.os }}
needs: [ "build_components", "init_build"]
needs: ["build_components", "init_build"]
runs-on: ${{ matrix.os }}
timeout-minutes: 70
permissions:
contents: read
id-token: write
attestations: write
artifact-metadata: write
strategy:
fail-fast: false
matrix:
os: ${{ fromJson(needs.init_build.outputs.os) }}
component: ${{ fromJson(needs.init_build.outputs.components) }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
disable-telemetry: true
allowed-endpoints: >
api.github.com:443
auth.docker.io:443
dc.services.visualstudio.com:443
fulcio.sigstore.dev:443
github.com:443
index.docker.io:443
mcr.microsoft.com:443
mobile.events.data.microsoft.com:443
ocsp.sectigo.com:80
production.cloudflare.docker.com:443
r12.c.lencr.org:80
registry-1.docker.io:443
rekor.sigstore.dev:443
timestamp.sigstore.dev:443
tuf-repo-cdn.sigstore.dev:443
x1.c.lencr.org:80
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
@@ -582,15 +481,11 @@ jobs:
fetch-depth: 1
- name: Install cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad
with:
cosign-release: 'v2.4.0'
- name: Check cosign version
run: cosign version
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
uses: ./.github/actions/cosign-install
- name: Login to DockerHub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
@@ -601,19 +496,23 @@ jobs:
MATRIX_OS: ${{ matrix.os }}
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
$os_tag=$(Get-Content -Path $Env:MATRIX_FILE | ConvertFrom-Json).'os-windows'."$Env:MATRIX_OS"
$osTag = (Get-Content -Path $Env:MATRIX_FILE | ConvertFrom-Json).'os-windows'."$Env:MATRIX_OS"
echo "::group::Base OS tag"
echo "$os_tag"
echo "::endgroup::"
if ([string]::IsNullOrWhiteSpace($osTag)) {
throw "Failed to detect Windows base OS tag for '$Env:MATRIX_OS'"
}
echo "os_tag=$os_tag" >> $Env:GITHUB_OUTPUT
echo "::group::Base OS tag"
echo "$osTag"
echo "::endgroup::"
echo "os_tag=$osTag" >> $Env:GITHUB_OUTPUT
- name: Generate tags
id: meta
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # v6.0.0
with:
images: ${{ env.DOCKER_REGISTRY }}/${{ env.DOCKER_REPOSITORY }}/${{ env.IMAGES_PREFIX }}${{ matrix.component }}
images: ${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY, env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, matrix.component ) }}
context: ${{ env.TRUNK_ONLY_EVENT == 'true' && 'git' || '' }}
tags: |
type=semver,enable=${{ needs.init_build.outputs.current_branch != 'trunk' }},pattern={{version}},prefix=${{ steps.base_os_tag.outputs.os_tag }}-
@@ -627,10 +526,9 @@ jobs:
latest=false
- name: Download SHA256 tag of ${{ env.BASE_BUILD_IMAGE_NAME }}:${{ matrix.os }}
uses: actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1
with:
path: ${{ env.BASE_BUILD_IMAGE_NAME }}_${{ matrix.os }}_${{ matrix.component }}
key: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ github.run_id }}
name: ${{ env.BASE_BUILD_IMAGE_NAME }}-${{ matrix.os }}-${{ matrix.component }}-${{ github.run_id }}
- name: Retrieve ${{ env.BASE_BUILD_IMAGE_NAME }}:${{ matrix.os }} SHA256 tag
id: base_build
@@ -641,148 +539,60 @@ jobs:
DOCKER_REPOSITORY: ${{ env.DOCKER_REPOSITORY }}
IMAGES_PREFIX: ${{ env.IMAGES_PREFIX }}
run: |
$base_image_file=$Env:BASE_BUILD_IMAGE_NAME + '_' + $Env:MATRIX_OS + '_' + $Env:MATRIX_COMPONENT
$base_tag = Get-Content $base_image_file -Raw
$build_base_image="$Env:DOCKER_REPOSITORY/$Env:IMAGES_PREFIX$Env:BASE_BUILD_IMAGE_NAME@" + $base_tag
$baseImageFile = "$Env:BASE_BUILD_IMAGE_NAME" + "_" + "$Env:MATRIX_OS" + "_" + "$Env:MATRIX_COMPONENT"
echo "::group::Base image Info"
echo "base_tag=$base_tag"
echo "base_build_image=$build_base_image"
echo "::endgroup::"
if (-not (Test-Path $baseImageFile)) {
throw "Missing cached build-components digest file: $baseImageFile"
}
echo "base_tag=$base_tag" >> $Env:GITHUB_OUTPUT
echo "base_build_image=$build_base_image" >> $Env:GITHUB_OUTPUT
$baseTag = (Get-Content $baseImageFile -Raw).Trim()
if ([string]::IsNullOrWhiteSpace($baseTag)) {
throw "Build-components digest is empty in file: $baseImageFile"
}
$buildBaseImage = "$Env:DOCKER_REPOSITORY/$Env:IMAGES_PREFIX$Env:BASE_BUILD_IMAGE_NAME@" + $baseTag
echo "::group::Base image info"
echo "base_tag=$baseTag"
echo "base_build_image=$buildBaseImage"
echo "::endgroup::"
echo "base_tag=$baseTag" >> $Env:GITHUB_OUTPUT
echo "base_build_image=$buildBaseImage" >> $Env:GITHUB_OUTPUT
- name: Verify ${{ env.BASE_BUILD_IMAGE_NAME }}:${{ matrix.os }} cosign
env:
BASE_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
OIDC_ISSUER: ${{ env.OIDC_ISSUER }}
IDENITY_REGEX: ${{ env.IDENITY_REGEX }}
run: |
cosign verify `
--certificate-oidc-issuer-regexp "$Env:OIDC_ISSUER" `
--certificate-identity-regexp "$Env:IDENITY_REGEX" `
"$Env:BASE_IMAGE" | jq
uses: ./.github/actions/cosign-verify
with:
image: ${{ steps.base_build.outputs.base_build_image }}
oidc_issuer: ${{ env.OIDC_ISSUER }}
identity_regexp: ${{ env.IDENTITY_REGEX }}
- name: Build and push image
id: docker_build
env:
DOCKERFILES_DIRECTORY: ${{ env.DOCKERFILES_DIRECTORY }}
BASE_BUILD_IMAGE: ${{ steps.base_build.outputs.base_build_image }}
BASE_BUILD_IMAGE_NAME: ${{ env.BASE_BUILD_IMAGE_NAME }}
MATRIX_COMPONENT: ${{ matrix.component }}
TAGS: ${{ steps.meta.outputs.tags }}
BASE_BUILD_OS_TAG: ${{ steps.base_os_tag.outputs.os_tag }}
LABEL_REVISION: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
LABEL_CREATED: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
OS_BASE_IMAGE_NAME: ${{ env.OS_BASE_IMAGE_NAME }}
AUTO_PUSH_IMAGES: ${{ env.AUTO_PUSH_IMAGES }}
run: |
echo "::group::Docker version"
docker version
echo "::endgroup::"
echo "::group::Docker info"
docker info
echo "::endgroup::"
$context="$Env:DOCKERFILES_DIRECTORY\$Env:MATRIX_COMPONENT\windows\"
$dockerfile= $context + 'Dockerfile'
$base_build_image= $Env:BASE_BUILD_IMAGE
# Can not build on GitHub due existing symlink. Must be removed before build process
Remove-Item -ErrorAction Ignore -Force -Path $context\README.md
$tags_array=$( "$Env:TAGS".Split("`n") )
$tags=$( $tags_array | Foreach-Object { "--tag=$_" } )
# PowerShell images based on LTSC 2019 and LTSC 2016 do not have "ltsc" prefix
$os_tag_suffix=$Env:BASE_BUILD_OS_TAG
# $os_tag_suffix=$os_tag_suffix -replace "ltsc2019",'1809'
$base_image=$Env:OS_BASE_IMAGE_NAME + ':' + $os_tag_suffix
echo "::group::Image tags"
echo "$Env:TAGS"
echo "::endgroup::"
echo "::group::Pull build base image"
docker pull $base_build_image
if (-not $?) {throw "Failed"}
echo "::endgroup::"
echo "::group::Pull Powershell base image"
docker pull $base_image
if (-not $?) {throw "Failed"}
echo "::endgroup::"
echo "::group::Build Image"
Write-Host @"
docker build --label org.opencontainers.image.revision=$Env:LABEL_REVISION
--label org.opencontainers.image.created=$Env:LABEL_CREATED
--build-arg=BUILD_BASE_IMAGE=$base_build_image
--build-arg=OS_BASE_IMAGE=$base_image
--file=$dockerfile
$tags
$context
"@
docker build --label org.opencontainers.image.revision=$Env:LABEL_REVISION `
--label org.opencontainers.image.created=$Env:LABEL_CREATED `
--build-arg=BUILD_BASE_IMAGE=$base_build_image `
--build-arg=OS_BASE_IMAGE=$base_image `
--file=$dockerfile `
$tags `
$context
if (-not $?) {throw "Failed"}
echo "::endgroup::"
echo "::group::Publish Image"
if ( $Env:AUTO_PUSH_IMAGES -eq 'true' ) {
Foreach ($tag in $tags_array) {
echo "docker image push $tag"
docker image push $tag
if (-not $?) {throw "Failed"}
}
$digest=$(docker inspect $tags_array[0] --format "{{ index .RepoDigests 0}}").Split('@')[-1]
if (-not $?) {throw "Failed"}
echo "Image digest got from RepoDigests"
}
else {
$digest=$(docker inspect $tags_array[0] --format "{{ index .Id}}")
if (-not $?) {throw "Failed"}
echo "Image digest got from Id"
}
echo "::endgroup::"
echo "::group::Digest"
echo "$digest"
echo "::endgroup::"
echo "digest=$digest" >> $Env:GITHUB_OUTPUT
uses: ./.github/actions/docker-build-push-windows
with:
context: ${{ env.DOCKERFILES_DIRECTORY }}\${{ matrix.component }}\windows
dockerfile: ${{ env.DOCKERFILES_DIRECTORY }}\${{ matrix.component }}\windows\Dockerfile
tags: ${{ steps.meta.outputs.tags }}
label_revision: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.revision'] }}
label_created: ${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.created'] }}
pull_images: |
${{ steps.base_build.outputs.base_build_image }}
${{ env.OS_BASE_IMAGE_NAME }}:${{ steps.base_os_tag.outputs.os_tag }}
build_args: |
BUILD_BASE_IMAGE=${{ steps.base_build.outputs.base_build_image }}
OS_BASE_IMAGE=${{ env.OS_BASE_IMAGE_NAME }}:${{ steps.base_os_tag.outputs.os_tag }}
push: ${{ env.AUTO_PUSH_IMAGES }}
- name: Sign the images with GitHub OIDC Token
env:
DIGEST: ${{ steps.docker_build.outputs.digest }}
TAGS: ${{ steps.meta.outputs.tags }}
run: |
$tags_array=$( "$Env:TAGS".Split("`n") )
$tag_list=@()
if: ${{ env.AUTO_PUSH_IMAGES == 'true' }}
uses: ./.github/actions/cosign-sign
with:
digest: ${{ steps.docker_build.outputs.digest }}
tags: ${{ steps.meta.outputs.tags }}
foreach ($tag in $tags_array) {
$tag_name=$tag.Split(":")[0]
$tag_list+="$tag_name@$Env:DIGEST"
}
echo "::group::Images to sign"
echo "$tag_list"
echo "::endgroup::"
echo "::group::Signing"
echo "cosign sign --yes $tag_list"
cosign sign --yes $tag_list
echo "::endgroup::"
- name: Image digest
if: ${{ env.AUTO_PUSH_IMAGES }}
env:
DIGEST: ${{ steps.docker_build.outputs.digest }}
run: |
echo "::group::Image digest"
echo "$Env:DIGEST"
echo "::endgroup::"
- name: Attest images
uses: ./.github/actions/attest-image
with:
subject_name: ${{ format('{0}/{1}/{2}{3}', env.DOCKER_REGISTRY, env.DOCKER_REPOSITORY, env.IMAGES_PREFIX, matrix.component ) }}
subject_digest: ${{ steps.docker_build.outputs.digest }}

View File

@@ -9,19 +9,26 @@ on:
- 'Dockerfiles/*/README.md'
- 'Dockerfiles/*/rhel/README.md'
- '.github/workflows/rhel_registry_description.yml'
- '.github/scripts/rhel_description.py'
- '.github/scripts/requirements-rhel-description.txt'
workflow_dispatch:
concurrency:
group: rhel-description-${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
DOCKERFILES_DIRECTORY: "./Dockerfiles"
API_URL: "https://catalog.redhat.com/api/containers/v1/projects/certification/id/"
MATRIX_FILE: "build.json"
PYTHON_REQUIREMENTS_FILE: ".github/scripts/requirements-rhel-description.txt"
defaults:
run:
shell: bash
permissions:
contents: read
contents: read
jobs:
init:
@@ -32,62 +39,68 @@ jobs:
outputs:
components: ${{ steps.components.outputs.list }}
zabbix_release: ${{ steps.branch_info.outputs.zabbix_release }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
github.com:443
api.github.com:443
- name: Checkout repository
- name: Checkout repository metadata
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 1
sparse-checkout: ${{ env.MATRIX_FILE }}
- name: Check ${{ env.MATRIX_FILE }} file
id: build_exists
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
if [[ ! -f "$MATRIX_FILE" ]]; then
echo "::error::File $MATRIX_FILE is missing"
exit 1
fi
set -Eeuo pipefail
if [[ ! -f "$MATRIX_FILE" ]]; then
echo "::error::File $MATRIX_FILE is missing"
exit 1
fi
- name: Get branch info
id: branch_info
shell: bash
env:
github_ref: ${{ github.ref }}
GITHUB_REF_NAME: ${{ github.ref_name }}
run: |
result=false
set -Eeuo pipefail
github_ref=${github_ref##*/}
echo "::group::Branch metadata"
echo "zabbix_release=${GITHUB_REF_NAME//.}"
echo "::endgroup::"
echo "::group::Branch metadata"
echo "zabbix_release=${github_ref//.}"
echo "::endgroup::"
echo "zabbix_release=${github_ref//.}" >> $GITHUB_OUTPUT
echo "zabbix_release=${GITHUB_REF_NAME//.}" >> "$GITHUB_OUTPUT"
- name: Prepare Zabbix component list
id: components
env:
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
component_list=$(jq -r '.components | map_values(select(.rhel == true)) | keys | @json' "$MATRIX_FILE")
set -Eeuo pipefail
echo "::group::Zabbix Component List"
echo "$component_list"
echo "::endgroup::"
component_list="$(jq -rc '.components | map_values(select(.rhel == true)) | keys' "$MATRIX_FILE")"
echo "list=$component_list" >> $GITHUB_OUTPUT
if [[ -z "$component_list" || "$component_list" == "[]" ]]; then
echo "::warning::No RHEL-enabled components found in $MATRIX_FILE"
fi
publish:
name: Initialize build
echo "::group::Zabbix Component List"
echo "$component_list"
echo "::endgroup::"
echo "list=$component_list" >> "$GITHUB_OUTPUT"
update-description:
name: Update description (${{ matrix.component }})
runs-on: ubuntu-latest
needs: init
permissions:
@@ -96,62 +109,98 @@ jobs:
fail-fast: false
matrix:
component: ${{ fromJson(needs.init.outputs.components) }}
steps:
- name: Block egress traffic
uses: step-security/harden-runner@20cf305ff2072d973412fa9b1e3a4f227bda3c76 # v2.14.0
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
catalog.redhat.com:443
files.pythonhosted.org:443
github.com:443
api.github.com:443
pypi.org:443
files.pythonhosted.org:443
catalog.redhat.com:443
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
fetch-depth: 1
- name: Variables formating
- name: Format matrix variables
id: var_format
env:
MATRIX_BUILD: ${{ matrix.component }}
run: |
MATRIX_BUILD=${MATRIX_BUILD^^}
MATRIX_BUILD=${MATRIX_BUILD//-/_}
set -Eeuo pipefail
echo "::group::Result"
echo "matrix_build=${MATRIX_BUILD}"
echo "::endgroup::"
echo "matrix_build=${MATRIX_BUILD}" >> $GITHUB_OUTPUT
MATRIX_BUILD="${MATRIX_BUILD^^}"
MATRIX_BUILD="${MATRIX_BUILD//-/_}"
- name: Setup Python 3.x
echo "::group::Result"
echo "matrix_build=${MATRIX_BUILD}"
echo "::endgroup::"
echo "matrix_build=${MATRIX_BUILD}" >> "$GITHUB_OUTPUT"
- name: Check Python requirements file
env:
PYTHON_REQUIREMENTS_FILE: ${{ env.PYTHON_REQUIREMENTS_FILE }}
run: |
set -Eeuo pipefail
if [[ ! -f "$PYTHON_REQUIREMENTS_FILE" ]]; then
echo "::error::File $PYTHON_REQUIREMENTS_FILE is missing"
exit 1
fi
- name: Setup Python 3.12
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: '3.x'
python-version: '3.12'
cache: 'pip'
cache-dependency-path: ${{ env.PYTHON_REQUIREMENTS_FILE }}
- name: Install Python packages
env:
PYTHON_REQUIREMENTS_FILE: ${{ env.PYTHON_REQUIREMENTS_FILE }}
run: |
set -Eeuo pipefail
python -m pip install --upgrade pip
pip install markdown requests
pip install -r "$PYTHON_REQUIREMENTS_FILE"
- name: Check description source
env:
DESCRIPTION_FILE: ${{ format('{0}/{1}/rhel/README', env.DOCKERFILES_DIRECTORY, matrix.component) }}
run: |
set -Eeuo pipefail
if [[ ! -f "${DESCRIPTION_FILE}.html" && ! -f "${DESCRIPTION_FILE}.md" ]]; then
echo "::error::No description file found: ${DESCRIPTION_FILE}.html or ${DESCRIPTION_FILE}.md"
exit 1
fi
- name: Update Red Hat project description
env:
DESCRIPTION_FILE: ${{ format('{0}/{1}/rhel/README', env.DOCKERFILES_DIRECTORY, matrix.component) }}
PROJECT_ID: ${{ secrets[format('RHEL_{0}_{1}_PROJECT', needs.init.outputs.zabbix_release, steps.var_format.outputs.matrix_build)] }}
PROJECT_ID: ${{ secrets[format('RHEL_{0}_{1}_PROJECT', needs.init.outputs.zabbix_release, steps.var_format.outputs.matrix_build)] }}
PYXIS_API_TOKEN: ${{ secrets.REDHAT_API_TOKEN }}
API_URL: ${{ env.API_URL }}
run: |
python ./.github/scripts/rhel_description.py
set -Eeuo pipefail
python ./.github/scripts/rhel_description.py
- name: Red Hat Gatalog URL
- name: Print Red Hat Catalog URL
env:
COMPONENT: ${{ matrix.component }}
MATRIX_FILE: ${{ env.MATRIX_FILE }}
run: |
PROJECT_ID=$(jq -r ".components.\"$COMPONENT\".rhel_project" "$MATRIX_FILE")
set -Eeuo pipefail
echo "::group::URL"
echo "https://catalog.redhat.com/software/containers/${PROJECT_ID}"
echo "::endgroup::"
PROJECT_ID="$(jq -r ".components.\"$COMPONENT\".rhel_project" "$MATRIX_FILE")"
echo "::group::URL"
echo "https://catalog.redhat.com/software/containers/${PROJECT_ID}"
echo "::endgroup::"

86
.github/workflows/scorecard.yml vendored Normal file
View File

@@ -0,0 +1,86 @@
# This workflow uses actions that are not certified by GitHub. They are provided
# by a third-party and are governed by separate terms of service, privacy
# policy, and support documentation.
name: Scorecard supply-chain security
on:
# For Branch-Protection check. Only the default branch is supported. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
branch_protection_rule:
# To guarantee Maintained check is occasionally updated. See
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
schedule:
- cron: '39 3 * * 2'
push:
branches:
- '7.4'
# Declare default permissions as read only.
permissions: read-all
jobs:
analysis:
name: Scorecard analysis
runs-on: ubuntu-latest
permissions:
# Needed to upload the results to code-scanning dashboard.
security-events: write
# Needed to publish results and get a badge (see publish_results below).
id-token: write
# Uncomment the permissions below if installing in a private repository.
# contents: read
# actions: read
steps:
- name: Harden Runner
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
disable-sudo: true
egress-policy: block
allowed-endpoints: >
github.com:443
api.github.com:443
api.scorecard.dev:443
rekor.sigstore.dev:443
tuf-repo-cdn.sigstore.dev:443
fulcio.sigstore.dev:443
- name: "Checkout code"
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
persist-credentials: false
- name: "Run analysis"
uses: ossf/scorecard-action@4eaacf0543bb3f2c246792bd56e8cdeffafb205a # v2.4.3
with:
results_file: results.sarif
results_format: sarif
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
# - you want to enable the Branch-Protection check on a *public* repository, or
# - you are installing Scorecard on a *private* repository
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
# Public repositories:
# - Publish results to OpenSSF REST API for easy access by consumers
# - Allows the repository to include the Scorecard badge.
# - See https://github.com/ossf/scorecard-action#publishing-results.
# For private repositories:
# - `publish_results` will always be set to `false`, regardless
# of the value entered here.
publish_results: true
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0
with:
name: SARIF file
path: results.sarif
retention-days: 5
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
uses: github/codeql-action/upload-sarif@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
with:
sarif_file: results.sarif

61
.github/workflows/sonarcloud.yml vendored Normal file
View File

@@ -0,0 +1,61 @@
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
# This workflow helps you trigger a SonarCloud analysis of your code and populates
# GitHub Code Scanning alerts with the vulnerabilities found.
# Free for open source project.
# 1. Login to SonarCloud.io using your GitHub account
# 2. Import your project on SonarCloud
# * Add your GitHub organization first, then add your repository as a new project.
# * Please note that many languages are eligible for automatic analysis,
# which means that the analysis will start automatically without the need to set up GitHub Actions.
# * This behavior can be changed in Administration > Analysis Method.
#
# 3. Follow the SonarCloud in-product tutorial
# * a. Copy/paste the Project Key and the Organization Key into the args parameter below
# (You'll find this information in SonarCloud. Click on "Information" at the bottom left)
#
# * b. Generate a new token and add it to your Github repository's secrets using the name SONAR_TOKEN
# (On SonarCloud, click on your avatar on top-right > My account > Security
# or go directly to https://sonarcloud.io/account/security/)
# Feel free to take a look at our documentation (https://docs.sonarcloud.io/getting-started/github/)
# or reach out to our community forum if you need some help (https://community.sonarsource.com/c/help/sc/9)
name: SonarCloud analysis
on:
push:
branches: [ "7.4" ]
pull_request:
branches: [ "7.4" ]
workflow_dispatch:
permissions:
pull-requests: read # allows SonarCloud to decorate PRs with analysis results
jobs:
Analysis:
runs-on: ubuntu-latest
steps:
- name: Block egress traffic
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
# Disabling shallow clone is recommended for improving relevancy of reporting
fetch-depth: 0
- name: Analyze with SonarCloud
uses: SonarSource/sonarqube-scan-action@v7.0.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} # Generate a token on Sonarcloud.io, add it to the secrets of this repo with the name SONAR_TOKEN (Settings > Secrets > Actions > add new repository secret)