Add support for getting ccache cache from S3.
ccache is archieved in a tar and downloaded from S3 Cloud Storage.
For push events, ccache is then uplodaed back to S3 to refresh and have
a ccache cache always fresh.
An additional workflow is added to upload files to an S3 Cloud Storage
from artifacts uplodaed to github. The minio tool is used to upload
files to S3.
If the ccache can't be downloaded from s3, we fallback to github cache
system.
Also limit s3 upload to the openwrt repository since external fork won't
have (obviously) the required secrtes to upload data to the S3 Cloud
Storage.
Signed-off-by: Christian Marangi <ansuelsmth@gmail.com>
(cherry picked from commit ebbc806d30)
154 lines
5.2 KiB
YAML
154 lines
5.2 KiB
YAML
name: Build Kernel
|
|
|
|
on:
|
|
pull_request:
|
|
paths:
|
|
- '.github/workflows/check-kernel-patches.yml'
|
|
- '.github/workflows/build.yml'
|
|
- '.github/workflows/kernel.yml'
|
|
- 'include/kernel*'
|
|
- 'package/kernel/**'
|
|
- 'target/linux/**'
|
|
push:
|
|
paths:
|
|
- '.github/workflows/check-kernel-patches.yml'
|
|
- '.github/workflows/build.yml'
|
|
- '.github/workflows/kernel.yml'
|
|
- 'include/kernel*'
|
|
- 'package/kernel/**'
|
|
- 'target/linux/**'
|
|
branches-ignore:
|
|
- master
|
|
|
|
permissions:
|
|
contents: read
|
|
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.ref }}
|
|
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
|
|
|
jobs:
|
|
determine_targets:
|
|
name: Set targets
|
|
runs-on: ubuntu-latest
|
|
outputs:
|
|
targets_subtargets: ${{ steps.find_targets.outputs.targets_subtargets }}
|
|
targets: ${{ steps.find_targets.outputs.targets }}
|
|
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 2
|
|
|
|
- name: Get changed files
|
|
id: changed-files
|
|
uses: tj-actions/changed-files@v35
|
|
|
|
- name: Set targets
|
|
id: find_targets
|
|
run: |
|
|
ALL_TARGETS="$(perl ./scripts/dump-target-info.pl targets 2>/dev/null)"
|
|
CHANGED_FILES="$(echo ${{ steps.changed-files.outputs.all_changed_files }} | tr ' ' '\n')"
|
|
|
|
TARGETS_SUBTARGETS="$(echo "$ALL_TARGETS" | sort -u -t '/' -k1 | awk '{ print $1 }')"
|
|
TARGETS="$(echo "$ALL_TARGETS" | sort -u -t '/' -k1,1 | awk '{ print $1 }')"
|
|
|
|
# On testing non-specific target, skip testing each subtarget
|
|
if echo "$CHANGED_FILES" | grep -v -q target/linux ||
|
|
echo "$CHANGED_FILES" | grep -q target/linux/generic; then
|
|
TARGETS_SUBTARGETS=$TARGETS
|
|
fi
|
|
|
|
JSON_TARGETS_SUBTARGETS='['
|
|
FIRST=1
|
|
for TARGET in $TARGETS_SUBTARGETS; do
|
|
if echo "$CHANGED_FILES" | grep -v -q target/linux ||
|
|
echo "$CHANGED_FILES" | grep -q target/linux/generic ||
|
|
echo "$CHANGED_FILES" | grep -q $(echo $TARGET | cut -d "/" -f 1); then
|
|
TUPLE='{"target":"'"$(echo $TARGET | cut -d "/" -f 1)"'","subtarget":"'"$(echo $TARGET | cut -d "/" -f 2)"'"}'
|
|
[[ $FIRST -ne 1 ]] && JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS"','
|
|
JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS""$TUPLE"
|
|
FIRST=0
|
|
fi
|
|
done
|
|
JSON_TARGETS_SUBTARGETS="$JSON_TARGETS_SUBTARGETS"']'
|
|
|
|
JSON_TARGETS='['
|
|
FIRST=1
|
|
for TARGET in $TARGETS; do
|
|
if echo "$CHANGED_FILES" | grep -v -q target/linux ||
|
|
echo "$CHANGED_FILES" | grep -q target/linux/generic ||
|
|
echo "$CHANGED_FILES" | grep -q $(echo $TARGET | cut -d "/" -f 1); then
|
|
TUPLE='{"target":"'"$(echo $TARGET | cut -d "/" -f 1)"'","subtarget":"'"$(echo $TARGET | cut -d "/" -f 2)"'"}'
|
|
[[ $FIRST -ne 1 ]] && JSON_TARGETS="$JSON_TARGETS"','
|
|
JSON_TARGETS="$JSON_TARGETS""$TUPLE"
|
|
FIRST=0
|
|
fi
|
|
done
|
|
JSON_TARGETS="$JSON_TARGETS"']'
|
|
|
|
echo -e "\n---- targets to build ----\n"
|
|
echo "$JSON_TARGETS_SUBTARGETS"
|
|
echo -e "\n---- targets to build ----\n"
|
|
|
|
echo -e "\n---- targets to check patch ----\n"
|
|
echo "$JSON_TARGETS"
|
|
echo -e "\n---- targets to check patch ----\n"
|
|
|
|
echo "targets_subtargets=$JSON_TARGETS_SUBTARGETS" >> $GITHUB_OUTPUT
|
|
echo "targets=$JSON_TARGETS" >> $GITHUB_OUTPUT
|
|
|
|
build:
|
|
name: Build Kernel with external toolchain
|
|
needs: determine_targets
|
|
permissions:
|
|
contents: read
|
|
packages: read
|
|
actions: write
|
|
strategy:
|
|
fail-fast: False
|
|
matrix:
|
|
include: ${{fromJson(needs.determine_targets.outputs.targets_subtargets)}}
|
|
uses: ./.github/workflows/build.yml
|
|
with:
|
|
container_name: toolchain
|
|
target: ${{ matrix.target }}
|
|
subtarget: ${{ matrix.subtarget }}
|
|
build_kernel: true
|
|
build_all_kmods: true
|
|
upload_ccache_cache: ${{ github.repository_owner == 'openwrt' }}
|
|
|
|
check-kernel-patches:
|
|
name: Check Kernel patches
|
|
needs: determine_targets
|
|
permissions:
|
|
contents: read
|
|
packages: read
|
|
strategy:
|
|
fail-fast: False
|
|
matrix:
|
|
include: ${{fromJson(needs.determine_targets.outputs.targets)}}
|
|
uses: ./.github/workflows/check-kernel-patches.yml
|
|
with:
|
|
target: ${{ matrix.target }}
|
|
subtarget: ${{ matrix.subtarget }}
|
|
|
|
upload-ccache-cache-in-s3:
|
|
if: github.event_name == 'push' && github.repository_owner == 'openwrt'
|
|
name: Upload ccache cache to s3
|
|
needs: [determine_targets, build]
|
|
strategy:
|
|
fail-fast: False
|
|
matrix:
|
|
include: ${{fromJson(needs.determine_targets.outputs.targets_subtargets)}}
|
|
secrets:
|
|
s3_access_key: ${{ secrets.GCS_S3_ACCESS_KEY }}
|
|
s3_secret_key: ${{ secrets.GCS_S3_SECRET_KEY }}
|
|
uses: ./.github/workflows/upload-file-s3.yml
|
|
with:
|
|
endpoint: https://storage.googleapis.com
|
|
bucket: openwrt-ci-cache
|
|
download_id: ${{ matrix.target }}-${{ matrix.subtarget }}-ccache-cache
|
|
filename: ccache-kernel-${{ matrix.target }}-${{ matrix.subtarget }}.tar
|